code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import scrapy
from scrapy.crawler import CrawlerProcess
# This class is the main spider class. It will crawl the main website.
class MantraSpider(scrapy.Spider):
name = "mantraspider"
def start_requests(self):
start_urls = [
"https://www.upanishads.iitk.ac.in/aitereya",
"https://www.upanishads.iitk.ac.in/isavasya",
"https://www.upanishads.iitk.ac.in/karika",
"https://www.upanishads.iitk.ac.in/katha",
"https://www.upanishads.iitk.ac.in/kena",
"https://www.upanishads.iitk.ac.in/mundaka",
"https://www.upanishads.iitk.ac.in/mandukya",
"https://www.upanishads.iitk.ac.in/prasna",
"https://www.upanishads.iitk.ac.in/brihadaranyaka",
"https://www.upanishads.iitk.ac.in/svetashvatra",
"https://www.upanishads.iitk.ac.in/taittiriya",
]
for url in start_urls:
yield scrapy.Request(url=url, callback=self.parse_mantra)
# Function to get all the mantras from the website
def parse_mantra(self, response):
for mantras in response.css("div.views-field.views-field-body"):
mantra = mantras.css("font::text").getall()
# remove new-line chars from string
mantra = list(map(lambda x: x.replace("\n", ""), mantra))
# remove empty strings
mantra = list(filter(lambda x: x != "", mantra))
yield {
"title": response.css("h1.page-title::text").get(),
"mantra": mantra,
}
first_page = response.css("p.navigation_block a::attr(href)")[0].get()
next_page = response.css("p.navigation_block a::attr(href)")[3].get()
if next_page != first_page:
yield response.follow(next_page, callback=self.parse_mantra)
process = CrawlerProcess(
settings={
"FEED_FORMAT": "csv",
"FEED_URI": "../../data/raw/upanishads/upanishads.csv",
}
)
process.crawl(MantraSpider)
process.start()
|
[
"scrapy.Request",
"scrapy.crawler.CrawlerProcess"
] |
[((1818, 1925), 'scrapy.crawler.CrawlerProcess', 'CrawlerProcess', ([], {'settings': "{'FEED_FORMAT': 'csv', 'FEED_URI': '../../data/raw/upanishads/upanishads.csv'}"}), "(settings={'FEED_FORMAT': 'csv', 'FEED_URI':\n '../../data/raw/upanishads/upanishads.csv'})\n", (1832, 1925), False, 'from scrapy.crawler import CrawlerProcess\n'), ((942, 993), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'url', 'callback': 'self.parse_mantra'}), '(url=url, callback=self.parse_mantra)\n', (956, 993), False, 'import scrapy\n')]
|
#!flask/bin/python
import base64
from flask import Flask, jsonify, make_response
from flask.ext.httpauth import HTTPBasicAuth
from flask.ext.restful import Api, Resource
from resources.repo_list import RepoList
from resources.repo_action import RepoAction
from resources.pkg import Pkg
# App Definition
auth = HTTPBasicAuth()
app = Flask(__name__, static_url_path="")
#api = Api(app)
api = Api(app, decorators=[auth.login_required])
# Retrieve User Password
@auth.get_password
def get_password(username):
if username == 'root':
return base64.b64decode('cHl0aG9u')
return None
# Deny unauthorized Use
@auth.error_handler
def unauthorized():
return make_response(jsonify({'error': 'Unauthorized access'}), 403)
# Add Repo List Resource
api.add_resource(RepoList, '/packager/repo/list')
# Add Repo Operations Resource
api.add_resource(RepoAction, '/packager/repo', endpoint='repos')
# Add Package Operations Resource
api.add_resource(Pkg, '/packager', endpoint='pkg')
if __name__ == '__main__':
app.run(debug=True)
|
[
"flask.ext.httpauth.HTTPBasicAuth",
"flask.Flask",
"base64.b64decode",
"flask.jsonify",
"flask.ext.restful.Api"
] |
[((311, 326), 'flask.ext.httpauth.HTTPBasicAuth', 'HTTPBasicAuth', ([], {}), '()\n', (324, 326), False, 'from flask.ext.httpauth import HTTPBasicAuth\n'), ((334, 369), 'flask.Flask', 'Flask', (['__name__'], {'static_url_path': '""""""'}), "(__name__, static_url_path='')\n", (339, 369), False, 'from flask import Flask, jsonify, make_response\n'), ((394, 436), 'flask.ext.restful.Api', 'Api', (['app'], {'decorators': '[auth.login_required]'}), '(app, decorators=[auth.login_required])\n', (397, 436), False, 'from flask.ext.restful import Api, Resource\n'), ((552, 580), 'base64.b64decode', 'base64.b64decode', (['"""cHl0aG9u"""'], {}), "('cHl0aG9u')\n", (568, 580), False, 'import base64\n'), ((691, 732), 'flask.jsonify', 'jsonify', (["{'error': 'Unauthorized access'}"], {}), "({'error': 'Unauthorized access'})\n", (698, 732), False, 'from flask import Flask, jsonify, make_response\n')]
|
import Nn
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
from utils.tf2_utils import show_graph, get_TensorSpecs, gaussian_clip_rsample, gaussian_likelihood_sum, gaussian_entropy
from Algorithms.tf2algos.base.on_policy import On_Policy
class PPO(On_Policy):
'''
Proximal Policy Optimization, https://arxiv.org/abs/1707.06347
Emergence of Locomotion Behaviours in Rich Environments, http://arxiv.org/abs/1707.02286, DPPO
'''
def __init__(self,
s_dim,
visual_sources,
visual_resolution,
a_dim_or_list,
is_continuous,
policy_epoch=4,
value_epoch=4,
beta=1.0e-3,
lr=5.0e-4,
lambda_=0.95,
epsilon=0.2,
value_epsilon=0.2,
share_net=True,
actor_lr=3e-4,
critic_lr=1e-3,
kl_reverse=False,
kl_target=0.02,
kl_target_cutoff=2,
kl_target_earlystop=4,
kl_beta=[0.7, 1.3],
kl_alpha=1.5,
kl_coef=1.0,
hidden_units={
'share': {
'continuous': {
'share': [32, 32],
'mu': [32, 32],
'v': [32, 32]
},
'discrete': {
'share': [32, 32],
'logits': [32, 32],
'v': [32, 32]
}
},
'actor_continuous': [32, 32],
'actor_discrete': [32, 32],
'critic': [32, 32]
},
**kwargs):
super().__init__(
s_dim=s_dim,
visual_sources=visual_sources,
visual_resolution=visual_resolution,
a_dim_or_list=a_dim_or_list,
is_continuous=is_continuous,
**kwargs)
self.beta = beta
self.policy_epoch = policy_epoch
self.value_epoch = value_epoch
self.lambda_ = lambda_
self.epsilon = epsilon
self.value_epsilon = value_epsilon
self.share_net = share_net
self.kl_reverse = kl_reverse
self.kl_target = kl_target
self.kl_alpha = kl_alpha
self.kl_coef = tf.constant(kl_coef, dtype=tf.float32)
self.kl_cutoff = kl_target * kl_target_cutoff
self.kl_stop = kl_target * kl_target_earlystop
self.kl_low = kl_target * kl_beta[0]
self.kl_high = kl_target * kl_beta[-1]
if self.is_continuous:
self.log_std = tf.Variable(initial_value=-0.5 * np.ones(self.a_counts, dtype=np.float32), trainable=True)
if self.share_net:
# self.TensorSpecs = get_TensorSpecs([self.s_dim], self.visual_dim, [self.a_counts], [1], [1], [1])
if self.is_continuous:
self.net = Nn.a_c_v_continuous(self.rnn_net.hdim, self.a_counts, hidden_units['share']['continuous'])
self.net_tv = self.net.trainable_variables + [self.log_std] + self.other_tv
else:
self.net = Nn.a_c_v_discrete(self.rnn_net.hdim, self.a_counts, hidden_units['share']['discrete'])
self.net_tv = self.net.trainable_variables + self.other_tv
self.lr = self.init_lr(lr)
self.optimizer = self.init_optimizer(self.lr)
self.model_recorder(dict(
model=self.net,
optimizer=self.optimizer
))
else:
# self.actor_TensorSpecs = get_TensorSpecs([self.s_dim], self.visual_dim, [self.a_counts], [1], [1])
# self.critic_TensorSpecs = get_TensorSpecs([self.s_dim], self.visual_dim, [1])
if self.is_continuous:
self.actor_net = Nn.actor_mu(self.rnn_net.hdim, self.a_counts, hidden_units['actor_continuous'])
self.actor_net_tv = self.actor_net.trainable_variables+ [self.log_std]
else:
self.actor_net = Nn.actor_discrete(self.rnn_net.hdim, self.a_counts, hidden_units['actor_discrete'])
self.actor_net_tv = self.actor_net.trainable_variables
self.critic_net = Nn.critic_v(self.rnn_net.hdim, hidden_units['critic'])
self.critic_tv = self.critic_net.trainable_variables + self.other_tv
self.actor_lr, self.critic_lr = map(self.init_lr, [actor_lr, critic_lr])
self.optimizer_actor, self.optimizer_critic = map(self.init_optimizer, [self.actor_lr, self.critic_lr])
self.model_recorder(dict(
actor=self.actor_net,
critic=self.critic_net,
optimizer_actor=self.optimizer_actor,
optimizer_critic=self.optimizer_critic
))
self.initialize_data_buffer(
data_name_list=['s', 'visual_s', 'a', 'r', 's_', 'visual_s_', 'done', 'value', 'log_prob'])
def show_logo(self):
self.recorder.logger.info('''
ใใใ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝใใใใใใใ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝใใใใใใใใใ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝใใใใใ
ใใใใใ๏ฝ๏ฝใใ๏ฝ๏ฝใใใใใใใใใ๏ฝ๏ฝใใ๏ฝ๏ฝใใใใใใใใ๏ฝ๏ฝ๏ฝใ๏ฝ๏ฝ๏ฝใใใใ
ใใใใใ๏ฝใใใ๏ฝ๏ฝ๏ฝใใใใใใใใ๏ฝใใใ๏ฝ๏ฝ๏ฝใใใใใใใ๏ฝ๏ฝใใใ๏ฝ๏ฝใใใใ
ใใใใใ๏ฝใใใ๏ฝ๏ฝ๏ฝใใใใใใใใ๏ฝใใใ๏ฝ๏ฝ๏ฝใใใใใใใ๏ฝ๏ฝใใใ๏ฝ๏ฝ๏ฝใใใ
ใใใใใ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝใใใใใใใใใ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝใใใใใใใ๏ฝ๏ฝ๏ฝใใใ๏ฝ๏ฝ๏ฝใใใ
ใใใใใ๏ฝใใใใใใใใใใใใใใ๏ฝใใใใใใใใใใใใใ๏ฝ๏ฝใใใ๏ฝ๏ฝ๏ฝใใใ
ใใใใใ๏ฝใใใใใใใใใใใใใใ๏ฝใใใใใใใใใใใใใ๏ฝ๏ฝใใใ๏ฝ๏ฝใใใใ
ใใใใใ๏ฝใใใใใใใใใใใใใใ๏ฝใใใใใใใใใใใใใ๏ฝ๏ฝใใ๏ฝ๏ฝ๏ฝใใใใ
ใใใ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝใใใใใใใใใใ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝใใใใใใใใใใใใ๏ฝ๏ฝ๏ฝ๏ฝ๏ฝใใ
''')
def choose_action(self, s, visual_s, evaluation=False):
a, value, log_prob, self.cell_state = self._get_action(s, visual_s, self.cell_state)
a = a.numpy()
self._value = np.squeeze(value.numpy())
self._log_prob = np.squeeze(log_prob.numpy()) + 1e-10
return a
@tf.function
def _get_action(self, s, visual_s, cell_state):
with tf.device(self.device):
feat, cell_state = self.get_feature(s, visual_s, cell_state=cell_state, record_cs=True, train=False)
if self.is_continuous:
if self.share_net:
mu, value = self.net(feat)
else:
mu = self.actor_net(feat)
value = self.critic_net(feat)
sample_op, _ = gaussian_clip_rsample(mu, self.log_std)
log_prob = gaussian_likelihood_sum(sample_op, mu, self.log_std)
else:
if self.share_net:
logits, value = self.net(feat)
else:
logits = self.actor_net(feat)
value = self.critic_net(feat)
norm_dist = tfp.distributions.Categorical(logits)
sample_op = norm_dist.sample()
log_prob = norm_dist.log_prob(sample_op)
return sample_op, value, log_prob, cell_state
def store_data(self, s, visual_s, a, r, s_, visual_s_, done):
assert isinstance(a, np.ndarray), "store_data need action type is np.ndarray"
assert isinstance(r, np.ndarray), "store_data need reward type is np.ndarray"
assert isinstance(done, np.ndarray), "store_data need done type is np.ndarray"
self.data.add(s, visual_s, a, r, s_, visual_s_, done, self._value, self._log_prob)
@tf.function
def _get_value(self, feat):
with tf.device(self.device):
if self.share_net:
_, value = self.net(feat)
else:
value = self.critic_net(feat)
return value
def calculate_statistics(self):
feat, self.cell_state = self.get_feature(self.data.last_s(), self.data.last_visual_s(), cell_state=self.cell_state, record_cs=True, train=False)
init_value = np.squeeze(self._get_value(feat).numpy())
self.data.cal_dc_r(self.gamma, init_value)
self.data.cal_td_error(self.gamma, init_value)
self.data.cal_gae_adv(self.lambda_, self.gamma)
# @show_graph(name='ppo_net')
def learn(self, **kwargs):
self.episode = kwargs['episode']
def _train(data, crsty_loss, cell_state):
early_step = 0
if self.share_net:
for i in range(self.policy_epoch):
actor_loss, critic_loss, entropy, kl = self.train_share(
data,
self.kl_coef,
crsty_loss,
cell_state
)
if kl > self.kl_stop:
early_step = i
break
else:
for i in range(self.policy_epoch):
s, visual_s, a, dc_r, old_log_prob, advantage, old_value = data
actor_loss, entropy, kl = self.train_actor(
(s, visual_s, a, old_log_prob, advantage),
self.kl_coef,
cell_state
)
if kl > self.kl_stop:
early_step = i
break
for _ in range(self.value_epoch):
critic_loss = self.train_critic(
(s, visual_s, dc_r, old_value),
crsty_loss,
cell_state
)
# https://github.com/joschu/modular_rl/blob/6970cde3da265cf2a98537250fea5e0c0d9a7639/modular_rl/ppo.py#L93
if kl > self.kl_high:
self.kl_coef *= self.kl_alpha
elif kl < self.kl_low:
self.kl_coef /= self.kl_alpha
summaries = dict([
['LOSS/actor_loss', actor_loss],
['LOSS/critic_loss', critic_loss],
['Statistics/kl', kl],
['Statistics/kl_coef', self.kl_coef],
['Statistics/early_step', early_step],
['Statistics/entropy', entropy]
])
return summaries
if self.share_net:
summary_dict = dict([['LEARNING_RATE/lr', self.lr(self.episode)]])
else:
summary_dict =dict([
['LEARNING_RATE/actor_lr', self.actor_lr(self.episode)],
['LEARNING_RATE/critic_lr', self.critic_lr(self.episode)]
])
self._learn(function_dict={
'calculate_statistics': self.calculate_statistics,
'train_function': _train,
'train_data_list': ['s', 'visual_s', 'a', 'discounted_reward', 'log_prob', 'gae_adv', 'value'],
'summary_dict': summary_dict
})
@tf.function(experimental_relax_shapes=True)
def train_share(self, memories, kl_coef, crsty_loss, cell_state):
s, visual_s, a, dc_r, old_log_prob, advantage, old_value = memories
with tf.device(self.device):
with tf.GradientTape() as tape:
feat = self.get_feature(s, visual_s, cell_state=cell_state)
if self.is_continuous:
mu, value = self.net(feat)
new_log_prob = gaussian_likelihood_sum(a, mu, self.log_std)
entropy = gaussian_entropy(self.log_std)
else:
logits, value = self.net(feat)
logp_all = tf.nn.log_softmax(logits)
new_log_prob = tf.reduce_sum(a * logp_all, axis=1, keepdims=True)
entropy = -tf.reduce_mean(tf.reduce_sum(tf.exp(logp_all) * logp_all, axis=1, keepdims=True))
ratio = tf.exp(new_log_prob - old_log_prob)
# https://github.com/joschu/modular_rl/blob/6970cde3da265cf2a98537250fea5e0c0d9a7639/modular_rl/ppo.py#L40
if self.kl_reverse:
kl = tf.reduce_mean(new_log_prob - old_log_prob)
else:
kl = tf.reduce_mean(old_log_prob - new_log_prob) # a sample estimate for KL-divergence, easy to compute
surrogate = ratio * advantage
# https://github.com/llSourcell/OpenAI_Five_vs_Dota2_Explained/blob/c5def7e57aa70785c2394ea2eeb3e5f66ad59a53/train.py#L154
value_clip = old_value + tf.clip_by_value(value - old_value, -self.value_epsilon, self.value_epsilon)
td_error = dc_r - value
td_error_clip = dc_r - value_clip
td_square = tf.maximum(tf.square(td_error), tf.square(td_error_clip))
pi_loss = -tf.reduce_mean(
tf.minimum(
surrogate,
tf.clip_by_value(ratio, 1.0 - self.epsilon, 1.0 + self.epsilon) * advantage
))
kl_loss = kl_coef * kl
extra_loss = 1000.0 * tf.square(tf.maximum(0., kl - self.kl_cutoff))
actor_loss = pi_loss + kl_loss + extra_loss
value_loss = 0.5 * tf.reduce_mean(td_square)
loss = actor_loss + 1.0 * value_loss - self.beta * entropy + crsty_loss
loss_grads = tape.gradient(loss, self.net_tv)
self.optimizer.apply_gradients(
zip(loss_grads, self.net_tv)
)
self.global_step.assign_add(1)
return actor_loss, value_loss, entropy, kl
@tf.function(experimental_relax_shapes=True)
def train_actor(self, memories, kl_coef, cell_state):
s, visual_s, a, old_log_prob, advantage = memories
with tf.device(self.device):
feat = self.get_feature(s, visual_s, cell_state=cell_state)
with tf.GradientTape() as tape:
if self.is_continuous:
mu = self.actor_net(feat)
new_log_prob = gaussian_likelihood_sum(a, mu, self.log_std)
entropy = gaussian_entropy(self.log_std)
else:
logits = self.actor_net(feat)
logp_all = tf.nn.log_softmax(logits)
new_log_prob = tf.reduce_sum(a * logp_all, axis=1, keepdims=True)
entropy = -tf.reduce_mean(tf.reduce_sum(tf.exp(logp_all) * logp_all, axis=1, keepdims=True))
ratio = tf.exp(new_log_prob - old_log_prob)
kl = tf.reduce_mean(old_log_prob - new_log_prob)
surrogate = ratio * advantage
min_adv = tf.where(advantage > 0, (1 + self.epsilon) * advantage, (1 - self.epsilon) * advantage)
pi_loss = -(tf.reduce_mean(tf.minimum(surrogate, min_adv)) + self.beta * entropy)
kl_loss = kl_coef * kl
extra_loss = 1000.0 * tf.square(tf.maximum(0., kl - self.kl_cutoff))
actor_loss = pi_loss + kl_loss + extra_loss
actor_grads = tape.gradient(actor_loss, self.actor_net_tv)
self.optimizer_actor.apply_gradients(
zip(actor_grads, self.actor_net_tv)
)
self.global_step.assign_add(1)
return actor_loss, entropy, kl
@tf.function(experimental_relax_shapes=True)
def train_critic(self, memories, crsty_loss, cell_state):
s, visual_s, dc_r, old_value = memories
with tf.device(self.device):
with tf.GradientTape() as tape:
feat = self.get_feature(s, visual_s, cell_state=cell_state)
value = self.critic_net(feat)
value_clip = old_value + tf.clip_by_value(value-old_value, -self.value_epsilon, self.value_epsilon)
td_error = dc_r - value
td_error_clip = dc_r - value_clip
td_square = tf.maximum(tf.square(td_error), tf.square(td_error_clip))
value_loss = 0.5 * tf.reduce_mean(td_square) + crsty_loss
critic_grads = tape.gradient(value_loss, self.critic_tv)
self.optimizer_critic.apply_gradients(
zip(critic_grads, self.critic_tv)
)
return value_loss
|
[
"Nn.critic_v",
"tensorflow.reduce_sum",
"tensorflow.clip_by_value",
"tensorflow.maximum",
"numpy.ones",
"Nn.actor_mu",
"tensorflow.nn.log_softmax",
"tensorflow_probability.distributions.Categorical",
"tensorflow.minimum",
"tensorflow.exp",
"utils.tf2_utils.gaussian_entropy",
"tensorflow.reduce_mean",
"tensorflow.constant",
"Nn.a_c_v_discrete",
"tensorflow.where",
"utils.tf2_utils.gaussian_likelihood_sum",
"tensorflow.device",
"tensorflow.square",
"tensorflow.function",
"Nn.actor_discrete",
"Nn.a_c_v_continuous",
"utils.tf2_utils.gaussian_clip_rsample",
"tensorflow.GradientTape"
] |
[((10804, 10847), 'tensorflow.function', 'tf.function', ([], {'experimental_relax_shapes': '(True)'}), '(experimental_relax_shapes=True)\n', (10815, 10847), True, 'import tensorflow as tf\n'), ((13473, 13516), 'tensorflow.function', 'tf.function', ([], {'experimental_relax_shapes': '(True)'}), '(experimental_relax_shapes=True)\n', (13484, 13516), True, 'import tensorflow as tf\n'), ((15189, 15232), 'tensorflow.function', 'tf.function', ([], {'experimental_relax_shapes': '(True)'}), '(experimental_relax_shapes=True)\n', (15200, 15232), True, 'import tensorflow as tf\n'), ((2524, 2562), 'tensorflow.constant', 'tf.constant', (['kl_coef'], {'dtype': 'tf.float32'}), '(kl_coef, dtype=tf.float32)\n', (2535, 2562), True, 'import tensorflow as tf\n'), ((4423, 4477), 'Nn.critic_v', 'Nn.critic_v', (['self.rnn_net.hdim', "hidden_units['critic']"], {}), "(self.rnn_net.hdim, hidden_units['critic'])\n", (4434, 4477), False, 'import Nn\n'), ((6032, 6054), 'tensorflow.device', 'tf.device', (['self.device'], {}), '(self.device)\n', (6041, 6054), True, 'import tensorflow as tf\n'), ((7485, 7507), 'tensorflow.device', 'tf.device', (['self.device'], {}), '(self.device)\n', (7494, 7507), True, 'import tensorflow as tf\n'), ((11007, 11029), 'tensorflow.device', 'tf.device', (['self.device'], {}), '(self.device)\n', (11016, 11029), True, 'import tensorflow as tf\n'), ((13647, 13669), 'tensorflow.device', 'tf.device', (['self.device'], {}), '(self.device)\n', (13656, 13669), True, 'import tensorflow as tf\n'), ((15356, 15378), 'tensorflow.device', 'tf.device', (['self.device'], {}), '(self.device)\n', (15365, 15378), True, 'import tensorflow as tf\n'), ((3116, 3211), 'Nn.a_c_v_continuous', 'Nn.a_c_v_continuous', (['self.rnn_net.hdim', 'self.a_counts', "hidden_units['share']['continuous']"], {}), "(self.rnn_net.hdim, self.a_counts, hidden_units['share']\n ['continuous'])\n", (3135, 3211), False, 'import Nn\n'), ((3344, 3435), 'Nn.a_c_v_discrete', 'Nn.a_c_v_discrete', (['self.rnn_net.hdim', 'self.a_counts', "hidden_units['share']['discrete']"], {}), "(self.rnn_net.hdim, self.a_counts, hidden_units['share'][\n 'discrete'])\n", (3361, 3435), False, 'import Nn\n'), ((4020, 4099), 'Nn.actor_mu', 'Nn.actor_mu', (['self.rnn_net.hdim', 'self.a_counts', "hidden_units['actor_continuous']"], {}), "(self.rnn_net.hdim, self.a_counts, hidden_units['actor_continuous'])\n", (4031, 4099), False, 'import Nn\n'), ((4238, 4326), 'Nn.actor_discrete', 'Nn.actor_discrete', (['self.rnn_net.hdim', 'self.a_counts', "hidden_units['actor_discrete']"], {}), "(self.rnn_net.hdim, self.a_counts, hidden_units[\n 'actor_discrete'])\n", (4255, 4326), False, 'import Nn\n'), ((6435, 6474), 'utils.tf2_utils.gaussian_clip_rsample', 'gaussian_clip_rsample', (['mu', 'self.log_std'], {}), '(mu, self.log_std)\n', (6456, 6474), False, 'from utils.tf2_utils import show_graph, get_TensorSpecs, gaussian_clip_rsample, gaussian_likelihood_sum, gaussian_entropy\n'), ((6502, 6554), 'utils.tf2_utils.gaussian_likelihood_sum', 'gaussian_likelihood_sum', (['sample_op', 'mu', 'self.log_std'], {}), '(sample_op, mu, self.log_std)\n', (6525, 6554), False, 'from utils.tf2_utils import show_graph, get_TensorSpecs, gaussian_clip_rsample, gaussian_likelihood_sum, gaussian_entropy\n'), ((6809, 6846), 'tensorflow_probability.distributions.Categorical', 'tfp.distributions.Categorical', (['logits'], {}), '(logits)\n', (6838, 6846), True, 'import tensorflow_probability as tfp\n'), ((11048, 11065), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (11063, 11065), True, 'import tensorflow as tf\n'), ((11731, 11766), 'tensorflow.exp', 'tf.exp', (['(new_log_prob - old_log_prob)'], {}), '(new_log_prob - old_log_prob)\n', (11737, 11766), True, 'import tensorflow as tf\n'), ((13760, 13777), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (13775, 13777), True, 'import tensorflow as tf\n'), ((14365, 14400), 'tensorflow.exp', 'tf.exp', (['(new_log_prob - old_log_prob)'], {}), '(new_log_prob - old_log_prob)\n', (14371, 14400), True, 'import tensorflow as tf\n'), ((14422, 14465), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['(old_log_prob - new_log_prob)'], {}), '(old_log_prob - new_log_prob)\n', (14436, 14465), True, 'import tensorflow as tf\n'), ((14538, 14629), 'tensorflow.where', 'tf.where', (['(advantage > 0)', '((1 + self.epsilon) * advantage)', '((1 - self.epsilon) * advantage)'], {}), '(advantage > 0, (1 + self.epsilon) * advantage, (1 - self.epsilon) *\n advantage)\n', (14546, 14629), True, 'import tensorflow as tf\n'), ((15397, 15414), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (15412, 15414), True, 'import tensorflow as tf\n'), ((11272, 11316), 'utils.tf2_utils.gaussian_likelihood_sum', 'gaussian_likelihood_sum', (['a', 'mu', 'self.log_std'], {}), '(a, mu, self.log_std)\n', (11295, 11316), False, 'from utils.tf2_utils import show_graph, get_TensorSpecs, gaussian_clip_rsample, gaussian_likelihood_sum, gaussian_entropy\n'), ((11347, 11377), 'utils.tf2_utils.gaussian_entropy', 'gaussian_entropy', (['self.log_std'], {}), '(self.log_std)\n', (11363, 11377), False, 'from utils.tf2_utils import show_graph, get_TensorSpecs, gaussian_clip_rsample, gaussian_likelihood_sum, gaussian_entropy\n'), ((11482, 11507), 'tensorflow.nn.log_softmax', 'tf.nn.log_softmax', (['logits'], {}), '(logits)\n', (11499, 11507), True, 'import tensorflow as tf\n'), ((11543, 11593), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(a * logp_all)'], {'axis': '(1)', 'keepdims': '(True)'}), '(a * logp_all, axis=1, keepdims=True)\n', (11556, 11593), True, 'import tensorflow as tf\n'), ((11952, 11995), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['(new_log_prob - old_log_prob)'], {}), '(new_log_prob - old_log_prob)\n', (11966, 11995), True, 'import tensorflow as tf\n'), ((12043, 12086), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['(old_log_prob - new_log_prob)'], {}), '(old_log_prob - new_log_prob)\n', (12057, 12086), True, 'import tensorflow as tf\n'), ((12388, 12464), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['(value - old_value)', '(-self.value_epsilon)', 'self.value_epsilon'], {}), '(value - old_value, -self.value_epsilon, self.value_epsilon)\n', (12404, 12464), True, 'import tensorflow as tf\n'), ((12594, 12613), 'tensorflow.square', 'tf.square', (['td_error'], {}), '(td_error)\n', (12603, 12613), True, 'import tensorflow as tf\n'), ((12615, 12639), 'tensorflow.square', 'tf.square', (['td_error_clip'], {}), '(td_error_clip)\n', (12624, 12639), True, 'import tensorflow as tf\n'), ((13094, 13119), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['td_square'], {}), '(td_square)\n', (13108, 13119), True, 'import tensorflow as tf\n'), ((13907, 13951), 'utils.tf2_utils.gaussian_likelihood_sum', 'gaussian_likelihood_sum', (['a', 'mu', 'self.log_std'], {}), '(a, mu, self.log_std)\n', (13930, 13951), False, 'from utils.tf2_utils import show_graph, get_TensorSpecs, gaussian_clip_rsample, gaussian_likelihood_sum, gaussian_entropy\n'), ((13982, 14012), 'utils.tf2_utils.gaussian_entropy', 'gaussian_entropy', (['self.log_std'], {}), '(self.log_std)\n', (13998, 14012), False, 'from utils.tf2_utils import show_graph, get_TensorSpecs, gaussian_clip_rsample, gaussian_likelihood_sum, gaussian_entropy\n'), ((14116, 14141), 'tensorflow.nn.log_softmax', 'tf.nn.log_softmax', (['logits'], {}), '(logits)\n', (14133, 14141), True, 'import tensorflow as tf\n'), ((14177, 14227), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['(a * logp_all)'], {'axis': '(1)', 'keepdims': '(True)'}), '(a * logp_all, axis=1, keepdims=True)\n', (14190, 14227), True, 'import tensorflow as tf\n'), ((15588, 15664), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['(value - old_value)', '(-self.value_epsilon)', 'self.value_epsilon'], {}), '(value - old_value, -self.value_epsilon, self.value_epsilon)\n', (15604, 15664), True, 'import tensorflow as tf\n'), ((15792, 15811), 'tensorflow.square', 'tf.square', (['td_error'], {}), '(td_error)\n', (15801, 15811), True, 'import tensorflow as tf\n'), ((15813, 15837), 'tensorflow.square', 'tf.square', (['td_error_clip'], {}), '(td_error_clip)\n', (15822, 15837), True, 'import tensorflow as tf\n'), ((2857, 2897), 'numpy.ones', 'np.ones', (['self.a_counts'], {'dtype': 'np.float32'}), '(self.a_counts, dtype=np.float32)\n', (2864, 2897), True, 'import numpy as np\n'), ((12962, 12998), 'tensorflow.maximum', 'tf.maximum', (['(0.0)', '(kl - self.kl_cutoff)'], {}), '(0.0, kl - self.kl_cutoff)\n', (12972, 12998), True, 'import tensorflow as tf\n'), ((14812, 14848), 'tensorflow.maximum', 'tf.maximum', (['(0.0)', '(kl - self.kl_cutoff)'], {}), '(0.0, kl - self.kl_cutoff)\n', (14822, 14848), True, 'import tensorflow as tf\n'), ((15875, 15900), 'tensorflow.reduce_mean', 'tf.reduce_mean', (['td_square'], {}), '(td_square)\n', (15889, 15900), True, 'import tensorflow as tf\n'), ((14669, 14699), 'tensorflow.minimum', 'tf.minimum', (['surrogate', 'min_adv'], {}), '(surrogate, min_adv)\n', (14679, 14699), True, 'import tensorflow as tf\n'), ((12776, 12839), 'tensorflow.clip_by_value', 'tf.clip_by_value', (['ratio', '(1.0 - self.epsilon)', '(1.0 + self.epsilon)'], {}), '(ratio, 1.0 - self.epsilon, 1.0 + self.epsilon)\n', (12792, 12839), True, 'import tensorflow as tf\n'), ((11654, 11670), 'tensorflow.exp', 'tf.exp', (['logp_all'], {}), '(logp_all)\n', (11660, 11670), True, 'import tensorflow as tf\n'), ((14288, 14304), 'tensorflow.exp', 'tf.exp', (['logp_all'], {}), '(logp_all)\n', (14294, 14304), True, 'import tensorflow as tf\n')]
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# @Author: SHF
# @Email: <EMAIL>
# @Date: 2020-09-15 16:06:14
import requests
AK = '<KEY>'
# AK = '<KEY>' # key
def Pos2Coord(name):
'''
@func: ้่ฟ็พๅบฆๅฐๅพAPIๅฐๅฐ็ๅ็งฐ่ฝฌๆขๆ็ป็บฌๅบฆ
@note: ๅฎๆนๆๆกฃ http://lbsyun.baidu.com/index.php?title=webapi/guide/webservice-geocoding
@output:
lng: ็ปๅบฆ
lat: ็บฌๅบฆ
conf: ๆ็น็ปๅฏน็ฒพๅบฆ๏ผๅณๅๆ ็น็่ฏฏๅทฎ่ๅด๏ผ
comp: ๆ่ฟฐๅฐๅ็่งฃ็จๅบฆใๅๅผ่ๅด0-100๏ผๅๅผ่ถๅคง๏ผๆๅกๅฏนๅฐๅ็่งฃ็จๅบฆ่ถ้ซ
level: ่ฝ็ฒพ็กฎ็่งฃ็ๅฐๅ็ฑปๅ
'''
url = 'http://api.map.baidu.com/geocoding/v3/?address=%s&output=json&ak=%s' % (name, AK)
res = requests.get(url)
if res.status_code == 200:
val = res.json()
# print(val['status'])
if val['status'] == 0:
retVal = {'lng': val['result']['location']['lng'], 'lat': val['result']['location']['lat'],
'conf': val['result']['confidence'], 'comp': val['result']['comprehension'],
'level': val['result']['level']}
else:
retVal = None
return retVal
else:
print('ๆ ๆณ่ทๅ%s็ป็บฌๅบฆ' % name)
def Coord2Pos(lng, lat, town='true'):
'''
@func: ้่ฟ็พๅบฆๅฐๅพAPIๅฐ็ป็บฌๅบฆ่ฝฌๆขๆๅฐ็ๅ็งฐ
@input:
lng: ็ปๅบฆ
lat: ็บฌๅบฆ
town: ๆฏๅฆ่ทๅไนก้็บงๅฐ็ไฝ็ฝฎไฟกๆฏ๏ผ้ป่ฎค่ทๅใๅฏ้ๅๆฐ๏ผtrue/false๏ผ
@output:
address:่งฃๆๅ็ๅฐ็ไฝ็ฝฎๅ็งฐ
province:็ไปฝๅ็งฐ
city:ๅๅธๅ
district:ๅฟ็บง่กๆฟๅบๅๅ
town: ไนก้็บง่กๆฟๅบๅ
adcode: ๅฟ็บง่กๆฟๅบๅ็ผ็
town_code: ้็บง่กๆฟๅบๅ็ผ็
'''
url = 'http://api.map.baidu.com/reverse_geocoding/v3/?output=json&ak=%s&location=%s,%s&extensions_town=%s' % (
AK, lat, lng, town)
res = requests.get(url)
if res.status_code == 200:
val = res.json()
if val['status'] == 0:
val = val['result']
retVal = {'address': val['formatted_address'], 'province': val['addressComponent']['province'],
'city': val['addressComponent']['city'], 'district': val['addressComponent']['district'],
'town': val['addressComponent']['town'], 'adcode': val['addressComponent']['adcode'],
'town_code': val['addressComponent']['town_code']}
else:
retVal = None
return retVal
else:
print('ๆ ๆณ่ทๅ(%s,%s)็ๅฐ็ไฟกๆฏ๏ผ' % (lat, lng))
def get_pos(origin0, destination0):
origin1 = Pos2Coord(origin0)
origin_lng = origin1['lng']
origin_lat = origin1['lat']
origin_ln = str(round(origin_lat, 6))
origin_la = str(round(origin_lng, 6))
destination1 = Pos2Coord(destination0)
destination_lng = destination1['lng']
destination_lat = destination1['lat']
destination_ln = str(round(destination_lat, 6))
destination_la = str(round(destination_lng, 6))
return origin_ln, origin_la, destination_ln, destination_la
def get_pos1(origin):
origin1 = Pos2Coord(origin)
origin_lng = origin1['lng']
origin_lat = origin1['lat']
ln = str(round(origin_lat, 3))
la = str(round(origin_lng, 3))
pos = ln + "," + la
return pos
if __name__ == '__main__':
# ๆญฃๅฐ็็ผ็
val1 = Pos2Coord('ไธๆตทๅคงๅญฆๅฎๅฑฑๆ กๅบ')
# {'lng': 121.39903048091482, 'lat': 31.32144004759091, 'conf': 70, 'comp': 100, 'level': 'ๆ่ฒ'}
print(val1)
# ้ๅฐ็็ผ็
val2 = Coord2Pos(121.399030, 31.321440)
# {'address': 'ๆฑ่็ๅไบฌๅธๆฑๅฎๅบ่ๆบๅคง้', 'province': 'ๆฑ่็', 'city': 'ๅไบฌๅธ', 'district': 'ๆฑๅฎๅบ', 'town': '็งฃ้ต่ก้', 'adcode': '320115', 'town_code': '320115011'}
print(val2)
|
[
"requests.get"
] |
[((610, 627), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (622, 627), False, 'import requests\n'), ((1672, 1689), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (1684, 1689), False, 'import requests\n')]
|
from plivo.utils import jwt
import time
# using valid_from in epoch and lifetime in seconds
token = jwt.AccessToken('{authId}', '{authToken}', '{endpointUsername}', valid_from=time.time(), lifetime=300, uid='{uid}')
# grants(incoming:bool, outgoing:bool)
token.add_voice_grants(True, True)
print(token.to_jwt())
# using valid_from and valid_till in epoch
token = jwt.AccessToken('{authId}', '{authToken}', '{endpointUsername}', valid_from=time.time(), valid_till=1588751222)
token.add_voice_grants(False, True)
print(token.to_jwt())
|
[
"time.time"
] |
[((177, 188), 'time.time', 'time.time', ([], {}), '()\n', (186, 188), False, 'import time\n'), ((442, 453), 'time.time', 'time.time', ([], {}), '()\n', (451, 453), False, 'import time\n')]
|
import sys
import os
import numpy as np
from pprint import pprint
from datetime import datetime
from datetime import timedelta
import mysql.connector
import math
import matplotlib.pyplot as plt
from scipy import stats
#database connection
cnx = mysql.connector.connect(user='root', password='<PASSWORD>', host='localhost', database='black_carbon')
cursor = cnx.cursor()
cursor.execute(('''SELECT mc.meas_mean_mass_conc, mc.meas_rel_err, mc.UNIX_UTC_6h_midtime, mc.cluster_number, measCO.CO_ppbv
FROM whi_gc_and_sp2_6h_mass_concs mc
JOIN whi_co_data measCO on mc.CO_meas_id = measCO.id
WHERE mc.RH_threshold = 90 and measCO.CO_ppbv < 250''')
)
data = cursor.fetchall()
cnx.close()
plot_data_Cont = []
plot_data_SPac = []
plot_data_NPac = []
plot_data_WPac = []
for row in data:
meas_rBC_v = row[0]/1.29 #density of air is 1.29kg/m3 at 0C and 1atm
meas_rel_err_v = row[1]
date = datetime.utcfromtimestamp(row[2])
cluster_number = row[3]
meas_CO_v = row[4]
if date.month == 4:
color = 'g'
if date.month == 5:
color = 'b'
if date.month == 6:
color = 'orange'
if date.month == 7:
color = 'red'
if date.month == 8:
color = 'm'
line = [meas_rBC_v,meas_rel_err_v,meas_CO_v,color]
if cluster_number in [4]: #Spac = 6,8,9 Npac = 1,3,5,10, Cont = 4 LRT = 2,7
plot_data_Cont.append(line)
if cluster_number in [6,8,9]: #Spac = 6,8,9 Npac = 1,3,5,10, Cont = 4 LRT = 2,7
plot_data_SPac.append(line)
if cluster_number in [1,3,5,10]: #Spac = 6,8,9 Npac = 1,3,5,10, Cont = 4 LRT = 2,7
plot_data_NPac.append(line)
if cluster_number in [2,7]: #Spac = 6,8,9 Npac = 1,3,5,10, Cont = 4 LRT = 2,7
plot_data_WPac.append(line)
Cont_bc = [row[0] for row in plot_data_Cont]
Cont_bc_err = [row[1]*row[0] for row in plot_data_Cont]
Cont_co = [row[2] for row in plot_data_Cont]
Cont_month = [row[3] for row in plot_data_Cont]
varx = np.array(Cont_co)
vary = np.array(Cont_bc)
mask = ~np.isnan(varx) & ~np.isnan(vary)
Cont_slope, Cont_intercept, Cont_r_value, Cont_p_value, Cont_std_err = stats.linregress(varx[mask], vary[mask])
Cont_line = Cont_slope*varx+Cont_intercept
SPacbc = [row[0] for row in plot_data_SPac]
SPacbc_err = [row[1]*row[0] for row in plot_data_SPac]
SPacco = [row[2] for row in plot_data_SPac]
SPacmonth = [row[3] for row in plot_data_SPac]
varx = np.array(SPacco)
vary = np.array(SPacbc)
mask = ~np.isnan(varx) & ~np.isnan(vary)
SPacslope, SPacintercept, SPacr_value, SPacp_value, SPacstd_err = stats.linregress(varx[mask], vary[mask])
SPacline = SPacslope*varx+SPacintercept
NPacbc = [row[0] for row in plot_data_NPac]
NPacbc_err = [row[1]*row[0] for row in plot_data_NPac]
NPacco = [row[2] for row in plot_data_NPac]
NPacmonth = [row[3] for row in plot_data_NPac]
varx = np.array(NPacco)
vary = np.array(NPacbc)
mask = ~np.isnan(varx) & ~np.isnan(vary)
NPacslope, NPacintercept, NPacr_value, NPacp_value, NPacstd_err = stats.linregress(varx[mask], vary[mask])
NPacline = NPacslope*varx+NPacintercept
WPacbc = [row[0] for row in plot_data_WPac]
WPacbc_err = [row[1]*row[0] for row in plot_data_WPac]
WPacco = [row[2] for row in plot_data_WPac]
WPacmonth = [row[3] for row in plot_data_WPac]
varx = np.array(WPacco)
vary = np.array(WPacbc)
mask = ~np.isnan(varx) & ~np.isnan(vary)
WPacslope, WPacintercept, WPacr_value, WPacp_value, WPacstd_err = stats.linregress(varx[mask], vary[mask])
WPacline = WPacslope*varx+WPacintercept
fig = plt.figure(figsize=(14,6))
ax1 = plt.subplot2grid((1,4), (0,3), colspan=1)
ax2 = plt.subplot2grid((1,4), (0,0), colspan=1)
ax3 = plt.subplot2grid((1,4), (0,1), colspan=1)
ax4 = plt.subplot2grid((1,4), (0,2), colspan=1)
CO_upper = 175
CO_lower = 75
BC_upper = 325
info_x_pos = 0.05
info_y_pos = 0.8
label_x_pos = 0.05
label_y_pos = 0.92
#ax1.scatter(Cont_co,Cont_bc,c=Cont_month, marker = 'o',s=40)
ax1.scatter(Cont_co,Cont_bc,c='r', marker = '>',s=40)
ax1.errorbar(Cont_co,Cont_bc,yerr = Cont_bc_err,fmt = None,zorder=0)
ax1.plot(Cont_co,Cont_line,color='k')
ax1.text(info_x_pos, info_y_pos+0.05 ,'r-square: ' + str(round(Cont_r_value**2,3)),transform=ax1.transAxes, color='grey')
ax1.text(info_x_pos, info_y_pos,'slope: ' + str(round(Cont_slope,3)),transform=ax1.transAxes, color='grey')
ax1.text(label_x_pos, label_y_pos,'Northern Canada',transform=ax1.transAxes, color='k')
#ax1.set_ylabel('rBC ng/kg')
ax1.set_xlabel('CO ppbv')
ax1.set_xlim(CO_lower,CO_upper)
ax1.set_ylim(0,BC_upper)
ax1.yaxis.tick_right()
ax2.scatter(NPacco,NPacbc,c='b', marker = '<',s=40)
ax2.errorbar(NPacco,NPacbc,yerr = NPacbc_err,fmt = None,zorder=0)
ax2.plot(NPacco,NPacline,color='k')
ax2.text(info_x_pos, (info_y_pos+0.05),'r-square: ' + str(round(NPacr_value**2,3)),transform=ax2.transAxes, color='grey')
ax2.text(info_x_pos, info_y_pos,'slope: ' + str(round(NPacslope,3)),transform=ax2.transAxes, color='grey')
ax2.text(label_x_pos,label_y_pos,'Northern Pacific',transform=ax2.transAxes, color='k')
ax2.set_ylabel('rBC ng/kg')
ax2.set_xlabel('CO ppbv')
ax2.set_xlim(CO_lower,CO_upper)
ax2.set_ylim(0,BC_upper)
ax3.scatter(SPacco,SPacbc,c='g', marker = 'o',s=40)
ax3.errorbar(SPacco,SPacbc,yerr = SPacbc_err,fmt = None,zorder=0)
ax3.plot(SPacco,SPacline,color='k')
ax3.text(info_x_pos, info_y_pos+0.05,'r-square: ' + str(round(SPacr_value**2,3)),transform=ax3.transAxes, color='grey')
ax3.text(info_x_pos, info_y_pos,'slope: ' + str(round(SPacslope,3)),transform=ax3.transAxes, color='grey')
ax3.text(label_x_pos, label_y_pos,'Southern Pacific',transform=ax3.transAxes, color='k')
#ax3.set_ylabel('rBC ng/kg')
ax3.set_xlabel('CO ppbv')
ax3.set_xlim(CO_lower,CO_upper)
ax3.set_ylim(0,BC_upper)
ax3.set_yticklabels([])
ax4.scatter(WPacco,WPacbc,c='orange', marker = 's',s=40)
ax4.errorbar(WPacco,WPacbc,yerr = WPacbc_err,fmt = None,zorder=0)
ax4.plot(WPacco,WPacline,color='k')
ax4.text(info_x_pos, info_y_pos+0.05,'r-square: ' + str(round(WPacr_value**2,3)),transform=ax4.transAxes, color='grey')
ax4.text(info_x_pos, info_y_pos,'slope: ' + str(round(WPacslope,3)),transform=ax4.transAxes, color='grey')
ax4.text(label_x_pos, label_y_pos,'Western Pacific/Asia',transform=ax4.transAxes, color='k')
#ax4.set_ylabel('rBC ng/kg')
ax4.set_xlabel('CO ppbv')
ax4.set_xlim(CO_lower,CO_upper)
ax4.set_ylim(0,BC_upper)
ax4.set_yticklabels([])
plt.subplots_adjust(wspace=0.0)
os.chdir('C:/Users/<NAME>/Documents/Data/WHI long term record/CO data/')
plt.savefig('BC vs CO - all clusters.png', bbox_inches='tight')
plt.show()
|
[
"matplotlib.pyplot.show",
"matplotlib.pyplot.subplot2grid",
"numpy.isnan",
"datetime.datetime.utcfromtimestamp",
"matplotlib.pyplot.figure",
"numpy.array",
"scipy.stats.linregress",
"matplotlib.pyplot.subplots_adjust",
"os.chdir",
"matplotlib.pyplot.savefig"
] |
[((1869, 1886), 'numpy.array', 'np.array', (['Cont_co'], {}), '(Cont_co)\n', (1877, 1886), True, 'import numpy as np\n'), ((1894, 1911), 'numpy.array', 'np.array', (['Cont_bc'], {}), '(Cont_bc)\n', (1902, 1911), True, 'import numpy as np\n'), ((2024, 2064), 'scipy.stats.linregress', 'stats.linregress', (['varx[mask]', 'vary[mask]'], {}), '(varx[mask], vary[mask])\n', (2040, 2064), False, 'from scipy import stats\n'), ((2306, 2322), 'numpy.array', 'np.array', (['SPacco'], {}), '(SPacco)\n', (2314, 2322), True, 'import numpy as np\n'), ((2330, 2346), 'numpy.array', 'np.array', (['SPacbc'], {}), '(SPacbc)\n', (2338, 2346), True, 'import numpy as np\n'), ((2454, 2494), 'scipy.stats.linregress', 'stats.linregress', (['varx[mask]', 'vary[mask]'], {}), '(varx[mask], vary[mask])\n', (2470, 2494), False, 'from scipy import stats\n'), ((2733, 2749), 'numpy.array', 'np.array', (['NPacco'], {}), '(NPacco)\n', (2741, 2749), True, 'import numpy as np\n'), ((2757, 2773), 'numpy.array', 'np.array', (['NPacbc'], {}), '(NPacbc)\n', (2765, 2773), True, 'import numpy as np\n'), ((2881, 2921), 'scipy.stats.linregress', 'stats.linregress', (['varx[mask]', 'vary[mask]'], {}), '(varx[mask], vary[mask])\n', (2897, 2921), False, 'from scipy import stats\n'), ((3160, 3176), 'numpy.array', 'np.array', (['WPacco'], {}), '(WPacco)\n', (3168, 3176), True, 'import numpy as np\n'), ((3184, 3200), 'numpy.array', 'np.array', (['WPacbc'], {}), '(WPacbc)\n', (3192, 3200), True, 'import numpy as np\n'), ((3308, 3348), 'scipy.stats.linregress', 'stats.linregress', (['varx[mask]', 'vary[mask]'], {}), '(varx[mask], vary[mask])\n', (3324, 3348), False, 'from scipy import stats\n'), ((3399, 3426), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(14, 6)'}), '(figsize=(14, 6))\n', (3409, 3426), True, 'import matplotlib.pyplot as plt\n'), ((3443, 3486), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(1, 4)', '(0, 3)'], {'colspan': '(1)'}), '((1, 4), (0, 3), colspan=1)\n', (3459, 3486), True, 'import matplotlib.pyplot as plt\n'), ((3492, 3535), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(1, 4)', '(0, 0)'], {'colspan': '(1)'}), '((1, 4), (0, 0), colspan=1)\n', (3508, 3535), True, 'import matplotlib.pyplot as plt\n'), ((3541, 3584), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(1, 4)', '(0, 1)'], {'colspan': '(1)'}), '((1, 4), (0, 1), colspan=1)\n', (3557, 3584), True, 'import matplotlib.pyplot as plt\n'), ((3590, 3633), 'matplotlib.pyplot.subplot2grid', 'plt.subplot2grid', (['(1, 4)', '(0, 2)'], {'colspan': '(1)'}), '((1, 4), (0, 2), colspan=1)\n', (3606, 3633), True, 'import matplotlib.pyplot as plt\n'), ((6241, 6272), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'wspace': '(0.0)'}), '(wspace=0.0)\n', (6260, 6272), True, 'import matplotlib.pyplot as plt\n'), ((6276, 6348), 'os.chdir', 'os.chdir', (['"""C:/Users/<NAME>/Documents/Data/WHI long term record/CO data/"""'], {}), "('C:/Users/<NAME>/Documents/Data/WHI long term record/CO data/')\n", (6284, 6348), False, 'import os\n'), ((6349, 6412), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""BC vs CO - all clusters.png"""'], {'bbox_inches': '"""tight"""'}), "('BC vs CO - all clusters.png', bbox_inches='tight')\n", (6360, 6412), True, 'import matplotlib.pyplot as plt\n'), ((6416, 6426), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6424, 6426), True, 'import matplotlib.pyplot as plt\n'), ((894, 927), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (['row[2]'], {}), '(row[2])\n', (919, 927), False, 'from datetime import datetime\n'), ((1920, 1934), 'numpy.isnan', 'np.isnan', (['varx'], {}), '(varx)\n', (1928, 1934), True, 'import numpy as np\n'), ((1938, 1952), 'numpy.isnan', 'np.isnan', (['vary'], {}), '(vary)\n', (1946, 1952), True, 'import numpy as np\n'), ((2355, 2369), 'numpy.isnan', 'np.isnan', (['varx'], {}), '(varx)\n', (2363, 2369), True, 'import numpy as np\n'), ((2373, 2387), 'numpy.isnan', 'np.isnan', (['vary'], {}), '(vary)\n', (2381, 2387), True, 'import numpy as np\n'), ((2782, 2796), 'numpy.isnan', 'np.isnan', (['varx'], {}), '(varx)\n', (2790, 2796), True, 'import numpy as np\n'), ((2800, 2814), 'numpy.isnan', 'np.isnan', (['vary'], {}), '(vary)\n', (2808, 2814), True, 'import numpy as np\n'), ((3209, 3223), 'numpy.isnan', 'np.isnan', (['varx'], {}), '(varx)\n', (3217, 3223), True, 'import numpy as np\n'), ((3227, 3241), 'numpy.isnan', 'np.isnan', (['vary'], {}), '(vary)\n', (3235, 3241), True, 'import numpy as np\n')]
|
# Lint as: python3
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test keras Model training."""
from absl import flags
import numpy as np
from pyiree.tf.support import tf_test_utils
from pyiree.tf.support import tf_utils
from sklearn.preprocessing import PolynomialFeatures
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_string(
"optimizer_name", "sgd",
"optimizer name: sgd, rmsprop, nadam, adamax, adam, adagrad, adadelta")
_DEGREE = 3 # polynomial degree of input feature for regression test
_FEATURE_SIZE = _DEGREE + 1 # input feature size
_BATCH_SIZE = 8 # batch size has to be dynamic TODO(b/142948097)
_INPUT_DATA_SHAPE = [_BATCH_SIZE, _FEATURE_SIZE]
_OUTPUT_DATA_SHAPE = [_BATCH_SIZE, 1]
class ModelTrain(tf.Module):
"""A module for model training."""
@staticmethod
def CreateModule(input_dim=_FEATURE_SIZE, output_dim=1):
"""Creates a module for regression model training.
Args:
input_dim: input dimensionality
output_dim: output dimensionality
Returns:
model for linear regression
"""
tf_utils.set_random_seed()
# build a single layer model
inputs = tf.keras.layers.Input((input_dim))
outputs = tf.keras.layers.Dense(output_dim)(inputs)
model = tf.keras.Model(inputs, outputs)
return ModelTrain(model)
def __init__(self, model):
self.model = model
self.loss = tf.keras.losses.MeanSquaredError()
self.optimizer = tf.keras.optimizers.get(FLAGS.optimizer_name)
@tf.function(input_signature=[
tf.TensorSpec(_INPUT_DATA_SHAPE, tf.float32),
tf.TensorSpec(_OUTPUT_DATA_SHAPE, tf.float32)
])
def train_step(self, inputs, targets):
with tf.GradientTape() as tape:
predictions = self.model(inputs, training=True)
loss_value = self.loss(predictions, targets)
gradients = tape.gradient(loss_value, self.model.trainable_variables)
self.optimizer.apply_gradients(
zip(gradients, self.model.trainable_variables))
return loss_value
class ModelTrainTest(tf_test_utils.TracedModuleTestCase):
def generate_regression_data(self, size=8):
x = np.arange(size) - size // 2
y = 1.0 * x**3 + 1.0 * x**2 + 1.0 * x + np.random.randn(size) * size
return x, y
def test_model_train(self):
# Generate input and output data for regression problem.
inputs, targets = self.generate_regression_data()
# Normalize data.
inputs = inputs / max(inputs)
targets = targets / max(targets)
# Generate polynomial features.
inputs = np.expand_dims(inputs, axis=1)
polynomial = PolynomialFeatures(_DEGREE) # returns: [1, a, b, a^2, ab, b^2]
inputs = polynomial.fit_transform(inputs)
targets = np.expand_dims(targets, axis=1)
def train_step(module):
# Run one iteration of training step.
module.train_step(inputs, targets)
self.compare_backends(train_step, self._modules)
if __name__ == "__main__":
if hasattr(tf, "enable_v2_behavior"):
tf.enable_v2_behavior()
tf_test_utils.compile_tf_module(ModelTrain.CreateModule,
exported_names=["train_step"])
tf.test.main()
|
[
"tensorflow.test.main",
"pyiree.tf.support.tf_test_utils.compile_tf_module",
"tensorflow.keras.losses.MeanSquaredError",
"pyiree.tf.support.tf_utils.set_random_seed",
"tensorflow.keras.layers.Dense",
"tensorflow.enable_v2_behavior",
"numpy.random.randn",
"tensorflow.keras.optimizers.get",
"numpy.expand_dims",
"absl.flags.DEFINE_string",
"tensorflow.keras.Model",
"sklearn.preprocessing.PolynomialFeatures",
"numpy.arange",
"tensorflow.keras.layers.Input",
"tensorflow.TensorSpec",
"tensorflow.GradientTape"
] |
[((852, 972), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""optimizer_name"""', '"""sgd"""', '"""optimizer name: sgd, rmsprop, nadam, adamax, adam, adagrad, adadelta"""'], {}), "('optimizer_name', 'sgd',\n 'optimizer name: sgd, rmsprop, nadam, adamax, adam, adagrad, adadelta')\n", (871, 972), False, 'from absl import flags\n'), ((3518, 3610), 'pyiree.tf.support.tf_test_utils.compile_tf_module', 'tf_test_utils.compile_tf_module', (['ModelTrain.CreateModule'], {'exported_names': "['train_step']"}), "(ModelTrain.CreateModule, exported_names=[\n 'train_step'])\n", (3549, 3610), False, 'from pyiree.tf.support import tf_test_utils\n'), ((3642, 3656), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (3654, 3656), True, 'import tensorflow as tf\n'), ((1601, 1627), 'pyiree.tf.support.tf_utils.set_random_seed', 'tf_utils.set_random_seed', ([], {}), '()\n', (1625, 1627), False, 'from pyiree.tf.support import tf_utils\n'), ((1675, 1707), 'tensorflow.keras.layers.Input', 'tf.keras.layers.Input', (['input_dim'], {}), '(input_dim)\n', (1696, 1707), True, 'import tensorflow as tf\n'), ((1778, 1809), 'tensorflow.keras.Model', 'tf.keras.Model', (['inputs', 'outputs'], {}), '(inputs, outputs)\n', (1792, 1809), True, 'import tensorflow as tf\n'), ((1908, 1942), 'tensorflow.keras.losses.MeanSquaredError', 'tf.keras.losses.MeanSquaredError', ([], {}), '()\n', (1940, 1942), True, 'import tensorflow as tf\n'), ((1964, 2009), 'tensorflow.keras.optimizers.get', 'tf.keras.optimizers.get', (['FLAGS.optimizer_name'], {}), '(FLAGS.optimizer_name)\n', (1987, 2009), True, 'import tensorflow as tf\n'), ((3046, 3076), 'numpy.expand_dims', 'np.expand_dims', (['inputs'], {'axis': '(1)'}), '(inputs, axis=1)\n', (3060, 3076), True, 'import numpy as np\n'), ((3094, 3121), 'sklearn.preprocessing.PolynomialFeatures', 'PolynomialFeatures', (['_DEGREE'], {}), '(_DEGREE)\n', (3112, 3121), False, 'from sklearn.preprocessing import PolynomialFeatures\n'), ((3219, 3250), 'numpy.expand_dims', 'np.expand_dims', (['targets'], {'axis': '(1)'}), '(targets, axis=1)\n', (3233, 3250), True, 'import numpy as np\n'), ((3492, 3515), 'tensorflow.enable_v2_behavior', 'tf.enable_v2_behavior', ([], {}), '()\n', (3513, 3515), True, 'import tensorflow as tf\n'), ((1724, 1757), 'tensorflow.keras.layers.Dense', 'tf.keras.layers.Dense', (['output_dim'], {}), '(output_dim)\n', (1745, 1757), True, 'import tensorflow as tf\n'), ((2203, 2220), 'tensorflow.GradientTape', 'tf.GradientTape', ([], {}), '()\n', (2218, 2220), True, 'import tensorflow as tf\n'), ((2638, 2653), 'numpy.arange', 'np.arange', (['size'], {}), '(size)\n', (2647, 2653), True, 'import numpy as np\n'), ((2050, 2094), 'tensorflow.TensorSpec', 'tf.TensorSpec', (['_INPUT_DATA_SHAPE', 'tf.float32'], {}), '(_INPUT_DATA_SHAPE, tf.float32)\n', (2063, 2094), True, 'import tensorflow as tf\n'), ((2102, 2147), 'tensorflow.TensorSpec', 'tf.TensorSpec', (['_OUTPUT_DATA_SHAPE', 'tf.float32'], {}), '(_OUTPUT_DATA_SHAPE, tf.float32)\n', (2115, 2147), True, 'import tensorflow as tf\n'), ((2710, 2731), 'numpy.random.randn', 'np.random.randn', (['size'], {}), '(size)\n', (2725, 2731), True, 'import numpy as np\n')]
|
import io
import gc
import traceback
import subprocess
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from fastprogress.fastprogress import master_bar, progress_bar
import torch
import torch.nn as nn
import torch.optim as optim
from torch.utils.data import Dataset, DataLoader
import albumentations as A
from albumentations.pytorch import ToTensorV2
# from copy import deepcopy
"""
Models
"""
class BaseModel(nn.Module):
def __init__(self, model):
super().__init__()
if isinstance(model, str):
self = torch.load(model, map_location='cpu')
else:
self.model = model
def forward(self, x):
return self.model(x)
@property
def device(self):
return next(self.parameters()).device
def save(self, filename):
device = self.device
self.to('cpu')
torch.save(self, filename)
self.to(device)
class SegmentationModel(BaseModel):
def step(self, batch, criterion):
img, mask = batch["image"], (batch["mask"] > .5).long()
pred = self(img)
loss = criterion(pred, mask)
return loss
class RegressionModel(BaseModel):
def step(self, batch, criterion):
img, mask = batch["image"], batch["target"]
pred = self(img)
loss = criterion(pred, mask)
return loss
class ClassificationModel(BaseModel):
def step(self, batch, criterion):
img, label = batch["image"], batch["label"]
pred = self(img)
loss = criterion(pred, label)
return loss
"""
Datasets
"""
class BaseDataset(Dataset):
def __len__(self):
return len(self.df) * self.fold
def __getitem__(self, idx):
return self.aug(**self.df.iloc[idx % len(self.df)])
def __repr__(self):
sample = self[0]
print("N =", len(self))
for k, v in sample.items():
print(k, v.dtype, v.shape, sep="\t")
show(sample)
return ""
class SegmentationDataset(BaseDataset):
def __init__(self, df, aug=A.NoOp(), fold=1):
"""
df.iloc[:, 0]: (C, H, W) float32
df.iloc[:, 1]: (H, W) float32
"""
self.df = pd.DataFrame(dict(
image=df.iloc[:, 0],
mask=df.iloc[:, 1]
))
self.aug = A.Compose([
aug,
ToTensorV2()
])
self.fold = fold
class RegressionDataset(BaseDataset):
def __init__(self, df, aug=A.NoOp(), fold=1):
"""
df.iloc[:, 0]: (C, H, W) float32
df.iloc[:, 1]: (C, H, W) float32
"""
self.df = pd.DataFrame(dict(
image=df.iloc[:, 0],
target=df.iloc[:, 1]
))
self.aug = A.Compose([
aug,
ToTensorV2()
], additional_targets=dict(
target="image"
))
self.fold = fold
class ClassificationDataset(BaseDataset):
def __init__(self, df, aug=A.NoOp(), fold=1):
"""
df.iloc[:, 0]: (C, H, W) float32
df.iloc[:, 1]: long
"""
self.df = pd.DataFrame(dict(
image=df.iloc[:, 0],
label=df.iloc[:, 1]
))
self.aug = A.Compose([
aug,
ToTensorV2()
])
self.fold = fold
class ClassificationOnMaskDataset(BaseDataset):
def __init__(self, df, aug=A.NoOp(), fold=1):
"""
df.iloc[:, 0]: (C, H, W) float32
df.iloc[:, 1]: (H, W) float32
df.iloc[:, 2]: long
"""
self.df = pd.DataFrame(dict(
image=df.iloc[:, 0],
mask=df.iloc[:, 1],
label=df.iloc[:, 2]
))
self.aug = A.Compose([
aug,
ToTensorV2()
])
self.fold = fold
"""
Trainer
"""
class Trainer:
def __init__(self, name, model, batch_size, train_set, val_set=None, device=None):
self.name = name
self.model = model
self.batch_size = batch_size
self.train_set = train_set
self.val_set = val_set
self.device = device
self.best_model = None
self.last_model = None
plt.rcParams["figure.facecolor"] = "white"
def __auto_select():
output = subprocess.check_output([
"nvidia-smi", "--format=csv", "--query-gpu=memory.used"
])
df = pd.read_csv(io.BytesIO(output), names=["used_memory"], skiprows=1)
df.used_memory = (
df.used_memory
.apply(lambda used_memory:
int(used_memory[:-4]))
)
return torch.device(f"cuda:{df.used_memory.idxmin()}")
def fit(self, epochs, criterion):
try:
device = self.device if self.device else Trainer.__auto_select()
self.model.to(device)
optimizer = optim.Adam(self.model.parameters())
train_loader = DataLoader(
self.train_set,
shuffle=True,
batch_size=self.batch_size,
num_workers=8
)
if self.val_set:
val_loader = DataLoader(
self.val_set,
shuffle=False,
batch_size=self.batch_size,
num_workers=8
)
mb = master_bar(range(1, epochs + 1))
if self.val_set:
mb.names = ["valid", "train"]
else:
mb.names = ["train"]
train_losses, val_losses = [], []
for epoch in mb:
train_loss, val_loss = 0, 0
x = range(1, epoch+1)
self.model.train()
for batch in progress_bar(train_loader, parent=mb):
batch = {k: v.to(device) for k, v in batch.items()}
loss = self.model.step(batch, criterion)
train_loss += loss.item()
optimizer.zero_grad()
loss.backward()
optimizer.step()
batch = loss = None
train_loss /= len(self.train_set)
train_losses.append(train_loss)
if self.val_set:
self.model.eval()
with torch.no_grad():
for batch in progress_bar(val_loader, parent=mb):
batch = {k: v.to(device) for k, v in batch.items()}
loss = self.model.step(batch, criterion)
val_loss += loss.item()
batch = loss = None
val_loss /= len(self.val_set)
val_losses.append(val_loss)
graphs = [[x, val_losses], [x, train_losses]]
y = np.concatenate((train_losses, val_losses))
else:
graphs = [[x, train_losses]]
y = train_losses
x_margin = 0.2
y_margin = 0.05
x_bounds = [1-x_margin, epoch+x_margin]
y_bounds = [np.min(y)-y_margin, np.max(y)+y_margin]
mb.update_graph(graphs, x_bounds, y_bounds)
if val_loss <= min(val_losses):
self.best_model = f"models/{self.name}_{epoch:04d}.pth"
self.model.save(self.best_model)
print(self.best_model, val_loss)
self.last_model = f"models/{self.name}_{epoch:04d}.pth"
self.model.save(self.last_model)
print()
print("last_model:", self.last_model, val_loss)
print("best_model:", self.best_model, min(val_losses))
except:
traceback.print_exc()
finally:
batch = loss = optimizer = None
self.model.cpu()
gc.collect()
torch.cuda.empty_cache()
|
[
"albumentations.pytorch.ToTensorV2",
"albumentations.NoOp",
"io.BytesIO",
"traceback.print_exc",
"torch.utils.data.DataLoader",
"subprocess.check_output",
"torch.load",
"fastprogress.fastprogress.progress_bar",
"torch.save",
"gc.collect",
"numpy.min",
"numpy.max",
"torch.cuda.empty_cache",
"torch.no_grad",
"numpy.concatenate"
] |
[((891, 917), 'torch.save', 'torch.save', (['self', 'filename'], {}), '(self, filename)\n', (901, 917), False, 'import torch\n'), ((2098, 2106), 'albumentations.NoOp', 'A.NoOp', ([], {}), '()\n', (2104, 2106), True, 'import albumentations as A\n'), ((2519, 2527), 'albumentations.NoOp', 'A.NoOp', ([], {}), '()\n', (2525, 2527), True, 'import albumentations as A\n'), ((3004, 3012), 'albumentations.NoOp', 'A.NoOp', ([], {}), '()\n', (3010, 3012), True, 'import albumentations as A\n'), ((3426, 3434), 'albumentations.NoOp', 'A.NoOp', ([], {}), '()\n', (3432, 3434), True, 'import albumentations as A\n'), ((4327, 4413), 'subprocess.check_output', 'subprocess.check_output', (["['nvidia-smi', '--format=csv', '--query-gpu=memory.used']"], {}), "(['nvidia-smi', '--format=csv',\n '--query-gpu=memory.used'])\n", (4350, 4413), False, 'import subprocess\n'), ((562, 599), 'torch.load', 'torch.load', (['model'], {'map_location': '"""cpu"""'}), "(model, map_location='cpu')\n", (572, 599), False, 'import torch\n'), ((4449, 4467), 'io.BytesIO', 'io.BytesIO', (['output'], {}), '(output)\n', (4459, 4467), False, 'import io\n'), ((4968, 5055), 'torch.utils.data.DataLoader', 'DataLoader', (['self.train_set'], {'shuffle': '(True)', 'batch_size': 'self.batch_size', 'num_workers': '(8)'}), '(self.train_set, shuffle=True, batch_size=self.batch_size,\n num_workers=8)\n', (4978, 5055), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((8065, 8077), 'gc.collect', 'gc.collect', ([], {}), '()\n', (8075, 8077), False, 'import gc\n'), ((8090, 8114), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (8112, 8114), False, 'import torch\n'), ((2395, 2407), 'albumentations.pytorch.ToTensorV2', 'ToTensorV2', ([], {}), '()\n', (2405, 2407), False, 'from albumentations.pytorch import ToTensorV2\n'), ((2818, 2830), 'albumentations.pytorch.ToTensorV2', 'ToTensorV2', ([], {}), '()\n', (2828, 2830), False, 'from albumentations.pytorch import ToTensorV2\n'), ((3289, 3301), 'albumentations.pytorch.ToTensorV2', 'ToTensorV2', ([], {}), '()\n', (3299, 3301), False, 'from albumentations.pytorch import ToTensorV2\n'), ((3784, 3796), 'albumentations.pytorch.ToTensorV2', 'ToTensorV2', ([], {}), '()\n', (3794, 3796), False, 'from albumentations.pytorch import ToTensorV2\n'), ((5189, 5275), 'torch.utils.data.DataLoader', 'DataLoader', (['self.val_set'], {'shuffle': '(False)', 'batch_size': 'self.batch_size', 'num_workers': '(8)'}), '(self.val_set, shuffle=False, batch_size=self.batch_size,\n num_workers=8)\n', (5199, 5275), False, 'from torch.utils.data import Dataset, DataLoader\n'), ((5806, 5843), 'fastprogress.fastprogress.progress_bar', 'progress_bar', (['train_loader'], {'parent': 'mb'}), '(train_loader, parent=mb)\n', (5818, 5843), False, 'from fastprogress.fastprogress import master_bar, progress_bar\n'), ((7941, 7962), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (7960, 7962), False, 'import traceback\n'), ((6939, 6981), 'numpy.concatenate', 'np.concatenate', (['(train_losses, val_losses)'], {}), '((train_losses, val_losses))\n', (6953, 6981), True, 'import numpy as np\n'), ((6390, 6405), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (6403, 6405), False, 'import torch\n'), ((6444, 6479), 'fastprogress.fastprogress.progress_bar', 'progress_bar', (['val_loader'], {'parent': 'mb'}), '(val_loader, parent=mb)\n', (6456, 6479), False, 'from fastprogress.fastprogress import master_bar, progress_bar\n'), ((7254, 7263), 'numpy.min', 'np.min', (['y'], {}), '(y)\n', (7260, 7263), True, 'import numpy as np\n'), ((7274, 7283), 'numpy.max', 'np.max', (['y'], {}), '(y)\n', (7280, 7283), True, 'import numpy as np\n')]
|
"""
This module is used to display the Density of States (DoS).
"""
from futile.Utils import write as safe_print
AU_eV = 27.21138386
class DiracSuperposition():
"""
Defines as superposition of Dirac deltas which can be used to
plot the density of states
"""
def __init__(self, dos, wgts=[1.0]):
"""
Parameters:
dos: array containing the density of states per eack k-point.
Should be of shape 2
wgts: contains the weights of each of the k-points
"""
import numpy as np
self.dos = dos
if isinstance(wgts, float):
self.norm = [wgts]
else:
self.norm = wgts
# set range for this distribution
e_min = 1.e100
e_max = -1.e100
ddos = np.ravel(dos)
if len(ddos) > 0:
e_min = min(e_min, np.min(ddos) - 0.05 *
(np.max(ddos) - np.min(ddos)))
e_max = max(e_max, np.max(ddos) + 0.05 *
(np.max(ddos) - np.min(ddos)))
self.xlim = (e_min, e_max)
def curve(self, xs, sigma, wgts=None):
from numpy import ones
dos_g = 0.0
idos = 0
for norm, dos in zip(self.norm, self.dos):
if wgts is not None:
norms = wgts[idos]*norm
idos += 1
else:
norms = ones(len(dos))*norm
kptcurve = self.peaks(xs, dos, norms, sigma)
dos_g += kptcurve
return xs, dos_g
def peak(self, omega, e, sigma):
"""
Define if a peak is a Gaussian or a Lorenzian (temporarily only the
gaussian is defined)
"""
import numpy as np
nfac = np.sqrt(2.0*np.pi)
val = np.exp(- (omega - e)**2 / (2.0 * sigma**2))/(nfac*sigma)
return val
def peaks(self, xs, dos, norms, sigma):
"""
Return the array of the whole set of peaks
"""
curve = 0.0
for e, nrm in zip(dos, norms):
curve += self.peak(xs, e, sigma)*nrm
return curve
def _bandarray_to_data(jspin, bandarrays):
lbl = 'up' if jspin == 0 else 'dw'
kptlists = [[], []]
for orbs in bandarrays:
for ispin, norbs in enumerate(orbs.info):
if norbs == 0 or ispin != jspin:
continue
# energy values
kptlists[0].append(orbs[ispin, :norbs])
# normalization
kptlists[1].append(orbs.kwgt*(1.0-2*ispin))
# print 'kpt',kptlists
return kptlists, lbl
class DoS():
"""
Definition of the density of state class
"""
def __init__(self, bandarrays=None, energies=None, evals=None, units='eV',
label='1', sigma=0.2, npts=2500, fermi_level=None,
norm=1.0, sdos=None, e_min=None, e_max=None):
"""
Extract a quantity which is associated to the DoS, that can be plotted
"""
import numpy as np
self.ens = []
self.labels = []
self.ef = None
# self.norms=[]
self.npts = npts
if e_min is not None:
self.e_min = e_min
else:
self.e_min = 1.e100
if e_max is not None:
self.e_max = e_max
else:
self.e_max = -1.e100
if bandarrays is not None:
self.append_from_bandarray(bandarrays, label)
if evals is not None:
self.append_from_dict(evals, label)
if energies is not None:
self.append(np.array([energies]), label=label, units=units,
norm=(np.array([norm])
if isinstance(norm, float) else norm))
self.sigma = sigma
self.fermi_level(fermi_level, units=units)
if sdos is not None:
self._embed_sdos(sdos)
def _embed_sdos(self, sdos):
self.sdos = []
for i, xdos in enumerate(sdos):
self.sdos.append({'coord': xdos['coord']})
jdos = 0
for subspin in xdos['dos']:
if len(subspin[0]) == 0:
continue
d = {'doslist': subspin}
try:
self.ens[jdos]['sdos'].append(d)
except KeyError:
self.ens[jdos]['sdos'] = [d]
jdos += 1
def append_from_bandarray(self, bandarrays, label):
"""
Add a new band array to the previous DoS. Important for kpoints DOS
"""
import numpy as np
for jspin in range(2):
kptlists, lbl = _bandarray_to_data(jspin, bandarrays)
self.append(np.array(kptlists[0]), label=label+lbl, units='AU',
norm=np.array(kptlists[1]))
def append_from_dict(self, evals, label):
import numpy as np
"Get the energies from the different flavours given by the dict"
evs = [[], []]
ef = None
for ev in evals:
occ = self.get_ev(ev, ['e_occ', 'e_occupied'])
if occ:
ef = max(occ)
vrt = self.get_ev(ev, ['e_vrt', 'e_virt'])
eigen = False
if occ:
eigen = occ
if vrt:
eigen = vrt
if not eigen:
eigen = self.get_ev(ev)
if not occ and not vrt and eigen:
ef = max(eigen)
if not eigen:
continue
for i, e in enumerate(eigen):
if e:
evs[i].append(e)
for i, energs in enumerate(evs):
if len(energs) == 0:
continue
self.append(np.array(energs), label=label,
units='AU', norm=1.0-2.0*i)
if ef:
self.fermi_level(ef, units='AU')
def get_ev(self, ev, keys=None):
"Get the correct list of the energies for this eigenvalue"
res = False
if keys is None:
ener = ev.get('e')
spin = ev.get('s')
if ener and spin == 1:
res = [ener]
elif ener and spin == -1:
res = [None, ener]
else:
for k in keys:
if k in ev:
res = ev[k]
if not isinstance(res, list):
res = [res]
break
return res
def append(self, energies, label=None, units='eV', norm=1.0):
if not isinstance(norm, float) and len(norm) == 0:
return
dos = self.conversion_factor(units)*energies
self.ens.append({'dos': DiracSuperposition(dos, wgts=norm)})
lbl = label if label is not None else str(len(self.labels)+1)
self.labels.append(lbl)
# self.norms.append(norm)
self.range = self._set_range()
def conversion_factor(self, units):
if units == 'AU':
fac = AU_eV
elif units == 'eV':
fac = 1.0
else:
raise ValueError('Unrecognized units ('+units+')')
return fac
def fermi_level(self, fermi_level, units='eV'):
if fermi_level is not None:
self.ef = fermi_level*self.conversion_factor(units)
def _set_range(self, npts=None, e_min=None, e_max=None):
import numpy as np
if npts is None:
npts = self.npts
if e_min is None:
e_min = self.e_min
if e_max is None:
e_max = self.e_max
for dos in self.ens:
mn, mx = dos['dos'].xlim
e_min = min(e_min, mn)
e_max = max(e_max, mx)
return np.arange(e_min, e_max, (e_max-e_min)/npts)
def curve(self, dos, norm, sigma=None):
import numpy as np
if sigma is None:
sigma = self.sigma
nrm = np.sqrt(2.0*np.pi)*sigma/norm
dos_g = []
for e_i in self.range:
if len(dos.shape) == 2:
nkpt = dos.shape[0]
value = 0.0
for ikpt in range(nkpt):
value += np.sum(np.exp(- (e_i - dos[ikpt, :])**2 /
(2.0 * sigma**2))/nrm[ikpt])
else:
value = np.sum(
np.exp(- (e_i - dos[:])**2 / (2.0 * sigma**2))/nrm)
# Append data corresponding to each energy grid
dos_g.append(value)
return np.array(dos_g)
def dump(self, sigma=None):
"For Gnuplot"
if sigma is None:
sigma = self.sigma
# data=[self.curve(dos,norm=self.norms[i],sigma=sigma)
# for i,dos in enumerate(self.ens)]
data = [dos['dos'].curve(self.range, sigma=sigma)[1]
for dos in self.ens]
for i, e in enumerate(self.range):
safe_print(e, ' '.join(map(str, [d[i] for d in data])))
def plot(self, sigma=None, legend=True, xlmin=None, xlmax=None, ylmin=None,
ylmax=None, width=6.4, height=4.8):
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider # , Button, RadioButtons
if sigma is None:
sigma = self.sigma
self.fig, self.ax1 = plt.subplots(figsize=(width, height))
self.plotl = []
for i, dos in enumerate(self.ens):
# self.plotl.append(self.ax1.plot(self.range,self.curve(dos,norm=self.norms[i],sigma=sigma),label=self.labels[i]))
self.plotl.append(self.ax1.plot(
*dos['dos'].curve(self.range, sigma=sigma),
label=self.labels[i]))
if xlmax is not None:
plt.xlim(xmax=xlmax)
if xlmin is not None:
plt.xlim(xmin=xlmin)
if ylmax is not None:
plt.ylim(ymax=ylmax)
if ylmin is not None:
plt.ylim(ymin=ylmin)
plt.xlabel('Energy [eV]', fontsize=18)
plt.ylabel('DoS', fontsize=18)
if self.ef is not None:
plt.axvline(self.ef, color='k', linestyle='--')
# self.ax1.annotate('Fermi level', xy=(self.ef,2),
# xytext=(self.ef, 10),
# arrowprops=dict(facecolor='white', shrink=0.05),
# )
if len(self.labels) > 1 and legend:
plt.legend(loc='best')
axcolor = 'lightgoldenrodyellow'
try:
axsigma = plt.axes([0.2, 0.93, 0.65, 0.03], facecolor=axcolor)
except AttributeError:
axsigma = plt.axes([0.2, 0.93, 0.65, 0.03], axisbg=axcolor)
self.ssig = Slider(axsigma, 'Smearing', 0.0, 0.4, valinit=sigma)
self.ssig.on_changed(self.update)
if hasattr(self, 'sdos') and self.sdos:
self._set_sdos_selector()
self._set_sdos()
plt.show()
def _set_sdos_selector(self):
import matplotlib.pyplot as plt
from matplotlib.widgets import RadioButtons
self.sdos_selector = RadioButtons(
plt.axes([0.93, 0.05, 0.04, 0.11], axisbg='lightgoldenrodyellow'),
('x', 'y', 'z'), active=1)
self.isdos = 1
self.sdos_selector.on_clicked(self._update_sdos)
def _set_sdos(self):
import numpy
xs = self.sdos[self.isdos]['coord']
self._set_sdos_sliders(numpy.min(xs), numpy.max(xs))
self._update_sdos(0.0) # fake value as it is unused
def _sdos_curve(self, sdos, vmin, vmax):
import numpy
xs = self.sdos[self.isdos]['coord']
imin = numpy.argmin(numpy.abs(xs-vmin))
imax = numpy.argmin(numpy.abs(xs-vmax))
doslist = sdos[self.isdos]['doslist']
# norms=self.sdos[self.isdos]['norms'][ispin]
tocurve = [0.0 for i in doslist[imin]]
for d in doslist[imin:imax+1]:
tocurve = [t+dd for t, dd in zip(tocurve, d)]
# tocurve=numpy.sum([ d[ispin] for d in doslist[imin:imax+1]],axis=0)
return tocurve
# float(len(xs))/float(imax+1-imin)*tocurve,norms
def _update_sdos(self, val):
isdos = self.isdos
if val == 'x':
isdos = 0
elif val == 'y':
isdos = 1
elif val == 'z':
isdos = 2
if isdos != self.isdos:
self.isdos = isdos
self._set_sdos()
vmin, vmax = (s.val for s in self.ssdos)
if vmax < vmin:
self.ssdos[1].set_val(vmin)
vmax = vmin
if vmin > vmax:
self.ssdos[0].set_val(vmax)
vmin = vmax
# now plot the sdos curve associated to the given value
sig = self.ssig.val
curves = []
for dos in self.ens:
if 'sdos' not in dos:
continue
renorms = self._sdos_curve(dos['sdos'], vmin, vmax)
curve = dos['dos'].curve(self.range, sigma=sig, wgts=renorms)
curves.append(curve)
if hasattr(self, '_sdos_plots'):
for pl, curve in zip(self._sdos_plots, curves):
pl[0].set_ydata(curve[1])
else:
self._sdos_plots = []
for c in curves:
self._sdos_plots.append(
self.ax1.plot(*c, label='sdos'))
self.ax1.relim()
self.ax1.autoscale_view()
self.fig.canvas.draw_idle()
def _set_sdos_sliders(self, cmin, cmax):
import matplotlib.pyplot as plt
from futile.Figures import VertSlider
if hasattr(self, 'ssdos'):
self.ssdos[0].ax.clear()
self.ssdos[0].__init__(
self.ssdos[0].ax, 'SDos', cmin, cmax, valinit=cmin)
self.ssdos[1].ax.clear()
self.ssdos[1].__init__(self.ssdos[1].ax, '',
cmin, cmax, valinit=cmax)
else:
axcolor = 'red'
axmin = plt.axes([0.93, 0.2, 0.02, 0.65], axisbg=axcolor)
axmax = plt.axes([0.95, 0.2, 0.02, 0.65], axisbg=axcolor)
self.ssdos = [
VertSlider(axmin, 'SDos', cmin, cmax, valinit=cmin),
VertSlider(axmax, '', cmin, cmax, valinit=cmax)]
self.ssdos[0].valtext.set_ha('right')
self.ssdos[1].valtext.set_ha('left')
self.ssdos[0].on_changed(self._update_sdos)
self.ssdos[1].on_changed(self._update_sdos)
def update(self, val):
sig = self.ssig.val
for i, dos in enumerate(self.ens):
self.plotl[i][0].set_ydata(
dos['dos'].curve(self.range, sigma=sig)[1])
# self.plotl[i][0].set_ydata(self.curve(dos,norm=self.norms[i],sigma=sig))
self.ax1.relim()
self.ax1.autoscale_view()
self.fig.canvas.draw_idle()
if __name__ == "__main__":
import numpy as np
energies = np.array([-0.815924953235059, -0.803163374736654,
-0.780540200987971, -0.7508806541364,
-0.723626807289917, -0.714924448617026,
-0.710448085701742, -0.68799028016451,
-0.67247569974853, -0.659038909236607,
-0.625396293324399, -0.608009041659988,
-0.565337910777367, -0.561250536074343,
-0.551767438323268, -0.541295070404525,
-0.532326667587434, -0.515961980147107,
-0.474601108285518, -0.473408476151224,
-0.46509070541069, -0.445709086452906,
-0.433874403837837, -0.416121660651406,
-0.407871082254237, -0.406123490618786,
-0.403004188319382, -0.38974739285104,
-0.380837488456638, -0.375163102271681,
-0.375007771592681, -0.367898783582561,
-0.367518948507212, -0.359401585874402,
-0.358189406008502, -0.354517727598174,
-0.334286389724978, -0.332921810616845,
-0.315466259109401, -0.308028853904577,
-0.29864142362141, -0.294024743731349,
-0.292104129933301, -0.285165738729842,
-0.28419932605141, -0.267399999874122,
-0.259487769142101, -0.239899780812716,
-0.224858003804207, -0.20448050758473,
-0.164155133452971, -0.117617164459898,
-0.0717938081884113, -0.0526986239898579,
-0.0346031190163735, -0.0167949342608791,
-0.0135168064347152, -0.0102971895842409,
0.00759271179427191, 0.00974950976249545,
0.010176021051287, 0.0217652761059223,
0.0239924727094222, 0.0413057846713024,
0.0422334333464529, 0.0459150454793617,
0.0517637894860314])
dos = DoS(energies, fermi_level=-0.1)
dos.append(0.2+energies)
dos.dump(sigma=0.01)
dos.plot()
|
[
"numpy.abs",
"numpy.ravel",
"matplotlib.pyplot.axes",
"matplotlib.widgets.Slider",
"numpy.arange",
"numpy.exp",
"matplotlib.pyplot.axvline",
"futile.Figures.VertSlider",
"numpy.max",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.legend",
"numpy.min",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlim",
"numpy.array",
"matplotlib.pyplot.xlabel",
"numpy.sqrt"
] |
[((14705, 16163), 'numpy.array', 'np.array', (['[-0.815924953235059, -0.803163374736654, -0.780540200987971, -\n 0.7508806541364, -0.723626807289917, -0.714924448617026, -\n 0.710448085701742, -0.68799028016451, -0.67247569974853, -\n 0.659038909236607, -0.625396293324399, -0.608009041659988, -\n 0.565337910777367, -0.561250536074343, -0.551767438323268, -\n 0.541295070404525, -0.532326667587434, -0.515961980147107, -\n 0.474601108285518, -0.473408476151224, -0.46509070541069, -\n 0.445709086452906, -0.433874403837837, -0.416121660651406, -\n 0.407871082254237, -0.406123490618786, -0.403004188319382, -\n 0.38974739285104, -0.380837488456638, -0.375163102271681, -\n 0.375007771592681, -0.367898783582561, -0.367518948507212, -\n 0.359401585874402, -0.358189406008502, -0.354517727598174, -\n 0.334286389724978, -0.332921810616845, -0.315466259109401, -\n 0.308028853904577, -0.29864142362141, -0.294024743731349, -\n 0.292104129933301, -0.285165738729842, -0.28419932605141, -\n 0.267399999874122, -0.259487769142101, -0.239899780812716, -\n 0.224858003804207, -0.20448050758473, -0.164155133452971, -\n 0.117617164459898, -0.0717938081884113, -0.0526986239898579, -\n 0.0346031190163735, -0.0167949342608791, -0.0135168064347152, -\n 0.0102971895842409, 0.00759271179427191, 0.00974950976249545, \n 0.010176021051287, 0.0217652761059223, 0.0239924727094222, \n 0.0413057846713024, 0.0422334333464529, 0.0459150454793617, \n 0.0517637894860314]'], {}), '([-0.815924953235059, -0.803163374736654, -0.780540200987971, -\n 0.7508806541364, -0.723626807289917, -0.714924448617026, -\n 0.710448085701742, -0.68799028016451, -0.67247569974853, -\n 0.659038909236607, -0.625396293324399, -0.608009041659988, -\n 0.565337910777367, -0.561250536074343, -0.551767438323268, -\n 0.541295070404525, -0.532326667587434, -0.515961980147107, -\n 0.474601108285518, -0.473408476151224, -0.46509070541069, -\n 0.445709086452906, -0.433874403837837, -0.416121660651406, -\n 0.407871082254237, -0.406123490618786, -0.403004188319382, -\n 0.38974739285104, -0.380837488456638, -0.375163102271681, -\n 0.375007771592681, -0.367898783582561, -0.367518948507212, -\n 0.359401585874402, -0.358189406008502, -0.354517727598174, -\n 0.334286389724978, -0.332921810616845, -0.315466259109401, -\n 0.308028853904577, -0.29864142362141, -0.294024743731349, -\n 0.292104129933301, -0.285165738729842, -0.28419932605141, -\n 0.267399999874122, -0.259487769142101, -0.239899780812716, -\n 0.224858003804207, -0.20448050758473, -0.164155133452971, -\n 0.117617164459898, -0.0717938081884113, -0.0526986239898579, -\n 0.0346031190163735, -0.0167949342608791, -0.0135168064347152, -\n 0.0102971895842409, 0.00759271179427191, 0.00974950976249545, \n 0.010176021051287, 0.0217652761059223, 0.0239924727094222, \n 0.0413057846713024, 0.0422334333464529, 0.0459150454793617, \n 0.0517637894860314])\n', (14713, 16163), True, 'import numpy as np\n'), ((800, 813), 'numpy.ravel', 'np.ravel', (['dos'], {}), '(dos)\n', (808, 813), True, 'import numpy as np\n'), ((1736, 1756), 'numpy.sqrt', 'np.sqrt', (['(2.0 * np.pi)'], {}), '(2.0 * np.pi)\n', (1743, 1756), True, 'import numpy as np\n'), ((7652, 7699), 'numpy.arange', 'np.arange', (['e_min', 'e_max', '((e_max - e_min) / npts)'], {}), '(e_min, e_max, (e_max - e_min) / npts)\n', (7661, 7699), True, 'import numpy as np\n'), ((8432, 8447), 'numpy.array', 'np.array', (['dos_g'], {}), '(dos_g)\n', (8440, 8447), True, 'import numpy as np\n'), ((9211, 9248), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(width, height)'}), '(figsize=(width, height))\n', (9223, 9248), True, 'import matplotlib.pyplot as plt\n'), ((9847, 9885), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Energy [eV]"""'], {'fontsize': '(18)'}), "('Energy [eV]', fontsize=18)\n", (9857, 9885), True, 'import matplotlib.pyplot as plt\n'), ((9894, 9924), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""DoS"""'], {'fontsize': '(18)'}), "('DoS', fontsize=18)\n", (9904, 9924), True, 'import matplotlib.pyplot as plt\n'), ((10537, 10589), 'matplotlib.widgets.Slider', 'Slider', (['axsigma', '"""Smearing"""', '(0.0)', '(0.4)'], {'valinit': 'sigma'}), "(axsigma, 'Smearing', 0.0, 0.4, valinit=sigma)\n", (10543, 10589), False, 'from matplotlib.widgets import Slider\n'), ((10755, 10765), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (10763, 10765), True, 'import matplotlib.pyplot as plt\n'), ((1769, 1815), 'numpy.exp', 'np.exp', (['(-(omega - e) ** 2 / (2.0 * sigma ** 2))'], {}), '(-(omega - e) ** 2 / (2.0 * sigma ** 2))\n', (1775, 1815), True, 'import numpy as np\n'), ((9629, 9649), 'matplotlib.pyplot.xlim', 'plt.xlim', ([], {'xmax': 'xlmax'}), '(xmax=xlmax)\n', (9637, 9649), True, 'import matplotlib.pyplot as plt\n'), ((9692, 9712), 'matplotlib.pyplot.xlim', 'plt.xlim', ([], {'xmin': 'xlmin'}), '(xmin=xlmin)\n', (9700, 9712), True, 'import matplotlib.pyplot as plt\n'), ((9755, 9775), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {'ymax': 'ylmax'}), '(ymax=ylmax)\n', (9763, 9775), True, 'import matplotlib.pyplot as plt\n'), ((9818, 9838), 'matplotlib.pyplot.ylim', 'plt.ylim', ([], {'ymin': 'ylmin'}), '(ymin=ylmin)\n', (9826, 9838), True, 'import matplotlib.pyplot as plt\n'), ((9969, 10016), 'matplotlib.pyplot.axvline', 'plt.axvline', (['self.ef'], {'color': '"""k"""', 'linestyle': '"""--"""'}), "(self.ef, color='k', linestyle='--')\n", (9980, 10016), True, 'import matplotlib.pyplot as plt\n'), ((10261, 10283), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""best"""'}), "(loc='best')\n", (10271, 10283), True, 'import matplotlib.pyplot as plt\n'), ((10360, 10412), 'matplotlib.pyplot.axes', 'plt.axes', (['[0.2, 0.93, 0.65, 0.03]'], {'facecolor': 'axcolor'}), '([0.2, 0.93, 0.65, 0.03], facecolor=axcolor)\n', (10368, 10412), True, 'import matplotlib.pyplot as plt\n'), ((10948, 11013), 'matplotlib.pyplot.axes', 'plt.axes', (['[0.93, 0.05, 0.04, 0.11]'], {'axisbg': '"""lightgoldenrodyellow"""'}), "([0.93, 0.05, 0.04, 0.11], axisbg='lightgoldenrodyellow')\n", (10956, 11013), True, 'import matplotlib.pyplot as plt\n'), ((11256, 11269), 'numpy.min', 'numpy.min', (['xs'], {}), '(xs)\n', (11265, 11269), False, 'import numpy\n'), ((11271, 11284), 'numpy.max', 'numpy.max', (['xs'], {}), '(xs)\n', (11280, 11284), False, 'import numpy\n'), ((11486, 11506), 'numpy.abs', 'numpy.abs', (['(xs - vmin)'], {}), '(xs - vmin)\n', (11495, 11506), False, 'import numpy\n'), ((11534, 11554), 'numpy.abs', 'numpy.abs', (['(xs - vmax)'], {}), '(xs - vmax)\n', (11543, 11554), False, 'import numpy\n'), ((13780, 13829), 'matplotlib.pyplot.axes', 'plt.axes', (['[0.93, 0.2, 0.02, 0.65]'], {'axisbg': 'axcolor'}), '([0.93, 0.2, 0.02, 0.65], axisbg=axcolor)\n', (13788, 13829), True, 'import matplotlib.pyplot as plt\n'), ((13850, 13899), 'matplotlib.pyplot.axes', 'plt.axes', (['[0.95, 0.2, 0.02, 0.65]'], {'axisbg': 'axcolor'}), '([0.95, 0.2, 0.02, 0.65], axisbg=axcolor)\n', (13858, 13899), True, 'import matplotlib.pyplot as plt\n'), ((3555, 3575), 'numpy.array', 'np.array', (['[energies]'], {}), '([energies])\n', (3563, 3575), True, 'import numpy as np\n'), ((4666, 4687), 'numpy.array', 'np.array', (['kptlists[0]'], {}), '(kptlists[0])\n', (4674, 4687), True, 'import numpy as np\n'), ((5688, 5704), 'numpy.array', 'np.array', (['energs'], {}), '(energs)\n', (5696, 5704), True, 'import numpy as np\n'), ((7839, 7859), 'numpy.sqrt', 'np.sqrt', (['(2.0 * np.pi)'], {}), '(2.0 * np.pi)\n', (7846, 7859), True, 'import numpy as np\n'), ((10466, 10515), 'matplotlib.pyplot.axes', 'plt.axes', (['[0.2, 0.93, 0.65, 0.03]'], {'axisbg': 'axcolor'}), '([0.2, 0.93, 0.65, 0.03], axisbg=axcolor)\n', (10474, 10515), True, 'import matplotlib.pyplot as plt\n'), ((13943, 13994), 'futile.Figures.VertSlider', 'VertSlider', (['axmin', '"""SDos"""', 'cmin', 'cmax'], {'valinit': 'cmin'}), "(axmin, 'SDos', cmin, cmax, valinit=cmin)\n", (13953, 13994), False, 'from futile.Figures import VertSlider\n'), ((14012, 14059), 'futile.Figures.VertSlider', 'VertSlider', (['axmax', '""""""', 'cmin', 'cmax'], {'valinit': 'cmax'}), "(axmax, '', cmin, cmax, valinit=cmax)\n", (14022, 14059), False, 'from futile.Figures import VertSlider\n'), ((871, 883), 'numpy.min', 'np.min', (['ddos'], {}), '(ddos)\n', (877, 883), True, 'import numpy as np\n'), ((979, 991), 'numpy.max', 'np.max', (['ddos'], {}), '(ddos)\n', (985, 991), True, 'import numpy as np\n'), ((4747, 4768), 'numpy.array', 'np.array', (['kptlists[1]'], {}), '(kptlists[1])\n', (4755, 4768), True, 'import numpy as np\n'), ((3633, 3649), 'numpy.array', 'np.array', (['[norm]'], {}), '([norm])\n', (3641, 3649), True, 'import numpy as np\n'), ((8273, 8322), 'numpy.exp', 'np.exp', (['(-(e_i - dos[:]) ** 2 / (2.0 * sigma ** 2))'], {}), '(-(e_i - dos[:]) ** 2 / (2.0 * sigma ** 2))\n', (8279, 8322), True, 'import numpy as np\n'), ((918, 930), 'numpy.max', 'np.max', (['ddos'], {}), '(ddos)\n', (924, 930), True, 'import numpy as np\n'), ((933, 945), 'numpy.min', 'np.min', (['ddos'], {}), '(ddos)\n', (939, 945), True, 'import numpy as np\n'), ((1026, 1038), 'numpy.max', 'np.max', (['ddos'], {}), '(ddos)\n', (1032, 1038), True, 'import numpy as np\n'), ((1041, 1053), 'numpy.min', 'np.min', (['ddos'], {}), '(ddos)\n', (1047, 1053), True, 'import numpy as np\n'), ((8096, 8151), 'numpy.exp', 'np.exp', (['(-(e_i - dos[ikpt, :]) ** 2 / (2.0 * sigma ** 2))'], {}), '(-(e_i - dos[ikpt, :]) ** 2 / (2.0 * sigma ** 2))\n', (8102, 8151), True, 'import numpy as np\n')]
|
from textwrap import dedent
from jumpscale.sals.chatflows.chatflows import chatflow_step
from jumpscale.packages.vdc_dashboard.sals.solutions_chatflow import SolutionsChatflowDeploy
class Publisher(SolutionsChatflowDeploy):
SOLUTION_TYPE = "publishingtools"
HELM_REPO_NAME = "marketplace"
CHART_NAME = "publishingtools"
EXAMPLE_URL = "https://github.com/threefoldfoundation/info_gridmanual"
DOC_URL = "https://now.threefold.io/now/docs/publishing-tool/#repository-examples"
title = "Publisher"
steps = [
"init_chatflow",
"get_release_name",
"choose_flavor",
"set_config",
"create_subdomain",
"install_chart",
"initializing",
"success",
]
def get_config(self):
return {
"env.type": self.config.chart_config.site_type,
"env.url": self.config.chart_config.url,
"env.branch": self.config.chart_config.branch,
"env.srcdir": self.config.chart_config.srcdir,
"ingress.host": self.config.chart_config.domain,
}
def get_mdconfig_msg(self):
msg = dedent(
f"""\
Few parameters are needed to be able to publish your content online
- Create a github account
- Fork the following [template repository]({self.EXAMPLE_URL}) and add your content there.
- Copy your forked repository URL to this deployer
- Select the branch you want to deploy, e.g: main
- Identify which source directory where the content lives in, e.g. src, html...
For more information about repository structure and examples please check [the manual]({self.DOC_URL}).
"""
)
return msg
@chatflow_step(title="Configurations")
def set_config(self):
form = self.new_form()
site_type = form.single_choice(
"Choose the publication type", options=["wiki", "www", "blog"], default="wiki", required=True
)
url = form.string_ask("Repository URL", required=True, is_git_url=True)
branch = form.string_ask("Branch", required=True)
srcdir = form.string_ask("Source directory", required=False, default="")
msg = self.get_mdconfig_msg()
form.ask(msg, md=True)
self.config.chart_config.site_type = site_type.value
self.config.chart_config.url = url.value
self.config.chart_config.branch = branch.value
self.config.chart_config.srcdir = srcdir.value
chat = Publisher
|
[
"textwrap.dedent",
"jumpscale.sals.chatflows.chatflows.chatflow_step"
] |
[((1732, 1769), 'jumpscale.sals.chatflows.chatflows.chatflow_step', 'chatflow_step', ([], {'title': '"""Configurations"""'}), "(title='Configurations')\n", (1745, 1769), False, 'from jumpscale.sals.chatflows.chatflows import chatflow_step\n'), ((1132, 1692), 'textwrap.dedent', 'dedent', (['f""" Few parameters are needed to be able to publish your content online\n - Create a github account\n - Fork the following [template repository]({self.EXAMPLE_URL}) and add your content there.\n - Copy your forked repository URL to this deployer\n - Select the branch you want to deploy, e.g: main\n - Identify which source directory where the content lives in, e.g. src, html...\n\n For more information about repository structure and examples please check [the manual]({self.DOC_URL}).\n """'], {}), '(\n f""" Few parameters are needed to be able to publish your content online\n - Create a github account\n - Fork the following [template repository]({self.EXAMPLE_URL}) and add your content there.\n - Copy your forked repository URL to this deployer\n - Select the branch you want to deploy, e.g: main\n - Identify which source directory where the content lives in, e.g. src, html...\n\n For more information about repository structure and examples please check [the manual]({self.DOC_URL}).\n """\n )\n', (1138, 1692), False, 'from textwrap import dedent\n')]
|
import requests
# EventsManager Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5002/eventsManager/v1.1/internal/')
print('{:30} - {:>20}'.format('EventsManager ', r.json().__str__()))
# END
# Facebook Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5003/facebook/v1.0/internal/')
print('{:30} - {:>20}'.format('Facebook ', r.json().__str__()))
# END
# Geolocation Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5004/geolocation/v1.0/internal/')
print('{:30} - {:>20}'.format('Geolocation ', r.json().__str__()))
# END
# LocationManager Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5005/locationManager/v1.1/internal/')
print('{:30} - {:>20}'.format('LocationManager ', r.json().__str__()))
# END
# ScheduleManager Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5006/scheduleManager/v1.0/internal/')
print('{:30} - {:>20}'.format('ScheduleManager ', r.json().__str__()))
# END
# UserProfiling Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5007/userProfiling/v1.0/internal/')
print('{:30} - {:>20}'.format('UserProfiling ', r.json().__str__()))
# END
# OpenWeather Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5009/Opw/v1.1/internal/')
print('{:30} - {:>20}'.format('OpenWeather ', r.json().__str__()))
# END
# UndergroundWeather Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5010/Underground/v1.1/internal/')
print('{:30} - {:>20}'.format('UndergroundWeather ', r.json().__str__()))
# END
# ChatManager Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5014/chatManager/v1.0/internal/')
print('{:30} - {:>20}'.format('ChatManager ', r.json().__str__()))
# END
# SmsManager Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5016/smsManager/v1.1/internal/')
print('{:30} - {:>20}'.format('SmsManager ', r.json().__str__()))
# END
# PushNotifications Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5017/pushNotification/v1.0/internal/')
print('{:30} - {:>20}'.format('PushNotifications ', r.json().__str__()))
# END
# WeatherProxy Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5008/weatherproxy/v1.0/internal/')
r = requests.post('http://192.168.1.100:5008/weatherproxy/v1.0/', json={'hello': 'world'})
r = requests.post('http://192.168.1.100:5008/weatherproxy/v1.0/', json={'hello': 'world'})
print('{:30} - {:>20}'.format('WeatherProxy ', r.json().__str__()))
# END
# AlarmManager Configuration
# BEGIN
r = requests.get('http://192.168.1.100:5001/alarmManager/v1.0/internal/')
r = requests.post('http://192.168.1.100:5001/alarmManager/v1.0/', json={'hello': 'world'})
r = requests.post('http://192.168.1.100:5001/alarmManager/v1.0/', json={'hello': 'world'})
print('{:30} - {:>20}'.format('AlarmManager ', r.json().__str__()))
# END
# App Server Configuration
# BEGIN
r = requests.post('http://192.168.1.100:5012/proxy/internal/authentication', json={'hello': 'world'})
print('{:30} - {:>20}'.format('App Server Authentication ', r.json().__str__()))
r = requests.post('http://192.168.1.100:5012/proxy/internal/get_credentials', json={'hello': 'world'})
print('{:30} - {:>20}'.format('App Server Credentials ', r.json().__str__()))
r = requests.get('http://192.168.1.100:5012/proxy/get_groups', json={'hello': 'world'})
r = requests.get('http://192.168.1.100:5012/proxy/get_groups', json={'hello': 'world'})
r = requests.get('http://192.168.1.100:5012/proxy/get_groups', json={'hello': 'world'})
r = requests.get('http://192.168.1.100:5012/proxy/get_groups', json={'hello': 'world'})
r = requests.get('http://192.168.1.100:5012/proxy/get_groups', json={'hello': 'world'})
r = requests.get('http://192.168.1.100:5012/proxy/get_groups', json={'hello': 'world'})
r = requests.get('http://192.168.1.100:5012/proxy/get_groups', json={'hello': 'world'})
r = requests.get('http://192.168.1.100:5012/proxy/get_groups', json={'hello': 'world'})
r = requests.get('http://192.168.1.100:5012/proxy/get_groups', json={'hello': 'world'})
r = requests.get('http://192.168.1.100:5012/proxy/get_groups', json={'hello': 'world'})
r = requests.get('http://192.168.1.100:5012/proxy/get_groups', json={'hello': 'world'})
# END
|
[
"requests.post",
"requests.get"
] |
[((59, 129), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5002/eventsManager/v1.1/internal/"""'], {}), "('http://192.168.1.100:5002/eventsManager/v1.1/internal/')\n", (71, 129), False, 'import requests\n'), ((244, 309), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5003/facebook/v1.0/internal/"""'], {}), "('http://192.168.1.100:5003/facebook/v1.0/internal/')\n", (256, 309), False, 'import requests\n'), ((421, 489), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5004/geolocation/v1.0/internal/"""'], {}), "('http://192.168.1.100:5004/geolocation/v1.0/internal/')\n", (433, 489), False, 'import requests\n'), ((608, 680), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5005/locationManager/v1.1/internal/"""'], {}), "('http://192.168.1.100:5005/locationManager/v1.1/internal/')\n", (620, 680), False, 'import requests\n'), ((803, 875), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5006/scheduleManager/v1.0/internal/"""'], {}), "('http://192.168.1.100:5006/scheduleManager/v1.0/internal/')\n", (815, 875), False, 'import requests\n'), ((996, 1066), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5007/userProfiling/v1.0/internal/"""'], {}), "('http://192.168.1.100:5007/userProfiling/v1.0/internal/')\n", (1008, 1066), False, 'import requests\n'), ((1183, 1243), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5009/Opw/v1.1/internal/"""'], {}), "('http://192.168.1.100:5009/Opw/v1.1/internal/')\n", (1195, 1243), False, 'import requests\n'), ((1365, 1433), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5010/Underground/v1.1/internal/"""'], {}), "('http://192.168.1.100:5010/Underground/v1.1/internal/')\n", (1377, 1433), False, 'import requests\n'), ((1555, 1623), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5014/chatManager/v1.0/internal/"""'], {}), "('http://192.168.1.100:5014/chatManager/v1.0/internal/')\n", (1567, 1623), False, 'import requests\n'), ((1737, 1804), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5016/smsManager/v1.1/internal/"""'], {}), "('http://192.168.1.100:5016/smsManager/v1.1/internal/')\n", (1749, 1804), False, 'import requests\n'), ((1924, 1997), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5017/pushNotification/v1.0/internal/"""'], {}), "('http://192.168.1.100:5017/pushNotification/v1.0/internal/')\n", (1936, 1997), False, 'import requests\n'), ((2120, 2189), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5008/weatherproxy/v1.0/internal/"""'], {}), "('http://192.168.1.100:5008/weatherproxy/v1.0/internal/')\n", (2132, 2189), False, 'import requests\n'), ((2194, 2284), 'requests.post', 'requests.post', (['"""http://192.168.1.100:5008/weatherproxy/v1.0/"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5008/weatherproxy/v1.0/', json={'hello':\n 'world'})\n", (2207, 2284), False, 'import requests\n'), ((2285, 2375), 'requests.post', 'requests.post', (['"""http://192.168.1.100:5008/weatherproxy/v1.0/"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5008/weatherproxy/v1.0/', json={'hello':\n 'world'})\n", (2298, 2375), False, 'import requests\n'), ((2488, 2557), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5001/alarmManager/v1.0/internal/"""'], {}), "('http://192.168.1.100:5001/alarmManager/v1.0/internal/')\n", (2500, 2557), False, 'import requests\n'), ((2562, 2652), 'requests.post', 'requests.post', (['"""http://192.168.1.100:5001/alarmManager/v1.0/"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5001/alarmManager/v1.0/', json={'hello':\n 'world'})\n", (2575, 2652), False, 'import requests\n'), ((2653, 2743), 'requests.post', 'requests.post', (['"""http://192.168.1.100:5001/alarmManager/v1.0/"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5001/alarmManager/v1.0/', json={'hello':\n 'world'})\n", (2666, 2743), False, 'import requests\n'), ((2855, 2956), 'requests.post', 'requests.post', (['"""http://192.168.1.100:5012/proxy/internal/authentication"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/internal/authentication',\n json={'hello': 'world'})\n", (2868, 2956), False, 'import requests\n'), ((3039, 3141), 'requests.post', 'requests.post', (['"""http://192.168.1.100:5012/proxy/internal/get_credentials"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/internal/get_credentials',\n json={'hello': 'world'})\n", (3052, 3141), False, 'import requests\n'), ((3221, 3308), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5012/proxy/get_groups"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/get_groups', json={'hello':\n 'world'})\n", (3233, 3308), False, 'import requests\n'), ((3309, 3396), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5012/proxy/get_groups"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/get_groups', json={'hello':\n 'world'})\n", (3321, 3396), False, 'import requests\n'), ((3397, 3484), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5012/proxy/get_groups"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/get_groups', json={'hello':\n 'world'})\n", (3409, 3484), False, 'import requests\n'), ((3485, 3572), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5012/proxy/get_groups"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/get_groups', json={'hello':\n 'world'})\n", (3497, 3572), False, 'import requests\n'), ((3573, 3660), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5012/proxy/get_groups"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/get_groups', json={'hello':\n 'world'})\n", (3585, 3660), False, 'import requests\n'), ((3661, 3748), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5012/proxy/get_groups"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/get_groups', json={'hello':\n 'world'})\n", (3673, 3748), False, 'import requests\n'), ((3749, 3836), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5012/proxy/get_groups"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/get_groups', json={'hello':\n 'world'})\n", (3761, 3836), False, 'import requests\n'), ((3837, 3924), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5012/proxy/get_groups"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/get_groups', json={'hello':\n 'world'})\n", (3849, 3924), False, 'import requests\n'), ((3925, 4012), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5012/proxy/get_groups"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/get_groups', json={'hello':\n 'world'})\n", (3937, 4012), False, 'import requests\n'), ((4013, 4100), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5012/proxy/get_groups"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/get_groups', json={'hello':\n 'world'})\n", (4025, 4100), False, 'import requests\n'), ((4101, 4188), 'requests.get', 'requests.get', (['"""http://192.168.1.100:5012/proxy/get_groups"""'], {'json': "{'hello': 'world'}"}), "('http://192.168.1.100:5012/proxy/get_groups', json={'hello':\n 'world'})\n", (4113, 4188), False, 'import requests\n')]
|
import pytz
import datetime
import bspump.common
import bspump.unittest
class CustomTimeNormalizer(bspump.common.TimeZoneNormalizer):
def process(self, context, event):
native_time = event["@timestamp"]
local_time = self.normalize(native_time)
return local_time
class TestCustomTimeZoneNormalizer(bspump.unittest.ProcessorTestCase):
def test_timezone_normalizer(self):
events = [(None, { # native time
"@timestamp": datetime.datetime(2000, 12, 30, 23, 59, 59, 59)
})]
self.set_up_processor(CustomTimeNormalizer)
output = self.execute(
events
)
self.assertEqual( # local time represented in UTC
[event for context, event in output],
[datetime.datetime(2000, 12, 30, 22, 59, 59, 59, tzinfo=pytz.UTC)]
)
class TestAbstractTimeZoneNormalizer(bspump.unittest.ProcessorTestCase):
def test_abstract_timezone_normalizer(self):
with self.assertRaises(TypeError):
self.set_up_processor(bspump.common.TimeZoneNormalizer)
|
[
"datetime.datetime"
] |
[((676, 740), 'datetime.datetime', 'datetime.datetime', (['(2000)', '(12)', '(30)', '(22)', '(59)', '(59)', '(59)'], {'tzinfo': 'pytz.UTC'}), '(2000, 12, 30, 22, 59, 59, 59, tzinfo=pytz.UTC)\n', (693, 740), False, 'import datetime\n'), ((436, 483), 'datetime.datetime', 'datetime.datetime', (['(2000)', '(12)', '(30)', '(23)', '(59)', '(59)', '(59)'], {}), '(2000, 12, 30, 23, 59, 59, 59)\n', (453, 483), False, 'import datetime\n')]
|
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
import shutil
from pre_wigs_validation.enums import ValidationEnforcement, ValidationResult
from pre_wigs_validation.instance import ValidationInstance
from pre_wigs_validation.dataclasses import ValidationOutput
from pre_wigs_validation.utils import check_validation_config
class FreeDiskSpace:
"""Validate that there is enough free disk space on the root volume."""
validation = "Free Disk Space"
enforcement = ValidationEnforcement.REQUIRED
@classmethod
def validate(
cls, *, min_gb: int = 5, enabled: bool = True, instance: ValidationInstance
) -> ValidationOutput:
"""
Parameters:
min_gb (int): the minimum amount of gigabytes in free space to check for
enabled (bool): whether or not to run this validation function
instance (ValidationInstance): the instance object being validated
Returns:
ValidationOutput: output of validation
"""
if not enabled:
return ValidationOutput(
validation=cls.validation,
result=ValidationResult.NOT_RUN,
enforcement=cls.enforcement,
)
pass_message = None
config = check_validation_config(
default_params=cls.validate.__kwdefaults__, local_params=locals()
)
total, used, free = shutil.disk_usage("/")
giga = 2 ** 30
utilization = used / total
diff = min_gb - (free // giga)
pass_message = f"There are {free // giga}gb free, {min_gb}gb are required"
verbose_message = None
if diff <= 0:
if utilization >= 0.85:
verbose_message = (
"Warning, disk utilization seems to be"
f" {round(utilization * 100)}%,"
" we recommend less than 85%"
)
return ValidationOutput(
validation=cls.validation,
result=ValidationResult.PASS,
enforcement=cls.enforcement,
config=config,
message=pass_message,
verbose_message=verbose_message,
)
fail_message = (
f"Please free up about {diff}gb on the root volume, at least"
f" {min_gb}gb are required"
)
return ValidationOutput(
validation=cls.validation,
result=ValidationResult.FAIL,
enforcement=cls.enforcement,
config=config,
message=fail_message,
)
|
[
"shutil.disk_usage",
"pre_wigs_validation.dataclasses.ValidationOutput"
] |
[((1452, 1474), 'shutil.disk_usage', 'shutil.disk_usage', (['"""/"""'], {}), "('/')\n", (1469, 1474), False, 'import shutil\n'), ((2428, 2571), 'pre_wigs_validation.dataclasses.ValidationOutput', 'ValidationOutput', ([], {'validation': 'cls.validation', 'result': 'ValidationResult.FAIL', 'enforcement': 'cls.enforcement', 'config': 'config', 'message': 'fail_message'}), '(validation=cls.validation, result=ValidationResult.FAIL,\n enforcement=cls.enforcement, config=config, message=fail_message)\n', (2444, 2571), False, 'from pre_wigs_validation.dataclasses import ValidationOutput\n'), ((1095, 1204), 'pre_wigs_validation.dataclasses.ValidationOutput', 'ValidationOutput', ([], {'validation': 'cls.validation', 'result': 'ValidationResult.NOT_RUN', 'enforcement': 'cls.enforcement'}), '(validation=cls.validation, result=ValidationResult.NOT_RUN,\n enforcement=cls.enforcement)\n', (1111, 1204), False, 'from pre_wigs_validation.dataclasses import ValidationOutput\n'), ((1980, 2160), 'pre_wigs_validation.dataclasses.ValidationOutput', 'ValidationOutput', ([], {'validation': 'cls.validation', 'result': 'ValidationResult.PASS', 'enforcement': 'cls.enforcement', 'config': 'config', 'message': 'pass_message', 'verbose_message': 'verbose_message'}), '(validation=cls.validation, result=ValidationResult.PASS,\n enforcement=cls.enforcement, config=config, message=pass_message,\n verbose_message=verbose_message)\n', (1996, 2160), False, 'from pre_wigs_validation.dataclasses import ValidationOutput\n')]
|
#!/usr/bin/python3
import pyspark.sql
import pyspark.sql.functions
import pyspark.sql.types
import time
import datetime
KAFKA_ADDRESS = "localhost:9092"
KAFKA_TOPIC = "Logs"
FILENAME = "apache-access-log.txt"
# Create spark session
sparksession = pyspark.sql.SparkSession.builder.appName("LogAggregator").getOrCreate()
# Setup Schema
data_type = pyspark.sql.types.StructField("value", pyspark.sql.types.StringType())
schema = pyspark.sql.types.StructType([data_type])
# Read in data
log_df = sparksession.read.text(FILENAME)
log_df = log_df.withColumn("key", pyspark.sql.functions.monotonically_increasing_id().cast("string"))
# Parse timestamps. These are used to slow down input to kafka. If all
# the logs are dumped into kafka at once, there will be no output in
# the ddos detector until more data is given. Since we only have
# limited data, it must not be put in all at once.
string_time_col = pyspark.sql.functions.regexp_extract(log_df['value'], '\[(.*)\]', 1).alias("TimestampString")
log_df = log_df.withColumn("Timestamp", pyspark.sql.functions.to_timestamp(string_time_col, "dd/MMM/yyyy:HH:mm:ss Z"))
# Determine start and end time of the input data
start_time = log_df.select(pyspark.sql.functions.min(log_df['Timestamp']).alias("min")).collect()[0]['min']
end_time = log_df.select(pyspark.sql.functions.max(log_df['Timestamp']).alias("max")).collect()[0]['max']
num_time_steps = (end_time - start_time).seconds + 1
# Here is the loop where we send the logs to Kafka. The first
# iteration of the loop puts in all the logs with the starting
# timestep. The next iteration puts in data for the next second of
# data. The timer is checked to ensure data is not entered too fast.
system_start_time = time.perf_counter()
for i in range(num_time_steps):
current_timestamp = start_time + datetime.timedelta(seconds=i)
# Ensure logs aren't being processed faster than real time:
while((system_start_time + i) > time.perf_counter()):
time.sleep(1)
ds = log_df.where(log_df['Timestamp'] == current_timestamp) \
.write \
.format("kafka") \
.option("kafka.bootstrap.servers", KAFKA_ADDRESS) \
.option("topic", KAFKA_TOPIC) \
.save()
|
[
"datetime.timedelta",
"time.perf_counter",
"time.sleep"
] |
[((1720, 1739), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (1737, 1739), False, 'import time\n'), ((1809, 1838), 'datetime.timedelta', 'datetime.timedelta', ([], {'seconds': 'i'}), '(seconds=i)\n', (1827, 1838), False, 'import datetime\n'), ((1944, 1963), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (1961, 1963), False, 'import time\n'), ((1974, 1987), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1984, 1987), False, 'import time\n')]
|
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import vaex.dataset as dataset
import numpy as np
import unittest
import vaex as vx
import tempfile
import vaex.server.tornado_server
import astropy.io.fits
import astropy.units
import pandas as pd
import vaex.execution
import contextlib
a = vaex.execution.buffer_size_default # will crash if we decide to rename it
basedir = os.path.dirname(__file__)
# this will make the test execute more code and may show up bugs
#vaex.execution.buffer_size_default = 3
@contextlib.contextmanager
def small_buffer(ds, size=3):
if ds.is_local():
previous = ds.executor.buffer_size
ds.executor.buffer_size = size
ds._invalidate_caches()
try:
yield
finally:
ds.executor.buffer_size = previous
else:
yield # for remote dfs we don't support this ... or should we?
# these need to be global for pickling
def function_upper(x):
return np.array(x.upper())
import vaex.serialize
@vaex.serialize.register
class Multiply:
def __init__(self, scale=0): self.scale = scale
@classmethod
def state_from(cls, state, trusted=True):
return cls(scale=state)
def state_get(self): return self.scale
def __call__(self, x): return x * self.scale
vx.set_log_level_exception()
#vx.set_log_level_off()
#vx.set_log_level_debug()
def from_scalars(**kwargs):
return vx.from_arrays(**{k:np.array([v]) for k, v in kwargs.items()})
class CallbackCounter(object):
def __init__(self, return_value=None):
self.counter = 0
self.return_value = return_value
self.last_args = None
self.last_kwargs = None
def __call__(self, *args, **kwargs):
self.counter += 1
self.last_args = args
self.last_kwargs = kwargs
return self.return_value
class TestDataset(unittest.TestCase):
def setUp(self):
self.dataset = dataset.DatasetArrays("dataset")
# x is non-c
# same as np.arange(10, dtype=">f8")., but with strides == 16, instead of 8
use_filtering = True
if use_filtering:
self.zero_index = 2
self.x = x = np.arange(-2, 40, dtype=">f8").reshape((-1,21)).T.copy()[:,0]
self.y = y = x ** 2
self.ints = np.arange(-2,19, dtype="i8")
self.ints[1] = 2**62+1
self.ints[2] = -2**62+1
self.ints[3] = -2**62-1
self.ints[1+10] = 2**62+1
self.ints[2+10] = -2**62+1
self.ints[3+10] = -2**62-1
self.dataset.add_column("x", x)
self.dataset.add_column("y", y)
mo = m = np.arange(-2, 40, dtype=">f8").reshape((-1,21)).T.copy()[:,0]
ma_value = 77777
m[-1+10+2] = ma_value
m[-1+20] = ma_value
else:
self.x = x = np.arange(20, dtype=">f8").reshape((-1,10)).T.copy()[:,0]
self.y = y = x ** 2
self.ints = np.arange(10, dtype="i8")
self.ints[0] = 2**62+1
self.ints[1] = -2**62+1
self.ints[2] = -2**62-1
self.dataset.add_column("x", x)
self.dataset.add_column("y", y)
m = x.copy()
ma_value = 77777
m[-1] = ma_value
self.m = m = np.ma.array(m, mask=m==ma_value)
self.mi = mi = np.ma.array(m.data.astype(np.int64), mask=m.data==ma_value, fill_value=88888)
self.dataset.add_column("m", m)
self.dataset.add_column("mi", mi)
self.dataset.add_column("ints", self.ints)
self.dataset.set_variable("t", 1.)
self.dataset.add_virtual_column("z", "x+t*y")
self.dataset.units["x"] = astropy.units.Unit("km")
self.dataset.units["y"] = astropy.units.Unit("km/s")
self.dataset.units["t"] = astropy.units.Unit("s")
self.dataset.add_column("f", np.arange(len(self.dataset), dtype=np.float64))
self.dataset.ucds["x"] = "some;ucd"
name = np.array(list(map(lambda x: str(x) + "bla" + ('_' * int(x)), self.x)), dtype='U') #, dtype=np.string_)
self.names = self.dataset.get_column_names()
self.dataset.add_column("name", np.array(name))
self.dataset.add_column("name_arrow", vaex.string_column(name))
if use_filtering:
self.dataset.select('(x >= 0) & (x < 10)', name=vaex.dataset.FILTER_SELECTION_NAME)
self.x = x = self.x[2:12]
self.y = y = self.y[2:12]
self.m = m = self.m[2:12]
self.ints = ints = self.ints[2:12]
# TODO; better virtual and variables support
# TODO: this is a copy since concatenated dfs do not yet support
# global selections
# a 'deep' copy
self.dataset_no_filter = vaex.from_items(*self.dataset.to_items(virtual=False, strings=True))
self.dataset_no_filter.add_virtual_column("z", "x+t*y")
self.dataset_no_filter.set_variable("t", 1.)
#self.jobsManager = dataset.JobsManager()
x = np.array([0., 1])
y = np.array([-1., 1])
self.datasetxy = vx.dataset.DatasetArrays("datasetxy")
self.datasetxy.add_column("x", x)
self.datasetxy.add_column("y", y)
x1 = np.array([1., 3])
x2 = np.array([2., 3, 4,])
x3 = np.array([5.])
self.x_concat = np.concatenate((x1, x2, x3))
dataset1 = vx.dataset.DatasetArrays("dataset1")
dataset2 = vx.dataset.DatasetArrays("dataset2")
dataset3 = vx.dataset.DatasetArrays("dataset3")
dataset1.add_column("x", x1)
dataset2.add_column("x", x2)
dataset3.add_column("x", x3)
dataset3.add_column("y", x3**2)
self.dataset_concat = vx.dataset.DatasetConcatenated([dataset1, dataset2, dataset3], name="dataset_concat")
self.dataset_concat_dup = vx.dataset.DatasetConcatenated([self.dataset_no_filter, self.dataset_no_filter, self.dataset_no_filter], name="dataset_concat_dup")
self.dataset_local = self.dataset
self.datasetxy_local = self.datasetxy
self.dataset_concat_local = self.dataset_concat
self.dataset_concat_dup_local = self.dataset_concat_dup
np.random.seed(0) # fix seed so that test never fails randomly
self.df = self.dataset.to_pandas_df()
def test_function(self):
def multiply(factor=2):
def f(x):
return x*factor
return f
ds = self.dataset
f = ds.add_function('mul2', multiply(2))
ds['x2'] = f(ds.x)
self.assertEqual((self.x * 2).tolist(), ds.evaluate('x2').tolist())
ds.state_get()
def test_apply(self):
ds_copy = self.dataset.copy()
ds = self.dataset
with small_buffer(ds, 2):
upper = ds.apply(function_upper, arguments=[ds['name']])
ds['NAME'] = upper
name = ds.evaluate('NAME')
self.assertEqual(name[0], u'0.0BLA')
ds_copy.state_set(ds.state_get())
name = ds_copy.evaluate('NAME')
self.assertEqual(name[0], u'0.0BLA')
ds['a1'] = ds.apply(lambda x: x+1, arguments=['x'])
ds['a2'] = ds.apply(lambda x: x+2, arguments=['x'])
assert (ds['a1']+1).evaluate().tolist() == ds['a2'].evaluate().tolist()
def test_filter(self):
ds = self.dataset
if ds.is_local(): # remote doesn't have a cache
ds._invalidate_selection_cache()
with small_buffer(ds):
ds1 = ds.copy()
ds1.select(ds1.x > 4, name=vaex.dataset.FILTER_SELECTION_NAME, mode='and')
ds1._invalidate_caches()
ds2 = ds[ds.x > 4]
ds1.x.evaluate()
# self.assertEqual(ds1.x.evaluate().tolist(), ds2.x.evaluate().tolist())
ds2.select(ds.x < 6)
x = ds2.x.evaluate(selection=True)
self.assertEqual(x.tolist(), [5])
# print("=" * 70)
def test_default_selection(self):
ds = self.dataset
ds._invalidate_selection_cache()
with small_buffer(ds):
indices = ds._filtered_range_to_unfiltered_indices(0, 2)
self.assertEqual(indices.tolist(), [self.zero_index+0, self.zero_index+1])
ds = ds[ds.x > 2]
indices = ds._filtered_range_to_unfiltered_indices(0, 2)
assert indices.tolist() == [self.zero_index+3, self.zero_index+4]
x = ds.x.evaluate(0, 2)
indices = ds._filtered_range_to_unfiltered_indices(0, 2)
assert len(x) == 2
assert x[0] == 3
x = ds.x.evaluate(4, 7)
indices = ds._filtered_range_to_unfiltered_indices(4, 7)
assert len(x) == 3
assert x[0] == 3+4
def test_unique(self):
ds = vaex.from_arrays(x=np.array([2,2,1,0,1,1,2]))
with small_buffer(ds):
classes = ds.unique('x')
assert np.sort(classes).tolist() == [0, 1, 2]
def test_amuse(self):
ds = vx.open(os.path.join(basedir, "files", "default_amuse_plummer.hdf5"))
self.assertGreater(len(ds), 0)
self.assertGreater(len(ds.get_column_names()), 0)
self.assertIsNotNone(ds.unit("x"))
self.assertIsNotNone(ds.unit("vx"))
self.assertIsNotNone(ds.unit("mass"))
ds.close_files()
def test_masked_array_output(self):
fn = tempfile.mktemp(".hdf5")
print(fn)
self.dataset.export_hdf5(fn, sort="x")
output = vaex.open(fn)
self.assertEqual(self.dataset.sum("m"), output.sum("m"))
table = self.dataset.to_astropy_table()
fn = tempfile.mktemp(".vot")
print(fn)
from astropy.io.votable import from_table, writeto
votable = from_table(table)
writeto(votable, fn)
output = vaex.open(fn)
self.assertEqual(self.dataset.sum("m"), output.sum("m"))
def test_formats(self):
return # TODO: not workign ATM because of fits + strings
ds_fits = vx.open(os.path.join(basedir, "files", "gaia-small-fits-basic.fits"))
ds_fits_plus = vx.open(os.path.join(basedir, "files", "gaia-small-fits-plus.fits"))
ds_colfits = vx.open(os.path.join(basedir, "files", "gaia-small-colfits-basic.fits"))
ds_colfits_plus = vx.open(os.path.join(basedir, "files", "gaia-small-colfits-plus.fits"))
ds_vot = vx.open(os.path.join(basedir, "files", "gaia-small-votable.vot"))
# FIXME: the votable gives issues
dslist = [ds_fits, ds_fits_plus, ds_colfits, ds_colfits_plus]#, ds_vot]
for ds1 in dslist:
path_hdf5 = tempfile.mktemp(".hdf5")
ds1.export_hdf5(path_hdf5)
ds2 = vx.open(path_hdf5)
diff, missing, types, meta = ds1.compare(ds2)
self.assertEqual(diff, [], "difference between %s and %s" % (ds1.path, ds2.path))
self.assertEqual(missing, [], "missing columns %s and %s" % (ds1.path, ds2.path))
self.assertEqual(meta, [], "meta mismatch between columns %s and %s" % (ds1.path, ds2.path))
path_fits = tempfile.mktemp(".fits")
ds1.export_fits(path_fits)
ds2 = vx.open(path_fits)
diff, missing, types, meta = ds1.compare(ds2)
self.assertEqual(diff, [], "difference between %s and %s" % (ds1.path, ds2.path))
self.assertEqual(missing, [], "missing columns %s and %s" % (ds1.path, ds2.path))
self.assertEqual(meta, [], "meta mismatch between columns %s and %s" % (ds1.path, ds2.path))
if 0:
N = len(dslist)
for i in range(N):
for j in range(i+1, N):
ds1 = dslist[i]
ds2 = dslist[j]
diff, missing, types, meta = ds1.compare(ds2)
self.assertEqual(diff, [], "difference between %s and %s" % (ds1.path, ds2.path))
self.assertEqual(missing, [], "missing columns %s and %s" % (ds1.path, ds2.path))
self.assertEqual(meta, [], "meta mismatch between columns %s and %s" % (ds1.path, ds2.path))
def test_to(self):
def test_equal(ds1, ds2, units=True, ucds=True, description=True, descriptions=True, skip=[]):
if description:
self.assertEqual(ds1.description, ds2.description)
for name in ds1.get_column_names(virtual=False):
if name in skip:
continue
self.assertIn(name, ds2.get_column_names(virtual=False))
np.testing.assert_array_equal(ds1.evaluate(name), ds2.evaluate(name), err_msg='mismatch in ' +name)
if units:
self.assertEqual(ds1.units.get(name), ds2.units.get(name))
if ucds:
self.assertEqual(ds1.ucds.get(name), ds2.ucds.get(name))
if descriptions:
self.assertEqual(ds1.descriptions.get(name), ds2.descriptions.get(name))
# as numpy dict
ds2 = vx.from_arrays(**self.dataset.to_dict())
test_equal(self.dataset, ds2, ucds=False, units=False, description=False, descriptions=False)
# as pandas
ds2 = vx.from_pandas(self.dataset.to_pandas_df())
# skip masked arrays, pandas doesn't understand that, converts it to nan, so we can't compare
test_equal(self.dataset, ds2, ucds=False, units=False, description=False, descriptions=False, skip=['m', 'mi'])
df = self.dataset.to_pandas_df(index_name="name")
ds2 = vx.from_pandas(df, index_name="name", copy_index=True)
test_equal(self.dataset, ds2, ucds=False, units=False, description=False, descriptions=False, skip=['m', 'mi'])
ds2 = vx.from_pandas(self.dataset.to_pandas_df(index_name="name"))
assert "name" not in ds2.get_column_names()
# as astropy table
ds2 = vx.from_astropy_table(self.dataset.to_astropy_table())
test_equal(self.dataset, ds2)
# as arrow table
ds2 = vx.from_arrow_table(self.dataset.to_arrow_table(), as_numpy=False)
test_equal(self.dataset, ds2, ucds=False, units=False, description=False, descriptions=False, )
# return a copy
ds2 = self.dataset.to_copy(virtual=True)
assert "z" not in ds2.columns
assert "z" in ds2.virtual_columns
test_equal(self.dataset, ds2)
def test_add_column(self):
columns = self.dataset.get_column_names()
self.dataset.add_column("x", self.dataset.data.x)
self.assertSequenceEqual(columns, self.dataset.get_column_names())
self.dataset.add_column("extra", self.dataset.data.x)
extra = self.dataset.evaluate("extra")
np.testing.assert_array_almost_equal(extra, self.dataset.data.x[self.zero_index:self.zero_index+10])
with self.assertRaises(ValueError):
self.dataset.add_column("unequal", self.dataset.data.x[:10])
with self.assertRaises(ValueError):
self.dataset.add_column("unequal", self.dataset.data.x[:11])
def test_rename_column(self):
self.dataset.rename("x", "xx")
self.assertNotIn("x", self.dataset.columns)
self.assertNotIn("x", self.dataset.column_names)
self.assertNotIn("x", self.dataset.units)
self.assertNotIn("x", self.dataset.ucds)
self.assertIn("xx", self.dataset.columns)
self.assertIn("xx", self.dataset.column_names)
self.assertIn("xx", self.dataset.units)
self.assertIn("xx", self.dataset.ucds)
def test_csv(self):
separator = ","
fn = tempfile.mktemp(".csv")
#print(fn)
with open(fn, "w") as f:
print(separator.join(["x", "y", "m", "mi", "name", "name_arrow", "ints", "f"]), file=f)
values = [self.dataset.evaluate(k) for k in 'x y m mi name name_arrow ints f'.split()]
for x, y, m, mi, name, name_arrow, i, f_ in zip(*values):#zip(self.x, self.y, self.dataset.data.m, self.dataset.data.mi, self.dataset.data.name, self.dataset.data.ints, self.dataset.data.f):
print(separator.join(map(str, [x, y, m, mi, name, name_arrow, i, f_])), file=f)
ds = vx.from_csv(fn, index_col=False, copy_index=True)
changes = self.dataset.compare(ds, report_difference=True)
diff = changes[0]
#print(diff)
self.assertEqual(changes[0], [], "changes in dataset")
self.assertEqual(changes[1], ['index'], "mssing columns")
def test_ascii(self):
for seperator in " \t,":
for use_header in [True, False]:
#print(">>>", repr(seperator), use_header)
fn = tempfile.mktemp("asc")
with open(fn, "w") as f:
if use_header:
print(seperator.join(["x", "y"]), file=f)
for x, y, name in zip(self.x, self.y, self.dataset.data.name):
print(seperator.join(map(repr, [x, y])), file=f)
#with open(fn) as f:
# print(f.read())
sep = seperator
if seperator == " ":
sep = None
if use_header:
ds = vx.from_ascii(fn, seperator=sep)
else:
ds = vx.from_ascii(fn, seperator=seperator, names="x y".split())
np.testing.assert_array_almost_equal(ds.data.x, self.x)
np.testing.assert_array_almost_equal(ds.data.y, self.y)
#np.testing.assert_array_equal(ds.data.names, self.dataset.data.name)
#if seperator == ",":
# df = pd.read_csv(fn)
# ds = vx.from_pandas(df)
# np.testing.assert_array_almost_equal(ds.data.x, self.x)
# np.testing.assert_array_almost_equal(ds.data.y, self.y)
#np.testing.assert_array_equal(ds.data.names, self.dataset.data.name)
def tearDown(self):
self.dataset.remove_virtual_meta()
self.dataset_concat.remove_virtual_meta()
self.dataset_concat_dup.remove_virtual_meta()
def test_mixed_endian(self):
x = np.arange(10., dtype=">f8")
y = np.arange(10, dtype="<f8")
ds = vx.from_arrays(x=x, y=y)
ds.count()
ds.count(binby=["x", "y"])
def test_join(self):
np.random.seed(42)
x = np.arange(10, dtype=np.float64)
indices = np.arange(10)
i = x.astype(np.int64)
np.random.shuffle(indices)
xs = x[indices]
y = x**2
z = ys = y[indices]
names = np.array(list(map(lambda x: str(x) + "bla", self.x)), dtype='S')[indices]
ds = vaex.from_arrays(x=x, y=y)
ds2 = vaex.from_arrays(x=xs, z=ys, i=i, names=names)
ds.join(ds2[['x', 'z', 'i', 'names']], 'x', rsuffix='r', inplace=True)
self.assertEqual(ds.sum('x*y'), np.sum(x*y))
self.assertEqual(ds.sum('x*z'), np.sum(x*y))
self.assertEqual(ds.sum('x*y'), np.sum(x[indices]*z))
self.assertEqual(ds.sum('x*y'), np.sum(x[indices]*z))
# test with incomplete data
ds = vaex.from_arrays(x=x, y=y)
ds2 = vaex.from_arrays(x=xs[:4], z=ys[:4], i=i[:4], names=names[:4])
ds.join(ds2, 'x', rsuffix='r', inplace=True)
self.assertEqual(ds.sum('x*y'), np.sum(x*y))
self.assertEqual(ds.sum('x*z'), np.sum(x[indices][:4]*y[indices][:4]))
# test with incomplete data, but other way around
ds = vaex.from_arrays(x=x[:4], y=y[:4])
ds2 = vaex.from_arrays(x=xs, z=ys, i=i, names=names)
ds.join(ds2, 'x', inplace=True, rsuffix='r')
self.assertEqual(ds.sum('x*y'), np.sum(x[:4]*y[:4]))
self.assertEqual(ds.sum('x*z'), np.sum(x[:4]*y[:4]))
def test_healpix_count(self):
# only test when healpy is present
try:
import healpy as hp
except ImportError:
return
max_order = 6
nside = hp.order2nside(max_order)
npix = hp.nside2npix(nside)
healpix = np.arange(npix)
ds = vx.from_arrays(healpix=healpix)
for order in range(max_order):
counts = ds.healpix_count(healpix_expression="healpix", healpix_max_level=max_order, healpix_level=order)
scaling = 4**(max_order-order)
ones = np.ones(npix//scaling) * scaling
np.testing.assert_array_almost_equal(counts, ones)
self.assertEqual(counts.sum(), npix)
def test_uncertainty_propagation(self):
N = 100000
# distance
parallaxes = np.random.normal(1, 0.1, N)
ds_many = vx.from_arrays(parallax=parallaxes)
ds_many.add_virtual_columns_distance_from_parallax("parallax", "distance")
distance_std_est = ds_many.std("distance").item()
ds_1 = vx.from_arrays(parallax=np.array([1.]), parallax_uncertainty=np.array([0.1]))
ds_1.add_virtual_columns_distance_from_parallax("parallax", "distance", "parallax_uncertainty")
distance_std = ds_1.evaluate("distance_uncertainty")[0]
self.assertAlmostEqual(distance_std, distance_std_est,2)
def test_virtual_column_storage(self):
self.dataset.write_meta()
ds = vaex.zeldovich()
ds.write_meta()
def test_add_virtual_columns_cartesian_velocities_to_polar(self):
if 1:
def dfs(x, y, velx, vely):
ds_1 = from_scalars(x=x, y=y, vx=velx, vy=vely, x_e=0.01, y_e=0.02, vx_e=0.03, vy_e=0.04)
ds_1.add_virtual_columns_cartesian_velocities_to_polar(propagate_uncertainties=True)
N = 100000
# distance
x = np.random.normal(x, 0.01, N)
y = np.random.normal(y, 0.02, N)
velx = np.random.normal(velx, 0.03, N)
vely = np.random.normal(vely, 0.04, N)
ds_many = vx.from_arrays(x=x, y=y, vx=vely, vy=vely)
ds_many.add_virtual_columns_cartesian_velocities_to_polar()
return ds_1, ds_many
ds_1, ds_many = dfs(0, 2, 3, 4)
vr_polar_e = ds_1.evaluate("vr_polar_uncertainty")[0]
vphi_polar_e = ds_1.evaluate("vphi_polar_uncertainty")[0]
self.assertAlmostEqual(vr_polar_e, ds_many.std("vr_polar").item(), delta=0.02)
self.assertAlmostEqual(vphi_polar_e, ds_many.std("vphi_polar").item(), delta=0.02)
# rotation is anti clockwise
ds_1 = from_scalars(x=0, y=2, vx=0, vy=2)
ds_1.add_virtual_columns_cartesian_velocities_to_polar()
vr_polar = ds_1.evaluate("vr_polar")[0]
vphi_polar = ds_1.evaluate("vphi_polar")[0]
self.assertAlmostEqual(vr_polar, 2)
self.assertAlmostEqual(vphi_polar, 0)
ds_1 = from_scalars(x=0, y=2, vx=-2, vy=0)
ds_1.add_virtual_columns_cartesian_velocities_to_polar()
vr_polar = ds_1.evaluate("vr_polar")[0]
vphi_polar = ds_1.evaluate("vphi_polar")[0]
self.assertAlmostEqual(vr_polar, 0)
self.assertAlmostEqual(vphi_polar, 2)
def test_add_virtual_columns_cartesian_velocities_to_spherical(self):
if 0: # TODO: errors in spherical velocities
pass
def test(vr_expect, vlong_expect, vlat_expect, **kwargs):
ds_1 = from_scalars(**kwargs)
ds_1.add_virtual_columns_cartesian_velocities_to_spherical()
vr, vlong, vlat = ds_1.evaluate("vr")[0], ds_1.evaluate("vlong")[0], ds_1.evaluate("vlat")[0]
self.assertAlmostEqual(vr, vr_expect)
self.assertAlmostEqual(vlong, vlong_expect)
self.assertAlmostEqual(vlat, vlat_expect)
test(0, -1, 0, x=1, y=0, z=0, vx=0, vy=-1, vz=0)
test(0, -1, 0, x=10, y=0, z=0, vx=0, vy=-1, vz=0)
test(0, 0, 1, x=1, y=0, z=0, vx=0, vy= 0, vz=1)
test(1, 0, 0, x=1, y=0, z=0, vx=1, vy= 0, vz=0)
a = 1./np.sqrt(2.)
test(0, 0, 1, x=a, y=0, z=a, vx=-a, vy= 0, vz=a)
def test_add_virtual_columns_cartesian_velocities_to_pmvr(self):
if 0: # TODO: errors in spherical velocities
pass
def test(vr_expect, pm_long_expect, pm_lat_expect, **kwargs):
ds_1 = from_scalars(**kwargs)
ds_1.add_variable("k", 1) # easier for comparison
ds_1.add_virtual_columns_cartesian_velocities_to_pmvr()
vr, pm_long, pm_lat = ds_1.evaluate("vr")[0], ds_1.evaluate("pm_long")[0], ds_1.evaluate("pm_lat")[0]
self.assertAlmostEqual(vr, vr_expect)
self.assertAlmostEqual(pm_long, pm_long_expect)
self.assertAlmostEqual(pm_lat, pm_lat_expect)
test(0, -1, 0, x=1, y=0, z=0, vx=0, vy=-1, vz=0)
test(0, -0.1, 0, x=10, y=0, z=0, vx=0, vy=-1, vz=0)
test(0, 0, 1, x=1, y=0, z=0, vx=0, vy= 0, vz=1)
test(1, 0, 0, x=1, y=0, z=0, vx=1, vy= 0, vz=0)
a = 1./np.sqrt(2.)
test(0, 0, 1, x=a, y=0, z=a, vx=-a, vy= 0, vz=a)
test(0, 0, 1*10, x=a/10, y=0, z=a/10, vx=-a, vy= 0, vz=a)
def test_add_virtual_columns_cartesian_to_polar(self):
for radians in [True, False]:
def dfs(x, y, radians=radians):
ds_1 = from_scalars(x=x, y=y, x_e=0.01, y_e=0.02)
ds_1.add_virtual_columns_cartesian_to_polar(propagate_uncertainties=True, radians=radians)
N = 100000
# distance
x = np.random.normal(x, 0.01, N)
y = np.random.normal(y, 0.02, N)
ds_many = vx.from_arrays(x=x, y=y)
ds_many.add_virtual_columns_cartesian_to_polar(radians=radians)
return ds_1, ds_many
ds_1, ds_many = dfs(0, 2)
r_polar_e = ds_1.evaluate("r_polar_uncertainty")[0]
phi_polar_e = ds_1.evaluate("phi_polar_uncertainty")[0]
self.assertAlmostEqual(r_polar_e, ds_many.std("r_polar").item(), delta=0.02)
self.assertAlmostEqual(phi_polar_e, ds_many.std("phi_polar").item(), delta=0.02)
# rotation is anti clockwise
r_polar = ds_1.evaluate("r_polar")[0]
phi_polar = ds_1.evaluate("phi_polar")[0]
self.assertAlmostEqual(r_polar, 2)
self.assertAlmostEqual(phi_polar, np.pi/2 if radians else 90)
def test_add_virtual_columns_proper_motion2vperpendicular(self):
def dfs(distance, pm_l, pm_b):
ds_1 = from_scalars(pm_l=pm_l, pm_b=pm_b, distance=distance, distance_e=0.1, pm_l_e=0.3, pm_b_e=0.4)
ds_1.add_virtual_columns_proper_motion2vperpendicular(propagate_uncertainties=True)
N = 100000
# distance
distance = np.random.normal(0, 0.1, N) + distance
pm_l = np.random.normal(0, 0.3, N) + pm_l
pm_b = np.random.normal(0, 0.4, N) + pm_b
ds_many = vx.from_arrays(pm_l=pm_l, pm_b=pm_b, distance=distance)
ds_many.add_virtual_columns_proper_motion2vperpendicular()
return ds_1, ds_many
ds_1, ds_many = dfs(2, 3, 4)
vl_e = ds_1.evaluate("vl_uncertainty")[0]
vb_e = ds_1.evaluate("vb_uncertainty")[0]
self.assertAlmostEqual(vl_e, ds_many.std("vl").item(), delta=0.02)
self.assertAlmostEqual(vb_e, ds_many.std("vb").item(), delta=0.02)
k = 4.74057
self.assertAlmostEqual(ds_1.evaluate("vl")[0], 2*k*3)
self.assertAlmostEqual(ds_1.evaluate("vb")[0], 2*k*4)
def test_virtual_columns_lbrvr_proper_motion2vcartesian(self):
for radians in [True, False]:
def dfs(l, b, distance, vr, pm_l, pm_b, radians=radians):
ds_1 = from_scalars(l=l, b=b, pm_l=pm_l, pm_b=pm_b, vr=vr, distance=distance, distance_e=0.1, vr_e=0.2, pm_l_e=0.3, pm_b_e=0.4)
ds_1.add_virtual_columns_lbrvr_proper_motion2vcartesian(propagate_uncertainties=True, radians=radians)
N = 100000
# distance
l = np.random.normal(0, 0.1, N) * 0 + l
b = np.random.normal(0, 0.1, N) * 0 + b
distance = np.random.normal(0, 0.1, N) + distance
vr = np.random.normal(0, 0.2, N) + vr
pm_l = np.random.normal(0, 0.3, N) + pm_l
pm_b = np.random.normal(0, 0.4, N) + pm_b
ds_many = vx.from_arrays(l=l, b=b, pm_l=pm_l, pm_b=pm_b, vr=vr, distance=distance)
ds_many.add_virtual_columns_lbrvr_proper_motion2vcartesian(radians=radians)
return ds_1, ds_many
ds_1, ds_many = dfs(0, 0, 1, 1, 2, 3)
vx_e = ds_1.evaluate("vx_uncertainty")[0]
vy_e = ds_1.evaluate("vy_uncertainty")[0]
vz_e = ds_1.evaluate("vz_uncertainty")[0]
self.assertAlmostEqual(vx_e, ds_many.std("vx").item(), delta=0.02)
self.assertAlmostEqual(vy_e, ds_many.std("vy").item(), delta=0.02)
self.assertAlmostEqual(vz_e, ds_many.std("vz").item(), delta=0.02)
self.assertAlmostEqual(vx_e, 0.2,2)
self.assertAlmostEqual(ds_1.evaluate("vx")[0], 1)
k = 4.74057
self.assertAlmostEqual(ds_1.evaluate("vy")[0], k*2)
self.assertAlmostEqual(ds_1.evaluate("vz")[0], k*3)
ds = vx.from_scalars(l=90, b=0, pm_l=-1, pm_b=0, distance=1, vr=0)
ds.add_virtual_columns_lbrvr_proper_motion2vcartesian()
self.assertAlmostEqual(ds.evaluate("vx")[0], k*1)
self.assertAlmostEqual(ds.evaluate("vy")[0], 0)
ds = vx.from_scalars(l=90, b=0, pm_l=-1, pm_b=0, distance=2, vr=0)
ds.add_virtual_columns_lbrvr_proper_motion2vcartesian()
self.assertAlmostEqual(ds.evaluate("vx")[0], k*2)
self.assertAlmostEqual(ds.evaluate("vy")[0], 0)
ds = vx.from_scalars(l=0, b=90, pm_l=0, pm_b=-1, distance=1, vr=0)
ds.add_virtual_columns_lbrvr_proper_motion2vcartesian()
self.assertAlmostEqual(ds.evaluate("vx")[0], k*1)
self.assertAlmostEqual(ds.evaluate("vy")[0], 0)
ds = vx.from_scalars(l=0, b=90, pm_l=0, pm_b=-1, distance=2, vr=0)
ds.add_virtual_columns_lbrvr_proper_motion2vcartesian()
self.assertAlmostEqual(ds.evaluate("vx")[0], k*2)
self.assertAlmostEqual(ds.evaluate("vy")[0], 0)
ds = vx.from_scalars(l=90, b=0, pm_l=0, pm_b=0, distance=1, vr=1)
ds.add_virtual_columns_lbrvr_proper_motion2vcartesian()
self.assertAlmostEqual(ds.evaluate("vx")[0], 0)
self.assertAlmostEqual(ds.evaluate("vy")[0], 1)
ds = vx.from_scalars(l=90, b=0, pm_l=0, pm_b=0, distance=2, vr=1)
ds.add_virtual_columns_lbrvr_proper_motion2vcartesian()
self.assertAlmostEqual(ds.evaluate("vx")[0], 0)
self.assertAlmostEqual(ds.evaluate("vy")[0], 1)
def test_state(self):
mul = Multiply(3)
ds = self.dataset
copy = ds.copy(virtual=False)
statefile = tempfile.mktemp('.json')
ds.select('x > 5', name='test')
ds.add_virtual_column('xx', 'x**2')
fmul = ds.add_function('fmul', mul)
ds['mul'] = fmul(ds.x)
count = ds.count('x', selection='test')
sum = ds.sum('xx', selection='test')
summul = ds.sum('mul', selection='test')
ds.state_write(statefile)
copy.state_load(statefile)
self.assertEqual(count, copy.count('x', selection='test'))
self.assertEqual(sum, copy.sum('xx', selection='test'))
self.assertEqual(summul, copy.sum('3*x', selection='test'))
self.assertEqual(summul, copy.sum('mul', selection='test'))
def test_strings(self):
# TODO: concatenated dfs with strings of different length
self.assertEqual(["x", "y", "m", "mi", "ints", "f"], self.dataset.get_column_names(virtual=False, strings=False))
names = ["x", "y", "m", "mi", "ints", "f", "name", "name_arrow"]
self.assertEqual(names, self.dataset.get_column_names(strings=True, virtual=False))
if self.dataset.is_local():
# check if strings are exported
path_hdf5 = tempfile.mktemp(".hdf5")
self.dataset.export_hdf5(path_hdf5, virtual=False)
exported_dataset = vx.open(path_hdf5)
self.assertEqual(names, exported_dataset.get_column_names(strings=True))
path_arrow = tempfile.mktemp(".arrow")
self.dataset.export_arrow(path_arrow, virtual=False)
exported_dataset = vx.open(path_arrow)
self.assertEqual(names, exported_dataset.get_column_names(strings=True))
# for fits we do not support arrow like strings
# TODO: get back support for SXX (string/binary) format
self.dataset.drop("name_arrow", inplace=True)
self.dataset.drop("name", inplace=True)
names.remove("name_arrow")
names.remove("name")
path_fits = tempfile.mktemp(".fits")
self.dataset.export_fits(path_fits, virtual=False)
exported_dataset = vx.open(path_fits)
self.assertEqual(names, exported_dataset.get_column_names(strings=True))
path_fits_astropy = tempfile.mktemp(".fits")
with astropy.io.fits.open(path_fits) as fitsfile:
# make sure astropy can read the data
bla = fitsfile[1].data
try:
fitsfile.writeto(path_fits_astropy)
finally:
os.remove(path_fits_astropy)
def histogram_cumulative(self):
self.dataset("x").histogram()
def test_units(self):
assert self.dataset.unit("x") == astropy.units.km
assert self.dataset.unit("y") == astropy.units.km/astropy.units.second
assert self.dataset.unit("t") == astropy.units.second
assert self.dataset.unit("z") == astropy.units.km
assert self.dataset.unit("x+y") == None
def test_dtype(self):
self.assertEqual(self.dataset.data_type("x"), np.dtype(">f8"))
self.assertEqual(self.dataset.data_type("f"), np.float64)
self.assertEqual(self.dataset.data_type("x*f"), np.float64)
def test_byte_size(self):
arrow_size = self.dataset.columns['name_arrow'].nbytes +\
self.dataset.columns['name'].nbytes
self.assertEqual(self.dataset.byte_size(), (8*6 + 2)*len(self.dataset) + arrow_size)
self.dataset.select("x < 1")
self.assertEqual(self.dataset.byte_size(selection=True), 8*6 + 2 + arrow_size)
def test_ucd_find(self):
self.dataset.ucds["x"] = "a;b;c"
self.dataset.ucds["y"] = "b;c;d"
self.dataset.ucds["z"] = "b;c;d"
self.assertEqual(self.dataset.ucd_find("a"), "x")
self.assertEqual(self.dataset.ucd_find("b"), "x")
self.assertEqual(self.dataset.ucd_find("^b"), "y")
self.assertEqual(self.dataset.ucd_find("c"), "x")
self.assertEqual(self.dataset.ucd_find("d"), "y")
self.assertEqual(self.dataset.ucd_find("b;c"), "x")
self.assertEqual(self.dataset.ucd_find("^b;c"), "y")
def test_data_access(self):
assert (all(self.dataset.data.x == self.dataset.columns["x"]))
def test_not_implemented(self):
subspace = vaex.legacy.Subspace(self.dataset, ["x", "y"], self.dataset.executor, False)
with self.assertRaises(NotImplementedError):
subspace.minmax()
with self.assertRaises(NotImplementedError):
subspace.mean()
with self.assertRaises(NotImplementedError):
subspace.var()
with self.assertRaises(NotImplementedError):
subspace.sum()
with self.assertRaises(NotImplementedError):
subspace.histogram([])
with self.assertRaises(NotImplementedError):
subspace.limits_sigma()
def test_length(self):
self.assertEqual(len(self.dataset), 10)
def t_est_length_mask(self):
self.dataset._set_mask(self.dataset.columns['x'] < 5)
self.assertEqual(self.dataset.length(selection=True), 5)
def test_evaluate(self):
for t in [2, 3]:
self.dataset.set_variable("t", t)
x = self.dataset.evaluate("x")
y = self.dataset.evaluate("y")
z = self.dataset.evaluate("z")
z_test = x + t * y
np.testing.assert_array_almost_equal(z, z_test)
x = self.dataset.evaluate("x", selection="x < 4")
self.assertEqual(x.tolist(), x[:4].tolist())
def test_invalid_expression(self):
with self.assertRaises(SyntaxError):
self.dataset.validate_expression("x/")
with self.assertRaises(NameError):
self.dataset.validate_expression("hoeba(x)")
with self.assertRaises(NameError):
self.dataset.validate_expression("x()")
self.dataset.validate_expression("sin(x)+tan(y)")
with self.assertRaises((KeyError, NameError)): # TODO: should we have just one error type?
self.dataset.validate_expression("doesnotexist")
self.dataset.validate_expression("x / y * z + x - x - -x")
self.dataset.validate_expression("x < 0")
self.dataset.validate_expression("x <= 0")
self.dataset.validate_expression("x > 0")
self.dataset.validate_expression("x >= 0")
self.dataset.validate_expression("x == 0")
self.dataset.validate_expression("x != 0")
def test_evaluate_nested(self):
self.dataset.add_virtual_column("z2", "-z")
self.dataset.add_virtual_column("z3", "z+z2")
zeros = self.dataset.evaluate("z3")
np.testing.assert_array_almost_equal(zeros, np.zeros(len(self.dataset)))
def test_count(self):
self.dataset.select("x < 5")
ds = self.dataset[self.dataset.x < 5]
df = self.df[self.df.x < 5]
np.testing.assert_array_almost_equal(self.dataset.count("x", selection=None), 10)
np.testing.assert_array_almost_equal(self.dataset.count("x", selection=True), 5)
np.testing.assert_array_almost_equal(self.dataset.x.count(selection=True), 5)
np.testing.assert_array_almost_equal(self.dataset['x'].count(), 10)
np.testing.assert_array_almost_equal(self.df['x'].count(), 10)
np.testing.assert_array_almost_equal(ds['x'].count(), 5)
np.testing.assert_array_almost_equal(df['x'].count(), 5)
self.dataset.select("x >= 5")
ds = self.dataset[self.dataset.x >= 5]
df = self.df[self.df.x >= 5]
np.testing.assert_array_almost_equal(self.dataset.count("m", selection=None), 9)
np.testing.assert_array_almost_equal(self.dataset.count("m", selection=True), 4)
np.testing.assert_array_almost_equal(self.dataset['m'].count(), 9)
np.testing.assert_array_almost_equal(self.df['m'].count(), 9)
np.testing.assert_array_almost_equal(ds['m'].count(), 4)
np.testing.assert_array_almost_equal(df['m'].count(), 4)
# convert to float
self.dataset_local.columns["x"] = self.dataset_local.columns["x"] * 1.
self.dataset_local.columns["x"][self.zero_index] = np.nan
if self.dataset.is_local():
self.dataset._invalidate_caches()
else:
if hasattr(self, 'webserver1'):
self.webserver1.dfs[0]._invalidate_caches()
self.webserver2.dfs[0]._invalidate_caches()
self.dataset_local._invalidate_caches()
self.df = self.dataset_local.to_pandas_df()
self.dataset.select("x < 5")
ds = self.dataset[self.dataset.x < 5]
df = self.df[self.df.x < 5]
# import pdb
# pdb.set_trace()
np.testing.assert_array_almost_equal(self.dataset.count("x", selection=None), 9)
np.testing.assert_array_almost_equal(self.dataset['x'].count(), 9)
np.testing.assert_array_almost_equal(self.df['x'].count(), 9)
np.testing.assert_array_almost_equal(self.dataset.count("x", selection=True), 4)
np.testing.assert_array_almost_equal(ds['x'].count(), 4)
np.testing.assert_array_almost_equal(df['x'].count(), 4)
np.testing.assert_array_almost_equal(self.dataset.count("y", selection=None), 9) # this is because of the filter x<10
np.testing.assert_array_almost_equal(self.dataset_no_filter.count("y", selection=None), 10)
np.testing.assert_array_almost_equal(self.dataset['y'].count(), 9)
np.testing.assert_array_almost_equal(self.dataset_no_filter['y'].count(), 10)
np.testing.assert_array_almost_equal(self.df['y'].count(), 9)
np.testing.assert_array_almost_equal(self.dataset.count("y", selection=True), 4)
np.testing.assert_array_almost_equal(ds['y'].count(), 4)
np.testing.assert_array_almost_equal(df['y'].count(), 4)
np.testing.assert_array_almost_equal(self.dataset.count(selection=None), 9)
np.testing.assert_array_almost_equal(self.dataset_no_filter.count(selection=None), 10)
np.testing.assert_array_almost_equal(self.dataset.count(), 9)
np.testing.assert_array_almost_equal(self.dataset_no_filter.count(), 10)
#np.testing.assert_array_almost_equal(self.df.count(), 9) # TODO: this is different in pandas
# we modified the data.. so actually this should be 4..
np.testing.assert_array_almost_equal(self.dataset.count(selection=True), 4)
np.testing.assert_array_almost_equal(ds.count(), 4)
np.testing.assert_array_almost_equal(self.dataset.count("*", selection=None), 9)
np.testing.assert_array_almost_equal(self.dataset_no_filter.count("*", selection=None), 10)
np.testing.assert_array_almost_equal(self.dataset.count(), 9)
np.testing.assert_array_almost_equal(self.dataset_no_filter.count(), 10)
np.testing.assert_array_almost_equal(self.dataset.count("*", selection=True), 4)
np.testing.assert_array_almost_equal(ds.count(), 4)
task = self.dataset.count("x", selection=True, delay=True)
self.dataset.execute()
np.testing.assert_array_almost_equal(task.get(), 4)
np.testing.assert_array_almost_equal(self.dataset.count("x", selection=None, binby=["x"], limits=[0, 10], shape=1), [9])
np.testing.assert_array_almost_equal(self.dataset.count("x", selection=True, binby=["x"], limits=[0, 10], shape=1), [4])
np.testing.assert_array_almost_equal(self.dataset.count("*", selection=None, binby=["x"], limits=[0, 10], shape=1), [9])
np.testing.assert_array_almost_equal(self.dataset.count("*", selection=True, binby=["x"], limits=[0, 10], shape=1), [4])
np.testing.assert_array_almost_equal(self.dataset .count("*", selection=None, binby=["y"], limits=[0, 9**2+1], shape=1), [9])
np.testing.assert_array_almost_equal(self.dataset_no_filter.count("*", selection=None, binby=["y"], limits=[0, 9**2+1], shape=1), [10])
np.testing.assert_array_almost_equal(self.dataset.count("*", selection=True, binby=["y"], limits=[0, 9**2+1], shape=1), [4])
np.testing.assert_array_almost_equal(self.dataset.count("x", selection=None, binby=["y"], limits=[0, 9**2+1], shape=1), [9])
np.testing.assert_array_almost_equal(self.dataset.count("x", selection=True, binby=["y"], limits=[0, 9**2+1], shape=1), [4])
np.testing.assert_array_almost_equal(self.dataset.count("x", selection=None, binby=["x"], limits=[0, 10], shape=2), [4, 5])
np.testing.assert_array_almost_equal(self.dataset.count("x", selection=True, binby=["x"], limits=[0, 10], shape=2), [4, 0])
ds = self.dataset
a = ds.count("x", binby="y", limits=[0, 100], shape=2)
ds.select("(y >= 0) & (y < 50)")
b = ds.count("x", selection=True)
ds.select("(y >= 50) & (y < 100)")
c = ds.count("x", selection=True)
np.testing.assert_array_almost_equal(a, [b, c])
ds = self.dataset[(self.dataset.y >= 0) & (self.dataset.y < 50)]
b = ds.count('x')
ds = self.dataset[(self.dataset.y >= 50) & (self.dataset.y < 100)]
c = ds.count('x')
np.testing.assert_array_almost_equal(a, [b, c])
df = self.df[(self.df.y >= 0) & (self.df.y < 50)]
b = df['x'].count()
df = self.df[(self.df.y >= 50) & (self.df.y < 100)]
c = df['x'].count()
np.testing.assert_array_almost_equal(a, [b, c])
def test_cov(self):
# convert to float
x = self.dataset_local.columns["x"][:10] = self.dataset_local.columns["x"][:10] * 1.
y = self.y
def cov(*args):
return np.cov(args, bias=1)
self.dataset.select("x < 5")
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=None), cov(x, y))
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=True), cov(x[:5], y[:5]))
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=[False, True]), [cov(x, y), cov(x[:5], y[:5])])
#self.dataset.columns["x"][0] = np.nan
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=None), cov(x, y))
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=True), cov(x[:5], y[:5]))
task = self.dataset.cov("x", "y", selection=True, delay=True)
self.dataset.execute()
np.testing.assert_array_almost_equal(task.get(), cov(x[:5], y[:5]))
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=None, binby=["x"], limits=[0, 10], shape=1), [cov(x, y)])
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=True, binby=["x"], limits=[0, 10], shape=1), [cov(x[:5], y[:5])])
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=None, binby=["y"], limits=[0, 9**2+1], shape=1), [cov(x, y)])
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=True, binby=["y"], limits=[0, 9**2+1], shape=1), [cov(x[:5], y[:5])])
nan22 = [[np.nan, np.nan], [np.nan, np.nan]]
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=None, binby=["x"], limits=[0, 10], shape=2), [cov(x[:5], y[:5]), cov(x[5:], y[5:])])
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=True, binby=["x"], limits=[0, 10], shape=2), [cov(x[:5], y[:5]), nan22])
i = 7
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=None, binby=["y"], limits=[0, 9**2+1], shape=2), [cov(x[:i], y[:i]), cov(x[i:], y[i:])])
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=True, binby=["y"], limits=[0, 9**2+1], shape=2), [cov(x[:5], y[:5]), nan22])
i = 5
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=None, binby=["x"], limits=[0, 10], shape=2), [cov(x[:i], y[:i]), cov(x[i:], y[i:])])
np.testing.assert_array_almost_equal(self.dataset.cov("x", "y", selection=True, binby=["x"], limits=[0, 10], shape=2), [cov(x[:i], y[:i]), nan22])
# include 3rd varialble
self.dataset.add_virtual_column("z", "x*y")
z = self.dataset.evaluate("z")
np.testing.assert_array_almost_equal(self.dataset.cov(["x", "y", "z"], selection=None), cov(x, y, z))
nan33 = [[np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan], [np.nan, np.nan, np.nan]]
np.testing.assert_array_almost_equal(self.dataset.cov(["x", "y", "z"], selection=None, binby=["x"], limits=[0, 10], shape=2), [cov(x[:5], y[:5], z[:5]), cov(x[5:], y[5:], z[5:])])
np.testing.assert_array_almost_equal(self.dataset.cov(["x", "y", "z"], selection=True, binby=["x"], limits=[0, 10], shape=2), [cov(x[:5], y[:5], z[:5]), nan33])
i = 7
np.testing.assert_array_almost_equal(self.dataset.cov(["x", "y", "z"], selection=None, binby=["y"], limits=[0, 9**2+1], shape=2), [cov(x[:i], y[:i], z[:i]), cov(x[i:], y[i:], z[i:])])
np.testing.assert_array_almost_equal(self.dataset.cov(["x", "y", "z"], selection=True, binby=["y"], limits=[0, 9**2+1], shape=2), [cov(x[:5], y[:5], z[:5]), nan33])
i = 5
np.testing.assert_array_almost_equal(self.dataset.cov(["x", "y", "z"], selection=None, binby=["x"], limits=[0, 10], shape=2), [cov(x[:i], y[:i], z[:i]), cov(x[i:], y[i:], z[i:])])
np.testing.assert_array_almost_equal(self.dataset.cov(["x", "y", "z"], selection=True, binby=["x"], limits=[0, 10], shape=2), [cov(x[:i], y[:i], z[:i]), nan33])
# including nan
n = np.arange(-self.zero_index, 20.-1)
n[self.zero_index+1] = np.nan
self.dataset_local.add_column('n', n)
assert not np.any(np.isnan(self.dataset.cov("x", "n")))
def test_covar(self):
# convert to float
x = self.dataset_local.columns["x"][:10] = self.dataset_local.columns["x"][:10] * 1.
y = self.y
def covar(x, y):
mask = np.isfinite(x * y)
#w = np.isfinite(x * y) * 1.0
x = x[mask]
y = y[mask]
return np.cov([x, y], bias=1)[1,0]
self.dataset.select("x < 5")
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=None), covar(x, y))
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=True), covar(x[:5], y[:5]))
#self.dataset.columns["x"][0] = np.nan
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=None), covar(x, y))
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=True), covar(x[:5], y[:5]))
task = self.dataset.covar("x", "y", selection=True, delay=True)
self.dataset.execute()
np.testing.assert_array_almost_equal(task.get(), covar(x[:5], y[:5]))
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=None, binby=["x"], limits=[0, 10], shape=1), [covar(x, y)])
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=True, binby=["x"], limits=[0, 10], shape=1), [covar(x[:5], y[:5])])
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=None, binby=["y"], limits=[0, 9**2+1], shape=1), [covar(x, y)])
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=True, binby=["y"], limits=[0, 9**2+1], shape=1), [covar(x[:5], y[:5])])
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=None, binby=["x"], limits=[0, 10], shape=2), [covar(x[:5], y[:5]), covar(x[5:], y[5:])])
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=True, binby=["x"], limits=[0, 10], shape=2), [covar(x[:5], y[:5]), np.nan])
i = 7
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=None, binby=["y"], limits=[0, 9**2+1], shape=2), [covar(x[:i], y[:i]), covar(x[i:], y[i:])])
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=True, binby=["y"], limits=[0, 9**2+1], shape=2), [covar(x[:5], y[:5]), np.nan])
i = 5
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=None, binby=["x"], limits=[0, 10], shape=2), [covar(x[:i], y[:i]), covar(x[i:], y[i:])])
np.testing.assert_array_almost_equal(self.dataset.covar("x", "y", selection=True, binby=["x"], limits=[0, 10], shape=2), [covar(x[:i], y[:i]), np.nan])
def test_percentile(self):
ds = vx.example()
#ds.median_approx('z', binby=['x'], limits=[-10, 10], shape=16)
#ds.median_approx('z', binby=['x', 'y'], limits=[-10, 10], shape=4)
#m = ds.median_approx('z+x/10', binby=['x'], limits=[-10, 10], shape=32, percentile_shape=128*10 , percentile_limits=[-10,10])
m = ds.median_approx('z+x/10', binby=['x'], limits=[6.875000, 7.500000], shape=1, percentile_shape=128*10 , percentile_limits=[-10,10])
mc = ds.median_approx("z+x/10", selection='(x > 6.875000) & (x <= 7.500000)', percentile_shape=128*10 , percentile_limits=[-10,10])
#print(m, m[32-5], mc)
print(m, mc)
return
dsodd = vx.from_arrays(x=np.arange(3)) # 0,1,2
dseven = vx.from_arrays(x=np.arange(4)) # 0,1,2,3
self.dataset.select("x < 5")
o = 0#10/30/2.
#x = dsodd.data.x
ds = dsodd
#ds = dseven
x = ds.data.x
print("median", np.median(x))
for offset in [-0.99, -0.5, 0.0]:#[0:1]:
print()
print("offset", offset)
limits = [0+offset, x.max()+1+offset]
print(">>>", ds.percentile_approx("x", selection=None, percentile_limits=limits, percentile_shape=len(x)),)
#np.testing.assert_array_almost_equal(
# ds.percentile_approx("x", selection=None, percentile_limits=limits, percentile_shape=4),
# np.median(x), decimal=2)
#return
np.testing.assert_array_almost_equal(
self.dataset.percentile_approx("x", selection=None, percentile_limits=[0-o, 10-o], percentile_shape=100),
np.median(self.x), decimal=1)
np.testing.assert_array_almost_equal(
self.dataset.percentile_approx("x", selection=None, percentile_limits=[0-o, 10-o], percentile_shape=1000),
np.median(self.x), decimal=2)
np.testing.assert_array_almost_equal(
self.dataset.percentile_approx(["x", "y"], selection=None, percentile_shape=10000),
[np.median(self.x), np.median(self.y)],
decimal=3)
return
np.testing.assert_array_almost_equal(self.dataset.percentile_approx("x", selection=True), np.median(self.x[:5]))
# convert to float
x = self.dataset.columns["x"] = self.dataset.columns["x"] * 1.
y = self.y
self.dataset.columns["x"][0] = np.nan
np.testing.assert_array_almost_equal(self.dataset.sum("x", selection=None), np.nansum(x))
np.testing.assert_array_almost_equal(self.dataset.sum("x", selection=True), np.nansum(x[:5]))
task = self.dataset.sum("x", selection=True, delay=True)
self.dataset.execute()
np.testing.assert_array_almost_equal(task.get(), np.nansum(x[:5]))
np.testing.assert_array_almost_equal(self.dataset.sum("x", selection=None, binby=["x"], limits=[0, 10], shape=1), [np.nansum(x)])
np.testing.assert_array_almost_equal(self.dataset.sum("x", selection=True, binby=["x"], limits=[0, 10], shape=1), [np.nansum(x[:5])])
np.testing.assert_array_almost_equal(self.dataset.sum("x", selection=None, binby=["y"], limits=[0, 9**2+1], shape=1), [np.nansum(x)])
np.testing.assert_array_almost_equal(self.dataset.sum("x", selection=True, binby=["y"], limits=[0, 9**2+1], shape=1), [np.nansum(x[:5])])
np.testing.assert_array_almost_equal(self.dataset.sum("x", selection=None, binby=["x"], limits=[0, 10], shape=2), [np.nansum(x[:5]), np.nansum(x[5:])])
np.testing.assert_array_almost_equal(self.dataset.sum("x", selection=True, binby=["x"], limits=[0, 10], shape=2), [np.nansum(x[:5]), 0])
i = 7
np.testing.assert_array_almost_equal(self.dataset.sum("x", selection=None, binby=["y"], limits=[0, 9**2+1], shape=2), [np.nansum(x[:i]), np.nansum(x[i:])])
np.testing.assert_array_almost_equal(self.dataset.sum("x", selection=True, binby=["y"], limits=[0, 9**2+1], shape=2), [np.nansum(x[:5]), 0])
i = 5
np.testing.assert_array_almost_equal(self.dataset.sum("y", selection=None, binby=["x"], limits=[0, 10], shape=2), [np.nansum(y[:i]), np.nansum(y[i:])])
np.testing.assert_array_almost_equal(self.dataset.sum("y", selection=True, binby=["x"], limits=[0, 10], shape=2), [np.nansum(y[:5]), 0])
def test_concat(self):
dc = self.dataset_concat_dup
self.assertEqual(len(self.dataset_concat_dup), len(self.dataset)*3)
self.assertEqual(self.dataset_concat.get_column_names(), ["x"])
N = len(self.x_concat)
# try out every possible slice
for i1 in range(N-1):
for i2 in range(i1+1,N):
#print "***", i1, i2
a = self.dataset_concat.columns["x"][i1:i2]
b = self.x_concat[i1:i2]
#print a, b
np.testing.assert_array_almost_equal(a, b)
def concat(*types):
arrays = [np.arange(3, dtype=dtype) for dtype in types]
N = len(arrays)
dfs = [vx.dataset.DatasetArrays("dataset-%i" % i) for i in range(N)]
for dataset, array in zip(dfs, arrays):
dataset.add_column("x", array)
dataset_concat = vx.dataset.DatasetConcatenated(dfs, name="dataset_concat")
return dataset_concat
self.assertEqual(concat(np.float32, np.float64).columns["x"].dtype, np.float64)
self.assertEqual(concat(np.float32, np.int64).columns["x"].dtype, np.float64)
self.assertEqual(concat(np.float32, np.byte).columns["x"].dtype, np.float32)
self.assertEqual(concat(np.float64, np.byte, np.int64).columns["x"].dtype, np.float64)
ar1 = np.zeros((10, 2))
ar2 = np.zeros((20))
arrays = [ar1, ar2]
N = len(arrays)
dfs = [vx.dataset.DatasetArrays("dataset1") for i in range(N)]
for dataset, array in zip(dfs, arrays):
dataset.add_column("x", array)
with self.assertRaises(ValueError):
dataset_concat = vx.dataset.DatasetConcatenated(dfs, name="dataset_concat")
ar1 = np.zeros((10))
ar2 = np.zeros((20))
arrays = [ar1, ar2]
N = len(arrays)
dfs = [vx.dataset.DatasetArrays("dataset1") for i in range(N)]
for dataset, array in zip(dfs, arrays):
dataset.add_column("x", array)
dataset_concat = vx.dataset.DatasetConcatenated(dfs, name="dataset_concat")
dataset_concat1 = vx.dataset.DatasetConcatenated(dfs, name="dataset_concat")
dataset_concat2 = vx.dataset.DatasetConcatenated(dfs, name="dataset_concat")
self.assertEqual(len(dataset_concat1.concat(dataset_concat2).dfs), 4)
self.assertEqual(len(dataset_concat1.concat(dfs[0]).dfs), 3)
self.assertEqual(len(dfs[0].concat(dataset_concat1).dfs), 3)
self.assertEqual(len(dfs[0].concat(dfs[0]).dfs), 2)
def test_export_concat(self):
x1 = np.arange(1000, dtype=np.float32)
x2 = np.arange(100, dtype=np.float32)
self.x_concat = np.concatenate((x1, x2))
dataset1 = vx.dataset.DatasetArrays("dataset1")
dataset2 = vx.dataset.DatasetArrays("dataset2")
dataset1.add_column("x", x1)
dataset2.add_column("x", x2)
self.dataset_concat = vx.dataset.DatasetConcatenated([dataset1, dataset2], name="dataset_concat")
path_hdf5 = tempfile.mktemp(".hdf5")
self.dataset_concat.export_hdf5(path_hdf5)
def test_export_sorted(self):
self.dataset.add_column("s", 100-self.dataset.data.x)
path_hdf5 = tempfile.mktemp(".hdf5")
self.dataset.export_hdf5(path_hdf5, sort="s")
ds2 = vaex.open(path_hdf5)
np.testing.assert_array_equal(self.dataset.data.x[self.zero_index:self.zero_index+10], ds2.data.x[::-1])
def test_export_sorted_arrow(self):
self.dataset.add_column("s", 100-self.dataset.data.x)
path_arrow = tempfile.mktemp(".arrow")
self.dataset.export_arrow(path_arrow, sort="s")
ds2 = vaex.open(path_arrow, as_numpy=False)
np.testing.assert_array_equal(self.dataset.data.x[self.zero_index:self.zero_index+10], np.array(ds2.data.x)[::-1])
def test_export(self):
path = path_hdf5 = tempfile.mktemp(".hdf5")
path_fits = tempfile.mktemp(".fits")
path_fits_astropy = tempfile.mktemp(".fits")
#print path
#with self.assertRaises(AssertionError):
# self.dataset.export_hdf5(path, selection=True)
for dataset in [self.dataset_concat_dup, self.dataset]:
#print dataset.virtual_columns
for fraction in [1, 0.5]:
dataset.set_active_fraction(fraction)
dataset.select("x > 3")
dataset.select("x > 2", name="named")
length = len(dataset)
for column_names in [["x", "y", "z"], ["x"], ["y"], ["z"], None]:
for byteorder in "<=>":
for shuffle in [False, True]:
for selection in [False, True, "named"]:
for virtual in [False, True]:
for export in [dataset.export_fits, dataset.export_hdf5, dataset.export_arrow, dataset.export_parquet]: #if byteorder == ">" else [dataset.export_hdf5]:
#print (">>>", dataset, path, column_names, byteorder, shuffle, selection, fraction, dataset.length_unfiltered(), virtual)
#byteorder = "<"
if export == dataset.export_fits and byteorder != ">":
#print("skip", export == dataset.export_fits, byteorder != ">", byteorder)
continue # fits only does big endian
if export == dataset.export_fits and byteorder == ">":
continue # arrow only little endian
if vx.utils.osname == "windows" and export == dataset.export_hdf5 and byteorder == ">":
#print("skip", vx.utils.osname)
continue # TODO: IS this a bug for h5py on win32?, leads to an open file
# same issue on windows for arrow, closing the mmapped file does not help
# for the moment we create a new temp file
path_arrow = tempfile.mktemp(".arrow")
path_parquet = tempfile.mktemp(".parquet")
#print dataset.length_unfiltered()
#print len(dataset)
if export == dataset.export_hdf5:
path = path_hdf5
export(path, column_names=column_names, byteorder=byteorder, shuffle=shuffle, selection=selection, progress=False, virtual=virtual)
elif export == dataset.export_arrow:
path = path_arrow
export(path, column_names=column_names, byteorder=byteorder, shuffle=shuffle, selection=selection, progress=False, virtual=virtual)
elif export == dataset.export_parquet:
path = path_parquet
export(path, column_names=column_names, byteorder=byteorder, shuffle=shuffle, selection=selection, progress=False, virtual=virtual)
else:
path = path_fits
export(path, column_names=column_names, shuffle=shuffle, selection=selection, progress=False, virtual=virtual)
with astropy.io.fits.open(path) as fitsfile:
# make sure astropy can read the data
bla = fitsfile[1].data
try:
fitsfile.writeto(path_fits_astropy)
finally:
os.remove(path_fits_astropy)
if path.endswith('arrow') or path.endswith('parquet'):
compare = vx.open(path, as_numpy=False)
else:
compare = vx.open(path)
if column_names is None:
column_names = ["x", "y", "m", "mi", "ints", "f", "z", "name", "name_arrow"] if virtual else ["x", "y", "m", "mi", "ints", "f", "name", "name_arrow"]
#if not virtual:
# if "z" in column_names:
# column_names.remove("z")
# TODO: does the order matter?
self.assertEqual((compare.get_column_names(strings=True)), (column_names + (["random_index"] if shuffle else [])))
def make_masked(ar):
if export == dataset.export_fits: # for fits the missing values will be filled in with nan
if ar.dtype.kind == "f":
nanmask = np.isnan(ar)
if np.any(nanmask):
ar = np.ma.array(ar, mask=nanmask)
return ar
for column_name in column_names:
#values = dataset.columns[column_name][dataset._index_start:dataset._index_end] if column_name in dataset.get_column_names(virtual=False) else dataset.evaluate(column_name)
values = dataset.evaluate(column_name, filtered=False)
if selection:
values = dataset.evaluate(column_name, array_type="numpy")
mask = dataset.evaluate_selection_mask(selection)#, 0, len(dataset))
if len(values[::]) != len(mask):
import pdb
pdb.set_trace()
# for concatenated columns, we get a plain numpy array copy using [::]
a = np.ma.compressed(make_masked(compare.evaluate(column_name, array_type="numpy")))
b = np.ma.compressed(make_masked(values[::][mask]))
if len(a) != len(b):
import pdb
pdb.set_trace()
self.assertEqual(sorted(a), sorted(b))
else:
values = dataset.evaluate(column_name, array_type="numpy")
if shuffle:
indices = compare.columns["random_index"]
a = np.ma.compressed(make_masked(compare.evaluate(column_name, array_type="numpy")))
b = np.ma.compressed(make_masked(values[::][indices]))
self.assertEqual(sorted(a), sorted(b))
else:
dtype = np.array(compare.columns[column_name]).dtype # we don't want any casting
compare_values = compare.columns[column_name]
if isinstance(compare_values, vaex.column.Column):
compare_values = compare_values.to_numpy()
np.testing.assert_array_equal(compare_values, values[:length].astype(dtype))
compare.close_files()
#os.remove(path)
# self.dataset_concat_dup references self.dataset, so set it's active_fraction to 1 again
dataset.set_active_fraction(1)
path_arrow = tempfile.mktemp(".arrow")
path_hdf5 = tempfile.mktemp(".hdf5")
dataset = self.dataset
dataset.export(path_arrow)
name = "vaex export"
#print(path_fits)
vaex.export.main([name, "--no-progress", "-q", "file", path_arrow, path_hdf5])
backup = vaex.vaex.utils.check_memory_usage
try:
vaex.vaex.utils.check_memory_usage = lambda *args: False
assert vaex.export.main([name, "--no-progress", "-q", "soneira", "--dimension=2", "-m=40", path_hdf5]) == 1
finally:
vaex.utils.check_memory_usage = backup
assert vaex.export.main([name, "--no-progress", "-q", "soneira", "--dimension=2", "-m=20", path_hdf5]) == 0
def test_fraction(self):
counter_selection = CallbackCounter()
counter_current_row = CallbackCounter()
self.dataset.signal_pick.connect(counter_current_row)
self.dataset.signal_selection_changed.connect(counter_selection)
self.dataset.set_active_fraction(1.0) # this shouldn't trigger
self.assertEqual(counter_selection.counter, 0)
self.assertEqual(counter_current_row.counter, 0)
length = len(self.dataset)
self.dataset.set_active_fraction(0.1) # this should trigger
self.assertEqual(counter_selection.counter, 1)
self.assertEqual(counter_current_row.counter, 1)
# test for event and the effect of the length
# the active_fraction only applies to the underlying length, which is 20
self.dataset.set_active_fraction(0.25)
self.assertEqual(counter_selection.counter, 2)
self.assertEqual(counter_current_row.counter, 2)
self.assertEqual(length/2 - self.zero_index, len(self.dataset))
self.dataset.select("x > 5")
self.assertEqual(counter_selection.counter, 3)
self.assertEqual(counter_current_row.counter, 2)
self.assertTrue(self.dataset.has_selection())
self.dataset.set_active_fraction(0.25) # nothing should happen, still the same
self.assertTrue(self.dataset.has_selection())
self.dataset.set_active_fraction(0.4999)
self.assertFalse(self.dataset.has_selection())
self.dataset.set_current_row(1)
self.assertTrue(self.dataset.has_current_row())
self.dataset.set_active_fraction(0.25)
self.assertFalse(self.dataset.has_current_row())
if self.dataset.is_local(): # this part doesn't work for remote dfs
for dataset in [self.dataset, self.dataset_concat]:
dataset.set_active_fraction(1.0)
x = dataset.columns["x"][:] * 1. # make a copy
dataset.set_active_fraction(0.25)
length = len(dataset)
a = x[:length]
b = dataset.columns["x"][:len(dataset)]
np.testing.assert_array_almost_equal(a, b)
self.assertLess(length, dataset.length_original())
# TODO: test if statistics and histogram work on the active_fraction
self.dataset.set_active_fraction(1)
total = self.dataset["x"].sum()
self.dataset.set_active_fraction(0.25)
total_half = self.dataset["x"].sum()
self.assertLess(total_half, total)
limits = [(-100, 100)]
self.dataset.set_active_fraction(1)
total = self.dataset["x"].count(limits=limits).sum()
self.dataset.set_active_fraction(0.25)
total_half = self.dataset["x"].count(limits=limits).sum()
self.assertLess(total_half, total)
def test_current_row(self):
counter_current_row = CallbackCounter()
self.dataset.signal_pick.connect(counter_current_row)
self.dataset.set_current_row(0)
self.assertEqual(counter_current_row.counter, 1)
with self.assertRaises(IndexError):
self.dataset.set_current_row(-1)
with self.assertRaises(IndexError):
self.dataset.set_current_row(len(self.dataset))
def t_not_needed_est_current(self):
for dataset in [self.dataset, self.dataset_concat]:
for i in range(len(dataset)):
dataset.set_current_row(i)
values = dataset("x", "x**2").current()
value = dataset.columns["x"][:][i]
self.assertEqual([value, value**2], values)
def test_dropna(self):
ds = self.dataset
ds.select_non_missing(column_names=['m'])
self.assertEqual(ds.count(selection=True), 9)
ds.select_non_missing(drop_masked=False, column_names=['m'])
self.assertEqual(ds.count(selection=True), 10)
self.dataset_local.data.x[self.zero_index] = np.nan
ds.select_non_missing(column_names=['x'])
self.assertEqual(ds.count(selection=True), 9)
ds.select_non_missing(drop_nan=False, column_names=['x'])
if ds.is_local():
self.assertEqual(ds.count(selection=True), 10)
else:
# TODO: on the server, the filter selection gets re-executed (x < 10)
# causing it to skip the nan anyway, find a good way to test this?
self.assertEqual(ds.count(selection=True), 9)
ds.select_non_missing()
self.assertEqual(ds.count(selection=True), 8)
ds.select_non_missing(drop_masked=False)
self.assertEqual(ds.count(selection=True), 9)
ds.select_non_missing(drop_nan=False)
if ds.is_local():
self.assertEqual(ds.count(selection=True), 9)
else:
# TODO: same as above
self.assertEqual(ds.count(selection=True), 8)
def test_selection_in_handler(self):
self.dataset.select("x > 5")
# in the handler, we should know there is not selection
def check(*ignore):
self.assertFalse(self.dataset.has_selection())
self.dataset.signal_selection_changed.connect(check)
self.dataset.select_nothing()
def test_nearest(self):
index, distance, (value,) = self.dataset("x").nearest([3])
self.assertEqual(index, 3 + self.zero_index)
self.assertEqual(distance, 0)
self.assertEqual(value, 3)
index, distance, (value,) = self.dataset("x").nearest([3.7])
self.assertEqual(index, 4 + self.zero_index)
self.assertAlmostEqual(distance, 0.3)
self.assertEqual(value, 4)
self.dataset.select("x > 5")
index, distance, (value,) = self.dataset("x").selected().nearest([3.7])
self.assertEqual(index, 6 + self.zero_index)
self.assertEqual(distance, 2.3)
self.assertEqual(value, 6)
def test_select_circle(self):
# Circular selection
self.dataset.select_circle('x', 'y', 0.5, 0.5, 1, name='circ')
# Assert
np.testing.assert_equal(2, self.dataset.count(selection='circ'))
def test_select_ellipse(self):
# Ellipse election
self.dataset.select_ellipse('x', 'y', 3, 10, 2, 15, -10, name='elli')
# Assert
np.testing.assert_equal(3, self.dataset.count(selection='elli'))
# allow multiple python versions on one machine to run the test
import sys
test_port = 29110 + sys.version_info[0] * 10 + sys.version_info[1]
#class A:#class estDatasetRemote(TestDataset):
class TestDatasetRemote(TestDataset):
#class A:
use_websocket = True
@classmethod
def setUpClass(cls):
global test_port
cls.webserver = vaex.server.WebServer(dfs=[], port=test_port, cache_byte_size=0).tornado_server
#print "serving"
cls.webserver.serve_threaded()
#print "getting server object"
scheme = "ws" if cls.use_websocket else "http"
cls.server = vx.server("%s://localhost:%d" % (scheme, test_port))
test_port += 1
@classmethod
def tearDownClass(cls):
cls.server.close()
cls.webserver.stop_serving()
def setUp(self):
# run all tests from TestDataset, but now served at the server
super(TestDatasetRemote, self).setUp()
# for the webserver we don't support filters on top of filters
# so the server always uses the full dataset
# self.dataset_no_filter.name = 'dataset'
# self.dataset = self.dataset_no_filter
self.dataset_local = self.dataset
self.datasetxy_local = self.datasetxy
self.dataset_concat_local = self.dataset_concat
self.dataset_concat_dup_local = self.dataset_concat_dup
dfs = [self.dataset_local, self.datasetxy_local, self.dataset_concat_local, self.dataset_concat_dup_local]
#print "get dfs"
self.webserver.set_dfs(dfs)
dfs = self.server.dfs(as_dict=True)
#print "got it", dfs
self.dataset = dfs["dataset"]
self.datasetxy = dfs["datasetxy"]
self.dataset_concat = dfs["dataset_concat"]
self.dataset_concat_dup = dfs["dataset_concat_dup"]
#print "all done"
def tearDown(self):
TestDataset.tearDown(self)
#print "stop serving"
def test_to(self):
pass # not supported
def test_amuse(self):
pass # no need
def test_ascii(self):
pass # no need
def test_csv(self):
pass # no need
def test_export(self):
pass # we can't export atm
def test_concat(self):
pass # doesn't make sense to test this for remote
def test_data_access(self):
pass
def test_byte_size(self):
pass # we don't know the selection's length for dataset remote..
def test_add_column(self):
pass # can't add column to remove objects
def test_rename_column(self):
pass # TODO: we cannot do that now
def test_masked_array_output(self):
pass # cannot test exporting
def test_export_sorted(self):
pass # cannot test exporting
def test_formats(self):
pass # cannot test exporting
def test_default_selection(self):
pass # uses local information
#def test_selection(self):
# pass
#def test_count(self):
# pass
#def test_sum(self):
# pass
#def test_cov(self):
# pass
#def test_correlation(self):
# pass
#def test_covar(self):
# pass
#def test_mean(self):
# pass
#def test_minmax(self):
# pass
#def test_var_and_std(self):
# pass
#def test_limits(self):
# pass
# import vaex.distributed
#class A:#class T_estDatasetDistributed(unittest.TestCase):
#class TestDatasetDistributed(unittest.TestCase):
class TestDatasetDistributed(TestDatasetRemote):
use_websocket = False
def setUp(self):
TestDataset.setUp(self)
global test_port
# self.dataset_local = self.dataset = dataset.DatasetArrays("dataset")
self.dataset_local = self.dataset
dfs = [self.dataset]
dfs_copy = [k.copy() for k in dfs] # otherwise we share the selection cache
for ds in dfs_copy:
ds.name = self.dataset_local.name
dfs = [self.dataset]
self.webserver1 = vaex.server.WebServer(dfs=dfs, port=test_port).tornado_server
self.webserver1.serve_threaded()
test_port += 1
self.webserver2 = vaex.server.WebServer(dfs=dfs_copy, port=test_port).tornado_server
self.webserver2.serve_threaded()
test_port += 1
scheme = "ws" if self.use_websocket else "http"
self.server1 = vx.server("%s://localhost:%d" % (scheme, test_port-2))
self.server2 = vx.server("%s://localhost:%d" % (scheme, test_port-1))
test_port += 1
dfs1 = self.server1.dfs(as_dict=True)
dfs2 = self.server2.dfs(as_dict=True)
self.dfs = [dfs1["dataset"], dfs2["dataset"]]
self.dataset = vaex.distributed.DatasetDistributed(self.dfs)
def tearDown(self):
#TestDataset.tearDown(self)
#print "stop serving"
self.webserver1.stop_serving()
self.webserver2.stop_serving()
def test_histogram(self):
#print self.dataset, self.dataset.__call__
#print self.dataset.subspace("x")
#self.dataset_local.set_active_range(5, 10)
counts = self.dataset("x").histogram([[0,10]], size=10)
#import pdb
#pdb.set_trace()
self.assertTrue(all(counts == 1), "counts is %r" % counts)
return
sums = self.dataset("x").histogram([[0,10]], size=10, weight="y")
assert(all(sums == self.y))
self.dataset.select("x < 5")
mask = self.x < 5
counts = self.dataset("x").selected().histogram([[0,10]], size=10)
mod_counts = counts * 1.
mod_counts[~mask] = 0
assert(all(counts == mod_counts))
mod_sums = self.y * 1.
mod_sums[~mask] = 0
sums = self.dataset("x").selected().histogram([[0,10]], size=10, weight="y")
assert(all(sums == mod_sums))
x = np.array([0, 1, 0, 1])
y = np.array([0, 0, 1, 1])
dataset = vx.from_arrays(x=x, y=y)
counts = dataset("x", "y").histogram([[0.,2.], [0.,2.]], size=2)
assert(np.all(counts == 1))
x = np.array([0, 1, 0, 1, 0, 1, 0, 1])
y = np.array([0, 0, 1, 1, 0, 0, 1, 1])
z = np.array([0, 0, 0, 0, 1, 1, 1, 1])
dataset = vx.from_arrays(x=x, y=y, z=z)
counts = dataset("x", "y", "z").histogram([[0.,2.], [0.,2.], [0.,2.]], size=2)
assert(np.all(counts == 1))
x = np.array([0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1])
y = np.array([0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1])
z = np.array([0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1])
w = np.array([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1,])
dataset = vx.from_arrays(x=x, y=y, z=z, w=w)
counts = dataset("x", "y", "z", "w").histogram([[0.,2.], [0.,2.], [0.,2.], [0.,2.]], size=2)
assert(np.all(counts == 1))
return
#class TestDatasetRemotePlain(TestDatasetRemote):
# use_websocket = False
"""
class T_stWebServer(unittest.TestCase):
def setUp(self):
self.dataset = dataset.DatasetArrays()
self.x = x = np.arange(10)
self.y = y = x ** 2
self.dataset.add_column("x", x)
self.dataset.add_column("y", y)
self.webserver = vaex.server.WebServer(dfs=[self.dataset], port=test_port).tornado_server
self.webserver.serve_threaded()
self.server = vx.server("http://localhost:%d" % test_port)
self.dataset_remote = self.server.dfs()[0]
def tearDown(self):
self.webserver.stop_serving()
def test_list(self):
dfs = self.server.dfs()
self.assertTrue(len(dfs) == 1)
dataset_remote = dfs[0]
self.assertEqual(dataset_remote.name, self.dataset.name)
self.assertEqual(dataset_remote.get_column_names(), self.dataset.get_column_names())
self.assertEqual(len(dataset_remote), len(self.dataset))
def test_minmax(self):
self.assertEqual(self.dataset_remote("x", "y").minmax().tolist(), self.dataset("x", "y").minmax().tolist())
def test_var(self):
self.assertEqual(self.dataset_remote("x", "y").var().tolist(), self.dataset("x", "y").var().tolist())
def test_histogram(self):
grid1 = self.dataset("x").bounded().gridded(32).grid
grid2 = self.dataset_remote("x").bounded().gridded(32).grid
self.assertEqual(grid1.tolist(), grid2.tolist())
"""
if __name__ == '__main__':
unittest.main()
|
[
"vaex.dataset.select",
"os.remove",
"vaex.dataset.length_original",
"numpy.random.seed",
"numpy.sum",
"vaex.from_scalars",
"vaex.server",
"numpy.ones",
"vaex.set_log_level_exception",
"vaex.dataset.set_current_row",
"numpy.isnan",
"vaex.dataset.DatasetArrays",
"numpy.arange",
"numpy.random.normal",
"vaex.example",
"numpy.testing.assert_array_almost_equal",
"vaex.dataset.DatasetConcatenated",
"os.path.join",
"vaex.open",
"unittest.main",
"vaex.dataset.add_column",
"os.path.dirname",
"healpy.order2nside",
"numpy.isfinite",
"vaex.dataset.export",
"numpy.cov",
"tempfile.mktemp",
"vaex.from_csv",
"numpy.random.shuffle",
"vaex.dataset.evaluate",
"numpy.nansum",
"numpy.median",
"numpy.testing.assert_array_equal",
"vaex.from_ascii",
"vaex.dataset.set_active_fraction",
"numpy.sort",
"vaex.dataset.evaluate_selection_mask",
"numpy.concatenate",
"numpy.all",
"astropy.io.votable.writeto",
"vaex.dataset",
"vaex.from_arrays",
"vaex.from_pandas",
"numpy.dtype",
"numpy.zeros",
"astropy.io.votable.from_table",
"numpy.ma.array",
"healpy.nside2npix",
"numpy.any",
"numpy.array",
"pdb.set_trace",
"numpy.sqrt"
] |
[((399, 424), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (414, 424), False, 'import os\n'), ((1216, 1244), 'vaex.set_log_level_exception', 'vx.set_log_level_exception', ([], {}), '()\n', (1242, 1244), True, 'import vaex as vx\n'), ((71768, 71783), 'unittest.main', 'unittest.main', ([], {}), '()\n', (71781, 71783), False, 'import unittest\n'), ((1783, 1815), 'vaex.dataset.DatasetArrays', 'dataset.DatasetArrays', (['"""dataset"""'], {}), "('dataset')\n", (1804, 1815), True, 'import vaex.dataset as dataset\n'), ((2870, 2904), 'numpy.ma.array', 'np.ma.array', (['m'], {'mask': '(m == ma_value)'}), '(m, mask=m == ma_value)\n', (2881, 2904), True, 'import numpy as np\n'), ((4394, 4412), 'numpy.array', 'np.array', (['[0.0, 1]'], {}), '([0.0, 1])\n', (4402, 4412), True, 'import numpy as np\n'), ((4418, 4437), 'numpy.array', 'np.array', (['[-1.0, 1]'], {}), '([-1.0, 1])\n', (4426, 4437), True, 'import numpy as np\n'), ((4456, 4493), 'vaex.dataset.DatasetArrays', 'vx.dataset.DatasetArrays', (['"""datasetxy"""'], {}), "('datasetxy')\n", (4480, 4493), True, 'import vaex as vx\n'), ((4574, 4592), 'numpy.array', 'np.array', (['[1.0, 3]'], {}), '([1.0, 3])\n', (4582, 4592), True, 'import numpy as np\n'), ((4599, 4620), 'numpy.array', 'np.array', (['[2.0, 3, 4]'], {}), '([2.0, 3, 4])\n', (4607, 4620), True, 'import numpy as np\n'), ((4628, 4643), 'numpy.array', 'np.array', (['[5.0]'], {}), '([5.0])\n', (4636, 4643), True, 'import numpy as np\n'), ((4661, 4689), 'numpy.concatenate', 'np.concatenate', (['(x1, x2, x3)'], {}), '((x1, x2, x3))\n', (4675, 4689), True, 'import numpy as np\n'), ((4704, 4740), 'vaex.dataset.DatasetArrays', 'vx.dataset.DatasetArrays', (['"""dataset1"""'], {}), "('dataset1')\n", (4728, 4740), True, 'import vaex as vx\n'), ((4754, 4790), 'vaex.dataset.DatasetArrays', 'vx.dataset.DatasetArrays', (['"""dataset2"""'], {}), "('dataset2')\n", (4778, 4790), True, 'import vaex as vx\n'), ((4804, 4840), 'vaex.dataset.DatasetArrays', 'vx.dataset.DatasetArrays', (['"""dataset3"""'], {}), "('dataset3')\n", (4828, 4840), True, 'import vaex as vx\n'), ((4992, 5082), 'vaex.dataset.DatasetConcatenated', 'vx.dataset.DatasetConcatenated', (['[dataset1, dataset2, dataset3]'], {'name': '"""dataset_concat"""'}), "([dataset1, dataset2, dataset3], name=\n 'dataset_concat')\n", (5022, 5082), True, 'import vaex as vx\n'), ((5107, 5243), 'vaex.dataset.DatasetConcatenated', 'vx.dataset.DatasetConcatenated', (['[self.dataset_no_filter, self.dataset_no_filter, self.dataset_no_filter]'], {'name': '"""dataset_concat_dup"""'}), "([self.dataset_no_filter, self.\n dataset_no_filter, self.dataset_no_filter], name='dataset_concat_dup')\n", (5137, 5243), True, 'import vaex as vx\n'), ((5426, 5443), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (5440, 5443), True, 'import numpy as np\n'), ((8092, 8116), 'tempfile.mktemp', 'tempfile.mktemp', (['""".hdf5"""'], {}), "('.hdf5')\n", (8107, 8116), False, 'import tempfile\n'), ((8304, 8327), 'tempfile.mktemp', 'tempfile.mktemp', (['""".vot"""'], {}), "('.vot')\n", (8319, 8327), False, 'import tempfile\n'), ((8405, 8422), 'astropy.io.votable.from_table', 'from_table', (['table'], {}), '(table)\n', (8415, 8422), False, 'from astropy.io.votable import from_table, writeto\n'), ((8425, 8445), 'astropy.io.votable.writeto', 'writeto', (['votable', 'fn'], {}), '(votable, fn)\n', (8432, 8445), False, 'from astropy.io.votable import from_table, writeto\n'), ((11638, 11692), 'vaex.from_pandas', 'vx.from_pandas', (['df'], {'index_name': '"""name"""', 'copy_index': '(True)'}), "(df, index_name='name', copy_index=True)\n", (11652, 11692), True, 'import vaex as vx\n'), ((12688, 12795), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['extra', 'self.dataset.data.x[self.zero_index:self.zero_index + 10]'], {}), '(extra, self.dataset.data.x[self.\n zero_index:self.zero_index + 10])\n', (12724, 12795), True, 'import numpy as np\n'), ((13466, 13489), 'tempfile.mktemp', 'tempfile.mktemp', (['""".csv"""'], {}), "('.csv')\n", (13481, 13489), False, 'import tempfile\n'), ((13997, 14046), 'vaex.from_csv', 'vx.from_csv', (['fn'], {'index_col': '(False)', 'copy_index': '(True)'}), '(fn, index_col=False, copy_index=True)\n', (14008, 14046), True, 'import vaex as vx\n'), ((15563, 15591), 'numpy.arange', 'np.arange', (['(10.0)'], {'dtype': '""">f8"""'}), "(10.0, dtype='>f8')\n", (15572, 15591), True, 'import numpy as np\n'), ((15597, 15623), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': '"""<f8"""'}), "(10, dtype='<f8')\n", (15606, 15623), True, 'import numpy as np\n'), ((15631, 15655), 'vaex.from_arrays', 'vx.from_arrays', ([], {'x': 'x', 'y': 'y'}), '(x=x, y=y)\n', (15645, 15655), True, 'import vaex as vx\n'), ((15723, 15741), 'numpy.random.seed', 'np.random.seed', (['(42)'], {}), '(42)\n', (15737, 15741), True, 'import numpy as np\n'), ((15748, 15779), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': 'np.float64'}), '(10, dtype=np.float64)\n', (15757, 15779), True, 'import numpy as np\n'), ((15792, 15805), 'numpy.arange', 'np.arange', (['(10)'], {}), '(10)\n', (15801, 15805), True, 'import numpy as np\n'), ((15833, 15859), 'numpy.random.shuffle', 'np.random.shuffle', (['indices'], {}), '(indices)\n', (15850, 15859), True, 'import numpy as np\n'), ((17131, 17156), 'healpy.order2nside', 'hp.order2nside', (['max_order'], {}), '(max_order)\n', (17145, 17156), True, 'import healpy as hp\n'), ((17166, 17186), 'healpy.nside2npix', 'hp.nside2npix', (['nside'], {}), '(nside)\n', (17179, 17186), True, 'import healpy as hp\n'), ((17199, 17214), 'numpy.arange', 'np.arange', (['npix'], {}), '(npix)\n', (17208, 17214), True, 'import numpy as np\n'), ((17222, 17253), 'vaex.from_arrays', 'vx.from_arrays', ([], {'healpix': 'healpix'}), '(healpix=healpix)\n', (17236, 17253), True, 'import vaex as vx\n'), ((17651, 17678), 'numpy.random.normal', 'np.random.normal', (['(1)', '(0.1)', 'N'], {}), '(1, 0.1, N)\n', (17667, 17678), True, 'import numpy as np\n'), ((17691, 17726), 'vaex.from_arrays', 'vx.from_arrays', ([], {'parallax': 'parallaxes'}), '(parallax=parallaxes)\n', (17705, 17726), True, 'import vaex as vx\n'), ((25162, 25223), 'vaex.from_scalars', 'vx.from_scalars', ([], {'l': '(90)', 'b': '(0)', 'pm_l': '(-1)', 'pm_b': '(0)', 'distance': '(1)', 'vr': '(0)'}), '(l=90, b=0, pm_l=-1, pm_b=0, distance=1, vr=0)\n', (25177, 25223), True, 'import vaex as vx\n'), ((25391, 25452), 'vaex.from_scalars', 'vx.from_scalars', ([], {'l': '(90)', 'b': '(0)', 'pm_l': '(-1)', 'pm_b': '(0)', 'distance': '(2)', 'vr': '(0)'}), '(l=90, b=0, pm_l=-1, pm_b=0, distance=2, vr=0)\n', (25406, 25452), True, 'import vaex as vx\n'), ((25621, 25682), 'vaex.from_scalars', 'vx.from_scalars', ([], {'l': '(0)', 'b': '(90)', 'pm_l': '(0)', 'pm_b': '(-1)', 'distance': '(1)', 'vr': '(0)'}), '(l=0, b=90, pm_l=0, pm_b=-1, distance=1, vr=0)\n', (25636, 25682), True, 'import vaex as vx\n'), ((25850, 25911), 'vaex.from_scalars', 'vx.from_scalars', ([], {'l': '(0)', 'b': '(90)', 'pm_l': '(0)', 'pm_b': '(-1)', 'distance': '(2)', 'vr': '(0)'}), '(l=0, b=90, pm_l=0, pm_b=-1, distance=2, vr=0)\n', (25865, 25911), True, 'import vaex as vx\n'), ((26080, 26140), 'vaex.from_scalars', 'vx.from_scalars', ([], {'l': '(90)', 'b': '(0)', 'pm_l': '(0)', 'pm_b': '(0)', 'distance': '(1)', 'vr': '(1)'}), '(l=90, b=0, pm_l=0, pm_b=0, distance=1, vr=1)\n', (26095, 26140), True, 'import vaex as vx\n'), ((26307, 26367), 'vaex.from_scalars', 'vx.from_scalars', ([], {'l': '(90)', 'b': '(0)', 'pm_l': '(0)', 'pm_b': '(0)', 'distance': '(2)', 'vr': '(1)'}), '(l=90, b=0, pm_l=0, pm_b=0, distance=2, vr=1)\n', (26322, 26367), True, 'import vaex as vx\n'), ((26636, 26660), 'tempfile.mktemp', 'tempfile.mktemp', (['""".json"""'], {}), "('.json')\n", (26651, 26660), False, 'import tempfile\n'), ((38072, 38119), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['a', '[b, c]'], {}), '(a, [b, c])\n', (38108, 38119), True, 'import numpy as np\n'), ((38299, 38346), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['a', '[b, c]'], {}), '(a, [b, c])\n', (38335, 38346), True, 'import numpy as np\n'), ((38501, 38548), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['a', '[b, c]'], {}), '(a, [b, c])\n', (38537, 38548), True, 'import numpy as np\n'), ((42445, 42482), 'numpy.arange', 'np.arange', (['(-self.zero_index)', '(20.0 - 1)'], {}), '(-self.zero_index, 20.0 - 1)\n', (42454, 42482), True, 'import numpy as np\n'), ((45156, 45168), 'vaex.example', 'vx.example', ([], {}), '()\n', (45166, 45168), True, 'import vaex as vx\n'), ((50187, 50204), 'numpy.zeros', 'np.zeros', (['(10, 2)'], {}), '((10, 2))\n', (50195, 50204), True, 'import numpy as np\n'), ((50213, 50225), 'numpy.zeros', 'np.zeros', (['(20)'], {}), '(20)\n', (50221, 50225), True, 'import numpy as np\n'), ((50536, 50548), 'numpy.zeros', 'np.zeros', (['(10)'], {}), '(10)\n', (50544, 50548), True, 'import numpy as np\n'), ((50559, 50571), 'numpy.zeros', 'np.zeros', (['(20)'], {}), '(20)\n', (50567, 50571), True, 'import numpy as np\n'), ((50774, 50832), 'vaex.dataset.DatasetConcatenated', 'vx.dataset.DatasetConcatenated', (['dfs'], {'name': '"""dataset_concat"""'}), "(dfs, name='dataset_concat')\n", (50804, 50832), True, 'import vaex as vx\n'), ((50855, 50913), 'vaex.dataset.DatasetConcatenated', 'vx.dataset.DatasetConcatenated', (['dfs'], {'name': '"""dataset_concat"""'}), "(dfs, name='dataset_concat')\n", (50885, 50913), True, 'import vaex as vx\n'), ((50934, 50992), 'vaex.dataset.DatasetConcatenated', 'vx.dataset.DatasetConcatenated', (['dfs'], {'name': '"""dataset_concat"""'}), "(dfs, name='dataset_concat')\n", (50964, 50992), True, 'import vaex as vx\n'), ((51284, 51317), 'numpy.arange', 'np.arange', (['(1000)'], {'dtype': 'np.float32'}), '(1000, dtype=np.float32)\n', (51293, 51317), True, 'import numpy as np\n'), ((51325, 51357), 'numpy.arange', 'np.arange', (['(100)'], {'dtype': 'np.float32'}), '(100, dtype=np.float32)\n', (51334, 51357), True, 'import numpy as np\n'), ((51376, 51400), 'numpy.concatenate', 'np.concatenate', (['(x1, x2)'], {}), '((x1, x2))\n', (51390, 51400), True, 'import numpy as np\n'), ((51415, 51451), 'vaex.dataset.DatasetArrays', 'vx.dataset.DatasetArrays', (['"""dataset1"""'], {}), "('dataset1')\n", (51439, 51451), True, 'import vaex as vx\n'), ((51465, 51501), 'vaex.dataset.DatasetArrays', 'vx.dataset.DatasetArrays', (['"""dataset2"""'], {}), "('dataset2')\n", (51489, 51501), True, 'import vaex as vx\n'), ((51589, 51664), 'vaex.dataset.DatasetConcatenated', 'vx.dataset.DatasetConcatenated', (['[dataset1, dataset2]'], {'name': '"""dataset_concat"""'}), "([dataset1, dataset2], name='dataset_concat')\n", (51619, 51664), True, 'import vaex as vx\n'), ((51680, 51704), 'tempfile.mktemp', 'tempfile.mktemp', (['""".hdf5"""'], {}), "('.hdf5')\n", (51695, 51704), False, 'import tempfile\n'), ((51852, 51876), 'tempfile.mktemp', 'tempfile.mktemp', (['""".hdf5"""'], {}), "('.hdf5')\n", (51867, 51876), False, 'import tempfile\n'), ((51956, 52067), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['self.dataset.data.x[self.zero_index:self.zero_index + 10]', 'ds2.data.x[::-1]'], {}), '(self.dataset.data.x[self.zero_index:self.\n zero_index + 10], ds2.data.x[::-1])\n', (51985, 52067), True, 'import numpy as np\n'), ((52170, 52195), 'tempfile.mktemp', 'tempfile.mktemp', (['""".arrow"""'], {}), "('.arrow')\n", (52185, 52195), False, 'import tempfile\n'), ((52455, 52479), 'tempfile.mktemp', 'tempfile.mktemp', (['""".hdf5"""'], {}), "('.hdf5')\n", (52470, 52479), False, 'import tempfile\n'), ((52494, 52518), 'tempfile.mktemp', 'tempfile.mktemp', (['""".fits"""'], {}), "('.fits')\n", (52509, 52518), False, 'import tempfile\n'), ((52541, 52565), 'tempfile.mktemp', 'tempfile.mktemp', (['""".fits"""'], {}), "('.fits')\n", (52556, 52565), False, 'import tempfile\n'), ((58281, 58306), 'tempfile.mktemp', 'tempfile.mktemp', (['""".arrow"""'], {}), "('.arrow')\n", (58296, 58306), False, 'import tempfile\n'), ((58321, 58345), 'tempfile.mktemp', 'tempfile.mktemp', (['""".hdf5"""'], {}), "('.hdf5')\n", (58336, 58345), False, 'import tempfile\n'), ((58373, 58399), 'vaex.dataset.export', 'dataset.export', (['path_arrow'], {}), '(path_arrow)\n', (58387, 58399), True, 'import vaex.dataset as dataset\n'), ((64969, 65021), 'vaex.server', 'vx.server', (["('%s://localhost:%d' % (scheme, test_port))"], {}), "('%s://localhost:%d' % (scheme, test_port))\n", (64978, 65021), True, 'import vaex as vx\n'), ((68197, 68253), 'vaex.server', 'vx.server', (["('%s://localhost:%d' % (scheme, test_port - 2))"], {}), "('%s://localhost:%d' % (scheme, test_port - 2))\n", (68206, 68253), True, 'import vaex as vx\n'), ((68269, 68325), 'vaex.server', 'vx.server', (["('%s://localhost:%d' % (scheme, test_port - 1))"], {}), "('%s://localhost:%d' % (scheme, test_port - 1))\n", (68278, 68325), True, 'import vaex as vx\n'), ((69467, 69489), 'numpy.array', 'np.array', (['[0, 1, 0, 1]'], {}), '([0, 1, 0, 1])\n', (69475, 69489), True, 'import numpy as np\n'), ((69496, 69518), 'numpy.array', 'np.array', (['[0, 0, 1, 1]'], {}), '([0, 0, 1, 1])\n', (69504, 69518), True, 'import numpy as np\n'), ((69531, 69555), 'vaex.from_arrays', 'vx.from_arrays', ([], {'x': 'x', 'y': 'y'}), '(x=x, y=y)\n', (69545, 69555), True, 'import vaex as vx\n'), ((69632, 69651), 'numpy.all', 'np.all', (['(counts == 1)'], {}), '(counts == 1)\n', (69638, 69651), True, 'import numpy as np\n'), ((69660, 69694), 'numpy.array', 'np.array', (['[0, 1, 0, 1, 0, 1, 0, 1]'], {}), '([0, 1, 0, 1, 0, 1, 0, 1])\n', (69668, 69694), True, 'import numpy as np\n'), ((69701, 69735), 'numpy.array', 'np.array', (['[0, 0, 1, 1, 0, 0, 1, 1]'], {}), '([0, 0, 1, 1, 0, 0, 1, 1])\n', (69709, 69735), True, 'import numpy as np\n'), ((69742, 69776), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 1, 1, 1, 1]'], {}), '([0, 0, 0, 0, 1, 1, 1, 1])\n', (69750, 69776), True, 'import numpy as np\n'), ((69789, 69818), 'vaex.from_arrays', 'vx.from_arrays', ([], {'x': 'x', 'y': 'y', 'z': 'z'}), '(x=x, y=y, z=z)\n', (69803, 69818), True, 'import vaex as vx\n'), ((69909, 69928), 'numpy.all', 'np.all', (['(counts == 1)'], {}), '(counts == 1)\n', (69915, 69928), True, 'import numpy as np\n'), ((69937, 69995), 'numpy.array', 'np.array', (['[0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1]'], {}), '([0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1])\n', (69945, 69995), True, 'import numpy as np\n'), ((70002, 70060), 'numpy.array', 'np.array', (['[0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1]'], {}), '([0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1])\n', (70010, 70060), True, 'import numpy as np\n'), ((70067, 70125), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1]'], {}), '([0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 1, 1])\n', (70075, 70125), True, 'import numpy as np\n'), ((70132, 70190), 'numpy.array', 'np.array', (['[0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1]'], {}), '([0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1])\n', (70140, 70190), True, 'import numpy as np\n'), ((70204, 70238), 'vaex.from_arrays', 'vx.from_arrays', ([], {'x': 'x', 'y': 'y', 'z': 'z', 'w': 'w'}), '(x=x, y=y, z=z, w=w)\n', (70218, 70238), True, 'import vaex as vx\n'), ((70343, 70362), 'numpy.all', 'np.all', (['(counts == 1)'], {}), '(counts == 1)\n', (70349, 70362), True, 'import numpy as np\n'), ((2093, 2122), 'numpy.arange', 'np.arange', (['(-2)', '(19)'], {'dtype': '"""i8"""'}), "(-2, 19, dtype='i8')\n", (2102, 2122), True, 'import numpy as np\n'), ((2623, 2648), 'numpy.arange', 'np.arange', (['(10)'], {'dtype': '"""i8"""'}), "(10, dtype='i8')\n", (2632, 2648), True, 'import numpy as np\n'), ((3670, 3684), 'numpy.array', 'np.array', (['name'], {}), '(name)\n', (3678, 3684), True, 'import numpy as np\n'), ((7766, 7826), 'os.path.join', 'os.path.join', (['basedir', '"""files"""', '"""default_amuse_plummer.hdf5"""'], {}), "(basedir, 'files', 'default_amuse_plummer.hdf5')\n", (7778, 7826), False, 'import os\n'), ((8636, 8696), 'os.path.join', 'os.path.join', (['basedir', '"""files"""', '"""gaia-small-fits-basic.fits"""'], {}), "(basedir, 'files', 'gaia-small-fits-basic.fits')\n", (8648, 8696), False, 'import os\n'), ((8723, 8782), 'os.path.join', 'os.path.join', (['basedir', '"""files"""', '"""gaia-small-fits-plus.fits"""'], {}), "(basedir, 'files', 'gaia-small-fits-plus.fits')\n", (8735, 8782), False, 'import os\n'), ((8807, 8870), 'os.path.join', 'os.path.join', (['basedir', '"""files"""', '"""gaia-small-colfits-basic.fits"""'], {}), "(basedir, 'files', 'gaia-small-colfits-basic.fits')\n", (8819, 8870), False, 'import os\n'), ((8900, 8962), 'os.path.join', 'os.path.join', (['basedir', '"""files"""', '"""gaia-small-colfits-plus.fits"""'], {}), "(basedir, 'files', 'gaia-small-colfits-plus.fits')\n", (8912, 8962), False, 'import os\n'), ((8983, 9039), 'os.path.join', 'os.path.join', (['basedir', '"""files"""', '"""gaia-small-votable.vot"""'], {}), "(basedir, 'files', 'gaia-small-votable.vot')\n", (8995, 9039), False, 'import os\n'), ((9187, 9211), 'tempfile.mktemp', 'tempfile.mktemp', (['""".hdf5"""'], {}), "('.hdf5')\n", (9202, 9211), False, 'import tempfile\n'), ((9251, 9269), 'vaex.open', 'vx.open', (['path_hdf5'], {}), '(path_hdf5)\n', (9258, 9269), True, 'import vaex as vx\n'), ((9601, 9625), 'tempfile.mktemp', 'tempfile.mktemp', (['""".fits"""'], {}), "('.fits')\n", (9616, 9625), False, 'import tempfile\n'), ((9665, 9683), 'vaex.open', 'vx.open', (['path_fits'], {}), '(path_fits)\n', (9672, 9683), True, 'import vaex as vx\n'), ((16191, 16204), 'numpy.sum', 'np.sum', (['(x * y)'], {}), '(x * y)\n', (16197, 16204), True, 'import numpy as np\n'), ((16238, 16251), 'numpy.sum', 'np.sum', (['(x * y)'], {}), '(x * y)\n', (16244, 16251), True, 'import numpy as np\n'), ((16285, 16307), 'numpy.sum', 'np.sum', (['(x[indices] * z)'], {}), '(x[indices] * z)\n', (16291, 16307), True, 'import numpy as np\n'), ((16341, 16363), 'numpy.sum', 'np.sum', (['(x[indices] * z)'], {}), '(x[indices] * z)\n', (16347, 16363), True, 'import numpy as np\n'), ((16580, 16593), 'numpy.sum', 'np.sum', (['(x * y)'], {}), '(x * y)\n', (16586, 16593), True, 'import numpy as np\n'), ((16627, 16666), 'numpy.sum', 'np.sum', (['(x[indices][:4] * y[indices][:4])'], {}), '(x[indices][:4] * y[indices][:4])\n', (16633, 16666), True, 'import numpy as np\n'), ((16897, 16918), 'numpy.sum', 'np.sum', (['(x[:4] * y[:4])'], {}), '(x[:4] * y[:4])\n', (16903, 16918), True, 'import numpy as np\n'), ((16952, 16973), 'numpy.sum', 'np.sum', (['(x[:4] * y[:4])'], {}), '(x[:4] * y[:4])\n', (16958, 16973), True, 'import numpy as np\n'), ((17476, 17526), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['counts', 'ones'], {}), '(counts, ones)\n', (17512, 17526), True, 'import numpy as np\n'), ((20565, 20577), 'numpy.sqrt', 'np.sqrt', (['(2.0)'], {}), '(2.0)\n', (20572, 20577), True, 'import numpy as np\n'), ((21429, 21441), 'numpy.sqrt', 'np.sqrt', (['(2.0)'], {}), '(2.0)\n', (21436, 21441), True, 'import numpy as np\n'), ((23096, 23151), 'vaex.from_arrays', 'vx.from_arrays', ([], {'pm_l': 'pm_l', 'pm_b': 'pm_b', 'distance': 'distance'}), '(pm_l=pm_l, pm_b=pm_b, distance=distance)\n', (23110, 23151), True, 'import vaex as vx\n'), ((27657, 27681), 'tempfile.mktemp', 'tempfile.mktemp', (['""".hdf5"""'], {}), "('.hdf5')\n", (27672, 27681), False, 'import tempfile\n'), ((27758, 27776), 'vaex.open', 'vx.open', (['path_hdf5'], {}), '(path_hdf5)\n', (27765, 27776), True, 'import vaex as vx\n'), ((27870, 27895), 'tempfile.mktemp', 'tempfile.mktemp', (['""".arrow"""'], {}), "('.arrow')\n", (27885, 27895), False, 'import tempfile\n'), ((27974, 27993), 'vaex.open', 'vx.open', (['path_arrow'], {}), '(path_arrow)\n', (27981, 27993), True, 'import vaex as vx\n'), ((28342, 28366), 'tempfile.mktemp', 'tempfile.mktemp', (['""".fits"""'], {}), "('.fits')\n", (28357, 28366), False, 'import tempfile\n'), ((28443, 28461), 'vaex.open', 'vx.open', (['path_fits'], {}), '(path_fits)\n', (28450, 28461), True, 'import vaex as vx\n'), ((28562, 28586), 'tempfile.mktemp', 'tempfile.mktemp', (['""".fits"""'], {}), "('.fits')\n", (28577, 28586), False, 'import tempfile\n'), ((29245, 29260), 'numpy.dtype', 'np.dtype', (['""">f8"""'], {}), "('>f8')\n", (29253, 29260), True, 'import numpy as np\n'), ((31279, 31326), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['z', 'z_test'], {}), '(z, z_test)\n', (31315, 31326), True, 'import numpy as np\n'), ((38721, 38741), 'numpy.cov', 'np.cov', (['args'], {'bias': '(1)'}), '(args, bias=1)\n', (38727, 38741), True, 'import numpy as np\n'), ((42785, 42803), 'numpy.isfinite', 'np.isfinite', (['(x * y)'], {}), '(x * y)\n', (42796, 42803), True, 'import numpy as np\n'), ((45990, 46002), 'numpy.median', 'np.median', (['x'], {}), '(x)\n', (45999, 46002), True, 'import numpy as np\n'), ((46567, 46584), 'numpy.median', 'np.median', (['self.x'], {}), '(self.x)\n', (46576, 46584), True, 'import numpy as np\n'), ((46750, 46767), 'numpy.median', 'np.median', (['self.x'], {}), '(self.x)\n', (46759, 46767), True, 'import numpy as np\n'), ((47065, 47086), 'numpy.median', 'np.median', (['self.x[:5]'], {}), '(self.x[:5])\n', (47074, 47086), True, 'import numpy as np\n'), ((47306, 47318), 'numpy.nansum', 'np.nansum', (['x'], {}), '(x)\n', (47315, 47318), True, 'import numpy as np\n'), ((47398, 47414), 'numpy.nansum', 'np.nansum', (['x[:5]'], {}), '(x[:5])\n', (47407, 47414), True, 'import numpy as np\n'), ((47552, 47568), 'numpy.nansum', 'np.nansum', (['x[:5]'], {}), '(x[:5])\n', (47561, 47568), True, 'import numpy as np\n'), ((49763, 49821), 'vaex.dataset.DatasetConcatenated', 'vx.dataset.DatasetConcatenated', (['dfs'], {'name': '"""dataset_concat"""'}), "(dfs, name='dataset_concat')\n", (49793, 49821), True, 'import vaex as vx\n'), ((50277, 50313), 'vaex.dataset.DatasetArrays', 'vx.dataset.DatasetArrays', (['"""dataset1"""'], {}), "('dataset1')\n", (50301, 50313), True, 'import vaex as vx\n'), ((50378, 50408), 'vaex.dataset.add_column', 'dataset.add_column', (['"""x"""', 'array'], {}), "('x', array)\n", (50396, 50408), True, 'import vaex.dataset as dataset\n'), ((50467, 50525), 'vaex.dataset.DatasetConcatenated', 'vx.dataset.DatasetConcatenated', (['dfs'], {'name': '"""dataset_concat"""'}), "(dfs, name='dataset_concat')\n", (50497, 50525), True, 'import vaex as vx\n'), ((50623, 50659), 'vaex.dataset.DatasetArrays', 'vx.dataset.DatasetArrays', (['"""dataset1"""'], {}), "('dataset1')\n", (50647, 50659), True, 'import vaex as vx\n'), ((50724, 50754), 'vaex.dataset.add_column', 'dataset.add_column', (['"""x"""', 'array'], {}), "('x', array)\n", (50742, 50754), True, 'import vaex.dataset as dataset\n'), ((1352, 1365), 'numpy.array', 'np.array', (['[v]'], {}), '([v])\n', (1360, 1365), True, 'import numpy as np\n'), ((7592, 7623), 'numpy.array', 'np.array', (['[2, 2, 1, 0, 1, 1, 2]'], {}), '([2, 2, 1, 0, 1, 1, 2])\n', (7600, 7623), True, 'import numpy as np\n'), ((14404, 14426), 'tempfile.mktemp', 'tempfile.mktemp', (['"""asc"""'], {}), "('asc')\n", (14419, 14426), False, 'import tempfile\n'), ((14902, 14957), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['ds.data.x', 'self.x'], {}), '(ds.data.x, self.x)\n', (14938, 14957), True, 'import numpy as np\n'), ((14962, 15017), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['ds.data.y', 'self.y'], {}), '(ds.data.y, self.y)\n', (14998, 15017), True, 'import numpy as np\n'), ((17440, 17464), 'numpy.ones', 'np.ones', (['(npix // scaling)'], {}), '(npix // scaling)\n', (17447, 17464), True, 'import numpy as np\n'), ((17890, 17905), 'numpy.array', 'np.array', (['[1.0]'], {}), '([1.0])\n', (17898, 17905), True, 'import numpy as np\n'), ((17927, 17942), 'numpy.array', 'np.array', (['[0.1]'], {}), '([0.1])\n', (17935, 17942), True, 'import numpy as np\n'), ((18604, 18632), 'numpy.random.normal', 'np.random.normal', (['x', '(0.01)', 'N'], {}), '(x, 0.01, N)\n', (18620, 18632), True, 'import numpy as np\n'), ((18648, 18676), 'numpy.random.normal', 'np.random.normal', (['y', '(0.02)', 'N'], {}), '(y, 0.02, N)\n', (18664, 18676), True, 'import numpy as np\n'), ((18695, 18726), 'numpy.random.normal', 'np.random.normal', (['velx', '(0.03)', 'N'], {}), '(velx, 0.03, N)\n', (18711, 18726), True, 'import numpy as np\n'), ((18745, 18776), 'numpy.random.normal', 'np.random.normal', (['vely', '(0.04)', 'N'], {}), '(vely, 0.04, N)\n', (18761, 18776), True, 'import numpy as np\n'), ((18791, 18833), 'vaex.from_arrays', 'vx.from_arrays', ([], {'x': 'x', 'y': 'y', 'vx': 'vely', 'vy': 'vely'}), '(x=x, y=y, vx=vely, vy=vely)\n', (18805, 18833), True, 'import vaex as vx\n'), ((21874, 21902), 'numpy.random.normal', 'np.random.normal', (['x', '(0.01)', 'N'], {}), '(x, 0.01, N)\n', (21890, 21902), True, 'import numpy as np\n'), ((21918, 21946), 'numpy.random.normal', 'np.random.normal', (['y', '(0.02)', 'N'], {}), '(y, 0.02, N)\n', (21934, 21946), True, 'import numpy as np\n'), ((21961, 21985), 'vaex.from_arrays', 'vx.from_arrays', ([], {'x': 'x', 'y': 'y'}), '(x=x, y=y)\n', (21975, 21985), True, 'import vaex as vx\n'), ((22943, 22970), 'numpy.random.normal', 'np.random.normal', (['(0)', '(0.1)', 'N'], {}), '(0, 0.1, N)\n', (22959, 22970), True, 'import numpy as np\n'), ((22997, 23024), 'numpy.random.normal', 'np.random.normal', (['(0)', '(0.3)', 'N'], {}), '(0, 0.3, N)\n', (23013, 23024), True, 'import numpy as np\n'), ((23047, 23074), 'numpy.random.normal', 'np.random.normal', (['(0)', '(0.4)', 'N'], {}), '(0, 0.4, N)\n', (23063, 23074), True, 'import numpy as np\n'), ((24371, 24443), 'vaex.from_arrays', 'vx.from_arrays', ([], {'l': 'l', 'b': 'b', 'pm_l': 'pm_l', 'pm_b': 'pm_b', 'vr': 'vr', 'distance': 'distance'}), '(l=l, b=b, pm_l=pm_l, pm_b=pm_b, vr=vr, distance=distance)\n', (24385, 24443), True, 'import vaex as vx\n'), ((42877, 42899), 'numpy.cov', 'np.cov', (['[x, y]'], {'bias': '(1)'}), '([x, y], bias=1)\n', (42883, 42899), True, 'import numpy as np\n'), ((45785, 45797), 'numpy.arange', 'np.arange', (['(3)'], {}), '(3)\n', (45794, 45797), True, 'import numpy as np\n'), ((45835, 45847), 'numpy.arange', 'np.arange', (['(4)'], {}), '(4)\n', (45844, 45847), True, 'import numpy as np\n'), ((46911, 46928), 'numpy.median', 'np.median', (['self.x'], {}), '(self.x)\n', (46920, 46928), True, 'import numpy as np\n'), ((46930, 46947), 'numpy.median', 'np.median', (['self.y'], {}), '(self.y)\n', (46939, 46947), True, 'import numpy as np\n'), ((47689, 47701), 'numpy.nansum', 'np.nansum', (['x'], {}), '(x)\n', (47698, 47701), True, 'import numpy as np\n'), ((47821, 47837), 'numpy.nansum', 'np.nansum', (['x[:5]'], {}), '(x[:5])\n', (47830, 47837), True, 'import numpy as np\n'), ((47961, 47973), 'numpy.nansum', 'np.nansum', (['x'], {}), '(x)\n', (47970, 47973), True, 'import numpy as np\n'), ((48097, 48113), 'numpy.nansum', 'np.nansum', (['x[:5]'], {}), '(x[:5])\n', (48106, 48113), True, 'import numpy as np\n'), ((48234, 48250), 'numpy.nansum', 'np.nansum', (['x[:5]'], {}), '(x[:5])\n', (48243, 48250), True, 'import numpy as np\n'), ((48252, 48268), 'numpy.nansum', 'np.nansum', (['x[5:]'], {}), '(x[5:])\n', (48261, 48268), True, 'import numpy as np\n'), ((48388, 48404), 'numpy.nansum', 'np.nansum', (['x[:5]'], {}), '(x[:5])\n', (48397, 48404), True, 'import numpy as np\n'), ((48540, 48556), 'numpy.nansum', 'np.nansum', (['x[:i]'], {}), '(x[:i])\n', (48549, 48556), True, 'import numpy as np\n'), ((48558, 48574), 'numpy.nansum', 'np.nansum', (['x[i:]'], {}), '(x[i:])\n', (48567, 48574), True, 'import numpy as np\n'), ((48698, 48714), 'numpy.nansum', 'np.nansum', (['x[:5]'], {}), '(x[:5])\n', (48707, 48714), True, 'import numpy as np\n'), ((48846, 48862), 'numpy.nansum', 'np.nansum', (['y[:i]'], {}), '(y[:i])\n', (48855, 48862), True, 'import numpy as np\n'), ((48864, 48880), 'numpy.nansum', 'np.nansum', (['y[i:]'], {}), '(y[i:])\n', (48873, 48880), True, 'import numpy as np\n'), ((49000, 49016), 'numpy.nansum', 'np.nansum', (['y[:5]'], {}), '(y[:5])\n', (49009, 49016), True, 'import numpy as np\n'), ((49448, 49490), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['a', 'b'], {}), '(a, b)\n', (49484, 49490), True, 'import numpy as np\n'), ((49527, 49552), 'numpy.arange', 'np.arange', (['(3)'], {'dtype': 'dtype'}), '(3, dtype=dtype)\n', (49536, 49552), True, 'import numpy as np\n'), ((49602, 49644), 'vaex.dataset.DatasetArrays', 'vx.dataset.DatasetArrays', (["('dataset-%i' % i)"], {}), "('dataset-%i' % i)\n", (49626, 49644), True, 'import vaex as vx\n'), ((49712, 49742), 'vaex.dataset.add_column', 'dataset.add_column', (['"""x"""', 'array'], {}), "('x', array)\n", (49730, 49742), True, 'import vaex.dataset as dataset\n'), ((52381, 52401), 'numpy.array', 'np.array', (['ds2.data.x'], {}), '(ds2.data.x)\n', (52389, 52401), True, 'import numpy as np\n'), ((52801, 52838), 'vaex.dataset.set_active_fraction', 'dataset.set_active_fraction', (['fraction'], {}), '(fraction)\n', (52828, 52838), True, 'import vaex.dataset as dataset\n'), ((52843, 52866), 'vaex.dataset.select', 'dataset.select', (['"""x > 3"""'], {}), "('x > 3')\n", (52857, 52866), True, 'import vaex.dataset as dataset\n'), ((52871, 52908), 'vaex.dataset.select', 'dataset.select', (['"""x > 2"""'], {'name': '"""named"""'}), "('x > 2', name='named')\n", (52885, 52908), True, 'import vaex.dataset as dataset\n'), ((58235, 58265), 'vaex.dataset.set_active_fraction', 'dataset.set_active_fraction', (['(1)'], {}), '(1)\n', (58262, 58265), True, 'import vaex.dataset as dataset\n'), ((60539, 60571), 'vaex.dataset.set_active_fraction', 'dataset.set_active_fraction', (['(1.0)'], {}), '(1.0)\n', (60566, 60571), True, 'import vaex.dataset as dataset\n'), ((60627, 60660), 'vaex.dataset.set_active_fraction', 'dataset.set_active_fraction', (['(0.25)'], {}), '(0.25)\n', (60654, 60660), True, 'import vaex.dataset as dataset\n'), ((60754, 60796), 'numpy.testing.assert_array_almost_equal', 'np.testing.assert_array_almost_equal', (['a', 'b'], {}), '(a, b)\n', (60790, 60796), True, 'import numpy as np\n'), ((61878, 61904), 'vaex.dataset.set_current_row', 'dataset.set_current_row', (['i'], {}), '(i)\n', (61901, 61904), True, 'import vaex.dataset as dataset\n'), ((69567, 69584), 'vaex.dataset', 'dataset', (['"""x"""', '"""y"""'], {}), "('x', 'y')\n", (69574, 69584), True, 'import vaex.dataset as dataset\n'), ((69830, 69852), 'vaex.dataset', 'dataset', (['"""x"""', '"""y"""', '"""z"""'], {}), "('x', 'y', 'z')\n", (69837, 69852), True, 'import vaex.dataset as dataset\n'), ((70250, 70277), 'vaex.dataset', 'dataset', (['"""x"""', '"""y"""', '"""z"""', '"""w"""'], {}), "('x', 'y', 'z', 'w')\n", (70257, 70277), True, 'import vaex.dataset as dataset\n'), ((14784, 14816), 'vaex.from_ascii', 'vx.from_ascii', (['fn'], {'seperator': 'sep'}), '(fn, seperator=sep)\n', (14797, 14816), True, 'import vaex as vx\n'), ((24166, 24193), 'numpy.random.normal', 'np.random.normal', (['(0)', '(0.1)', 'N'], {}), '(0, 0.1, N)\n', (24182, 24193), True, 'import numpy as np\n'), ((24221, 24248), 'numpy.random.normal', 'np.random.normal', (['(0)', '(0.2)', 'N'], {}), '(0, 0.2, N)\n', (24237, 24248), True, 'import numpy as np\n'), ((24270, 24297), 'numpy.random.normal', 'np.random.normal', (['(0)', '(0.3)', 'N'], {}), '(0, 0.3, N)\n', (24286, 24297), True, 'import numpy as np\n'), ((24321, 24348), 'numpy.random.normal', 'np.random.normal', (['(0)', '(0.4)', 'N'], {}), '(0, 0.4, N)\n', (24337, 24348), True, 'import numpy as np\n'), ((28777, 28805), 'os.remove', 'os.remove', (['path_fits_astropy'], {}), '(path_fits_astropy)\n', (28786, 28805), False, 'import os\n'), ((60825, 60850), 'vaex.dataset.length_original', 'dataset.length_original', ([], {}), '()\n', (60848, 60850), True, 'import vaex.dataset as dataset\n'), ((7688, 7704), 'numpy.sort', 'np.sort', (['classes'], {}), '(classes)\n', (7695, 7704), True, 'import numpy as np\n'), ((24064, 24091), 'numpy.random.normal', 'np.random.normal', (['(0)', '(0.1)', 'N'], {}), '(0, 0.1, N)\n', (24080, 24091), True, 'import numpy as np\n'), ((24115, 24142), 'numpy.random.normal', 'np.random.normal', (['(0)', '(0.1)', 'N'], {}), '(0, 0.1, N)\n', (24131, 24142), True, 'import numpy as np\n'), ((61918, 61938), 'vaex.dataset', 'dataset', (['"""x"""', '"""x**2"""'], {}), "('x', 'x**2')\n", (61925, 61938), True, 'import vaex.dataset as dataset\n'), ((1993, 2023), 'numpy.arange', 'np.arange', (['(-2)', '(40)'], {'dtype': '""">f8"""'}), "(-2, 40, dtype='>f8')\n", (2002, 2023), True, 'import numpy as np\n'), ((2373, 2403), 'numpy.arange', 'np.arange', (['(-2)', '(40)'], {'dtype': '""">f8"""'}), "(-2, 40, dtype='>f8')\n", (2382, 2403), True, 'import numpy as np\n'), ((2527, 2553), 'numpy.arange', 'np.arange', (['(20)'], {'dtype': '""">f8"""'}), "(20, dtype='>f8')\n", (2536, 2553), True, 'import numpy as np\n'), ((54174, 54199), 'tempfile.mktemp', 'tempfile.mktemp', (['""".arrow"""'], {}), "('.arrow')\n", (54189, 54199), False, 'import tempfile\n'), ((54225, 54252), 'tempfile.mktemp', 'tempfile.mktemp', (['""".parquet"""'], {}), "('.parquet')\n", (54240, 54252), False, 'import tempfile\n'), ((55507, 55536), 'vaex.open', 'vx.open', (['path'], {'as_numpy': '(False)'}), '(path, as_numpy=False)\n', (55514, 55536), True, 'import vaex as vx\n'), ((55574, 55587), 'vaex.open', 'vx.open', (['path'], {}), '(path)\n', (55581, 55587), True, 'import vaex as vx\n'), ((56608, 56653), 'vaex.dataset.evaluate', 'dataset.evaluate', (['column_name'], {'filtered': '(False)'}), '(column_name, filtered=False)\n', (56624, 56653), True, 'import vaex.dataset as dataset\n'), ((56700, 56749), 'vaex.dataset.evaluate', 'dataset.evaluate', (['column_name'], {'array_type': '"""numpy"""'}), "(column_name, array_type='numpy')\n", (56716, 56749), True, 'import vaex.dataset as dataset\n'), ((56769, 56811), 'vaex.dataset.evaluate_selection_mask', 'dataset.evaluate_selection_mask', (['selection'], {}), '(selection)\n', (56800, 56811), True, 'import vaex.dataset as dataset\n'), ((57348, 57397), 'vaex.dataset.evaluate', 'dataset.evaluate', (['column_name'], {'array_type': '"""numpy"""'}), "(column_name, array_type='numpy')\n", (57364, 57397), True, 'import vaex.dataset as dataset\n'), ((56244, 56256), 'numpy.isnan', 'np.isnan', (['ar'], {}), '(ar)\n', (56252, 56256), True, 'import numpy as np\n'), ((56273, 56288), 'numpy.any', 'np.any', (['nanmask'], {}), '(nanmask)\n', (56279, 56288), True, 'import numpy as np\n'), ((56913, 56928), 'pdb.set_trace', 'pdb.set_trace', ([], {}), '()\n', (56926, 56928), False, 'import pdb\n'), ((57243, 57258), 'pdb.set_trace', 'pdb.set_trace', ([], {}), '()\n', (57256, 57258), False, 'import pdb\n'), ((56309, 56338), 'numpy.ma.array', 'np.ma.array', (['ar'], {'mask': 'nanmask'}), '(ar, mask=nanmask)\n', (56320, 56338), True, 'import numpy as np\n'), ((57734, 57772), 'numpy.array', 'np.array', (['compare.columns[column_name]'], {}), '(compare.columns[column_name])\n', (57742, 57772), True, 'import numpy as np\n'), ((55392, 55420), 'os.remove', 'os.remove', (['path_fits_astropy'], {}), '(path_fits_astropy)\n', (55401, 55420), False, 'import os\n')]
|
from collections import namedtuple
from datetime import datetime
import pytz
import requests
from dateutil import parser
from requests import RequestException
from commcare_cloud.alias import commcare_cloud
from commcare_cloud.cli_utils import ask
from commcare_cloud.colors import color_warning, color_notice, color_summary
from commcare_cloud.commands.ansible import ansible_playbook
from commcare_cloud.commands.ansible.helpers import AnsibleContext
from commcare_cloud.commands.deploy.sentry import update_sentry_post_deploy
from commcare_cloud.commands.deploy.utils import announce_deploy_start, announce_deploy_failed, \
announce_deploy_success, create_release_tag
from commcare_cloud.commands.terraform.aws import get_default_username
from commcare_cloud.commands.utils import timeago
from commcare_cloud.events import publish_deploy_event
from commcare_cloud.fab.deploy_diff import DeployDiff
from commcare_cloud.github import github_repo
FORMPLAYER = "Formplayer"
AWS_BASE_URL_ENV = {
"staging": "https://s3.amazonaws.com/dimagi-formplayer-jars/staging/latest-successful"
}
AWS_BASE_URL_DEFAULT = "https://s3.amazonaws.com/dimagi-formplayer-jars/latest-successful"
GIT_PROPERTIES = "git.properties"
BUILD_INFO_PROPERTIES = "build-info.properties"
class VersionInfo(namedtuple("VersionInfo", "commit, message, time, build_time")):
@property
def commit_time_ago(self):
return self._format_time_ago(self.time)
@property
def build_time_ago(self):
return self._format_time_ago(self.build_time)
@staticmethod
def _format_time_ago(time):
build_time = parser.parse(time)
if build_time.tzinfo:
build_time = build_time.astimezone(pytz.utc).replace(tzinfo=None)
delta = datetime.utcnow() - build_time
return timeago(delta)
def deploy_formplayer(environment, args):
print(color_notice("\nPreparing to deploy Formplayer to: "), end="")
print(f"{environment.name}\n")
tag_commits = environment.fab_settings_config.tag_deploy_commits
repo = github_repo('dimagi/formplayer', require_write_permissions=tag_commits)
diff = get_deploy_diff(environment, repo)
diff.print_deployer_diff()
if not ask('Continue with deploy?', quiet=args.quiet):
return 1
start = datetime.utcnow()
announce_deploy_start(environment, FORMPLAYER)
rc = run_ansible_playbook_command(environment, args)
if rc != 0:
announce_deploy_failed(environment, FORMPLAYER)
return rc
rc = commcare_cloud(
args.env_name, 'run-shell-command', 'formplayer',
('supervisorctl reread; '
'supervisorctl update {project}-{deploy_env}-formsplayer-spring; '
'supervisorctl restart {project}-{deploy_env}-formsplayer-spring').format(
project='commcare-hq',
deploy_env=environment.meta_config.deploy_env,
), '-b',
)
if rc != 0:
announce_deploy_failed(environment, FORMPLAYER)
return rc
record_deploy_success(environment, repo, diff, start)
return 0
def record_deploy_success(environment, repo, diff, start):
end = datetime.utcnow()
create_release_tag(environment, repo, diff)
record_deploy_in_datadog(environment, diff, end - start)
update_sentry_post_deploy(environment, "formplayer", repo, diff, start, end)
announce_deploy_success(environment, FORMPLAYER, diff.get_email_diff())
publish_deploy_event("deploy_success", "formplayer", environment)
def get_deploy_diff(environment, repo):
print(color_summary(">> Compiling deploy summary"))
current_commit = get_current_formplayer_version(environment)
latest_version = get_latest_formplayer_version(environment.name)
new_version_details = {}
if latest_version:
new_version_details["Release Name"] = environment.new_release_name()
new_version_details["Commit"] = latest_version.commit
new_version_details["Commit message"] = latest_version.message
new_version_details["Commit date"] = f"{latest_version.commit_time_ago} ({latest_version.time})"
new_version_details["Build time"] = f"{latest_version.build_time_ago} ({latest_version.build_time})"
diff = DeployDiff(
repo, current_commit, latest_version.commit,
new_version_details=new_version_details
)
return diff
def run_ansible_playbook_command(environment, args):
skip_check = True
environment.create_generated_yml()
ansible_context = AnsibleContext(args)
return ansible_playbook.run_ansible_playbook(
environment, 'deploy_stack.yml', ansible_context,
skip_check=skip_check, quiet=skip_check, always_skip_check=skip_check, limit='formplayer',
use_factory_auth=False, unknown_args=('--tags=formplayer_deploy',),
respect_ansible_skip=True,
)
def record_deploy_in_datadog(environment, diff, tdelta):
if environment.public_vars.get('DATADOG_ENABLED', False):
print(color_summary(f">> Recording deploy in Datadog"))
diff_url = f"\nDiff link: [Git Diff]({diff.url})"
deploy_notification_text = (
"Formplayer has been successfully deployed to "
"*{}* by *{}* in *{}* minutes.\nRelease Name: {}{}".format(
environment.name,
get_default_username(),
int(tdelta.total_seconds() / 60) or '?',
environment.new_release_name(),
diff_url
)
)
commcare_cloud(
environment.name, 'send-datadog-event',
'Formplayer Deploy Success',
deploy_notification_text,
'--alert_type', "success",
show_command=False
)
def get_current_formplayer_version(environment):
"""Get version of currently deployed Formplayer by querying
the Formplayer management endpoint to get the build info.
"""
formplayer0 = environment.groups["formplayer"][0]
try:
res = requests.get(f"http://{formplayer0}:8081/info", timeout=5)
res.raise_for_status()
except RequestException as e:
print(color_warning(f"Error getting current formplayer version: {e}"))
return
info = res.json()
return info.get("git", {}).get("commit", {}).get("id", None)
def get_latest_formplayer_version(env_name):
"""Get version info of latest available version. This fetches
meta files from S3 and parses them to get the data.
"""
def get_url_content(url):
res = requests.get(url)
res.raise_for_status()
return res.text
def extract_vals_from_property_data(data, mapping):
out = {}
for line in data.splitlines(keepends=False):
if not line.strip():
continue
key, value = line.strip().split("=")
if key in mapping:
out[mapping[key]] = strip_escapes(value)
return out
git_info_url, build_info_url = get_info_urls(env_name)
try:
git_info = get_url_content(git_info_url)
build_info = get_url_content(build_info_url)
except RequestException as e:
print(color_warning(f"Error getting latest formplayer version: {e}"))
return
git_data = extract_vals_from_property_data(git_info, {
"git.commit.id": "commit",
"git.commit.message.short": "message",
"git.commit.time": "time"
})
build_data = extract_vals_from_property_data(build_info, {"build.time": "build_time"})
return VersionInfo(**git_data, **build_data)
def strip_escapes(value):
return value.replace("\\", "")
def get_info_urls(env_name):
"""
:return: tuple(git_info_url, build_info_url)
"""
base_url = AWS_BASE_URL_ENV.get(env_name, AWS_BASE_URL_DEFAULT)
return f"{base_url}/{GIT_PROPERTIES}", f"{base_url}/{BUILD_INFO_PROPERTIES}"
|
[
"commcare_cloud.colors.color_warning",
"commcare_cloud.commands.deploy.utils.announce_deploy_start",
"commcare_cloud.commands.ansible.helpers.AnsibleContext",
"datetime.datetime.utcnow",
"commcare_cloud.commands.utils.timeago",
"commcare_cloud.commands.terraform.aws.get_default_username",
"commcare_cloud.events.publish_deploy_event",
"commcare_cloud.colors.color_summary",
"requests.get",
"commcare_cloud.cli_utils.ask",
"commcare_cloud.colors.color_notice",
"commcare_cloud.alias.commcare_cloud",
"commcare_cloud.fab.deploy_diff.DeployDiff",
"dateutil.parser.parse",
"commcare_cloud.github.github_repo",
"commcare_cloud.commands.ansible.ansible_playbook.run_ansible_playbook",
"commcare_cloud.commands.deploy.sentry.update_sentry_post_deploy",
"commcare_cloud.commands.deploy.utils.create_release_tag",
"collections.namedtuple",
"commcare_cloud.commands.deploy.utils.announce_deploy_failed"
] |
[((1288, 1350), 'collections.namedtuple', 'namedtuple', (['"""VersionInfo"""', '"""commit, message, time, build_time"""'], {}), "('VersionInfo', 'commit, message, time, build_time')\n", (1298, 1350), False, 'from collections import namedtuple\n'), ((2054, 2125), 'commcare_cloud.github.github_repo', 'github_repo', (['"""dimagi/formplayer"""'], {'require_write_permissions': 'tag_commits'}), "('dimagi/formplayer', require_write_permissions=tag_commits)\n", (2065, 2125), False, 'from commcare_cloud.github import github_repo\n'), ((2294, 2311), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (2309, 2311), False, 'from datetime import datetime\n'), ((2316, 2362), 'commcare_cloud.commands.deploy.utils.announce_deploy_start', 'announce_deploy_start', (['environment', 'FORMPLAYER'], {}), '(environment, FORMPLAYER)\n', (2337, 2362), False, 'from commcare_cloud.commands.deploy.utils import announce_deploy_start, announce_deploy_failed, announce_deploy_success, create_release_tag\n'), ((3139, 3156), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (3154, 3156), False, 'from datetime import datetime\n'), ((3161, 3204), 'commcare_cloud.commands.deploy.utils.create_release_tag', 'create_release_tag', (['environment', 'repo', 'diff'], {}), '(environment, repo, diff)\n', (3179, 3204), False, 'from commcare_cloud.commands.deploy.utils import announce_deploy_start, announce_deploy_failed, announce_deploy_success, create_release_tag\n'), ((3270, 3346), 'commcare_cloud.commands.deploy.sentry.update_sentry_post_deploy', 'update_sentry_post_deploy', (['environment', '"""formplayer"""', 'repo', 'diff', 'start', 'end'], {}), "(environment, 'formplayer', repo, diff, start, end)\n", (3295, 3346), False, 'from commcare_cloud.commands.deploy.sentry import update_sentry_post_deploy\n'), ((3427, 3492), 'commcare_cloud.events.publish_deploy_event', 'publish_deploy_event', (['"""deploy_success"""', '"""formplayer"""', 'environment'], {}), "('deploy_success', 'formplayer', environment)\n", (3447, 3492), False, 'from commcare_cloud.events import publish_deploy_event\n'), ((4213, 4314), 'commcare_cloud.fab.deploy_diff.DeployDiff', 'DeployDiff', (['repo', 'current_commit', 'latest_version.commit'], {'new_version_details': 'new_version_details'}), '(repo, current_commit, latest_version.commit, new_version_details\n =new_version_details)\n', (4223, 4314), False, 'from commcare_cloud.fab.deploy_diff import DeployDiff\n'), ((4486, 4506), 'commcare_cloud.commands.ansible.helpers.AnsibleContext', 'AnsibleContext', (['args'], {}), '(args)\n', (4500, 4506), False, 'from commcare_cloud.commands.ansible.helpers import AnsibleContext\n'), ((4518, 4809), 'commcare_cloud.commands.ansible.ansible_playbook.run_ansible_playbook', 'ansible_playbook.run_ansible_playbook', (['environment', '"""deploy_stack.yml"""', 'ansible_context'], {'skip_check': 'skip_check', 'quiet': 'skip_check', 'always_skip_check': 'skip_check', 'limit': '"""formplayer"""', 'use_factory_auth': '(False)', 'unknown_args': "('--tags=formplayer_deploy',)", 'respect_ansible_skip': '(True)'}), "(environment, 'deploy_stack.yml',\n ansible_context, skip_check=skip_check, quiet=skip_check,\n always_skip_check=skip_check, limit='formplayer', use_factory_auth=\n False, unknown_args=('--tags=formplayer_deploy',), respect_ansible_skip\n =True)\n", (4555, 4809), False, 'from commcare_cloud.commands.ansible import ansible_playbook\n'), ((1617, 1635), 'dateutil.parser.parse', 'parser.parse', (['time'], {}), '(time)\n', (1629, 1635), False, 'from dateutil import parser\n'), ((1806, 1820), 'commcare_cloud.commands.utils.timeago', 'timeago', (['delta'], {}), '(delta)\n', (1813, 1820), False, 'from commcare_cloud.commands.utils import timeago\n'), ((1875, 1931), 'commcare_cloud.colors.color_notice', 'color_notice', (['"""\nPreparing to deploy Formplayer to: """'], {}), '("""\nPreparing to deploy Formplayer to: """)\n', (1887, 1931), False, 'from commcare_cloud.colors import color_warning, color_notice, color_summary\n'), ((2216, 2262), 'commcare_cloud.cli_utils.ask', 'ask', (['"""Continue with deploy?"""'], {'quiet': 'args.quiet'}), "('Continue with deploy?', quiet=args.quiet)\n", (2219, 2262), False, 'from commcare_cloud.cli_utils import ask\n'), ((2445, 2492), 'commcare_cloud.commands.deploy.utils.announce_deploy_failed', 'announce_deploy_failed', (['environment', 'FORMPLAYER'], {}), '(environment, FORMPLAYER)\n', (2467, 2492), False, 'from commcare_cloud.commands.deploy.utils import announce_deploy_start, announce_deploy_failed, announce_deploy_success, create_release_tag\n'), ((2930, 2977), 'commcare_cloud.commands.deploy.utils.announce_deploy_failed', 'announce_deploy_failed', (['environment', 'FORMPLAYER'], {}), '(environment, FORMPLAYER)\n', (2952, 2977), False, 'from commcare_cloud.commands.deploy.utils import announce_deploy_start, announce_deploy_failed, announce_deploy_success, create_release_tag\n'), ((3545, 3589), 'commcare_cloud.colors.color_summary', 'color_summary', (['""">> Compiling deploy summary"""'], {}), "('>> Compiling deploy summary')\n", (3558, 3589), False, 'from commcare_cloud.colors import color_warning, color_notice, color_summary\n'), ((5479, 5643), 'commcare_cloud.alias.commcare_cloud', 'commcare_cloud', (['environment.name', '"""send-datadog-event"""', '"""Formplayer Deploy Success"""', 'deploy_notification_text', '"""--alert_type"""', '"""success"""'], {'show_command': '(False)'}), "(environment.name, 'send-datadog-event',\n 'Formplayer Deploy Success', deploy_notification_text, '--alert_type',\n 'success', show_command=False)\n", (5493, 5643), False, 'from commcare_cloud.alias import commcare_cloud\n'), ((5967, 6025), 'requests.get', 'requests.get', (['f"""http://{formplayer0}:8081/info"""'], {'timeout': '(5)'}), "(f'http://{formplayer0}:8081/info', timeout=5)\n", (5979, 6025), False, 'import requests\n'), ((6494, 6511), 'requests.get', 'requests.get', (['url'], {}), '(url)\n', (6506, 6511), False, 'import requests\n'), ((1760, 1777), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1775, 1777), False, 'from datetime import datetime\n'), ((4966, 5014), 'commcare_cloud.colors.color_summary', 'color_summary', (['f""">> Recording deploy in Datadog"""'], {}), "(f'>> Recording deploy in Datadog')\n", (4979, 5014), False, 'from commcare_cloud.colors import color_warning, color_notice, color_summary\n'), ((5293, 5315), 'commcare_cloud.commands.terraform.aws.get_default_username', 'get_default_username', ([], {}), '()\n', (5313, 5315), False, 'from commcare_cloud.commands.terraform.aws import get_default_username\n'), ((6105, 6168), 'commcare_cloud.colors.color_warning', 'color_warning', (['f"""Error getting current formplayer version: {e}"""'], {}), "(f'Error getting current formplayer version: {e}')\n", (6118, 6168), False, 'from commcare_cloud.colors import color_warning, color_notice, color_summary\n'), ((7127, 7189), 'commcare_cloud.colors.color_warning', 'color_warning', (['f"""Error getting latest formplayer version: {e}"""'], {}), "(f'Error getting latest formplayer version: {e}')\n", (7140, 7189), False, 'from commcare_cloud.colors import color_warning, color_notice, color_summary\n')]
|
import numpy as np
import pytest
import torch
from torch.utils.data import TensorDataset
from doppelganger import (ContinuousOutput, DGTorch, DiscreteOutput,
Normalization, Output, OutputType, prepare_data)
@pytest.fixture
def dg_model() -> DGTorch:
attribute_outputs = [
ContinuousOutput(
name="a",
normalization=Normalization.ZERO_ONE,
global_min=0,
global_max=1,
is_feature_normalized=False,
is_example_normalized=False,
),
DiscreteOutput(name="b", dim=3),
]
feature_outputs = [
DiscreteOutput(name="c", dim=4),
ContinuousOutput(
name="d",
normalization=Normalization.ZERO_ONE,
global_min=0,
global_max=1,
is_feature_normalized=False,
is_example_normalized=False,
),
]
dg = DGTorch(attribute_outputs, [], feature_outputs, 20, 5)
return dg
@pytest.fixture
def attribute_data():
n = 100
attributes = np.concatenate(
(
np.random.rand(n, 1),
np.random.randint(0, 3, size=(n, 1)),
),
axis=1,
)
return (attributes, [OutputType.CONTINUOUS, OutputType.DISCRETE])
@pytest.fixture
def feature_data():
n = 100
features = np.concatenate(
(
np.random.randint(0, 4, size=(n, 20, 1)),
np.random.rand(n, 20, 1),
),
axis=2,
)
return (features, [OutputType.DISCRETE, OutputType.CONTINUOUS])
def test_generate(dg_model: DGTorch):
attributes, features = dg_model.generate(8)
assert attributes.shape == (8, 2)
assert features.shape == (8, 20, 2)
def test_train(attribute_data, feature_data):
attributes, attribute_types = attribute_data
features, feature_types = feature_data
dg_data = prepare_data(
attributes,
attribute_types,
features,
feature_types,
is_feature_normalized=False,
is_example_normalized=False,
)
dg = DGTorch(
attribute_outputs=dg_data.attribute_outputs,
additional_attribute_outputs=None,
feature_outputs=dg_data.feature_outputs,
max_sequence_len=20,
sample_len=5,
)
dataset = torch.utils.data.TensorDataset(
torch.Tensor(dg_data.attributes),
torch.Tensor(dg_data.features),
)
dg.train(dataset, batch_size=10, num_epochs=2)
def test_train_with_attribute_discriminator(attribute_data, feature_data):
attributes, attribute_types = attribute_data
features, feature_types = feature_data
dg_data = prepare_data(
attributes,
attribute_types,
features,
feature_types,
is_feature_normalized=False,
is_example_normalized=False,
)
dg = DGTorch(
attribute_outputs=dg_data.attribute_outputs,
additional_attribute_outputs=None,
feature_outputs=dg_data.feature_outputs,
max_sequence_len=20,
sample_len=5,
use_attribute_discriminator=True,
)
dataset = torch.utils.data.TensorDataset(
torch.Tensor(dg_data.attributes),
torch.Tensor(dg_data.features),
)
dg.train(dataset, batch_size=10, num_epochs=2)
def test_train_with_additional_attributes(attribute_data, feature_data):
attributes, attribute_types = attribute_data
features, feature_types = feature_data
dg_data = prepare_data(
attributes,
attribute_types,
features,
feature_types,
is_feature_normalized=False,
is_example_normalized=True,
)
dg = DGTorch(
attribute_outputs=dg_data.attribute_outputs,
additional_attribute_outputs=dg_data.additional_attribute_outputs,
feature_outputs=dg_data.feature_outputs,
max_sequence_len=20,
sample_len=5,
)
dataset = torch.utils.data.TensorDataset(
torch.Tensor(dg_data.attributes),
torch.Tensor(dg_data.additional_attributes),
torch.Tensor(dg_data.features),
)
dg.train(dataset, batch_size=10, num_epochs=2)
def test_train_with_additional_attributes_and_discriminator(
attribute_data, feature_data
):
attributes, attribute_types = attribute_data
features, feature_types = feature_data
dg_data = prepare_data(
attributes,
attribute_types,
features,
feature_types,
is_feature_normalized=False,
is_example_normalized=True,
)
dg = DGTorch(
attribute_outputs=dg_data.attribute_outputs,
additional_attribute_outputs=dg_data.additional_attribute_outputs,
feature_outputs=dg_data.feature_outputs,
max_sequence_len=20,
sample_len=5,
use_attribute_discriminator=True,
)
dataset = torch.utils.data.TensorDataset(
torch.Tensor(dg_data.attributes),
torch.Tensor(dg_data.additional_attributes),
torch.Tensor(dg_data.features),
)
dg.train(dataset, batch_size=10, num_epochs=2)
def test_output():
o1 = Output(name="foo")
assert o1.name == "foo"
assert o1.get_dim() == 1
o2 = DiscreteOutput(name="foo", dim=4)
assert o2.name == "foo"
assert o2.dim == 4
assert o2.get_dim() == 4
o3 = ContinuousOutput(
name="foo",
normalization=Normalization.ZERO_ONE,
global_min=0.0,
global_max=1.0,
is_feature_normalized=False,
is_example_normalized=False,
)
assert o3.get_dim() == 1
with pytest.raises(TypeError):
DiscreteOutput(name="bad")
with pytest.raises(TypeError):
ContinuousOutput(
name="bad",
normalization=Normalization.ZERO_ONE,
global_min=0,
global_max=1,
dim=5,
is_feature_normalized=False,
is_example_normalized=False,
)
def test_prepare_data():
original_attributes = np.concatenate(
(
np.random.rand(100, 1),
np.random.randint(0, 3, size=(100, 1)),
),
axis=1,
)
original_features = np.concatenate(
(
np.random.rand(100, 20, 1),
np.random.randint(0, 2, size=(100, 20, 1)),
),
axis=2,
)
attribute_types = [OutputType.CONTINUOUS, OutputType.DISCRETE]
feature_types = [OutputType.CONTINUOUS, OutputType.DISCRETE]
dg_data = prepare_data(
original_attributes, attribute_types, original_features, feature_types
)
assert dg_data.attributes.shape == (100, 4)
assert dg_data.additional_attributes.shape == (100, 2)
assert dg_data.features.shape == (100, 20, 3)
|
[
"doppelganger.DGTorch",
"doppelganger.prepare_data",
"doppelganger.ContinuousOutput",
"pytest.raises",
"doppelganger.DiscreteOutput",
"torch.Tensor",
"numpy.random.randint",
"numpy.random.rand",
"doppelganger.Output"
] |
[((918, 972), 'doppelganger.DGTorch', 'DGTorch', (['attribute_outputs', '[]', 'feature_outputs', '(20)', '(5)'], {}), '(attribute_outputs, [], feature_outputs, 20, 5)\n', (925, 972), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((1875, 2003), 'doppelganger.prepare_data', 'prepare_data', (['attributes', 'attribute_types', 'features', 'feature_types'], {'is_feature_normalized': '(False)', 'is_example_normalized': '(False)'}), '(attributes, attribute_types, features, feature_types,\n is_feature_normalized=False, is_example_normalized=False)\n', (1887, 2003), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((2065, 2237), 'doppelganger.DGTorch', 'DGTorch', ([], {'attribute_outputs': 'dg_data.attribute_outputs', 'additional_attribute_outputs': 'None', 'feature_outputs': 'dg_data.feature_outputs', 'max_sequence_len': '(20)', 'sample_len': '(5)'}), '(attribute_outputs=dg_data.attribute_outputs,\n additional_attribute_outputs=None, feature_outputs=dg_data.\n feature_outputs, max_sequence_len=20, sample_len=5)\n', (2072, 2237), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((2646, 2774), 'doppelganger.prepare_data', 'prepare_data', (['attributes', 'attribute_types', 'features', 'feature_types'], {'is_feature_normalized': '(False)', 'is_example_normalized': '(False)'}), '(attributes, attribute_types, features, feature_types,\n is_feature_normalized=False, is_example_normalized=False)\n', (2658, 2774), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((2836, 3046), 'doppelganger.DGTorch', 'DGTorch', ([], {'attribute_outputs': 'dg_data.attribute_outputs', 'additional_attribute_outputs': 'None', 'feature_outputs': 'dg_data.feature_outputs', 'max_sequence_len': '(20)', 'sample_len': '(5)', 'use_attribute_discriminator': '(True)'}), '(attribute_outputs=dg_data.attribute_outputs,\n additional_attribute_outputs=None, feature_outputs=dg_data.\n feature_outputs, max_sequence_len=20, sample_len=5,\n use_attribute_discriminator=True)\n', (2843, 3046), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((3457, 3584), 'doppelganger.prepare_data', 'prepare_data', (['attributes', 'attribute_types', 'features', 'feature_types'], {'is_feature_normalized': '(False)', 'is_example_normalized': '(True)'}), '(attributes, attribute_types, features, feature_types,\n is_feature_normalized=False, is_example_normalized=True)\n', (3469, 3584), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((3646, 3849), 'doppelganger.DGTorch', 'DGTorch', ([], {'attribute_outputs': 'dg_data.attribute_outputs', 'additional_attribute_outputs': 'dg_data.additional_attribute_outputs', 'feature_outputs': 'dg_data.feature_outputs', 'max_sequence_len': '(20)', 'sample_len': '(5)'}), '(attribute_outputs=dg_data.attribute_outputs,\n additional_attribute_outputs=dg_data.additional_attribute_outputs,\n feature_outputs=dg_data.feature_outputs, max_sequence_len=20, sample_len=5)\n', (3653, 3849), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((4334, 4461), 'doppelganger.prepare_data', 'prepare_data', (['attributes', 'attribute_types', 'features', 'feature_types'], {'is_feature_normalized': '(False)', 'is_example_normalized': '(True)'}), '(attributes, attribute_types, features, feature_types,\n is_feature_normalized=False, is_example_normalized=True)\n', (4346, 4461), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((4523, 4764), 'doppelganger.DGTorch', 'DGTorch', ([], {'attribute_outputs': 'dg_data.attribute_outputs', 'additional_attribute_outputs': 'dg_data.additional_attribute_outputs', 'feature_outputs': 'dg_data.feature_outputs', 'max_sequence_len': '(20)', 'sample_len': '(5)', 'use_attribute_discriminator': '(True)'}), '(attribute_outputs=dg_data.attribute_outputs,\n additional_attribute_outputs=dg_data.additional_attribute_outputs,\n feature_outputs=dg_data.feature_outputs, max_sequence_len=20,\n sample_len=5, use_attribute_discriminator=True)\n', (4530, 4764), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((5077, 5095), 'doppelganger.Output', 'Output', ([], {'name': '"""foo"""'}), "(name='foo')\n", (5083, 5095), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((5163, 5196), 'doppelganger.DiscreteOutput', 'DiscreteOutput', ([], {'name': '"""foo"""', 'dim': '(4)'}), "(name='foo', dim=4)\n", (5177, 5196), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((5287, 5451), 'doppelganger.ContinuousOutput', 'ContinuousOutput', ([], {'name': '"""foo"""', 'normalization': 'Normalization.ZERO_ONE', 'global_min': '(0.0)', 'global_max': '(1.0)', 'is_feature_normalized': '(False)', 'is_example_normalized': '(False)'}), "(name='foo', normalization=Normalization.ZERO_ONE,\n global_min=0.0, global_max=1.0, is_feature_normalized=False,\n is_example_normalized=False)\n", (5303, 5451), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((6425, 6513), 'doppelganger.prepare_data', 'prepare_data', (['original_attributes', 'attribute_types', 'original_features', 'feature_types'], {}), '(original_attributes, attribute_types, original_features,\n feature_types)\n', (6437, 6513), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((313, 468), 'doppelganger.ContinuousOutput', 'ContinuousOutput', ([], {'name': '"""a"""', 'normalization': 'Normalization.ZERO_ONE', 'global_min': '(0)', 'global_max': '(1)', 'is_feature_normalized': '(False)', 'is_example_normalized': '(False)'}), "(name='a', normalization=Normalization.ZERO_ONE, global_min\n =0, global_max=1, is_feature_normalized=False, is_example_normalized=False)\n", (329, 468), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((556, 587), 'doppelganger.DiscreteOutput', 'DiscreteOutput', ([], {'name': '"""b"""', 'dim': '(3)'}), "(name='b', dim=3)\n", (570, 587), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((627, 658), 'doppelganger.DiscreteOutput', 'DiscreteOutput', ([], {'name': '"""c"""', 'dim': '(4)'}), "(name='c', dim=4)\n", (641, 658), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((668, 823), 'doppelganger.ContinuousOutput', 'ContinuousOutput', ([], {'name': '"""d"""', 'normalization': 'Normalization.ZERO_ONE', 'global_min': '(0)', 'global_max': '(1)', 'is_feature_normalized': '(False)', 'is_example_normalized': '(False)'}), "(name='d', normalization=Normalization.ZERO_ONE, global_min\n =0, global_max=1, is_feature_normalized=False, is_example_normalized=False)\n", (684, 823), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((2331, 2363), 'torch.Tensor', 'torch.Tensor', (['dg_data.attributes'], {}), '(dg_data.attributes)\n', (2343, 2363), False, 'import torch\n'), ((2373, 2403), 'torch.Tensor', 'torch.Tensor', (['dg_data.features'], {}), '(dg_data.features)\n', (2385, 2403), False, 'import torch\n'), ((3144, 3176), 'torch.Tensor', 'torch.Tensor', (['dg_data.attributes'], {}), '(dg_data.attributes)\n', (3156, 3176), False, 'import torch\n'), ((3186, 3216), 'torch.Tensor', 'torch.Tensor', (['dg_data.features'], {}), '(dg_data.features)\n', (3198, 3216), False, 'import torch\n'), ((3944, 3976), 'torch.Tensor', 'torch.Tensor', (['dg_data.attributes'], {}), '(dg_data.attributes)\n', (3956, 3976), False, 'import torch\n'), ((3986, 4029), 'torch.Tensor', 'torch.Tensor', (['dg_data.additional_attributes'], {}), '(dg_data.additional_attributes)\n', (3998, 4029), False, 'import torch\n'), ((4039, 4069), 'torch.Tensor', 'torch.Tensor', (['dg_data.features'], {}), '(dg_data.features)\n', (4051, 4069), False, 'import torch\n'), ((4863, 4895), 'torch.Tensor', 'torch.Tensor', (['dg_data.attributes'], {}), '(dg_data.attributes)\n', (4875, 4895), False, 'import torch\n'), ((4905, 4948), 'torch.Tensor', 'torch.Tensor', (['dg_data.additional_attributes'], {}), '(dg_data.additional_attributes)\n', (4917, 4948), False, 'import torch\n'), ((4958, 4988), 'torch.Tensor', 'torch.Tensor', (['dg_data.features'], {}), '(dg_data.features)\n', (4970, 4988), False, 'import torch\n'), ((5538, 5562), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (5551, 5562), False, 'import pytest\n'), ((5572, 5598), 'doppelganger.DiscreteOutput', 'DiscreteOutput', ([], {'name': '"""bad"""'}), "(name='bad')\n", (5586, 5598), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((5609, 5633), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (5622, 5633), False, 'import pytest\n'), ((5643, 5810), 'doppelganger.ContinuousOutput', 'ContinuousOutput', ([], {'name': '"""bad"""', 'normalization': 'Normalization.ZERO_ONE', 'global_min': '(0)', 'global_max': '(1)', 'dim': '(5)', 'is_feature_normalized': '(False)', 'is_example_normalized': '(False)'}), "(name='bad', normalization=Normalization.ZERO_ONE,\n global_min=0, global_max=1, dim=5, is_feature_normalized=False,\n is_example_normalized=False)\n", (5659, 5810), False, 'from doppelganger import ContinuousOutput, DGTorch, DiscreteOutput, Normalization, Output, OutputType, prepare_data\n'), ((1094, 1114), 'numpy.random.rand', 'np.random.rand', (['n', '(1)'], {}), '(n, 1)\n', (1108, 1114), True, 'import numpy as np\n'), ((1128, 1164), 'numpy.random.randint', 'np.random.randint', (['(0)', '(3)'], {'size': '(n, 1)'}), '(0, 3, size=(n, 1))\n', (1145, 1164), True, 'import numpy as np\n'), ((1372, 1412), 'numpy.random.randint', 'np.random.randint', (['(0)', '(4)'], {'size': '(n, 20, 1)'}), '(0, 4, size=(n, 20, 1))\n', (1389, 1412), True, 'import numpy as np\n'), ((1426, 1450), 'numpy.random.rand', 'np.random.rand', (['n', '(20)', '(1)'], {}), '(n, 20, 1)\n', (1440, 1450), True, 'import numpy as np\n'), ((5989, 6011), 'numpy.random.rand', 'np.random.rand', (['(100)', '(1)'], {}), '(100, 1)\n', (6003, 6011), True, 'import numpy as np\n'), ((6025, 6063), 'numpy.random.randint', 'np.random.randint', (['(0)', '(3)'], {'size': '(100, 1)'}), '(0, 3, size=(100, 1))\n', (6042, 6063), True, 'import numpy as np\n'), ((6160, 6186), 'numpy.random.rand', 'np.random.rand', (['(100)', '(20)', '(1)'], {}), '(100, 20, 1)\n', (6174, 6186), True, 'import numpy as np\n'), ((6200, 6242), 'numpy.random.randint', 'np.random.randint', (['(0)', '(2)'], {'size': '(100, 20, 1)'}), '(0, 2, size=(100, 20, 1))\n', (6217, 6242), True, 'import numpy as np\n')]
|
import torch
import torch.nn.functional as F
from torch import nn
class DropBlock1D(nn.Module):
"""
Args:
drop_prob (float): probability of an element to be dropped.
block_size (int): size of the block to drop
Shape:
- Input: `(N, C, S)`
- Output: `(N, C, S)`
.. _DropBlock: A regularization method for convolutional networks:
https://arxiv.org/abs/1810.12890
"""
def __init__(self, drop_prob, block_size):
super(DropBlock1D, self).__init__()
self.drop_prob = drop_prob
self.block_size = block_size
def forward(self, x):
# shape: (bsize, channels, series)
assert (
x.dim() == 3
), "Expected input with 3 dimensions (bsize, channels, series)"
if not self.training or self.drop_prob == 0.0:
return x
else:
# get gamma value
gamma = self._compute_gamma(x)
# sample mask
mask = (torch.rand(x.shape[0], x.shape[2]) < gamma).float()
# place mask on input device
mask = mask.to(x.device)
# compute block mask
block_mask = self._compute_block_mask(mask)
# apply block mask
out = x * block_mask[:, None, :]
# scale output
out = out * block_mask.numel() / block_mask.sum()
return out
def _compute_block_mask(self, mask):
block_mask = F.max_pool1d(
input=mask[:, None, :],
kernel_size=self.block_size,
stride=1,
padding=self.block_size // 2,
)
if self.block_size % 2 == 0:
block_mask = block_mask[:, :, :-1]
block_mask = 1 - block_mask.squeeze(1)
return block_mask
def _compute_gamma(self, x):
return self.drop_prob / (self.block_size ** 2)
|
[
"torch.nn.functional.max_pool1d",
"torch.rand"
] |
[((1461, 1570), 'torch.nn.functional.max_pool1d', 'F.max_pool1d', ([], {'input': 'mask[:, None, :]', 'kernel_size': 'self.block_size', 'stride': '(1)', 'padding': '(self.block_size // 2)'}), '(input=mask[:, None, :], kernel_size=self.block_size, stride=1,\n padding=self.block_size // 2)\n', (1473, 1570), True, 'import torch.nn.functional as F\n'), ((986, 1020), 'torch.rand', 'torch.rand', (['x.shape[0]', 'x.shape[2]'], {}), '(x.shape[0], x.shape[2])\n', (996, 1020), False, 'import torch\n')]
|
from flask import render_template, jsonify
from app import app
import random
import io
from flask import Response
from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas
from matplotlib.figure import Figure
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
from matplotlib.ticker import (MultipleLocator, FormatStrFormatter, AutoMinorLocator)
import matplotlib.dates as mdates
import matplotlib.cbook as cbook
import datetime as datetime
import numpy as np
from pandas.plotting import register_matplotlib_converters
register_matplotlib_converters()
import os
def saveplot(filename, extension):
strFile = filename + '.' + extension
if os.path.isfile(strFile):
os.remove(strFile) # Opt.: os.system("rm "+strFile)
plt.savefig(strFile)
plt.savefig(strFile)
data = pd.read_csv("DIS.csv")
data['Date'] = pd.to_datetime(data['Date']).dt.date
#data = data.set_index('Date')
data90 = data[len(data)-90:]
preds_d1 = pd.read_csv("preds_d1.csv")
preds_d1['Date'] = pd.to_datetime(preds_d1['Date']).dt.date
#preds_dt = preds_d1.set_index('Date')
preds = pd.read_csv("preds.csv")
preds['Date'] = pd.to_datetime(preds['Date']).dt.date
#preds = preds.set_index('Date')
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html', title='Home')
@app.route('/map')
def map():
return render_template('map.html', title='Map')
@app.route('/map/refresh', methods=['POST'])
def map_refresh():
points = [(random.uniform(48.8434100, 48.8634100),
random.uniform(2.3388000, 2.3588000))
for _ in range(random.randint(2, 9))]
return jsonify({'points': points})
@app.route('/contact')
def contact():
return render_template('contact.html', title='Contact')
@app.route('/dif1.png')
def dif_png():
fig = create_dif1()
output = io.BytesIO()
FigureCanvas(fig).print_png(output)
return Response(output.getvalue(), mimetype='image/png')
def create_dif1():
d1 = plt.figure(1)
fig, ax = plt.subplots()
ax.plot(preds_d1['Date'], preds_d1['Adj Close'], label='close')
ax.plot(preds_d1['Date'], preds_d1['reg'], label='reg')
ax.plot(preds_d1['Date'], preds_d1['krr'], label='krr')
ax.plot(preds_d1['Date'], preds_d1['mlp'], label='mlp')
months = mdates.MonthLocator() # every month
mdays = mdates.DayLocator(bymonthday=range(1, 32)) # every day
months_fmt = mdates.DateFormatter('%Y-%m-%d')
# format the ticks
ax.xaxis.set_major_locator(months)
ax.xaxis.set_major_formatter(months_fmt)
ax.xaxis.set_minor_locator(mdays)
# round to nearest years.
datemin = np.datetime64(np.amin(preds_d1['Date']), 'D')
datemax = np.datetime64(np.amax(preds_d1['Date']), 'D')
ax.set_xlim(datemin, datemax)
# format the coords message box
ax.format_xdata = mdates.DateFormatter('%Y-%m-%d')
ax.format_ydata = lambda x: '$%1.2f' % x # format the price.
ax.grid(True)
# rotates and right aligns the x labels, and moves the bottom of the
# axes up to make room for them
fig.autofmt_xdate()
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles, labels)
#d1.show()
saveplot('dif1', 'png')
return fig
@app.route('/price.png')
def price_png():
fig = create_price()
output = io.BytesIO()
FigureCanvas(fig).print_png(output)
return Response(output.getvalue(), mimetype='image/png')
def create_price():
#plt.clf()
pr = plt.figure(2)
fig, ax = plt.subplots()
ax.plot(preds['Date'], preds['Adj Close'], label='close')
ax.plot(preds['Date'], preds['reg'], label='reg')
ax.plot(preds['Date'], preds['krr'], label='krr')
ax.plot(preds['Date'], preds['mlp'], label='mlp')
months = mdates.MonthLocator() # every month
mdays = mdates.DayLocator(bymonthday=range(1, 32)) # every day
months_fmt = mdates.DateFormatter('%Y-%m-%d')
# format the ticks
ax.xaxis.set_major_locator(months)
ax.xaxis.set_major_formatter(months_fmt)
ax.xaxis.set_minor_locator(mdays)
# round to nearest years.
datemin = np.datetime64(np.amin(preds['Date']), 'D')
datemax = np.datetime64(np.amax(preds['Date']), 'D')
ax.set_xlim(datemin, datemax)
# format the coords message box
ax.format_xdata = mdates.DateFormatter('%Y-%m-%d')
ax.format_ydata = lambda x: '$%1.2f' % x # format the price.
ax.grid(True)
# rotates and right aligns the x labels, and moves the bottom of the
# axes up to make room for them
fig.autofmt_xdate()
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles, labels)
#pr.show()
saveplot('price', 'png')
return fig
|
[
"os.remove",
"matplotlib.dates.MonthLocator",
"numpy.amin",
"pandas.read_csv",
"os.path.isfile",
"flask.jsonify",
"matplotlib.pyplot.figure",
"random.randint",
"matplotlib.backends.backend_agg.FigureCanvasAgg",
"matplotlib.dates.DateFormatter",
"flask.render_template",
"matplotlib.pyplot.subplots",
"io.BytesIO",
"pandas.to_datetime",
"app.app.route",
"random.uniform",
"pandas.plotting.register_matplotlib_converters",
"numpy.amax",
"matplotlib.pyplot.savefig"
] |
[((593, 625), 'pandas.plotting.register_matplotlib_converters', 'register_matplotlib_converters', ([], {}), '()\n', (623, 625), False, 'from pandas.plotting import register_matplotlib_converters\n'), ((883, 905), 'pandas.read_csv', 'pd.read_csv', (['"""DIS.csv"""'], {}), "('DIS.csv')\n", (894, 905), True, 'import pandas as pd\n'), ((1037, 1064), 'pandas.read_csv', 'pd.read_csv', (['"""preds_d1.csv"""'], {}), "('preds_d1.csv')\n", (1048, 1064), True, 'import pandas as pd\n'), ((1179, 1203), 'pandas.read_csv', 'pd.read_csv', (['"""preds.csv"""'], {}), "('preds.csv')\n", (1190, 1203), True, 'import pandas as pd\n'), ((1299, 1313), 'app.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (1308, 1313), False, 'from app import app\n'), ((1316, 1335), 'app.app.route', 'app.route', (['"""/index"""'], {}), "('/index')\n", (1325, 1335), False, 'from app import app\n'), ((1412, 1429), 'app.app.route', 'app.route', (['"""/map"""'], {}), "('/map')\n", (1421, 1429), False, 'from app import app\n'), ((1501, 1544), 'app.app.route', 'app.route', (['"""/map/refresh"""'], {'methods': "['POST']"}), "('/map/refresh', methods=['POST'])\n", (1510, 1544), False, 'from app import app\n'), ((1774, 1795), 'app.app.route', 'app.route', (['"""/contact"""'], {}), "('/contact')\n", (1783, 1795), False, 'from app import app\n'), ((1877, 1899), 'app.app.route', 'app.route', (['"""/dif1.png"""'], {}), "('/dif1.png')\n", (1886, 1899), False, 'from app import app\n'), ((3390, 3413), 'app.app.route', 'app.route', (['"""/price.png"""'], {}), "('/price.png')\n", (3399, 3413), False, 'from app import app\n'), ((729, 752), 'os.path.isfile', 'os.path.isfile', (['strFile'], {}), '(strFile)\n', (743, 752), False, 'import os\n'), ((852, 872), 'matplotlib.pyplot.savefig', 'plt.savefig', (['strFile'], {}), '(strFile)\n', (863, 872), True, 'import matplotlib.pyplot as plt\n'), ((1362, 1405), 'flask.render_template', 'render_template', (['"""index.html"""'], {'title': '"""Home"""'}), "('index.html', title='Home')\n", (1377, 1405), False, 'from flask import render_template, jsonify\n'), ((1454, 1494), 'flask.render_template', 'render_template', (['"""map.html"""'], {'title': '"""Map"""'}), "('map.html', title='Map')\n", (1469, 1494), False, 'from flask import render_template, jsonify\n'), ((1740, 1767), 'flask.jsonify', 'jsonify', (["{'points': points}"], {}), "({'points': points})\n", (1747, 1767), False, 'from flask import render_template, jsonify\n'), ((1824, 1872), 'flask.render_template', 'render_template', (['"""contact.html"""'], {'title': '"""Contact"""'}), "('contact.html', title='Contact')\n", (1839, 1872), False, 'from flask import render_template, jsonify\n'), ((1955, 1967), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (1965, 1967), False, 'import io\n'), ((2103, 2116), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (2113, 2116), True, 'import matplotlib.pyplot as plt\n'), ((2134, 2148), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2146, 2148), True, 'import matplotlib.pyplot as plt\n'), ((2417, 2438), 'matplotlib.dates.MonthLocator', 'mdates.MonthLocator', ([], {}), '()\n', (2436, 2438), True, 'import matplotlib.dates as mdates\n'), ((2542, 2574), 'matplotlib.dates.DateFormatter', 'mdates.DateFormatter', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (2562, 2574), True, 'import matplotlib.dates as mdates\n'), ((2978, 3010), 'matplotlib.dates.DateFormatter', 'mdates.DateFormatter', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (2998, 3010), True, 'import matplotlib.dates as mdates\n'), ((3472, 3484), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (3482, 3484), False, 'import io\n'), ((3639, 3652), 'matplotlib.pyplot.figure', 'plt.figure', (['(2)'], {}), '(2)\n', (3649, 3652), True, 'import matplotlib.pyplot as plt\n'), ((3670, 3684), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (3682, 3684), True, 'import matplotlib.pyplot as plt\n'), ((3929, 3950), 'matplotlib.dates.MonthLocator', 'mdates.MonthLocator', ([], {}), '()\n', (3948, 3950), True, 'import matplotlib.dates as mdates\n'), ((4054, 4086), 'matplotlib.dates.DateFormatter', 'mdates.DateFormatter', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (4074, 4086), True, 'import matplotlib.dates as mdates\n'), ((4484, 4516), 'matplotlib.dates.DateFormatter', 'mdates.DateFormatter', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (4504, 4516), True, 'import matplotlib.dates as mdates\n'), ((763, 781), 'os.remove', 'os.remove', (['strFile'], {}), '(strFile)\n', (772, 781), False, 'import os\n'), ((826, 846), 'matplotlib.pyplot.savefig', 'plt.savefig', (['strFile'], {}), '(strFile)\n', (837, 846), True, 'import matplotlib.pyplot as plt\n'), ((922, 950), 'pandas.to_datetime', 'pd.to_datetime', (["data['Date']"], {}), "(data['Date'])\n", (936, 950), True, 'import pandas as pd\n'), ((1085, 1117), 'pandas.to_datetime', 'pd.to_datetime', (["preds_d1['Date']"], {}), "(preds_d1['Date'])\n", (1099, 1117), True, 'import pandas as pd\n'), ((1221, 1250), 'pandas.to_datetime', 'pd.to_datetime', (["preds['Date']"], {}), "(preds['Date'])\n", (1235, 1250), True, 'import pandas as pd\n'), ((2788, 2813), 'numpy.amin', 'np.amin', (["preds_d1['Date']"], {}), "(preds_d1['Date'])\n", (2795, 2813), True, 'import numpy as np\n'), ((2849, 2874), 'numpy.amax', 'np.amax', (["preds_d1['Date']"], {}), "(preds_d1['Date'])\n", (2856, 2874), True, 'import numpy as np\n'), ((4300, 4322), 'numpy.amin', 'np.amin', (["preds['Date']"], {}), "(preds['Date'])\n", (4307, 4322), True, 'import numpy as np\n'), ((4358, 4380), 'numpy.amax', 'np.amax', (["preds['Date']"], {}), "(preds['Date'])\n", (4365, 4380), True, 'import numpy as np\n'), ((1581, 1615), 'random.uniform', 'random.uniform', (['(48.84341)', '(48.86341)'], {}), '(48.84341, 48.86341)\n', (1595, 1615), False, 'import random\n'), ((1637, 1667), 'random.uniform', 'random.uniform', (['(2.3388)', '(2.3588)'], {}), '(2.3388, 2.3588)\n', (1651, 1667), False, 'import random\n'), ((1973, 1990), 'matplotlib.backends.backend_agg.FigureCanvasAgg', 'FigureCanvas', (['fig'], {}), '(fig)\n', (1985, 1990), True, 'from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas\n'), ((3490, 3507), 'matplotlib.backends.backend_agg.FigureCanvasAgg', 'FigureCanvas', (['fig'], {}), '(fig)\n', (3502, 3507), True, 'from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas\n'), ((1705, 1725), 'random.randint', 'random.randint', (['(2)', '(9)'], {}), '(2, 9)\n', (1719, 1725), False, 'import random\n')]
|
import logging
from typing import Any, Dict
from pytest_zebrunner.context import zebrunner_context
logger = logging.getLogger(__name__)
class SeleniumSession:
def __init__(self, reporting_service) -> None: # type: ignore
self._active_sessions: Dict[str, Any] = {}
self.reporting_service = reporting_service
def start_session(self, session_id: str, capabilities: dict, desired_capabilities: dict) -> None:
self._active_sessions[session_id] = {"related_tests": []}
zebrunner_session_id = self.reporting_service.start_test_session(
session_id, capabilities, desired_capabilities
)
if zebrunner_session_id:
self._active_sessions[session_id]["zebrunner_session_id"] = zebrunner_session_id
def finish_session(self, session_id: str) -> None:
self.reporting_service.finish_test_session(
self._active_sessions[session_id]["zebrunner_session_id"],
self._active_sessions[session_id]["related_tests"],
)
del self._active_sessions[session_id]
def finish_all_sessions(self) -> None:
for session_id in list(self._active_sessions):
self.finish_session(session_id)
def add_test(self, test_id: int) -> None:
for session_id in self._active_sessions:
if self._active_sessions[session_id].get("related_tests") is not None:
self._active_sessions[session_id]["related_tests"].append(test_id)
else:
self._active_sessions[session_id]["related_tests"] = [test_id]
def inject_driver(session_manager: SeleniumSession) -> None:
try:
from selenium.webdriver.remote.webdriver import WebDriver
base_init = WebDriver.__init__
base_close = WebDriver.close
def init(session, *args, **kwargs) -> None: # type: ignore
base_init(session, *args, **kwargs)
session_manager.start_session(
session.session_id, session.capabilities, kwargs.get("desired_capabilities", {})
)
if zebrunner_context.test_is_active:
session_manager.add_test(zebrunner_context.test_id)
def quit(session) -> None: # type: ignore
session_manager.finish_session(session.session_id)
base_close(session)
WebDriver.__init__ = init
WebDriver.quit = quit
except ImportError:
logger.warning("Selenium library is not installed.")
|
[
"logging.getLogger"
] |
[((110, 137), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (127, 137), False, 'import logging\n')]
|
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""PSROIPoolingGrad op"""
from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType
p_s_r_o_i_pooling_grad_op_info = TBERegOp("PSROIPoolingGrad") \
.fusion_type("OPAQUE") \
.async_flag(False) \
.binfile_name("p_s_r_o_i_pooling_grad_v2_d.so") \
.compute_cost(10) \
.kernel_name("p_s_r_o_i_pooling_grad_v2_d") \
.partial_flag(True) \
.attr("output_dim", "required", "int", "all") \
.attr("group_size", "required", "int", "all") \
.attr("spatial_scale", "required", "float", "all") \
.attr("input_size", "required", "listInt", "all") \
.input(0, "x", False, "required", "all") \
.input(1, "rois", False, "required", "all") \
.output(0, "y", False, "required", "all") \
.dtype_format(DataType.F32_5HD, DataType.F32_Default, DataType.F32_5HD) \
.get_op_info()
@op_info_register(p_s_r_o_i_pooling_grad_op_info)
def _p_s_r_o_i_pooling_grad_tbe():
"""PSROIPoolingGrad TBE register"""
return
|
[
"mindspore.ops.op_info_register.TBERegOp",
"mindspore.ops.op_info_register.op_info_register"
] |
[((1545, 1593), 'mindspore.ops.op_info_register.op_info_register', 'op_info_register', (['p_s_r_o_i_pooling_grad_op_info'], {}), '(p_s_r_o_i_pooling_grad_op_info)\n', (1561, 1593), False, 'from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType\n'), ((826, 854), 'mindspore.ops.op_info_register.TBERegOp', 'TBERegOp', (['"""PSROIPoolingGrad"""'], {}), "('PSROIPoolingGrad')\n", (834, 854), False, 'from mindspore.ops.op_info_register import op_info_register, TBERegOp, DataType\n')]
|
import h5py
import numpy as np
from sklearn import svm
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.utils import shuffle
rbfSigma = 0.1
def readFile(filename):
with h5py.File(filename, 'r') as f:
a_group_key = list(f.keys())
X = list(f[a_group_key[0]])
y = list(f[a_group_key[1]])
return np.array(X), np.array(y)
def form_mesh(x, y, h=0.1):
x_min, x_max = x.min() - 1, x.max() + 1
y_min, y_max = y.min() - 1, y.max() + 1
return np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
def linear(X1, X2):
gram_matrix = np.zeros((X1.shape[0], X2.shape[0]))
for i, x1 in enumerate(X1):
for j, x2 in enumerate(X2):
x1 = x1.flatten()
x2 = x2.flatten()
gram_matrix[i, j] = np.dot(x1, x2)
return gram_matrix
def rbfKernel(X1, X2):
gamma = 1 / float( 2*(rbfSigma**2))
gram_matrix = np.zeros((X1.shape[0], X2.shape[0]))
for i, x1 in enumerate(X1):
for j, x2 in enumerate(X2):
x1 = x1.flatten()
x2 = x2.flatten()
gram_matrix[i, j] = np.exp(- np.sum(np.power((x1 - x2), 2) ) * gamma )
return gram_matrix
class SVM():
def __init__(self, kernel = linear, C = 1, sigma=None):
self.C = C
self.kernel = kernel
self.sigma = sigma
self.clf = svm.SVC(kernel=self.kernel, C = self.C)
def fit(self, X, y):
self.clf.fit(X, y)
def predict(self, X):
return self.clf.predict(X)
def accuracy(self, X, y):
m = X.shape[0]
return round(np.sum(self.predict(X) == y) / m, 3)
def custom_predict(self, pX, X):
m, n = pX.shape
b = self.clf._intercept_[0]
a = self.clf.dual_coef_[0]
take_feature = self.clf.support_
sv = X[take_feature]
val = np.zeros(m)
for j in range(m):
for i in range(len(sv)):
s = sv[i].reshape(n, 1)
p = pX[j].reshape(n, 1)
val[j] += a[i] * self.kernel(s.T, p.T)
pred = (val - b >= 0)*1
return pred
def custom_accuracy(self, X, y, xTrain):
pred = self.custom_predict(X, xTrain)
return round(np.mean(pred == y), 3)
def __str__(self):
print('sv = ' + str(self.clf.support_vectors_))
print('nv = ' + str(self.clf.n_support_))
print('a = ' + str(self.clf.dual_coef_))
print('a.shape = ' + str(self.clf.dual_coef_.shape))
print('b = ' + str(self.clf._intercept_))
print('cs = ' + str(self.clf.classes_))
print(str(self.clf.support_))
return ""
def get_data(fItr):
filename = "./dataset_q1/data_" + str(fItr) + ".h5"
X, y = readFile(filename)
return X, y
def plot_graph(X, y, clfs, h, s, X_b, y_b):
X0, X1 = X[:, 0], X[:, 1]
xx, yy = form_mesh(X0, X1, h=h)
meshVals = np.c_[xx.ravel(), yy.ravel()]
Z = None
try:
Zs = []
for model in clfs:
Z = model.predict(meshVals)
Zs.append(Z)
Zs = np.array(Zs)
Z = np.argmax(Zs, axis=0)
except:
Z = clfs.predict(meshVals)
Z = Z.reshape(xx.shape)
plt.contourf(xx, yy, Z, cmap=plt.cm.coolwarm, alpha=0.7)
plt.scatter(X[:, 0], X[:, 1], c=y, s=s, cmap=plt.cm.coolwarm)
try:
plt.scatter(X_b[:, 0], X_b[:, 1], c=y_b, s=20, cmap=plt.cm.cool)
except:
pass
plt.xlabel('Feature 1')
plt.ylabel('Feature 2')
plt.title("Decision Boundary Plot")
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.show()
def get_model(X, y, ker, C = 1, sigma = 1, h = 0.1, visualize=True, s=None, X_b = [], y_b = []):
global rbfSigma
m, n = X.shape
rbfSigma = sigma
clf = SVM(kernel=ker, C=C, sigma=sigma)
clf.fit(X, y)
if(visualize):
plot_graph(X, y, clf, h, s, X_b, y_b)
return clf
def getOutliers(X, y, f):
scaler = StandardScaler()
X0 = X[y == 0]
y0 = y[y == 0]
X1 = X[y == 1]
y1 = y[y == 1]
X0t = scaler.fit_transform(X0)
X1t = scaler.fit_transform(X1)
out0 = np.where(np.absolute(X0t) > f[0])[0]
out1 = np.where(np.absolute(X1t) > f[1])[0]
clusters0 = np.zeros(X0.shape[0])
clusters1 = np.zeros(X1.shape[0])
clusters0[out0] = -1
clusters1[out1] = -1
X0, y0, X_b0, y_b0 = refineData(X0, y0, clusters0)
X1, y1, X_b1, y_b1 = refineData(X1, y1, clusters1)
X = np.vstack((X0, X1))
X_b = np.vstack((X_b0, X_b1))
y = np.append(y0, y1)
y_b = np.append(y_b0, y_b1)
X, y = shuffle(X, y)
return X, y, X_b, y_b
def refineData(X, y, f):
X_b = X[f == -1]
y_b = y[f == -1]
X = np.delete(X, np.where(f == -1), axis = 0)
y = np.delete(y, np.where(f == -1), axis = 0)
return X, y, X_b, y_b
if __name__ == '__main__':
X, y = get_data(1)
clf1 = get_model(X, y, ker=rbfKernel, C=100, sigma=1, h=0.05, s=5)
X, y = get_data(2)
clf2 = get_model(X, y, ker=rbfKernel, C=1000, sigma=1, h=0.05, s=5)
X, y = get_data(3)
clf3 = []
Y = y
for i in range(3):
y = np.copy(Y)
y[y == (i + 2) % 3] = -1
y[y == (i + 1) % 3] = -1
y[y == i] = 1
y[y == -1] = 0
clf = get_model(X, y, ker=linear, visualize=False)
clf3.append(clf)
plot_graph(X, Y, clf3, h=0.3, s=10, X_b = [], y_b = [])
X, y = get_data(4)
X, y, X_b, y_b = getOutliers(X, y, (1.8, 1.9))
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
clf4Linear = get_model(X_train, y_train, ker=linear, s=1, X_b=X_b, y_b=y_b)
clf4Rbf = get_model(X_train, y_train, ker=rbfKernel, C=0.01, sigma=1, h=0.1, s=1, X_b=X_b, y_b=y_b)
accuracyLinear_train = clf4Linear.custom_accuracy(X_train, y_train, X_train)
accuracyLinear_test = clf4Linear.custom_accuracy(X_test, y_test, X_train)
accuracyRbf_train = clf4Rbf.custom_accuracy(X_train, y_train, X_train)
accuracyRbf_test = clf4Rbf.custom_accuracy(X_test, y_test, X_train)
print("Accuracy for Prediciton with Custom Method")
print(" "*10, "Linear".center(20), "RBF".center(20))
print("Train".center(10), str(accuracyLinear_train).center(20), str(accuracyRbf_train).center(20))
print("Test".center(10), str(accuracyLinear_test).center(20), str(accuracyRbf_test).center(20))
accuracyLinear_train = clf4Linear.accuracy(X_train, y_train)
accuracyLinear_test = clf4Linear.accuracy(X_test, y_test)
accuracyRbf_train = clf4Rbf.accuracy(X_train, y_train)
accuracyRbf_test = clf4Rbf.accuracy(X_test, y_test)
print("Accuracy for Prediciton with Inbuilt Method")
print(" "*10, "Linear".center(20), "RBF".center(20))
print("Train".center(10), str(accuracyLinear_train).center(20), str(accuracyRbf_train).center(20))
print("Test".center(10), str(accuracyLinear_test).center(20), str(accuracyRbf_test).center(20))
X, y = get_data(5)
X, y, X_b, y_b = getOutliers(X, y, (1.8, 1.9))
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
clf5Linear = get_model(X_train, y_train, ker=linear, s=1, X_b=X_b, y_b=y_b)
clf5Rbf = get_model(X_train, y_train, ker=rbfKernel, C=10, sigma=2, h=0.1, s=1, X_b=X_b, y_b=y_b)
accuracyLinear_train = clf5Linear.custom_accuracy(X_train, y_train, X_train)
accuracyLinear_test = clf5Linear.custom_accuracy(X_test, y_test, X_train)
accuracyRbf_train = clf5Rbf.custom_accuracy(X_train, y_train, X_train)
accuracyRbf_test = clf5Rbf.custom_accuracy(X_test, y_test, X_train)
print("Accuracy for Prediciton with Custom Method")
print(" "*10, "Linear".center(20), "RBF".center(20))
print("Train".center(10), str(accuracyLinear_train).center(20), str(accuracyRbf_train).center(20))
print("Test".center(10), str(accuracyLinear_test).center(20), str(accuracyRbf_test).center(20))
accuracyLinear_train = clf5Linear.accuracy(X_train, y_train)
accuracyLinear_test = clf5Linear.accuracy(X_test, y_test)
accuracyRbf_train = clf5Rbf.accuracy(X_train, y_train)
accuracyRbf_test = clf5Rbf.accuracy(X_test, y_test)
print("Accuracy for Prediciton with Inbuilt Method")
print(" "*10, "Linear".center(20), "RBF".center(20))
print("Train".center(10), str(accuracyLinear_train).center(20), str(accuracyRbf_train).center(20))
print("Test".center(10), str(accuracyLinear_test).center(20), str(accuracyRbf_test).center(20))
|
[
"matplotlib.pyplot.title",
"numpy.absolute",
"sklearn.preprocessing.StandardScaler",
"numpy.argmax",
"sklearn.model_selection.train_test_split",
"numpy.mean",
"matplotlib.pyplot.contourf",
"numpy.arange",
"sklearn.svm.SVC",
"numpy.copy",
"numpy.power",
"numpy.append",
"h5py.File",
"matplotlib.pyplot.show",
"matplotlib.pyplot.ylabel",
"numpy.dot",
"numpy.vstack",
"matplotlib.pyplot.scatter",
"numpy.zeros",
"numpy.where",
"numpy.array",
"sklearn.utils.shuffle",
"matplotlib.pyplot.xlabel"
] |
[((685, 721), 'numpy.zeros', 'np.zeros', (['(X1.shape[0], X2.shape[0])'], {}), '((X1.shape[0], X2.shape[0]))\n', (693, 721), True, 'import numpy as np\n'), ((1002, 1038), 'numpy.zeros', 'np.zeros', (['(X1.shape[0], X2.shape[0])'], {}), '((X1.shape[0], X2.shape[0]))\n', (1010, 1038), True, 'import numpy as np\n'), ((3272, 3328), 'matplotlib.pyplot.contourf', 'plt.contourf', (['xx', 'yy', 'Z'], {'cmap': 'plt.cm.coolwarm', 'alpha': '(0.7)'}), '(xx, yy, Z, cmap=plt.cm.coolwarm, alpha=0.7)\n', (3284, 3328), True, 'import matplotlib.pyplot as plt\n'), ((3333, 3394), 'matplotlib.pyplot.scatter', 'plt.scatter', (['X[:, 0]', 'X[:, 1]'], {'c': 'y', 's': 's', 'cmap': 'plt.cm.coolwarm'}), '(X[:, 0], X[:, 1], c=y, s=s, cmap=plt.cm.coolwarm)\n', (3344, 3394), True, 'import matplotlib.pyplot as plt\n'), ((3506, 3529), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Feature 1"""'], {}), "('Feature 1')\n", (3516, 3529), True, 'import matplotlib.pyplot as plt\n'), ((3534, 3557), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Feature 2"""'], {}), "('Feature 2')\n", (3544, 3557), True, 'import matplotlib.pyplot as plt\n'), ((3562, 3597), 'matplotlib.pyplot.title', 'plt.title', (['"""Decision Boundary Plot"""'], {}), "('Decision Boundary Plot')\n", (3571, 3597), True, 'import matplotlib.pyplot as plt\n'), ((3668, 3678), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3676, 3678), True, 'import matplotlib.pyplot as plt\n'), ((4026, 4042), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (4040, 4042), False, 'from sklearn.preprocessing import StandardScaler\n'), ((4301, 4322), 'numpy.zeros', 'np.zeros', (['X0.shape[0]'], {}), '(X0.shape[0])\n', (4309, 4322), True, 'import numpy as np\n'), ((4339, 4360), 'numpy.zeros', 'np.zeros', (['X1.shape[0]'], {}), '(X1.shape[0])\n', (4347, 4360), True, 'import numpy as np\n'), ((4529, 4548), 'numpy.vstack', 'np.vstack', (['(X0, X1)'], {}), '((X0, X1))\n', (4538, 4548), True, 'import numpy as np\n'), ((4559, 4582), 'numpy.vstack', 'np.vstack', (['(X_b0, X_b1)'], {}), '((X_b0, X_b1))\n', (4568, 4582), True, 'import numpy as np\n'), ((4591, 4608), 'numpy.append', 'np.append', (['y0', 'y1'], {}), '(y0, y1)\n', (4600, 4608), True, 'import numpy as np\n'), ((4619, 4640), 'numpy.append', 'np.append', (['y_b0', 'y_b1'], {}), '(y_b0, y_b1)\n', (4628, 4640), True, 'import numpy as np\n'), ((4652, 4665), 'sklearn.utils.shuffle', 'shuffle', (['X', 'y'], {}), '(X, y)\n', (4659, 4665), False, 'from sklearn.utils import shuffle\n'), ((5576, 5613), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.2)'}), '(X, y, test_size=0.2)\n', (5592, 5613), False, 'from sklearn.model_selection import train_test_split\n'), ((7101, 7138), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'test_size': '(0.2)'}), '(X, y, test_size=0.2)\n', (7117, 7138), False, 'from sklearn.model_selection import train_test_split\n'), ((274, 298), 'h5py.File', 'h5py.File', (['filename', '"""r"""'], {}), "(filename, 'r')\n", (283, 298), False, 'import h5py\n'), ((425, 436), 'numpy.array', 'np.array', (['X'], {}), '(X)\n', (433, 436), True, 'import numpy as np\n'), ((438, 449), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (446, 449), True, 'import numpy as np\n'), ((590, 616), 'numpy.arange', 'np.arange', (['x_min', 'x_max', 'h'], {}), '(x_min, x_max, h)\n', (599, 616), True, 'import numpy as np\n'), ((618, 644), 'numpy.arange', 'np.arange', (['y_min', 'y_max', 'h'], {}), '(y_min, y_max, h)\n', (627, 644), True, 'import numpy as np\n'), ((1443, 1480), 'sklearn.svm.SVC', 'svm.SVC', ([], {'kernel': 'self.kernel', 'C': 'self.C'}), '(kernel=self.kernel, C=self.C)\n', (1450, 1480), False, 'from sklearn import svm\n'), ((1927, 1938), 'numpy.zeros', 'np.zeros', (['m'], {}), '(m)\n', (1935, 1938), True, 'import numpy as np\n'), ((3146, 3158), 'numpy.array', 'np.array', (['Zs'], {}), '(Zs)\n', (3154, 3158), True, 'import numpy as np\n'), ((3171, 3192), 'numpy.argmax', 'np.argmax', (['Zs'], {'axis': '(0)'}), '(Zs, axis=0)\n', (3180, 3192), True, 'import numpy as np\n'), ((3412, 3476), 'matplotlib.pyplot.scatter', 'plt.scatter', (['X_b[:, 0]', 'X_b[:, 1]'], {'c': 'y_b', 's': '(20)', 'cmap': 'plt.cm.cool'}), '(X_b[:, 0], X_b[:, 1], c=y_b, s=20, cmap=plt.cm.cool)\n', (3423, 3476), True, 'import matplotlib.pyplot as plt\n'), ((4781, 4798), 'numpy.where', 'np.where', (['(f == -1)'], {}), '(f == -1)\n', (4789, 4798), True, 'import numpy as np\n'), ((4831, 4848), 'numpy.where', 'np.where', (['(f == -1)'], {}), '(f == -1)\n', (4839, 4848), True, 'import numpy as np\n'), ((5196, 5206), 'numpy.copy', 'np.copy', (['Y'], {}), '(Y)\n', (5203, 5206), True, 'import numpy as np\n'), ((882, 896), 'numpy.dot', 'np.dot', (['x1', 'x2'], {}), '(x1, x2)\n', (888, 896), True, 'import numpy as np\n'), ((2307, 2325), 'numpy.mean', 'np.mean', (['(pred == y)'], {}), '(pred == y)\n', (2314, 2325), True, 'import numpy as np\n'), ((4209, 4225), 'numpy.absolute', 'np.absolute', (['X0t'], {}), '(X0t)\n', (4220, 4225), True, 'import numpy as np\n'), ((4257, 4273), 'numpy.absolute', 'np.absolute', (['X1t'], {}), '(X1t)\n', (4268, 4273), True, 'import numpy as np\n'), ((1216, 1236), 'numpy.power', 'np.power', (['(x1 - x2)', '(2)'], {}), '(x1 - x2, 2)\n', (1224, 1236), True, 'import numpy as np\n')]
|
import os
import re
import sublime
import sublime_plugin
try: # Python 3
from .haxe_helper import HaxeComplete_inst
from .haxe_generate_code_helper import is_haxe_scope
except (ValueError): # Python 2
from haxe_helper import HaxeComplete_inst
from haxe_generate_code_helper import is_haxe_scope
re_type = re.compile(r'(abstract|class|enum|interface|typedef)\s*\b([\w]*)\b')
re_package = re.compile(r'package\s*([\w.]*);')
def gen_package_decl(package):
if package == '':
return 'package;'
return 'package %s;' % package
class HaxeFixModule(sublime_plugin.TextCommand):
@staticmethod
def poll(ctx):
view = ctx.view
cmds = []
classpath = HaxeComplete_inst().get_build(view).get_classpath(view)
if classpath is None:
return cmds
filename = os.path.splitext(os.path.basename(view.file_name()))[0]
filedir = os.path.dirname(view.file_name())
src = ctx.src
mos = [mo for mo in re_type.finditer(src)]
if len(mos) == 1:
mo = mos[0]
if filename != mo.group(2):
cmds.append((
'Rename %s %s to %s' % (
mo.group(1), mo.group(2), filename),
'haxe_fix_module',
{'cname': filename}))
mo = re_package.search(src)
cur_package = mo.group(1) if mo else ''
if classpath in filedir:
package = os.path.relpath(filedir, classpath)
if package == '.':
package = ''
package = package.replace(os.sep, '.')
if package != cur_package:
cmds.append((
'Rename package \'%s\' to \'%s\'' % (
cur_package, package),
'haxe_fix_module',
{'package': package}))
return cmds
def run(self, edit, cname=None, package=None):
view = self.view
if view is None or view.is_loading() or not is_haxe_scope(view):
return
src = view.substr(sublime.Region(0, self.view.size()))
if cname is not None:
mo = re_type.search(src)
view.replace(edit, sublime.Region(mo.start(2), mo.end(2)), cname)
sublime.status_message(
'%s %s renamed to %s' % (mo.group(1), mo.group(2), cname))
if package is not None:
mo = re_package.search(src)
if mo:
view.replace(
edit, sublime.Region(mo.start(0), mo.end(0)),
gen_package_decl(package))
else:
view.insert(edit, 0, gen_package_decl(package) + '\n')
sublime.status_message('package renamed')
|
[
"haxe_helper.HaxeComplete_inst",
"haxe_generate_code_helper.is_haxe_scope",
"sublime.status_message",
"os.path.relpath",
"re.compile"
] |
[((325, 396), 're.compile', 're.compile', (['"""(abstract|class|enum|interface|typedef)\\\\s*\\\\b([\\\\w]*)\\\\b"""'], {}), "('(abstract|class|enum|interface|typedef)\\\\s*\\\\b([\\\\w]*)\\\\b')\n", (335, 396), False, 'import re\n'), ((407, 442), 're.compile', 're.compile', (['"""package\\\\s*([\\\\w.]*);"""'], {}), "('package\\\\s*([\\\\w.]*);')\n", (417, 442), False, 'import re\n'), ((1469, 1504), 'os.path.relpath', 'os.path.relpath', (['filedir', 'classpath'], {}), '(filedir, classpath)\n', (1484, 1504), False, 'import os\n'), ((2737, 2778), 'sublime.status_message', 'sublime.status_message', (['"""package renamed"""'], {}), "('package renamed')\n", (2759, 2778), False, 'import sublime\n'), ((2040, 2059), 'haxe_generate_code_helper.is_haxe_scope', 'is_haxe_scope', (['view'], {}), '(view)\n', (2053, 2059), False, 'from haxe_generate_code_helper import is_haxe_scope\n'), ((710, 729), 'haxe_helper.HaxeComplete_inst', 'HaxeComplete_inst', ([], {}), '()\n', (727, 729), False, 'from haxe_helper import HaxeComplete_inst\n')]
|
from django.shortcuts import render, redirect, get_object_or_404
from .models import HottProfile
from .forms import ProfileEditForm
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import UpdateView
from django.urls import reverse_lazy
def profile_view(request, username=None):
owner = False
# import pdb; pdb.set_trace()
if not username:
username = request.user.get_username()
owner = True
if username == '':
return redirect('/')
profile = get_object_or_404(HottProfile, user__username=username)
if not owner:
return redirect('/')
context = {
'profile': profile,
}
return render(request, 'templates/home.html', context)
|
[
"django.shortcuts.render",
"django.shortcuts.get_object_or_404",
"django.shortcuts.redirect"
] |
[((533, 588), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['HottProfile'], {'user__username': 'username'}), '(HottProfile, user__username=username)\n', (550, 588), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((699, 746), 'django.shortcuts.render', 'render', (['request', '"""templates/home.html"""', 'context'], {}), "(request, 'templates/home.html', context)\n", (705, 746), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((623, 636), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (631, 636), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((504, 517), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (512, 517), False, 'from django.shortcuts import render, redirect, get_object_or_404\n')]
|
"""
Class for creating and using connections to Google Drive.
Special note: API access is presumed to be through a user and not a service acct.
Last modified: Dec 2016
By: <NAME>
"""
from __future__ import print_function
import httplib2
import os
import sys
import logging
import base64
from email.mime.text import MIMEText
from os.path import basename
from apiclient import discovery
from apiclient import errors
from apiclient.http import MediaFileUpload
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client.service_account import ServiceAccountCredentials
class gclient:
def __init__(self, scopes, client_secret_file, application_name, delegate):
"""
params:
scopes: Google API scopes to use
client_secret_file: filename of secret file downloaded from Google
application_name: name of application utilizing the Google API
delegate: for use with delegate accounts
"""
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/
self.flags = tools.argparser.parse_args([])
self.scopes = scopes
self.client_secret_file = client_secret_file
self.application_name = application_name
self.delegate = delegate
def get_credentials(self):
"""
Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
if getattr(sys, 'frozen', False):
# frozen
run_path = os.path.dirname(sys._MEIPASS)
else:
# unfrozen
run_path = os.path.dirname(os.path.realpath(__file__))
credential_path = run_path+"/"+'.googleapis_config.json'
store = Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(run_path+"/"+self.client_secret_file, self.scopes)
flow.params['access_type'] = 'offline'
flow.approval_prompt = 'force'
flow.user_agent = self.application_name
# approval_prompt to 'force'
if self.flags:
credentials = tools.run_flow(flow, store, self.flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run_flow(flow, store)
return credentials
#for appending data to specified Google sheet by ID
def sheet_insert_request(self, spreadsheet_id, insert_values, sheet_range='A:B'):
"""
Inserts new row into Google spreadsheet with provided data.
params:
spreadsheet_id: ID of Google spreadsheet to insert values into
insert_values: values to insert into Google spreadsheet
sheet_range: range to use when inserting the values into Google spreadsheet
"""
#Creates a Sheets API service object
credentials = self.get_credentials()
http = credentials.authorize(httplib2.Http())
discoveryUrl = ('https://sheets.googleapis.com/$discovery/rest?version=v4')
service = discovery.build('sheets', 'v4', http=http, discoveryServiceUrl=discoveryUrl)
#create dummy range and var for data to insert
append_data = {"values":[insert_values]}
logging.info("Appending row of data to Google spreadsheet with id: "+spreadsheet_id)
#send the data to be appended
result = service.spreadsheets().values().append(
spreadsheetId=spreadsheet_id, valueInputOption="USER_ENTERED", range=sheet_range, body=append_data).execute()
#for uploading files to Google Drive using source filepath and Google Drive folder ID
def drive_upload_request(self, path_to_source_file, drive_folder_id):
"""
Uploads file to Google Drive folder based on ID
params:
path_to_source_file: path to file which will be uploaded to Google Drive
drive_folder_id: ID of Google Drive folder to upload file to
"""
#Creates a Drive API service object
credentials = self.get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('drive', 'v3', http=http)
#create MediaFileUpload object for file to upload
media_body = MediaFileUpload(path_to_source_file)
#create meta data for file to upload
body = {
'name': basename(path_to_source_file),
'parents':[drive_folder_id]
}
logging.info("Uploading "+basename(path_to_source_file)+" to Google Drive folder with id: "+drive_folder_id)
#upload the file
file = service.files().create(body=body, media_body=media_body).execute()
#for sending content through email automatically (uses gmail)
def gmail_send(self, mail_to, mail_reply_to, mail_cc, mail_subject, mail_content):
"""
Function for sending Gmail email based on provided information in params
params:
mail_to: what email adddresses to send to delimited by commas
mail_reply_to: who the email reply-to should be set as
mail_cc: what emails adddresses to cc to delimited by commas
mail_subject: email subject line
mail_content: email body content
"""
#Creates a Drive API service object
credentials = self.get_credentials()
#delegated_credentials = credentials.create_delegated(self.delegate)
http = credentials.authorize(httplib2.Http())
service = discovery.build('gmail', 'v1', http=http)
logging.info("Building email message")
#Create a message for an email - uses html formatting for better spacing options
message = MIMEText(mail_content, 'html')
message['to'] = mail_to
message['cc'] = mail_cc
message['reply-to'] = mail_reply_to
message['from'] = self.delegate
message['subject'] = mail_subject
message_content = {'raw': base64.urlsafe_b64encode(message.as_string().encode()).decode('utf-8')}
logging.info("Sending email message")
#Send an email message.
sent_message = (service.users().messages().send(userId=self.delegate, body=message_content).execute())
|
[
"oauth2client.tools.argparser.parse_args",
"oauth2client.file.Storage",
"apiclient.discovery.build",
"httplib2.Http",
"os.path.basename",
"email.mime.text.MIMEText",
"os.path.dirname",
"oauth2client.client.flow_from_clientsecrets",
"os.path.realpath",
"oauth2client.tools.run_flow",
"logging.info",
"apiclient.http.MediaFileUpload"
] |
[((1233, 1263), 'oauth2client.tools.argparser.parse_args', 'tools.argparser.parse_args', (['[]'], {}), '([])\n', (1259, 1263), False, 'from oauth2client import tools\n'), ((2075, 2099), 'oauth2client.file.Storage', 'Storage', (['credential_path'], {}), '(credential_path)\n', (2082, 2099), False, 'from oauth2client.file import Storage\n'), ((3480, 3556), 'apiclient.discovery.build', 'discovery.build', (['"""sheets"""', '"""v4"""'], {'http': 'http', 'discoveryServiceUrl': 'discoveryUrl'}), "('sheets', 'v4', http=http, discoveryServiceUrl=discoveryUrl)\n", (3495, 3556), False, 'from apiclient import discovery\n'), ((3676, 3766), 'logging.info', 'logging.info', (["('Appending row of data to Google spreadsheet with id: ' + spreadsheet_id)"], {}), "('Appending row of data to Google spreadsheet with id: ' +\n spreadsheet_id)\n", (3688, 3766), False, 'import logging\n'), ((4578, 4619), 'apiclient.discovery.build', 'discovery.build', (['"""drive"""', '"""v3"""'], {'http': 'http'}), "('drive', 'v3', http=http)\n", (4593, 4619), False, 'from apiclient import discovery\n'), ((4703, 4739), 'apiclient.http.MediaFileUpload', 'MediaFileUpload', (['path_to_source_file'], {}), '(path_to_source_file)\n', (4718, 4739), False, 'from apiclient.http import MediaFileUpload\n'), ((5981, 6022), 'apiclient.discovery.build', 'discovery.build', (['"""gmail"""', '"""v1"""'], {'http': 'http'}), "('gmail', 'v1', http=http)\n", (5996, 6022), False, 'from apiclient import discovery\n'), ((6034, 6072), 'logging.info', 'logging.info', (['"""Building email message"""'], {}), "('Building email message')\n", (6046, 6072), False, 'import logging\n'), ((6182, 6212), 'email.mime.text.MIMEText', 'MIMEText', (['mail_content', '"""html"""'], {}), "(mail_content, 'html')\n", (6190, 6212), False, 'from email.mime.text import MIMEText\n'), ((6526, 6563), 'logging.info', 'logging.info', (['"""Sending email message"""'], {}), "('Sending email message')\n", (6538, 6563), False, 'import logging\n'), ((1853, 1882), 'os.path.dirname', 'os.path.dirname', (['sys._MEIPASS'], {}), '(sys._MEIPASS)\n', (1868, 1882), False, 'import os\n'), ((2209, 2298), 'oauth2client.client.flow_from_clientsecrets', 'client.flow_from_clientsecrets', (["(run_path + '/' + self.client_secret_file)", 'self.scopes'], {}), "(run_path + '/' + self.client_secret_file,\n self.scopes)\n", (2239, 2298), False, 'from oauth2client import client\n'), ((3359, 3374), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (3372, 3374), False, 'import httplib2\n'), ((4542, 4557), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (4555, 4557), False, 'import httplib2\n'), ((4827, 4856), 'os.path.basename', 'basename', (['path_to_source_file'], {}), '(path_to_source_file)\n', (4835, 4856), False, 'from os.path import basename\n'), ((5945, 5960), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (5958, 5960), False, 'import httplib2\n'), ((1962, 1988), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1978, 1988), False, 'import os\n'), ((2541, 2580), 'oauth2client.tools.run_flow', 'tools.run_flow', (['flow', 'store', 'self.flags'], {}), '(flow, store, self.flags)\n', (2555, 2580), False, 'from oauth2client import tools\n'), ((2679, 2706), 'oauth2client.tools.run_flow', 'tools.run_flow', (['flow', 'store'], {}), '(flow, store)\n', (2693, 2706), False, 'from oauth2client import tools\n'), ((4951, 4980), 'os.path.basename', 'basename', (['path_to_source_file'], {}), '(path_to_source_file)\n', (4959, 4980), False, 'from os.path import basename\n')]
|
from urllib.request import urlopen
from bs4 import BeautifulSoup
import pandas as pd
#import re
#from urllib.parse import urlencode
#from urllib.error import HTTPError
uasgs_url = "http://www.comprasnet.gov.br/livre/uasg/Catalogo_Resp.asp"
response = urlopen(uasgs_url)
html = response.read()
soup = BeautifulSoup(html, 'html.parser')
uasgs = []
first_tag = 7
last_tag = 11095
for tr_tag in soup.find_all('tr')[first_tag:last_tag]:
uasg_data = {}
tds = tr_tag.find_all('td')
uasg_data['id'] = tds[0].string.strip()
uasg_data['nome'] = tds[1].string.strip()
uasg_data['sigla_uf'] = tds[2].a.string
uasgs.append(uasg_data)
table = pd.DataFrame(data=uasgs, dtype=str, columns=['id', 'nome', 'sigla_uf'])
table.to_json("uasgs.json", orient='records')
|
[
"bs4.BeautifulSoup",
"urllib.request.urlopen",
"pandas.DataFrame"
] |
[((252, 270), 'urllib.request.urlopen', 'urlopen', (['uasgs_url'], {}), '(uasgs_url)\n', (259, 270), False, 'from urllib.request import urlopen\n'), ((302, 336), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""html.parser"""'], {}), "(html, 'html.parser')\n", (315, 336), False, 'from bs4 import BeautifulSoup\n'), ((641, 712), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'uasgs', 'dtype': 'str', 'columns': "['id', 'nome', 'sigla_uf']"}), "(data=uasgs, dtype=str, columns=['id', 'nome', 'sigla_uf'])\n", (653, 712), True, 'import pandas as pd\n')]
|
#Session model stores the session data
from django.contrib.sessions.models import Session
from .models import LoggedInUser
from .views import single_user, time_up
from django.utils import timezone
from mysite.settings import t_out
from django.contrib.auth.models import User
from runcode.runcodefuncs import kill_stop_clear
class OneSessionPerUserMiddleware:
# Called only once when the web server starts
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
# Code to be executed for each request before
# the view (and later middleware) are called.
print(LoggedInUser.objects.count())
if request.user.is_authenticated:
if LoggedInUser.objects.count() > 1:
obj = LoggedInUser.objects.exclude(user_id=request.user.id)
for i in obj:
if ((timezone.now()-i.user.last_login).seconds > t_out and not i.user.is_staff) or (i.user.is_staff and (timezone.now()-i.user.last_login).seconds > 3*t_out):
user=User.objects.get(pk=i.user_id)
[s.delete() for s in Session.objects.all() if str(s.get_decoded().get('_auth_user_id')) == str(user.id)]
i.delete()
else:
return single_user(request)
tnow = timezone.now()
tlogin = request.user.logged_in_user.login_time
if (tnow - tlogin).seconds > t_out:
if request.user.is_staff or request.user.is_superuser:
pass
else:
kill_stop_clear()
return time_up(request)
# if there is a stored_session_key in our database and it is
# different from the current session, delete the stored_session_key
# session_key with from the Session table
stored_session_key = request.user.logged_in_user.session_key
if stored_session_key and stored_session_key != request.session.session_key:
Session.objects.get(session_key=stored_session_key).delete()
request.user.logged_in_user.session_key = request.session.session_key
request.user.logged_in_user.save()
response = self.get_response(request)
return response
# This is where you add any extra code to be executed for each request/response after
# the view is called.
# For this tutorial, we're not adding any code so we just return the response
|
[
"runcode.runcodefuncs.kill_stop_clear",
"django.contrib.auth.models.User.objects.get",
"django.utils.timezone.now",
"django.contrib.sessions.models.Session.objects.get",
"django.contrib.sessions.models.Session.objects.all"
] |
[((1375, 1389), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1387, 1389), False, 'from django.utils import timezone\n'), ((1636, 1653), 'runcode.runcodefuncs.kill_stop_clear', 'kill_stop_clear', ([], {}), '()\n', (1651, 1653), False, 'from runcode.runcodefuncs import kill_stop_clear\n'), ((1083, 1113), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'pk': 'i.user_id'}), '(pk=i.user_id)\n', (1099, 1113), False, 'from django.contrib.auth.models import User\n'), ((2084, 2135), 'django.contrib.sessions.models.Session.objects.get', 'Session.objects.get', ([], {'session_key': 'stored_session_key'}), '(session_key=stored_session_key)\n', (2103, 2135), False, 'from django.contrib.sessions.models import Session\n'), ((1159, 1180), 'django.contrib.sessions.models.Session.objects.all', 'Session.objects.all', ([], {}), '()\n', (1178, 1180), False, 'from django.contrib.sessions.models import Session\n'), ((900, 914), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (912, 914), False, 'from django.utils import timezone\n'), ((1000, 1014), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (1012, 1014), False, 'from django.utils import timezone\n')]
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
''' File '''
import os
import subprocess
import hashlib
import string_helpers
from chunked import chunked
from cached import cached
class File(object):
''' Wrapper around a filesystem object '''
TYPE_MUSIC = 'music'
EXT_MUSIC = (
'.mp3',
)
TYPE_MISC = 'misc'
TYPE_FOLDER = 'folder'
def __init__(self, path):
self.path = unicode(path)
def __unicode__(self):
return self.filename
def __repr__(self):
return "'" + unicode(self) + "'"
def rename(self, name):
''' Renames a file (keeping the existing extension) '''
# The name is "empty" aka its really a removal...
if not len(name):
raise Exception("No name Passed in")
new_path = os.path.join(self.dir, name + self.ext)
if os.path.exists(new_path):
raise RuntimeError("Rename would overwrite... Aborting: '{}' to '{}'".format(
self.name,
name
))
subprocess.check_call(["mv", "-n", self.path, new_path])
self.path = new_path
def add_prefix(self, prefix):
self.rename(prefix + self.name)
def change_ext(self, ext):
''' Swaps the file extension '''
new_path = os.path.join(self.dir, self.name + ext)
subprocess.check_call(["mv", "-n", self.path, new_path])
self.path = new_path
# TODO: this should re-initialize the file, in case its now a music/img/etc file
def remove(self, force=False):
''' Removes the file (trashes), pass in force=True to delete permanently '''
# TODO: Make it multi-os
# Linux: gvfs-trash
if force:
subprocess.check_call(["rm", self.path])
else:
subprocess.check_call(["trash", self.path])
def capitalize(self):
self.rename(string_helpers.capitalize_words(self.name))
@property
def dir(self):
return os.path.dirname(self.path)
@property
def name(self):
return os.path.splitext(self.filename)[0]
@property
def filename(self):
return os.path.basename(self.path)
@property
def fullname(self):
return os.path.basename(self.path)
@property
def ext(self):
return os.path.splitext(self.path)[1]
@property
@cached
def stat(self):
return os.stat_result(os.stat(self.path))
#########################################
# File Types
def type(self):
if self.ext in File.EXT_MUSIC:
return File.TYPE_MUSIC
return File.TYPE_MISC
# File Hashing
HASH_CHUNK_SIZE = 64
@cached
def hash(self):
self.__hash = hashlib.sha1()
with open(self.path, 'r') as fh:
for data in chunked(fh, self.HASH_CHUNK_SIZE):
self.__hash.update(data)
return self.__hash.hexdigest()
def compare(self, other):
if self.hash() == other.hash():
return True
return False
|
[
"hashlib.sha1",
"os.path.basename",
"os.stat",
"os.path.dirname",
"string_helpers.capitalize_words",
"os.path.exists",
"os.path.splitext",
"chunked.chunked",
"os.path.join",
"subprocess.check_call"
] |
[((801, 840), 'os.path.join', 'os.path.join', (['self.dir', '(name + self.ext)'], {}), '(self.dir, name + self.ext)\n', (813, 840), False, 'import os\n'), ((852, 876), 'os.path.exists', 'os.path.exists', (['new_path'], {}), '(new_path)\n', (866, 876), False, 'import os\n'), ((1040, 1096), 'subprocess.check_call', 'subprocess.check_call', (["['mv', '-n', self.path, new_path]"], {}), "(['mv', '-n', self.path, new_path])\n", (1061, 1096), False, 'import subprocess\n'), ((1293, 1332), 'os.path.join', 'os.path.join', (['self.dir', '(self.name + ext)'], {}), '(self.dir, self.name + ext)\n', (1305, 1332), False, 'import os\n'), ((1341, 1397), 'subprocess.check_call', 'subprocess.check_call', (["['mv', '-n', self.path, new_path]"], {}), "(['mv', '-n', self.path, new_path])\n", (1362, 1397), False, 'import subprocess\n'), ((1981, 2007), 'os.path.dirname', 'os.path.dirname', (['self.path'], {}), '(self.path)\n', (1996, 2007), False, 'import os\n'), ((2147, 2174), 'os.path.basename', 'os.path.basename', (['self.path'], {}), '(self.path)\n', (2163, 2174), False, 'import os\n'), ((2229, 2256), 'os.path.basename', 'os.path.basename', (['self.path'], {}), '(self.path)\n', (2245, 2256), False, 'import os\n'), ((2723, 2737), 'hashlib.sha1', 'hashlib.sha1', ([], {}), '()\n', (2735, 2737), False, 'import hashlib\n'), ((1730, 1770), 'subprocess.check_call', 'subprocess.check_call', (["['rm', self.path]"], {}), "(['rm', self.path])\n", (1751, 1770), False, 'import subprocess\n'), ((1797, 1840), 'subprocess.check_call', 'subprocess.check_call', (["['trash', self.path]"], {}), "(['trash', self.path])\n", (1818, 1840), False, 'import subprocess\n'), ((1888, 1930), 'string_helpers.capitalize_words', 'string_helpers.capitalize_words', (['self.name'], {}), '(self.name)\n', (1919, 1930), False, 'import string_helpers\n'), ((2058, 2089), 'os.path.splitext', 'os.path.splitext', (['self.filename'], {}), '(self.filename)\n', (2074, 2089), False, 'import os\n'), ((2306, 2333), 'os.path.splitext', 'os.path.splitext', (['self.path'], {}), '(self.path)\n', (2322, 2333), False, 'import os\n'), ((2414, 2432), 'os.stat', 'os.stat', (['self.path'], {}), '(self.path)\n', (2421, 2432), False, 'import os\n'), ((2804, 2837), 'chunked.chunked', 'chunked', (['fh', 'self.HASH_CHUNK_SIZE'], {}), '(fh, self.HASH_CHUNK_SIZE)\n', (2811, 2837), False, 'from chunked import chunked\n')]
|
import torch
import numpy as np
def suit4pytorch(X, Y):
X = np.swapaxes(X, 1, 3)
X_norm = X/255
X_torch = torch.from_numpy(X_norm).float()
Y_torch = torch.from_numpy(Y).long()
return X_torch, Y_torch
|
[
"numpy.swapaxes",
"torch.from_numpy"
] |
[((66, 86), 'numpy.swapaxes', 'np.swapaxes', (['X', '(1)', '(3)'], {}), '(X, 1, 3)\n', (77, 86), True, 'import numpy as np\n'), ((120, 144), 'torch.from_numpy', 'torch.from_numpy', (['X_norm'], {}), '(X_norm)\n', (136, 144), False, 'import torch\n'), ((167, 186), 'torch.from_numpy', 'torch.from_numpy', (['Y'], {}), '(Y)\n', (183, 186), False, 'import torch\n')]
|
#needs pytorch_transformers version 1.2.0
#!/usr/bin/env python
# coding: utf-8
import argparse
import re
import os
import _pickle as cPickle
import numpy as np
import pandas as pd
import torch
from pytorch_transformers.tokenization_bert import BertTokenizer
def assert_eq(real, expected):
assert real == expected, "%s (true) vs %s (expected)" % (real, expected)
# the same tokenize function from BERT adapted for this task
def tokenize(entries, tokenizer, max_length=16, padding_index=0):
"""Tokenizes the captions.
This will add c_token in each entry of the dataset.
-1 represent nil, and should be treated as padding_index in embedding
"""
for entry in entries:
tokens = tokenizer.encode(entry["caption"])
tokens = tokens[: max_length - 2]
tokens = tokenizer.add_special_tokens_single_sentence(tokens)
segment_ids = [0] * len(tokens)
input_mask = [1] * len(tokens)
if len(tokens) < max_length:
# Note here we pad in front of the sentence
padding = [padding_index] * (max_length - len(tokens))
tokens = tokens + padding
input_mask += padding
segment_ids += padding
assert_eq(len(tokens), max_length)
entry["c_token"] = tokens
entry["c_input_mask"] = input_mask
entry["c_segment_ids"] = segment_ids
# the same tensorize function from BERT adapted for this task
def tensorize(entries, split='trainval'):
for entry in entries:
caption = torch.from_numpy(np.array(entry["c_token"]))
entry["c_token"] = caption
c_input_mask = torch.from_numpy(np.array(entry["c_input_mask"]))
entry["c_input_mask"] = c_input_mask
c_segment_ids = torch.from_numpy(np.array(entry["c_segment_ids"]))
entry["c_segment_ids"] = c_segment_ids
if "scores" in entry:
scores = np.array(entry["scores"], dtype=np.float32)
scores = torch.from_numpy(scores)
entry["scores"] = scores
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"--bert_model",
default="bert-base-uncased",
type=str,
help="Bert pre-trained model selected in the list: bert-base-uncased, "
"bert-large-uncased, bert-base-cased, bert-base-multilingual, bert-base-chinese.",
)
parser.add_argument(
"--captions_path",
type=str,
default="/aloui/MediaEval/dev-set/dev-set_video-captions.txt",
help="Captions .txt file"
)
parser.add_argument(
"--gt_path",
type=str,
default="/MediaEval/dev-set/ground-truth/ground-truth_dev-set.csv",
help="Ground truth .csv file"
)
parser.add_argument(
"--do_lower_case",
default=True,
type=bool,
help="Whether to lower case the input text. True for uncased models, False for cased models.",
)
parser.add_argument(
"--split",
required=True,
type=str,
help="which split to use trainval or test"
)
parser.add_argument(
"--dc",
action="store_true",
help="Whether to use deep captions or not"
)
args = parser.parse_args()
try:
assert args.split in ["trainval", "test"]
except Exception as error:
print("Split must be trainval or test")
raise
#deep_coptions_path = "/MediaEval/alto_titles_danny.csv"
#train_caption_path = '/aloui/MediaEval/dev-set/dev-set_video-captions.txt'
dataroot = 'datasets/ME2020'
max_length = 23
tokenizer = BertTokenizer.from_pretrained(args.bert_model, do_lower_case=args.do_lower_case)
entries = []
if args.dc:
deep_coptions_df = pd.read_csv(args.captions_path)
entries = []
for r in deep_coptions_df.itertuples():
sample = {}
vid_id = int(r.video)
caption = r.caption.rstrip().replace('-', ' ')
sample['video_id'] = vid_id
sample['caption'] = caption
entries.append(sample)
else:
df=pd.read_csv(args.captions_path)
df= df.groupby('video_id').agg({'video_url':'first',
'description': ' '.join}).reset_index()
print(df.description)
for r in df.itertuples():
#print(r)
sample = {}
#vid_id,video_url, caption = line.split(','
#vid_id = re.findall(r'\d+', vid_name)[0]
#caption = caption.rstrip().replace('-', ' ')
sample['video_id'] = int(r.video_id)
sample['caption'] = r.description
entries.append(sample)
train_df = pd.read_csv(args.gt_path)
score_dict = {}
for r in train_df.itertuples():
vid_id = r.video_id
vid_id = int(vid_id)
score_dict[vid_id] = [r.part_1_scores, r.part_2_scores]
train_score_list = []
for sample in entries:
if sample['video_id'] in score_dict:
sample['scores'] = score_dict[sample['video_id']]
train_score_list.append(sample)
tokenize(train_score_list, tokenizer, max_length=max_length)
tensorize(train_score_list, split=args.split)
#print(len(train_score_list))
#print(train_score_list[0])
train_cache_path = os.path.join(dataroot, 'cache', 'ME2020' + '_' + args.split + '_' + str(max_length) + '_cleaned' + '.pkl')
print("Saving cache file with {} samples under {}".format(len(train_score_list), train_cache_path))
cPickle.dump(train_score_list, open(train_cache_path, 'wb'))
if __name__ == "__main__":
main()
|
[
"argparse.ArgumentParser",
"pandas.read_csv",
"pytorch_transformers.tokenization_bert.BertTokenizer.from_pretrained",
"numpy.array",
"torch.from_numpy"
] |
[((2052, 2077), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2075, 2077), False, 'import argparse\n'), ((3632, 3717), 'pytorch_transformers.tokenization_bert.BertTokenizer.from_pretrained', 'BertTokenizer.from_pretrained', (['args.bert_model'], {'do_lower_case': 'args.do_lower_case'}), '(args.bert_model, do_lower_case=args.do_lower_case\n )\n', (3661, 3717), False, 'from pytorch_transformers.tokenization_bert import BertTokenizer\n'), ((4703, 4728), 'pandas.read_csv', 'pd.read_csv', (['args.gt_path'], {}), '(args.gt_path)\n', (4714, 4728), True, 'import pandas as pd\n'), ((3779, 3810), 'pandas.read_csv', 'pd.read_csv', (['args.captions_path'], {}), '(args.captions_path)\n', (3790, 3810), True, 'import pandas as pd\n'), ((4133, 4164), 'pandas.read_csv', 'pd.read_csv', (['args.captions_path'], {}), '(args.captions_path)\n', (4144, 4164), True, 'import pandas as pd\n'), ((1542, 1568), 'numpy.array', 'np.array', (["entry['c_token']"], {}), "(entry['c_token'])\n", (1550, 1568), True, 'import numpy as np\n'), ((1646, 1677), 'numpy.array', 'np.array', (["entry['c_input_mask']"], {}), "(entry['c_input_mask'])\n", (1654, 1677), True, 'import numpy as np\n'), ((1766, 1798), 'numpy.array', 'np.array', (["entry['c_segment_ids']"], {}), "(entry['c_segment_ids'])\n", (1774, 1798), True, 'import numpy as np\n'), ((1899, 1942), 'numpy.array', 'np.array', (["entry['scores']"], {'dtype': 'np.float32'}), "(entry['scores'], dtype=np.float32)\n", (1907, 1942), True, 'import numpy as np\n'), ((1964, 1988), 'torch.from_numpy', 'torch.from_numpy', (['scores'], {}), '(scores)\n', (1980, 1988), False, 'import torch\n')]
|
##############################################################################
#
# Copyright (c) 2001 Zope Foundation and Contributors
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this
# distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
""" Classes: LocalRolePlugin
"""
from AccessControl import ClassSecurityInfo
from AccessControl.class_init import InitializeClass
from Products.PageTemplates.PageTemplateFile import PageTemplateFile
from zope.interface import Interface
from ..interfaces.plugins import IRolesPlugin
from ..utils import classImplements
from .BasePlugin import BasePlugin
class ILocalRolePlugin(Interface):
""" Marker interface.
"""
manage_addLocalRolePluginForm = PageTemplateFile(
'www/lrpAdd', globals(), __name__='manage_addLocalRolePluginForm')
def addLocalRolePlugin(dispatcher, id, title='', RESPONSE=None):
""" Add a Local Role Plugin to 'dispatcher'.
"""
lrp = LocalRolePlugin(id, title)
dispatcher._setObject(id, lrp)
if RESPONSE is not None:
msg = '%s/manage_main?manage_tabs_message=%s'
goto = dispatcher.absolute_url()
RESPONSE.redirect(msg % (goto, 'LocalRolePlugin+added.'))
class LocalRolePlugin(BasePlugin):
""" Provide roles during Authentication from local roles
assignments made on the root object.
"""
meta_type = 'Local Role Plugin'
zmi_icon = 'fas fa-user-tag'
security = ClassSecurityInfo()
def __init__(self, id, title=None):
self._setId(id)
self.title = title
#
# IRolesPlugin implementation
#
@security.private
def getRolesForPrincipal(self, principal, request=None):
""" See IRolesPlugin.
"""
local_roles = getattr(self.getPhysicalRoot(),
'__ac_local_roles__', None)
if local_roles is None:
return None
return local_roles.get(principal.getId())
classImplements(LocalRolePlugin, ILocalRolePlugin, IRolesPlugin)
InitializeClass(LocalRolePlugin)
|
[
"AccessControl.ClassSecurityInfo",
"AccessControl.class_init.InitializeClass"
] |
[((2360, 2392), 'AccessControl.class_init.InitializeClass', 'InitializeClass', (['LocalRolePlugin'], {}), '(LocalRolePlugin)\n', (2375, 2392), False, 'from AccessControl.class_init import InitializeClass\n'), ((1787, 1806), 'AccessControl.ClassSecurityInfo', 'ClassSecurityInfo', ([], {}), '()\n', (1804, 1806), False, 'from AccessControl import ClassSecurityInfo\n')]
|
from compas import PRECISION
class SmoothUnion(object):
"""The smooth union between two volumetric objects.
Parameters
----------
a: volumetric object
First object to add.
b: volumetric object
Second object to add.
r: float
Intensity factor, the higher the number, the smoother the result. Default value `1.0`
Examples
--------
>>> s = Sphere(Point(5, 6, 0), 9)
>>> b = Box(Frame.worldXY(), 20, 15, 10)
>>> vs = VolSphere(s)
>>> vb = VolBox(b, 2.5)
>>> u = SmoothUnion(vs, vb, 1.5)
"""
def __init__(self, a=None, b=None, r=1.0):
self.a = a
self.b = b
self.r = r
def __repr__(self):
return 'SmoothUnion({0},{1},{2:.{3}f})'.format(str(self.a), str(self.b), self.r, PRECISION[:1])
def get_distance_alt(self, x, y, z):
da = self.a.get_distance(x, y, z)
db = self.b.get_distance(x, y, z)
e = max(self.r - abs(da - db), 0)
return min(da, db) - e**2 * 0.25 / self.r
def get_distance(self, point):
"""
single point distance function
"""
da = self.a.get_distance(point)
db = self.b.get_distance(point)
k = self.r
h = min(max(0.5 + 0.5 * (db - da) / k, 0), 1)
return (db * (1 - h) + h * da) - k * h * (1 - h)
def get_distance_numpy(self, x, y, z):
"""
vectorized distance function
"""
import numpy as np
da = self.a.get_distance_numpy(x, y, z)
db = self.b.get_distance_numpy(x, y, z)
h = np.minimum(np.maximum(0.5 + 0.5 * (db - da)/self.r, 0), 1)
return (db * (1 - h) + h * da) - self.r * h * (1 - h)
# ==============================================================================
# Main
# ==============================================================================
if __name__ == "__main__":
from compas_vol.primitives import VolSphere, VolBox
from compas.geometry import Box, Frame, Point, Sphere
import numpy as np
import matplotlib.pyplot as plt
s = Sphere(Point(4, 5, 0), 7)
b = Box(Frame.worldXY(), 20, 15, 10)
vs = VolSphere(s)
vb = VolBox(b, 2.5)
u = SmoothUnion(vs, vb, 6.5)
# for y in range(-15, 15):
# s = ''
# for x in range(-30, 30):
# d = u.get_distance(Point(x*0.5, y, 0))
# if d < 0:
# s += 'x'
# else:
# s += '.'
# print(s)
x, y, z = np.ogrid[-15:15:100j, -15:15:100j, -15:15:100j]
d = u.get_distance_numpy(x, y, z)
m = d[:, :, 50].T
plt.imshow(-np.tanh(m*5), cmap='Greys')
# plt.colorbar()
plt.show()
|
[
"compas_vol.primitives.VolSphere",
"matplotlib.pyplot.show",
"numpy.maximum",
"compas.geometry.Point",
"numpy.tanh",
"compas_vol.primitives.VolBox",
"compas.geometry.Frame.worldXY"
] |
[((2149, 2161), 'compas_vol.primitives.VolSphere', 'VolSphere', (['s'], {}), '(s)\n', (2158, 2161), False, 'from compas_vol.primitives import VolSphere, VolBox\n'), ((2171, 2185), 'compas_vol.primitives.VolBox', 'VolBox', (['b', '(2.5)'], {}), '(b, 2.5)\n', (2177, 2185), False, 'from compas_vol.primitives import VolSphere, VolBox\n'), ((2664, 2674), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2672, 2674), True, 'import matplotlib.pyplot as plt\n'), ((2080, 2094), 'compas.geometry.Point', 'Point', (['(4)', '(5)', '(0)'], {}), '(4, 5, 0)\n', (2085, 2094), False, 'from compas.geometry import Box, Frame, Point, Sphere\n'), ((2111, 2126), 'compas.geometry.Frame.worldXY', 'Frame.worldXY', ([], {}), '()\n', (2124, 2126), False, 'from compas.geometry import Box, Frame, Point, Sphere\n'), ((1582, 1627), 'numpy.maximum', 'np.maximum', (['(0.5 + 0.5 * (db - da) / self.r)', '(0)'], {}), '(0.5 + 0.5 * (db - da) / self.r, 0)\n', (1592, 1627), True, 'import numpy as np\n'), ((2611, 2625), 'numpy.tanh', 'np.tanh', (['(m * 5)'], {}), '(m * 5)\n', (2618, 2625), True, 'import numpy as np\n')]
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import eventlet
import importlib
import six
from collections import defaultdict
from kombu import Connection
from st2actions.query.base import QueryContext
from st2common import log as logging
from st2common.models.db.executionstate import ActionExecutionStateDB
from st2common.persistence.executionstate import ActionExecutionState
from st2common.transport import actionexecutionstate, consumers, publishers
from st2common.transport import utils as transport_utils
LOG = logging.getLogger(__name__)
ACTIONSTATE_WORK_Q = actionexecutionstate.get_queue('st2.resultstracker.work',
routing_key=publishers.CREATE_RK)
class ResultsTracker(consumers.MessageHandler):
message_type = ActionExecutionStateDB
def __init__(self, connection, queues):
super(ResultsTracker, self).__init__(connection, queues)
self._queriers = {}
self._query_threads = []
self._failed_imports = set()
def start(self, wait=False):
self._bootstrap()
super(ResultsTracker, self).start(wait=wait)
def wait(self):
super(ResultsTracker, self).wait()
for thread in self._query_threads:
thread.wait()
def shutdown(self):
super(ResultsTracker, self).shutdown()
LOG.info('Stats from queriers:')
self._print_stats()
def _print_stats(self):
for _, querier in six.iteritems(self._queriers):
if querier:
querier.print_stats()
def _bootstrap(self):
all_states = ActionExecutionState.get_all()
LOG.info('Found %d pending states in db.' % len(all_states))
query_contexts_dict = defaultdict(list)
for state_db in all_states:
try:
context = QueryContext.from_model(state_db)
except:
LOG.exception('Invalid state object: %s', state_db)
continue
query_module_name = state_db.query_module
querier = self.get_querier(query_module_name)
if querier is not None:
query_contexts_dict[querier].append(context)
for querier, contexts in six.iteritems(query_contexts_dict):
LOG.info('Found %d pending actions for query module %s', len(contexts), querier)
querier.add_queries(query_contexts=contexts)
def process(self, query_context):
querier = self.get_querier(query_context.query_module)
context = QueryContext.from_model(query_context)
querier.add_queries(query_contexts=[context])
return
def get_querier(self, query_module_name):
if (query_module_name not in self._queriers and
query_module_name not in self._failed_imports):
try:
query_module = self._import_query_module(query_module_name)
except:
LOG.exception('Failed importing query module: %s', query_module_name)
self._failed_imports.add(query_module_name)
self._queriers[query_module_name] = None
else:
querier = query_module.get_instance()
self._queriers[query_module_name] = querier
self._query_threads.append(eventlet.spawn(querier.start))
return self._queriers[query_module_name]
def _import_query_module(self, module_name):
return importlib.import_module(module_name, package=None)
def get_tracker():
with Connection(transport_utils.get_messaging_urls()) as conn:
return ResultsTracker(conn, [ACTIONSTATE_WORK_Q])
|
[
"eventlet.spawn",
"importlib.import_module",
"st2common.log.getLogger",
"st2common.transport.utils.get_messaging_urls",
"collections.defaultdict",
"st2common.transport.actionexecutionstate.get_queue",
"st2actions.query.base.QueryContext.from_model",
"six.iteritems",
"st2common.persistence.executionstate.ActionExecutionState.get_all"
] |
[((1256, 1283), 'st2common.log.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1273, 1283), True, 'from st2common import log as logging\n'), ((1306, 1402), 'st2common.transport.actionexecutionstate.get_queue', 'actionexecutionstate.get_queue', (['"""st2.resultstracker.work"""'], {'routing_key': 'publishers.CREATE_RK'}), "('st2.resultstracker.work', routing_key=\n publishers.CREATE_RK)\n", (1336, 1402), False, 'from st2common.transport import actionexecutionstate, consumers, publishers\n'), ((2192, 2221), 'six.iteritems', 'six.iteritems', (['self._queriers'], {}), '(self._queriers)\n', (2205, 2221), False, 'import six\n'), ((2333, 2363), 'st2common.persistence.executionstate.ActionExecutionState.get_all', 'ActionExecutionState.get_all', ([], {}), '()\n', (2361, 2363), False, 'from st2common.persistence.executionstate import ActionExecutionState\n'), ((2464, 2481), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (2475, 2481), False, 'from collections import defaultdict\n'), ((2952, 2986), 'six.iteritems', 'six.iteritems', (['query_contexts_dict'], {}), '(query_contexts_dict)\n', (2965, 2986), False, 'import six\n'), ((3258, 3296), 'st2actions.query.base.QueryContext.from_model', 'QueryContext.from_model', (['query_context'], {}), '(query_context)\n', (3281, 3296), False, 'from st2actions.query.base import QueryContext\n'), ((4170, 4220), 'importlib.import_module', 'importlib.import_module', (['module_name'], {'package': 'None'}), '(module_name, package=None)\n', (4193, 4220), False, 'import importlib\n'), ((4262, 4298), 'st2common.transport.utils.get_messaging_urls', 'transport_utils.get_messaging_urls', ([], {}), '()\n', (4296, 4298), True, 'from st2common.transport import utils as transport_utils\n'), ((2561, 2594), 'st2actions.query.base.QueryContext.from_model', 'QueryContext.from_model', (['state_db'], {}), '(state_db)\n', (2584, 2594), False, 'from st2actions.query.base import QueryContext\n'), ((4024, 4053), 'eventlet.spawn', 'eventlet.spawn', (['querier.start'], {}), '(querier.start)\n', (4038, 4053), False, 'import eventlet\n')]
|
from file_and_system.config_utils import ConfigUtils
import os
class GlobalParam:
project_path = os.path.dirname(os.getcwd())
conf_path = ''.join((project_path + r'\test file\cf.properties'))
# print(sys.path[0])
# print(os.path.dirname(os.getcwd()))
# print(os.path.dirname(os.path.realpath(__file__)))
# print(sys.path[1])
# conf_path = r'D:\ivanovsky\IdeaProjects\cowabunga-potato\test file\cf.properties'
section_test_path = 'test_path'
section_opencv_utils = 'opencv_utils'
section_machine_learning = 'machine_learning'
section_appium = 'appium'
section_selenium = 'selenium'
section_databases = 'databases'
section_test_reports = 'testReports'
section_gif_utils = 'image_utils'
# test_path section
@staticmethod
def get_test_image_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_test_path, 'test_image_path')[2]))
@staticmethod
def get_test_video_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_test_path, 'test_video_path')[2]))
@staticmethod
def get_test_file_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_test_path, 'test_file_path')[2]))
# opencv_utils section
@staticmethod
def get_system_font_path():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'system_font_path')[2]
@staticmethod
def get_tesseract_path():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'tesseract_path')[2]
@staticmethod
def get_image_input():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'image_input')[2]))
@staticmethod
def get_image_output():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'image_output')[2]))
@staticmethod
def get_character_output():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'character_output')[2]))
@staticmethod
def get_sentence_output():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'sentence_output')[2]))
@staticmethod
def get_video_input():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'video_input')[2]))
@staticmethod
def get_video_output():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'video_output')[2]))
@staticmethod
def get_face_detect_face_xml():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'face_detect_face_xml')[
2]))
@staticmethod
def get_face_detect_eyes_xml():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_opencv_utils, 'face_detect_eyes_xml')[
2]))
# appium section
@staticmethod
def get_aapt_path():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_appium, 'aapt_path')[2]))
@staticmethod
def get_android_apk_list():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_appium, 'android_apk_list')[2]))
@staticmethod
def get_appium_screenshot_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_appium, 'appium_screenshot_path')[2]))
@staticmethod
def get_appium_screenrecord_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_appium, 'appium_screenrecord_path')[2]))
@staticmethod
def get_qr_code_image_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_appium, 'qr_code_image_path')[2]))
# machine learning section
@staticmethod
def get_ml_ch2_housing_data():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_machine_learning,
'ml_ch2_housing_data')[2]))
@staticmethod
def get_ml_ch2_housing_image():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_machine_learning,
'ml_ch2_housing_image')[2]))
@staticmethod
def get_ml_ch3_sklearn_data_home():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_machine_learning,
'ml_ch3_sklearn_data_home')[2]))
@staticmethod
def get_ml_numpy_array_save_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_machine_learning,
'ml_numpy_array_save_path')[2]))
@staticmethod
def get_ml_matplotlib_figure_save_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_machine_learning,
'ml_matplotlib_figure_save_path')[2]))
# selenium section
@staticmethod
def get_chrome_driver_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_selenium, 'chrome_driver_path')[2]))
@staticmethod
def get_ie_driver_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_selenium, 'ie_driver_path')[2]))
@staticmethod
def get_edge_driver_path():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_selenium, 'edge_driver_path')[2]))
@staticmethod
def get_chromium_path():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_selenium, 'chromium_path')[2]
# databases section
@staticmethod
def get_mariadb_url():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'mariadb_url')[2]
@staticmethod
def get_mariadb_user():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'mariadb_user')[2]
@staticmethod
def get_mariadb_password():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'mariadb_password')[2]
@staticmethod
def get_pgsql_url():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'pgsql_url')[2]
@staticmethod
def get_pgsql_user():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'pgsql_user')[2]
@staticmethod
def get_pgsql_password():
return ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'pgsql_password')[2]
@staticmethod
def get_excel_datasets():
return ''.join((GlobalParam.project_path,
ConfigUtils.read_conf_file(GlobalParam.conf_path, GlobalParam.section_databases, 'excel_datasets')[2]))
@staticmethod
def get_csv_datasets():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path,
GlobalParam.section_databases, 'csv_datasets')[2]))
# test_reports section
@staticmethod
def get_unittest_reports():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path,
GlobalParam.section_test_reports, 'unittest_reports')[2]))
@staticmethod
def get_pytest_reports():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path,
GlobalParam.section_test_reports, 'pytest_reports')[2]))
@staticmethod
def get_word_report():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path,
GlobalParam.section_test_reports, 'word_report')[2]))
# gif_utils section
@staticmethod
def get_gif_import():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path,
GlobalParam.section_gif_utils, 'gif_import')[2]))
@staticmethod
def get_gif_export():
return ''.join(
(GlobalParam.project_path, ConfigUtils.read_conf_file(GlobalParam.conf_path,
GlobalParam.section_gif_utils, 'gif_export')[2]))
|
[
"os.getcwd",
"file_and_system.config_utils.ConfigUtils.read_conf_file"
] |
[((119, 130), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (128, 130), False, 'import os\n'), ((1548, 1656), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_opencv_utils', '"""system_font_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_opencv_utils, 'system_font_path')\n", (1574, 1656), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((1719, 1825), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_opencv_utils', '"""tesseract_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_opencv_utils, 'tesseract_path')\n", (1745, 1825), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((7215, 7316), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_selenium', '"""chromium_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_selenium, 'chromium_path')\n", (7241, 7316), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((7400, 7500), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_databases', '"""mariadb_url"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_databases, 'mariadb_url')\n", (7426, 7500), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((7561, 7662), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_databases', '"""mariadb_user"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_databases, 'mariadb_user')\n", (7587, 7662), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((7727, 7832), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_databases', '"""mariadb_password"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_databases, 'mariadb_password')\n", (7753, 7832), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((7890, 7988), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_databases', '"""pgsql_url"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_databases, 'pgsql_url')\n", (7916, 7988), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((8047, 8146), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_databases', '"""pgsql_user"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_databases, 'pgsql_user')\n", (8073, 8146), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((8209, 8312), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_databases', '"""pgsql_password"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_databases, 'pgsql_password')\n", (8235, 8312), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((894, 998), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_test_path', '"""test_image_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_test_path, 'test_image_path')\n", (920, 998), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((1123, 1227), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_test_path', '"""test_video_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_test_path, 'test_video_path')\n", (1149, 1227), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((1351, 1454), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_test_path', '"""test_file_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_test_path, 'test_file_path')\n", (1377, 1454), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((1944, 2047), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_opencv_utils', '"""image_input"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_opencv_utils, 'image_input')\n", (1970, 2047), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((2169, 2273), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_opencv_utils', '"""image_output"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_opencv_utils, 'image_output')\n", (2195, 2273), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((2399, 2507), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_opencv_utils', '"""character_output"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_opencv_utils, 'character_output')\n", (2425, 2507), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((2632, 2739), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_opencv_utils', '"""sentence_output"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_opencv_utils, 'sentence_output')\n", (2658, 2739), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((2860, 2963), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_opencv_utils', '"""video_input"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_opencv_utils, 'video_input')\n", (2886, 2963), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((3085, 3189), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_opencv_utils', '"""video_output"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_opencv_utils, 'video_output')\n", (3111, 3189), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((3319, 3431), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_opencv_utils', '"""face_detect_face_xml"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_opencv_utils, 'face_detect_face_xml')\n", (3345, 3431), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((3590, 3702), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_opencv_utils', '"""face_detect_eyes_xml"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_opencv_utils, 'face_detect_eyes_xml')\n", (3616, 3702), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((3860, 3955), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_appium', '"""aapt_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_appium, 'aapt_path')\n", (3886, 3955), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((4070, 4172), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_appium', '"""android_apk_list"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_appium, 'android_apk_list')\n", (4096, 4172), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((4304, 4412), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_appium', '"""appium_screenshot_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_appium, 'appium_screenshot_path')\n", (4330, 4412), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((4546, 4656), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_appium', '"""appium_screenrecord_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_appium, 'appium_screenrecord_path')\n", (4572, 4656), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((4784, 4888), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_appium', '"""qr_code_image_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_appium, 'qr_code_image_path')\n", (4810, 4888), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((5048, 5163), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_machine_learning', '"""ml_ch2_housing_data"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_machine_learning, 'ml_ch2_housing_data')\n", (5074, 5163), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((5344, 5460), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_machine_learning', '"""ml_ch2_housing_image"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_machine_learning, 'ml_ch2_housing_image')\n", (5370, 5460), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((5645, 5765), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_machine_learning', '"""ml_ch3_sklearn_data_home"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_machine_learning, 'ml_ch3_sklearn_data_home')\n", (5671, 5765), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((5950, 6070), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_machine_learning', '"""ml_numpy_array_save_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_machine_learning, 'ml_numpy_array_save_path')\n", (5976, 6070), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((6261, 6387), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_machine_learning', '"""ml_matplotlib_figure_save_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_machine_learning, 'ml_matplotlib_figure_save_path')\n", (6287, 6387), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((6589, 6695), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_selenium', '"""chrome_driver_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_selenium, 'chrome_driver_path')\n", (6615, 6695), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((6819, 6921), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_selenium', '"""ie_driver_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_selenium, 'ie_driver_path')\n", (6845, 6921), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((7047, 7151), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_selenium', '"""edge_driver_path"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_selenium, 'edge_driver_path')\n", (7073, 7151), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((8434, 8537), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_databases', '"""excel_datasets"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_databases, 'excel_datasets')\n", (8460, 8537), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((8648, 8749), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_databases', '"""csv_datasets"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_databases, 'csv_datasets')\n", (8674, 8749), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((8957, 9065), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_test_reports', '"""unittest_reports"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_test_reports, 'unittest_reports')\n", (8983, 9065), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((9244, 9350), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_test_reports', '"""pytest_reports"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_test_reports, 'pytest_reports')\n", (9270, 9350), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((9526, 9629), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_test_reports', '"""word_report"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_test_reports, 'word_report')\n", (9552, 9629), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((9828, 9927), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_gif_utils', '"""gif_import"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_gif_utils, 'gif_import')\n", (9854, 9927), False, 'from file_and_system.config_utils import ConfigUtils\n'), ((10102, 10201), 'file_and_system.config_utils.ConfigUtils.read_conf_file', 'ConfigUtils.read_conf_file', (['GlobalParam.conf_path', 'GlobalParam.section_gif_utils', '"""gif_export"""'], {}), "(GlobalParam.conf_path, GlobalParam.\n section_gif_utils, 'gif_export')\n", (10128, 10201), False, 'from file_and_system.config_utils import ConfigUtils\n')]
|
#!/usr/bin/env python
#
# Copyright 2009 <NAME>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An implementation of basic combinatorial k-subset operations using
a revolving door (minimal change) ordering.
Note that for our purposes here, sets are represented as lists as
ranking, unranking, and successor functions need a total order on the
elements of the set.
For the base set B, if B is an integer, we assume that our base set is
[0,...,B-1]. Otherwise, assume that B is a pair consisting of:
1. a list representing the base set
2. a reverse lookup dict, mapping elements of the base set to their
position in the total order.
Example: [0,3,4,2], {0:0, 3:1, 4:2, 2:3}
Note that we require B to contain the reverse lookup information to
speed up the algorithms here; otherwise, we would need to call index on
our base set many times, which would increase complexity by a factor of
the length of the base set.
By <NAME>, 2009."""
from builtins import range
from . import combfuncs
def rank(B, K):
"""Return the rank of k-subset K in base set B."""
block = (K if type(B) == int else [B[1][i] for i in K])
k = len(block)
return sum([(1 if i%2 == k%2 else -1) * combfuncs.binom(block[i-1]+1,i) for i in range(k,0,-1)]) + (0 if k%2 == 0 else -1)
def unrank(B, k, rk):
"""Return the k-subset of rank rk in base set B."""
v = (B if type(B) == int else len(B[0]))
K = [0] * k
for i in range(k,0,-1):
while combfuncs.binom(v,i) > rk:
v -= 1
K[i-1] = v
rk = combfuncs.binom(v+1,i) - rk - 1
return (K if type(B) == int else [B[0][i] for i in K])
def succ(B, K):
"""Return the successor of the k-subset K in base set B.
If there is no successor, we return None."""
v = (B if type(B) == int else len(B[0]))
Kn = (K if type(B) == int else [B[1][i] for i in K]) + [v]
k = len(K)
j = 0
while j < k and Kn[j] == j:
j += 1
if k%2 == j%2:
if j == 0:
Kn[0] -= 1
else:
Kn[j-1] = j
Kn[j-2] = j-1
else:
if Kn[j+1] != Kn[j] + 1:
Kn[j-1] = Kn[j]
Kn[j] += 1
else:
Kn[j+1] = Kn[j]
Kn[j] = j
if Kn[:k] == list(range(k)):
return None
return (Kn[:k] if type(B) == int else [B[0][i] for i in Kn[:k]])
def all(B, k):
"""A generator to create all subsets over the specified base set B."""
# Make the base set, creating a copy of B if B is a pair as described in
# the module introduction; thus, if B changes, the iterator does not
# become invalid.
Bn = (B if type(B) == int else (B[0][:], dict(B[1])))
K = (list(range(k)) if type(B) == int else Bn[0][:k])
while K != None:
yield K
K = succ(Bn, K)
|
[
"builtins.range"
] |
[((1939, 1954), 'builtins.range', 'range', (['k', '(0)', '(-1)'], {}), '(k, 0, -1)\n', (1944, 1954), False, 'from builtins import range\n'), ((2754, 2762), 'builtins.range', 'range', (['k'], {}), '(k)\n', (2759, 2762), False, 'from builtins import range\n'), ((3188, 3196), 'builtins.range', 'range', (['k'], {}), '(k)\n', (3193, 3196), False, 'from builtins import range\n'), ((1741, 1756), 'builtins.range', 'range', (['k', '(0)', '(-1)'], {}), '(k, 0, -1)\n', (1746, 1756), False, 'from builtins import range\n')]
|
import os
import sys
import glob
import subprocess
import pytest
try: # Check for local build
sys.path.append('../build')
import MDI_Library as mdi
except ImportError: # Check for installed package
import mdi
build_dir = "../build"
sys.path.append(build_dir)
driver_out_expected_f90 = """ Engine name: MM
NNODES: 2
NODE: @FORCES
NCOMMANDS: 3
COMMAND: >FORCES
NCALLBACKS: 1
CALLBACK: >FORCES
"""
# Output expected from each of the drivers
driver_out_expected_py = """ Engine name: MM
NNODES: 2
NODE: @FORCES
NCOMMANDS: 3
COMMAND: >FORCES
NCALLBACKS: 1
CALLBACK: >FORCES
NATOMS: 10
COORDS: [0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.0, 2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7, 2.8, 2.9]
FORCES: [0.0, 0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08, 0.09, 0.1, 0.11, 0.12, 0.13, 0.14, 0.15, 0.16, 0.17, 0.18, 0.19, 0.2, 0.21, 0.22, 0.23, 0.24, 0.25, 0.26, 0.27, 0.28, 0.29]
"""
# Includes flags to prevent warning messages
mpiexec_general = "mpiexec "
mpiexec_mca = "mpiexec --mca btl_base_warn_component_unused 0 "
def format_return(input_string):
my_string = input_string.decode('utf-8')
# remove any \r special characters, which sometimes are added on Windows
my_string = my_string.replace('\r','')
return my_string
##########################
# LIBRARY Method #
##########################
def test_cxx_cxx_lib():
# get the name of the driver code, which includes a .exe extension on Windows
driver_name = glob.glob("../build/driver_lib_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method LIB"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_f90_f90_lib():
# get the name of the driver code, which includes a .exe extension on Windows
driver_name = glob.glob("../build/driver_lib_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method LIB"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_py_py_lib():
# run the calculation
driver_proc = subprocess.Popen([sys.executable, "../build/lib_py.py", "-mdi", "-role DRIVER -name driver -method LIB"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected = '''Start of driver
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
NATOMS: 10
'''
assert driver_err == ""
assert driver_out == expected
def test_py_py_lib_mpi():
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","2",sys.executable, "../build/lib_py.py", "-mdi", "-role DRIVER -name driver -method LIB"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected = '''Start of driver
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
NATOMS: 10
NATOMS: 20
'''
assert driver_err == ""
assert driver_out == expected
##########################
# MPI Method #
##########################
def test_cxx_cxx_mpi():
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_cxx*")[0]
engine_name = glob.glob("../build/engine_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",driver_name, "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",engine_name,"-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_out == " Engine name: MM\n"
assert driver_err == ""
def test_cxx_f90_mpi():
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_cxx*")[0]
engine_name = glob.glob("../build/engine_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",driver_name, "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",engine_name,"-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_cxx_py_mpi():
# get the name of the driver code, which includes a .exe extension on Windows
driver_name = glob.glob("../build/driver_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",driver_name, "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",sys.executable,"engine_py.py","-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_f90_cxx_mpi():
global driver_out_expected_f90
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_f90*")[0]
engine_name = glob.glob("../build/engine_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",driver_name, "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",engine_name,"-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_f90
def test_f90_f90_mpi():
global driver_out_expected_f90
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_f90*")[0]
engine_name = glob.glob("../build/engine_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",driver_name, "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",engine_name,"-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_f90
def test_f90_py_mpi():
global driver_out_expected_f90
# get the name of the driver code, which includes a .exe extension on Windows
driver_name = glob.glob("../build/driver_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",driver_name, "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",sys.executable,"engine_py.py","-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_f90
def test_py_cxx_mpi():
global driver_out_expected_py
# get the name of the engine code, which includes a .exe extension on Windows
engine_name = glob.glob("../build/engine_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",sys.executable,"driver_py.py", "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",engine_name,"-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_py
def test_py_f90_mpi():
global driver_out_expected_py
# get the name of the engine code, which includes a .exe extension on Windows
engine_name = glob.glob("../build/engine_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",sys.executable,"driver_py.py", "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",engine_name,"-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_py
def test_py_py_mpi():
global driver_out_expected_py
# run the calculation
driver_proc = subprocess.Popen(["mpiexec","-n","1",sys.executable,"driver_py.py", "-mdi", "-role DRIVER -name driver -method MPI",":",
"-n","1",sys.executable,"engine_py.py","-mdi","-role ENGINE -name MM -method MPI"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_py
##########################
# TCP Method #
##########################
def test_cxx_cxx_tcp():
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_cxx*")[0]
engine_name = glob.glob("../build/engine_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
engine_proc = subprocess.Popen([engine_name, "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"])
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_cxx_f90_tcp():
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_cxx*")[0]
engine_name = glob.glob("../build/engine_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
engine_proc = subprocess.Popen([engine_name, "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"])
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_cxx_py_tcp():
# get the name of the driver code, which includes a .exe extension on Windows
driver_name = glob.glob("../build/driver_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
engine_proc = subprocess.Popen([sys.executable, "../build/engine_py.py", "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"],
cwd=build_dir)
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == " Engine name: MM\n"
def test_f90_cxx_tcp():
global driver_out_expected_f90
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_f90*")[0]
engine_name = glob.glob("../build/engine_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
engine_proc = subprocess.Popen([engine_name, "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"])
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_f90
def test_f90_f90_tcp():
global driver_out_expected_f90
# get the names of the driver and engine codes, which include a .exe extension on Windows
driver_name = glob.glob("../build/driver_f90*")[0]
engine_name = glob.glob("../build/engine_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
engine_proc = subprocess.Popen([engine_name, "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"])
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_f90
def test_f90_py_tcp():
global driver_out_expected_f90
# get the name of the driver code, which includes a .exe extension on Windows
driver_name = glob.glob("../build/driver_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen([driver_name, "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
engine_proc = subprocess.Popen([sys.executable, "../build/engine_py.py", "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"],
cwd=build_dir)
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_f90
def test_py_cxx_tcp():
global driver_out_expected_py
# get the name of the engine code, which includes a .exe extension on Windows
engine_name = glob.glob("../build/engine_cxx*")[0]
# run the calculation
driver_proc = subprocess.Popen([sys.executable, "../build/driver_py.py", "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
engine_proc = subprocess.Popen([engine_name, "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"])
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_py
def test_py_f90_tcp():
global driver_out_expected_py
# get the name of the engine code, which includes a .exe extension on Windows
engine_name = glob.glob("../build/engine_f90*")[0]
# run the calculation
driver_proc = subprocess.Popen([sys.executable, "../build/driver_py.py", "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
engine_proc = subprocess.Popen([engine_name, "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"])
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
assert driver_out == driver_out_expected_py
def test_py_py_tcp():
global driver_out_expected_py
# run the calculation
driver_proc = subprocess.Popen([sys.executable, "../build/driver_py.py", "-mdi", "-role DRIVER -name driver -method TCP -port 8021"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
engine_proc = subprocess.Popen([sys.executable, "../build/engine_py.py", "-mdi", "-role ENGINE -name MM -method TCP -port 8021 -hostname localhost"],
cwd=build_dir)
driver_tup = driver_proc.communicate()
engine_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
assert driver_err == ""
# assert driver_out == " Engine name: MM\n"
assert driver_out == driver_out_expected_py
##########################
# Unit Conversions Tests #
##########################
def test_unit_conversions_py():
# Test all charge conversions
assert mdi.MDI_Conversion_Factor("atomic_unit_of_charge", "atomic_unit_of_charge") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_charge", "coulomb") == pytest.approx(1.6021766208e-19)
assert mdi.MDI_Conversion_Factor("coulomb", "atomic_unit_of_charge") == pytest.approx(1.0 / 1.6021766208e-19)
assert mdi.MDI_Conversion_Factor("coulomb", "coulomb") == pytest.approx(1.0)
# Test some energy conversions
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "atomic_unit_of_energy") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "calorie") == pytest.approx(1.0420039967034203e-18)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "electron_volt") == pytest.approx(27.211386245988066)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "hartree") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "inverse_meter_energy") == pytest.approx(21947463.136319984)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "joule") == pytest.approx(4.35974465e-18)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "kelvin_energy") == pytest.approx(315775.02480406954)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "kilocalorie") == pytest.approx(1.0420039967034203e-21)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "kilocalorie_per_mol") == pytest.approx(627.5094737775374)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "kilojoule") == pytest.approx(4.3597446499999996e-21)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "kilojoule_per_mol") == pytest.approx(2625.4996382852164)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy", "rydberg") == pytest.approx(2.0)
assert mdi.MDI_Conversion_Factor("calorie", "atomic_unit_of_energy") == pytest.approx(1.0 / 1.0420039967034203e-18)
assert mdi.MDI_Conversion_Factor("electron_volt", "atomic_unit_of_energy") == pytest.approx(1.0 / 27.211386245988066)
assert mdi.MDI_Conversion_Factor("hartree", "atomic_unit_of_energy") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("inverse_meter_energy", "atomic_unit_of_energy") == pytest.approx(1.0 / 21947463.136319984)
assert mdi.MDI_Conversion_Factor("joule", "atomic_unit_of_energy") == pytest.approx(1.0 / 4.35974465e-18)
assert mdi.MDI_Conversion_Factor("kelvin_energy", "atomic_unit_of_energy") == pytest.approx(1.0 / 315775.02480406954)
assert mdi.MDI_Conversion_Factor("kilocalorie", "atomic_unit_of_energy") == pytest.approx(1.0 / 1.0420039967034203e-21)
assert mdi.MDI_Conversion_Factor("kilocalorie_per_mol", "atomic_unit_of_energy") == pytest.approx(1.0 / 627.5094737775374)
assert mdi.MDI_Conversion_Factor("kilojoule", "atomic_unit_of_energy") == pytest.approx(1.0 / 4.3597446499999996e-21)
assert mdi.MDI_Conversion_Factor("kilojoule_per_mol", "atomic_unit_of_energy") == pytest.approx(1.0 / 2625.4996382852164)
assert mdi.MDI_Conversion_Factor("rydberg", "atomic_unit_of_energy") == pytest.approx(0.5)
# Test all force conversions
assert mdi.MDI_Conversion_Factor("atomic_unit_of_force", "atomic_unit_of_force") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_force", "newton") == pytest.approx(3.753838631429819e-15)
assert mdi.MDI_Conversion_Factor("newton", "atomic_unit_of_force") == pytest.approx(1.0 / 3.753838631429819e-15)
assert mdi.MDI_Conversion_Factor("newton", "newton") == pytest.approx(1.0)
# Test some length conversions
assert mdi.MDI_Conversion_Factor("atomic_unit_of_length", "angstrom") == pytest.approx(0.52917721067)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_length", "atomic_unit_of_length") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_length", "bohr") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_length", "meter") == pytest.approx(5.29177210903e-11)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_length", "nanometer") == pytest.approx(5.29177210903e-2)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_length", "picometer") == pytest.approx(5.29177210903e+1)
assert mdi.MDI_Conversion_Factor("angstrom", "atomic_unit_of_length") == pytest.approx(1.0 / 0.52917721067)
assert mdi.MDI_Conversion_Factor("bohr", "atomic_unit_of_length") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("meter", "atomic_unit_of_length") == pytest.approx(1.0 / 5.29177210903e-11)
assert mdi.MDI_Conversion_Factor("nanometer", "atomic_unit_of_length") == pytest.approx(1.0 / 5.29177210903e-2)
assert mdi.MDI_Conversion_Factor("picometer", "atomic_unit_of_length") == pytest.approx(1.0 / 5.29177210903e+1)
# Test all mass conversions
assert mdi.MDI_Conversion_Factor("atomic_unit_of_mass", "atomic_unit_of_mass") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_mass", "kilogram") == pytest.approx(9.10938356e-31)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_mass", "gram") == pytest.approx(9.10938356e-28)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_mass", "atomic_mass_unit") == pytest.approx(0.0005485799093287202)
assert mdi.MDI_Conversion_Factor("kilogram", "atomic_unit_of_mass") == pytest.approx(1.0 / 9.10938356e-31)
assert mdi.MDI_Conversion_Factor("kilogram", "kilogram") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("kilogram", "gram") == pytest.approx(1000.0)
assert mdi.MDI_Conversion_Factor("kilogram", "atomic_mass_unit") == pytest.approx(6.022140858549162e+26)
assert mdi.MDI_Conversion_Factor("gram", "atomic_unit_of_mass") == pytest.approx(1.0 / 9.10938356e-28)
assert mdi.MDI_Conversion_Factor("gram", "kilogram") == pytest.approx(0.001)
assert mdi.MDI_Conversion_Factor("gram", "gram") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("gram", "atomic_mass_unit") == pytest.approx(6.0221408585491626e+23)
assert mdi.MDI_Conversion_Factor("atomic_mass_unit", "atomic_unit_of_mass") == pytest.approx(1.0 / 0.0005485799093287202)
assert mdi.MDI_Conversion_Factor("atomic_mass_unit", "kilogram") == pytest.approx(1.66053904e-27)
assert mdi.MDI_Conversion_Factor("atomic_mass_unit", "gram") == pytest.approx(1.66053904e-24)
assert mdi.MDI_Conversion_Factor("atomic_mass_unit", "atomic_mass_unit") == pytest.approx(1.0)
# Test all time conversions
assert mdi.MDI_Conversion_Factor("atomic_unit_of_time", "atomic_unit_of_time") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_time", "picosecond") == pytest.approx(2.4188843265857007e-05)
assert mdi.MDI_Conversion_Factor("atomic_unit_of_time", "second") == pytest.approx(2.4188843265857007e-17)
assert mdi.MDI_Conversion_Factor("picosecond", "atomic_unit_of_time") == pytest.approx(1.0 / 2.4188843265857007e-05)
assert mdi.MDI_Conversion_Factor("picosecond", "picosecond") == pytest.approx(1.0)
assert mdi.MDI_Conversion_Factor("picosecond", "second") == pytest.approx(1.0e-12)
assert mdi.MDI_Conversion_Factor("second", "atomic_unit_of_time") == pytest.approx(1.0 / 2.4188843265857007e-17)
assert mdi.MDI_Conversion_Factor("second", "picosecond") == pytest.approx(1.0e+12)
assert mdi.MDI_Conversion_Factor("second", "second") == pytest.approx(1.0)
# Test exceptions for unrecognized units
with pytest.raises(Exception):
assert mdi.MDI_Conversion_Factor("fake_unit","bohr")
with pytest.raises(Exception):
assert mdi.MDI_Conversion_Factor("angstrom","")
# Test exceptions for inconsistent unit types
with pytest.raises(Exception):
assert mdi.MDI_Conversion_Factor("atomic_unit_of_energy","atomic_unit_of_time")
with pytest.raises(Exception):
assert mdi.MDI_Conversion_Factor("meter","calorie")
##########################
# Error Tests #
##########################
def test_uninitialized():
comm = mdi.MDI_NULL_COMM
# Test exceptions when MDI is not initialized
with pytest.raises(Exception):
mdi.MDI_Accept_Communicator()
with pytest.raises(Exception):
mdi.MDI_Send([1, 2], 2, mdi.MDI_INT, comm)
with pytest.raises(Exception):
mdi.MDI_Recv(2, mdi.MDI_INT, comm)
with pytest.raises(Exception):
mdi.MDI_Send_Command("<VERSION", comm)
with pytest.raises(Exception):
mdi.MDI_Recv_Command(comm)
with pytest.raises(Exception):
mdi.MDI_Register_Node("TESTNODE")
with pytest.raises(Exception):
mdi.MDI_Check_Node_Exists("TESTNODE", comm)
with pytest.raises(Exception):
mdi.MDI_Get_Node(0, comm, "TESTNODE")
with pytest.raises(Exception):
mdi.MDI_Get_NNodes(comm)
with pytest.raises(Exception):
mdi.MDI_Get_Node(0, comm)
with pytest.raises(Exception):
mdi.MDI_Register_Command("TESTNODE", "TESTCOMM")
with pytest.raises(Exception):
mdi.MDI_Check_Command_Exists("TESTNODE", "TESTCOMM", comm)
with pytest.raises(Exception):
mdi.MDI_Get_NCommands("TESTNODE", comm)
with pytest.raises(Exception):
mdi.MDI_Get_Command("TESTNODE", 0, comm)
with pytest.raises(Exception):
mdi.MDI_Register_Callback("TESTNODE", "TESTCALL")
with pytest.raises(Exception):
mdi.MDI_Check_Callback_Exists("TESTNODE", "TESTCALL", comm)
with pytest.raises(Exception):
mdi.MDI_Get_NCallbacks("TESTNODE", comm)
with pytest.raises(Exception):
mdi.MDI_Get_Callback("TESTNODE", 0, comm)
def test_test_method():
# run the calculation
driver_proc = subprocess.Popen([sys.executable, "../build/ut_tmethod.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
# convert the driver's output into a string
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Cannot register node name with length greater than MDI_COMMAND_LENGTH
Node name is greater than MDI_COMMAND_LENGTH
Vector accessed out-of-bounds
MDI_Get_Node unable to find node
Node name is greater than MDI_COMMAND_LENGTH
Cannot chcek command name with length greater than MDI_COMMAND_LENGTH
Could not find the node
Node name is greater than MDI_COMMAND_LENGTH
Could not find the node
MDI_Get_Command could not find the requested node
MDI_Get_Command failed because the command does not exist
Node name is greater than MDI_COMMAND_LENGTH
Cannot check callback name with length greater than MDI_COMMAND_LENGTH
Could not find the node
Node name is greater than MDI_COMMAND_LENGTH
Could not find the node
MDI_Get_Command could not find the requested node
MDI_Get_Command failed because the command does not exist
"""
assert driver_err == expected_err
assert driver_out == ""
def test_init_errors():
# Test running with no -method option
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_no_method.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = ""
assert driver_err == expected_err
assert driver_out == ""
# Test running with no -name option
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_no_name.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: -name option not provided
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with no -role option
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_no_role.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: -role option not provided
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with no -port option for a DRIVER using TCP
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_no_port_d.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: -port option not provided
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with no -port option for an ENGINE using TCP
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_no_port_e.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: -port option not provided
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with no -hostname option for an ENGINE using TCP
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_no_hostname.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: -hostname option not provided
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with a fake option
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_fake_opt.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Unrecognized option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with a fake method
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_fake_method.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Method not recognized
"""
assert driver_err == expected_err
assert driver_out == ""
# Test running with a fake role
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_fake_role.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Role not recognized
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -role argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_role.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -role option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -method argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_method.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = ""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -name argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_name.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -name option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -hostname argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_hostname.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -hostname option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -port argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_port.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -port option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -out argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_out.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -out option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -driver_name argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_driver_name.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -driver_name option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test leaving off the -_language argument
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_noarg_language.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """Error in MDI_Init: Argument missing from -_language option
"""
assert driver_err == expected_err
assert driver_out == ""
# Test double initialization
driver_proc = subprocess.Popen([sys.executable, "../build/ut_init_double.py"],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)
driver_tup = driver_proc.communicate()
driver_out = format_return(driver_tup[0])
driver_err = format_return(driver_tup[1])
expected_err = """MDI_Init called after MDI was already initialized
"""
assert driver_err == expected_err
assert driver_out == ""
|
[
"mdi.MDI_Register_Command",
"mdi.MDI_Register_Callback",
"mdi.MDI_Get_NCallbacks",
"mdi.MDI_Accept_Communicator",
"mdi.MDI_Check_Callback_Exists",
"mdi.MDI_Get_Callback",
"mdi.MDI_Register_Node",
"mdi.MDI_Recv",
"glob.glob",
"sys.path.append",
"mdi.MDI_Get_NCommands",
"pytest.raises",
"mdi.MDI_Check_Node_Exists",
"mdi.MDI_Check_Command_Exists",
"subprocess.Popen",
"mdi.MDI_Get_Node",
"mdi.MDI_Get_Command",
"mdi.MDI_Get_NNodes",
"pytest.approx",
"mdi.MDI_Send",
"mdi.MDI_Send_Command",
"mdi.MDI_Recv_Command",
"mdi.MDI_Conversion_Factor"
] |
[((246, 272), 'sys.path.append', 'sys.path.append', (['build_dir'], {}), '(build_dir)\n', (261, 272), False, 'import sys\n'), ((98, 125), 'sys.path.append', 'sys.path.append', (['"""../build"""'], {}), "('../build')\n", (113, 125), False, 'import sys\n'), ((1639, 1775), 'subprocess.Popen', 'subprocess.Popen', (["[driver_name, '-mdi', '-role DRIVER -name driver -method LIB']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "([driver_name, '-mdi',\n '-role DRIVER -name driver -method LIB'], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n", (1655, 1775), False, 'import subprocess\n'), ((2277, 2413), 'subprocess.Popen', 'subprocess.Popen', (["[driver_name, '-mdi', '-role DRIVER -name driver -method LIB']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "([driver_name, '-mdi',\n '-role DRIVER -name driver -method LIB'], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n", (2293, 2413), False, 'import subprocess\n'), ((2771, 2947), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/lib_py.py', '-mdi',\n '-role DRIVER -name driver -method LIB']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/lib_py.py', '-mdi',\n '-role DRIVER -name driver -method LIB'], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, cwd=build_dir)\n", (2787, 2947), False, 'import subprocess\n'), ((3552, 3750), 'subprocess.Popen', 'subprocess.Popen', (["['mpiexec', '-n', '2', sys.executable, '../build/lib_py.py', '-mdi',\n '-role DRIVER -name driver -method LIB']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "(['mpiexec', '-n', '2', sys.executable,\n '../build/lib_py.py', '-mdi', '-role DRIVER -name driver -method LIB'],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (3568, 3750), False, 'import subprocess\n'), ((4639, 4890), 'subprocess.Popen', 'subprocess.Popen', (["['mpiexec', '-n', '1', driver_name, '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', engine_name,\n '-mdi', '-role ENGINE -name MM -method MPI']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "(['mpiexec', '-n', '1', driver_name, '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', engine_name,\n '-mdi', '-role ENGINE -name MM -method MPI'], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, cwd=build_dir)\n", (4655, 4890), False, 'import subprocess\n'), ((5475, 5726), 'subprocess.Popen', 'subprocess.Popen', (["['mpiexec', '-n', '1', driver_name, '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', engine_name,\n '-mdi', '-role ENGINE -name MM -method MPI']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "(['mpiexec', '-n', '1', driver_name, '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', engine_name,\n '-mdi', '-role ENGINE -name MM -method MPI'], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, cwd=build_dir)\n", (5491, 5726), False, 'import subprocess\n'), ((6243, 6514), 'subprocess.Popen', 'subprocess.Popen', (["['mpiexec', '-n', '1', driver_name, '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', sys.executable,\n 'engine_py.py', '-mdi', '-role ENGINE -name MM -method MPI']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "(['mpiexec', '-n', '1', driver_name, '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', sys.executable,\n 'engine_py.py', '-mdi', '-role ENGINE -name MM -method MPI'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (6259, 6514), False, 'import subprocess\n'), ((7133, 7384), 'subprocess.Popen', 'subprocess.Popen', (["['mpiexec', '-n', '1', driver_name, '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', engine_name,\n '-mdi', '-role ENGINE -name MM -method MPI']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "(['mpiexec', '-n', '1', driver_name, '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', engine_name,\n '-mdi', '-role ENGINE -name MM -method MPI'], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, cwd=build_dir)\n", (7149, 7384), False, 'import subprocess\n'), ((8008, 8259), 'subprocess.Popen', 'subprocess.Popen', (["['mpiexec', '-n', '1', driver_name, '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', engine_name,\n '-mdi', '-role ENGINE -name MM -method MPI']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "(['mpiexec', '-n', '1', driver_name, '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', engine_name,\n '-mdi', '-role ENGINE -name MM -method MPI'], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, cwd=build_dir)\n", (8024, 8259), False, 'import subprocess\n'), ((8815, 9086), 'subprocess.Popen', 'subprocess.Popen', (["['mpiexec', '-n', '1', driver_name, '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', sys.executable,\n 'engine_py.py', '-mdi', '-role ENGINE -name MM -method MPI']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "(['mpiexec', '-n', '1', driver_name, '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', sys.executable,\n 'engine_py.py', '-mdi', '-role ENGINE -name MM -method MPI'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (8831, 9086), False, 'import subprocess\n'), ((9639, 9910), 'subprocess.Popen', 'subprocess.Popen', (["['mpiexec', '-n', '1', sys.executable, 'driver_py.py', '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', engine_name,\n '-mdi', '-role ENGINE -name MM -method MPI']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "(['mpiexec', '-n', '1', sys.executable, 'driver_py.py',\n '-mdi', '-role DRIVER -name driver -method MPI', ':', '-n', '1',\n engine_name, '-mdi', '-role ENGINE -name MM -method MPI'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (9655, 9910), False, 'import subprocess\n'), ((10462, 10733), 'subprocess.Popen', 'subprocess.Popen', (["['mpiexec', '-n', '1', sys.executable, 'driver_py.py', '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', engine_name,\n '-mdi', '-role ENGINE -name MM -method MPI']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "(['mpiexec', '-n', '1', sys.executable, 'driver_py.py',\n '-mdi', '-role DRIVER -name driver -method MPI', ':', '-n', '1',\n engine_name, '-mdi', '-role ENGINE -name MM -method MPI'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (10478, 10733), False, 'import subprocess\n'), ((11146, 11437), 'subprocess.Popen', 'subprocess.Popen', (["['mpiexec', '-n', '1', sys.executable, 'driver_py.py', '-mdi',\n '-role DRIVER -name driver -method MPI', ':', '-n', '1', sys.executable,\n 'engine_py.py', '-mdi', '-role ENGINE -name MM -method MPI']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "(['mpiexec', '-n', '1', sys.executable, 'driver_py.py',\n '-mdi', '-role DRIVER -name driver -method MPI', ':', '-n', '1', sys.\n executable, 'engine_py.py', '-mdi', '-role ENGINE -name MM -method MPI'\n ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (11162, 11437), False, 'import subprocess\n'), ((12105, 12253), 'subprocess.Popen', 'subprocess.Popen', (["[driver_name, '-mdi', '-role DRIVER -name driver -method TCP -port 8021']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "([driver_name, '-mdi',\n '-role DRIVER -name driver -method TCP -port 8021'], stdout=subprocess.\n PIPE, stderr=subprocess.PIPE)\n", (12121, 12253), False, 'import subprocess\n'), ((12298, 12409), 'subprocess.Popen', 'subprocess.Popen', (["[engine_name, '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost']"], {}), "([engine_name, '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost'])\n", (12314, 12409), False, 'import subprocess\n'), ((12969, 13117), 'subprocess.Popen', 'subprocess.Popen', (["[driver_name, '-mdi', '-role DRIVER -name driver -method TCP -port 8021']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "([driver_name, '-mdi',\n '-role DRIVER -name driver -method TCP -port 8021'], stdout=subprocess.\n PIPE, stderr=subprocess.PIPE)\n", (12985, 13117), False, 'import subprocess\n'), ((13162, 13273), 'subprocess.Popen', 'subprocess.Popen', (["[engine_name, '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost']"], {}), "([engine_name, '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost'])\n", (13178, 13273), False, 'import subprocess\n'), ((13765, 13913), 'subprocess.Popen', 'subprocess.Popen', (["[driver_name, '-mdi', '-role DRIVER -name driver -method TCP -port 8021']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "([driver_name, '-mdi',\n '-role DRIVER -name driver -method TCP -port 8021'], stdout=subprocess.\n PIPE, stderr=subprocess.PIPE)\n", (13781, 13913), False, 'import subprocess\n'), ((13958, 14116), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/engine_py.py', '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost']"], {'cwd': 'build_dir'}), "([sys.executable, '../build/engine_py.py', '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost'],\n cwd=build_dir)\n", (13974, 14116), False, 'import subprocess\n'), ((14744, 14892), 'subprocess.Popen', 'subprocess.Popen', (["[driver_name, '-mdi', '-role DRIVER -name driver -method TCP -port 8021']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "([driver_name, '-mdi',\n '-role DRIVER -name driver -method TCP -port 8021'], stdout=subprocess.\n PIPE, stderr=subprocess.PIPE)\n", (14760, 14892), False, 'import subprocess\n'), ((14938, 15049), 'subprocess.Popen', 'subprocess.Popen', (["[engine_name, '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost']"], {}), "([engine_name, '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost'])\n", (14954, 15049), False, 'import subprocess\n'), ((15648, 15796), 'subprocess.Popen', 'subprocess.Popen', (["[driver_name, '-mdi', '-role DRIVER -name driver -method TCP -port 8021']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "([driver_name, '-mdi',\n '-role DRIVER -name driver -method TCP -port 8021'], stdout=subprocess.\n PIPE, stderr=subprocess.PIPE)\n", (15664, 15796), False, 'import subprocess\n'), ((15841, 15952), 'subprocess.Popen', 'subprocess.Popen', (["[engine_name, '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost']"], {}), "([engine_name, '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost'])\n", (15857, 15952), False, 'import subprocess\n'), ((16483, 16631), 'subprocess.Popen', 'subprocess.Popen', (["[driver_name, '-mdi', '-role DRIVER -name driver -method TCP -port 8021']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "([driver_name, '-mdi',\n '-role DRIVER -name driver -method TCP -port 8021'], stdout=subprocess.\n PIPE, stderr=subprocess.PIPE)\n", (16499, 16631), False, 'import subprocess\n'), ((16676, 16834), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/engine_py.py', '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost']"], {'cwd': 'build_dir'}), "([sys.executable, '../build/engine_py.py', '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost'],\n cwd=build_dir)\n", (16692, 16834), False, 'import subprocess\n'), ((17395, 17586), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/driver_py.py', '-mdi',\n '-role DRIVER -name driver -method TCP -port 8021']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/driver_py.py', '-mdi',\n '-role DRIVER -name driver -method TCP -port 8021'], stdout=subprocess.\n PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (17411, 17586), False, 'import subprocess\n'), ((17631, 17742), 'subprocess.Popen', 'subprocess.Popen', (["[engine_name, '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost']"], {}), "([engine_name, '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost'])\n", (17647, 17742), False, 'import subprocess\n'), ((18271, 18462), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/driver_py.py', '-mdi',\n '-role DRIVER -name driver -method TCP -port 8021']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/driver_py.py', '-mdi',\n '-role DRIVER -name driver -method TCP -port 8021'], stdout=subprocess.\n PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (18287, 18462), False, 'import subprocess\n'), ((18507, 18618), 'subprocess.Popen', 'subprocess.Popen', (["[engine_name, '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost']"], {}), "([engine_name, '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost'])\n", (18523, 18618), False, 'import subprocess\n'), ((19008, 19199), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/driver_py.py', '-mdi',\n '-role DRIVER -name driver -method TCP -port 8021']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/driver_py.py', '-mdi',\n '-role DRIVER -name driver -method TCP -port 8021'], stdout=subprocess.\n PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (19024, 19199), False, 'import subprocess\n'), ((19244, 19402), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/engine_py.py', '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost']"], {'cwd': 'build_dir'}), "([sys.executable, '../build/engine_py.py', '-mdi',\n '-role ENGINE -name MM -method TCP -port 8021 -hostname localhost'],\n cwd=build_dir)\n", (19260, 19402), False, 'import subprocess\n'), ((29519, 29647), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_tmethod.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_tmethod.py'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (29535, 29647), False, 'import subprocess\n'), ((30852, 30987), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_no_method.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_no_method.py'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (30868, 30987), False, 'import subprocess\n'), ((31300, 31433), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_no_name.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_no_name.py'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (31316, 31433), False, 'import subprocess\n'), ((31795, 31928), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_no_role.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_no_role.py'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (31811, 31928), False, 'import subprocess\n'), ((32313, 32448), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_no_port_d.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_no_port_d.py'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (32329, 32448), False, 'import subprocess\n'), ((32834, 32969), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_no_port_e.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_no_port_e.py'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (32850, 32969), False, 'import subprocess\n'), ((33359, 33495), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_no_hostname.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_no_hostname.py'],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (33375, 33495), False, 'import subprocess\n'), ((33860, 33994), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_fake_opt.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_fake_opt.py'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (33876, 33994), False, 'import subprocess\n'), ((34348, 34484), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_fake_method.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_fake_method.py'],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (34364, 34484), False, 'import subprocess\n'), ((34839, 34974), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_fake_role.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_fake_role.py'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (34855, 34974), False, 'import subprocess\n'), ((35332, 35468), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_noarg_role.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_noarg_role.py'], stdout\n =subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (35348, 35468), False, 'import subprocess\n'), ((35843, 35980), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_noarg_method.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_noarg_method.py'],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (35859, 35980), False, 'import subprocess\n'), ((36296, 36432), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_noarg_name.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_noarg_name.py'], stdout\n =subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (36312, 36432), False, 'import subprocess\n'), ((36809, 36948), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_noarg_hostname.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_noarg_hostname.py'],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (36825, 36948), False, 'import subprocess\n'), ((37326, 37462), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_noarg_port.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_noarg_port.py'], stdout\n =subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (37342, 37462), False, 'import subprocess\n'), ((37834, 37969), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_noarg_out.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_noarg_out.py'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (37850, 37969), False, 'import subprocess\n'), ((38348, 38490), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_noarg_driver_name.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_noarg_driver_name.py'],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (38364, 38490), False, 'import subprocess\n'), ((38876, 39015), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_noarg_language.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_noarg_language.py'],\n stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (38892, 39015), False, 'import subprocess\n'), ((39385, 39517), 'subprocess.Popen', 'subprocess.Popen', (["[sys.executable, '../build/ut_init_double.py']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'cwd': 'build_dir'}), "([sys.executable, '../build/ut_init_double.py'], stdout=\n subprocess.PIPE, stderr=subprocess.PIPE, cwd=build_dir)\n", (39401, 39517), False, 'import subprocess\n'), ((1553, 1590), 'glob.glob', 'glob.glob', (['"""../build/driver_lib_cxx*"""'], {}), "('../build/driver_lib_cxx*')\n", (1562, 1590), False, 'import glob\n'), ((2191, 2228), 'glob.glob', 'glob.glob', (['"""../build/driver_lib_f90*"""'], {}), "('../build/driver_lib_f90*')\n", (2200, 2228), False, 'import glob\n'), ((4502, 4535), 'glob.glob', 'glob.glob', (['"""../build/driver_cxx*"""'], {}), "('../build/driver_cxx*')\n", (4511, 4535), False, 'import glob\n'), ((4557, 4590), 'glob.glob', 'glob.glob', (['"""../build/engine_cxx*"""'], {}), "('../build/engine_cxx*')\n", (4566, 4590), False, 'import glob\n'), ((5338, 5371), 'glob.glob', 'glob.glob', (['"""../build/driver_cxx*"""'], {}), "('../build/driver_cxx*')\n", (5347, 5371), False, 'import glob\n'), ((5393, 5426), 'glob.glob', 'glob.glob', (['"""../build/engine_f90*"""'], {}), "('../build/engine_f90*')\n", (5402, 5426), False, 'import glob\n'), ((6161, 6194), 'glob.glob', 'glob.glob', (['"""../build/driver_cxx*"""'], {}), "('../build/driver_cxx*')\n", (6170, 6194), False, 'import glob\n'), ((6996, 7029), 'glob.glob', 'glob.glob', (['"""../build/driver_f90*"""'], {}), "('../build/driver_f90*')\n", (7005, 7029), False, 'import glob\n'), ((7051, 7084), 'glob.glob', 'glob.glob', (['"""../build/engine_cxx*"""'], {}), "('../build/engine_cxx*')\n", (7060, 7084), False, 'import glob\n'), ((7871, 7904), 'glob.glob', 'glob.glob', (['"""../build/driver_f90*"""'], {}), "('../build/driver_f90*')\n", (7880, 7904), False, 'import glob\n'), ((7926, 7959), 'glob.glob', 'glob.glob', (['"""../build/engine_f90*"""'], {}), "('../build/engine_f90*')\n", (7935, 7959), False, 'import glob\n'), ((8733, 8766), 'glob.glob', 'glob.glob', (['"""../build/driver_f90*"""'], {}), "('../build/driver_f90*')\n", (8742, 8766), False, 'import glob\n'), ((9557, 9590), 'glob.glob', 'glob.glob', (['"""../build/engine_cxx*"""'], {}), "('../build/engine_cxx*')\n", (9566, 9590), False, 'import glob\n'), ((10380, 10413), 'glob.glob', 'glob.glob', (['"""../build/engine_f90*"""'], {}), "('../build/engine_f90*')\n", (10389, 10413), False, 'import glob\n'), ((11968, 12001), 'glob.glob', 'glob.glob', (['"""../build/driver_cxx*"""'], {}), "('../build/driver_cxx*')\n", (11977, 12001), False, 'import glob\n'), ((12023, 12056), 'glob.glob', 'glob.glob', (['"""../build/engine_cxx*"""'], {}), "('../build/engine_cxx*')\n", (12032, 12056), False, 'import glob\n'), ((12832, 12865), 'glob.glob', 'glob.glob', (['"""../build/driver_cxx*"""'], {}), "('../build/driver_cxx*')\n", (12841, 12865), False, 'import glob\n'), ((12887, 12920), 'glob.glob', 'glob.glob', (['"""../build/engine_f90*"""'], {}), "('../build/engine_f90*')\n", (12896, 12920), False, 'import glob\n'), ((13683, 13716), 'glob.glob', 'glob.glob', (['"""../build/driver_cxx*"""'], {}), "('../build/driver_cxx*')\n", (13692, 13716), False, 'import glob\n'), ((14607, 14640), 'glob.glob', 'glob.glob', (['"""../build/driver_f90*"""'], {}), "('../build/driver_f90*')\n", (14616, 14640), False, 'import glob\n'), ((14662, 14695), 'glob.glob', 'glob.glob', (['"""../build/engine_cxx*"""'], {}), "('../build/engine_cxx*')\n", (14671, 14695), False, 'import glob\n'), ((15511, 15544), 'glob.glob', 'glob.glob', (['"""../build/driver_f90*"""'], {}), "('../build/driver_f90*')\n", (15520, 15544), False, 'import glob\n'), ((15566, 15599), 'glob.glob', 'glob.glob', (['"""../build/engine_f90*"""'], {}), "('../build/engine_f90*')\n", (15575, 15599), False, 'import glob\n'), ((16401, 16434), 'glob.glob', 'glob.glob', (['"""../build/driver_f90*"""'], {}), "('../build/driver_f90*')\n", (16410, 16434), False, 'import glob\n'), ((17313, 17346), 'glob.glob', 'glob.glob', (['"""../build/engine_cxx*"""'], {}), "('../build/engine_cxx*')\n", (17322, 17346), False, 'import glob\n'), ((18189, 18222), 'glob.glob', 'glob.glob', (['"""../build/engine_f90*"""'], {}), "('../build/engine_f90*')\n", (18198, 18222), False, 'import glob\n'), ((19931, 20006), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_charge"""', '"""atomic_unit_of_charge"""'], {}), "('atomic_unit_of_charge', 'atomic_unit_of_charge')\n", (19956, 20006), False, 'import mdi\n'), ((20010, 20028), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (20023, 20028), False, 'import pytest\n'), ((20040, 20101), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_charge"""', '"""coulomb"""'], {}), "('atomic_unit_of_charge', 'coulomb')\n", (20065, 20101), False, 'import mdi\n'), ((20105, 20136), 'pytest.approx', 'pytest.approx', (['(1.6021766208e-19)'], {}), '(1.6021766208e-19)\n', (20118, 20136), False, 'import pytest\n'), ((20148, 20209), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""coulomb"""', '"""atomic_unit_of_charge"""'], {}), "('coulomb', 'atomic_unit_of_charge')\n", (20173, 20209), False, 'import mdi\n'), ((20213, 20250), 'pytest.approx', 'pytest.approx', (['(1.0 / 1.6021766208e-19)'], {}), '(1.0 / 1.6021766208e-19)\n', (20226, 20250), False, 'import pytest\n'), ((20262, 20309), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""coulomb"""', '"""coulomb"""'], {}), "('coulomb', 'coulomb')\n", (20287, 20309), False, 'import mdi\n'), ((20313, 20331), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (20326, 20331), False, 'import pytest\n'), ((20379, 20454), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""atomic_unit_of_energy"""'], {}), "('atomic_unit_of_energy', 'atomic_unit_of_energy')\n", (20404, 20454), False, 'import mdi\n'), ((20458, 20476), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (20471, 20476), False, 'import pytest\n'), ((20488, 20549), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""calorie"""'], {}), "('atomic_unit_of_energy', 'calorie')\n", (20513, 20549), False, 'import mdi\n'), ((20553, 20590), 'pytest.approx', 'pytest.approx', (['(1.0420039967034203e-18)'], {}), '(1.0420039967034203e-18)\n', (20566, 20590), False, 'import pytest\n'), ((20602, 20669), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""electron_volt"""'], {}), "('atomic_unit_of_energy', 'electron_volt')\n", (20627, 20669), False, 'import mdi\n'), ((20673, 20706), 'pytest.approx', 'pytest.approx', (['(27.211386245988066)'], {}), '(27.211386245988066)\n', (20686, 20706), False, 'import pytest\n'), ((20718, 20779), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""hartree"""'], {}), "('atomic_unit_of_energy', 'hartree')\n", (20743, 20779), False, 'import mdi\n'), ((20783, 20801), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (20796, 20801), False, 'import pytest\n'), ((20813, 20887), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""inverse_meter_energy"""'], {}), "('atomic_unit_of_energy', 'inverse_meter_energy')\n", (20838, 20887), False, 'import mdi\n'), ((20891, 20924), 'pytest.approx', 'pytest.approx', (['(21947463.136319984)'], {}), '(21947463.136319984)\n', (20904, 20924), False, 'import pytest\n'), ((20936, 20995), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""joule"""'], {}), "('atomic_unit_of_energy', 'joule')\n", (20961, 20995), False, 'import mdi\n'), ((20999, 21028), 'pytest.approx', 'pytest.approx', (['(4.35974465e-18)'], {}), '(4.35974465e-18)\n', (21012, 21028), False, 'import pytest\n'), ((21040, 21107), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""kelvin_energy"""'], {}), "('atomic_unit_of_energy', 'kelvin_energy')\n", (21065, 21107), False, 'import mdi\n'), ((21111, 21144), 'pytest.approx', 'pytest.approx', (['(315775.02480406954)'], {}), '(315775.02480406954)\n', (21124, 21144), False, 'import pytest\n'), ((21156, 21221), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""kilocalorie"""'], {}), "('atomic_unit_of_energy', 'kilocalorie')\n", (21181, 21221), False, 'import mdi\n'), ((21225, 21262), 'pytest.approx', 'pytest.approx', (['(1.0420039967034203e-21)'], {}), '(1.0420039967034203e-21)\n', (21238, 21262), False, 'import pytest\n'), ((21274, 21347), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""kilocalorie_per_mol"""'], {}), "('atomic_unit_of_energy', 'kilocalorie_per_mol')\n", (21299, 21347), False, 'import mdi\n'), ((21351, 21383), 'pytest.approx', 'pytest.approx', (['(627.5094737775374)'], {}), '(627.5094737775374)\n', (21364, 21383), False, 'import pytest\n'), ((21395, 21458), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""kilojoule"""'], {}), "('atomic_unit_of_energy', 'kilojoule')\n", (21420, 21458), False, 'import mdi\n'), ((21462, 21499), 'pytest.approx', 'pytest.approx', (['(4.3597446499999996e-21)'], {}), '(4.3597446499999996e-21)\n', (21475, 21499), False, 'import pytest\n'), ((21511, 21582), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""kilojoule_per_mol"""'], {}), "('atomic_unit_of_energy', 'kilojoule_per_mol')\n", (21536, 21582), False, 'import mdi\n'), ((21586, 21619), 'pytest.approx', 'pytest.approx', (['(2625.4996382852164)'], {}), '(2625.4996382852164)\n', (21599, 21619), False, 'import pytest\n'), ((21631, 21692), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""rydberg"""'], {}), "('atomic_unit_of_energy', 'rydberg')\n", (21656, 21692), False, 'import mdi\n'), ((21696, 21714), 'pytest.approx', 'pytest.approx', (['(2.0)'], {}), '(2.0)\n', (21709, 21714), False, 'import pytest\n'), ((21726, 21787), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""calorie"""', '"""atomic_unit_of_energy"""'], {}), "('calorie', 'atomic_unit_of_energy')\n", (21751, 21787), False, 'import mdi\n'), ((21791, 21834), 'pytest.approx', 'pytest.approx', (['(1.0 / 1.0420039967034203e-18)'], {}), '(1.0 / 1.0420039967034203e-18)\n', (21804, 21834), False, 'import pytest\n'), ((21846, 21913), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""electron_volt"""', '"""atomic_unit_of_energy"""'], {}), "('electron_volt', 'atomic_unit_of_energy')\n", (21871, 21913), False, 'import mdi\n'), ((21917, 21956), 'pytest.approx', 'pytest.approx', (['(1.0 / 27.211386245988066)'], {}), '(1.0 / 27.211386245988066)\n', (21930, 21956), False, 'import pytest\n'), ((21968, 22029), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""hartree"""', '"""atomic_unit_of_energy"""'], {}), "('hartree', 'atomic_unit_of_energy')\n", (21993, 22029), False, 'import mdi\n'), ((22033, 22051), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (22046, 22051), False, 'import pytest\n'), ((22063, 22137), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""inverse_meter_energy"""', '"""atomic_unit_of_energy"""'], {}), "('inverse_meter_energy', 'atomic_unit_of_energy')\n", (22088, 22137), False, 'import mdi\n'), ((22141, 22180), 'pytest.approx', 'pytest.approx', (['(1.0 / 21947463.136319984)'], {}), '(1.0 / 21947463.136319984)\n', (22154, 22180), False, 'import pytest\n'), ((22192, 22251), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""joule"""', '"""atomic_unit_of_energy"""'], {}), "('joule', 'atomic_unit_of_energy')\n", (22217, 22251), False, 'import mdi\n'), ((22255, 22290), 'pytest.approx', 'pytest.approx', (['(1.0 / 4.35974465e-18)'], {}), '(1.0 / 4.35974465e-18)\n', (22268, 22290), False, 'import pytest\n'), ((22302, 22369), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""kelvin_energy"""', '"""atomic_unit_of_energy"""'], {}), "('kelvin_energy', 'atomic_unit_of_energy')\n", (22327, 22369), False, 'import mdi\n'), ((22373, 22412), 'pytest.approx', 'pytest.approx', (['(1.0 / 315775.02480406954)'], {}), '(1.0 / 315775.02480406954)\n', (22386, 22412), False, 'import pytest\n'), ((22424, 22489), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""kilocalorie"""', '"""atomic_unit_of_energy"""'], {}), "('kilocalorie', 'atomic_unit_of_energy')\n", (22449, 22489), False, 'import mdi\n'), ((22493, 22536), 'pytest.approx', 'pytest.approx', (['(1.0 / 1.0420039967034203e-21)'], {}), '(1.0 / 1.0420039967034203e-21)\n', (22506, 22536), False, 'import pytest\n'), ((22548, 22621), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""kilocalorie_per_mol"""', '"""atomic_unit_of_energy"""'], {}), "('kilocalorie_per_mol', 'atomic_unit_of_energy')\n", (22573, 22621), False, 'import mdi\n'), ((22625, 22663), 'pytest.approx', 'pytest.approx', (['(1.0 / 627.5094737775374)'], {}), '(1.0 / 627.5094737775374)\n', (22638, 22663), False, 'import pytest\n'), ((22675, 22738), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""kilojoule"""', '"""atomic_unit_of_energy"""'], {}), "('kilojoule', 'atomic_unit_of_energy')\n", (22700, 22738), False, 'import mdi\n'), ((22742, 22785), 'pytest.approx', 'pytest.approx', (['(1.0 / 4.3597446499999996e-21)'], {}), '(1.0 / 4.3597446499999996e-21)\n', (22755, 22785), False, 'import pytest\n'), ((22797, 22868), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""kilojoule_per_mol"""', '"""atomic_unit_of_energy"""'], {}), "('kilojoule_per_mol', 'atomic_unit_of_energy')\n", (22822, 22868), False, 'import mdi\n'), ((22872, 22911), 'pytest.approx', 'pytest.approx', (['(1.0 / 2625.4996382852164)'], {}), '(1.0 / 2625.4996382852164)\n', (22885, 22911), False, 'import pytest\n'), ((22923, 22984), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""rydberg"""', '"""atomic_unit_of_energy"""'], {}), "('rydberg', 'atomic_unit_of_energy')\n", (22948, 22984), False, 'import mdi\n'), ((22988, 23006), 'pytest.approx', 'pytest.approx', (['(0.5)'], {}), '(0.5)\n', (23001, 23006), False, 'import pytest\n'), ((23052, 23125), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_force"""', '"""atomic_unit_of_force"""'], {}), "('atomic_unit_of_force', 'atomic_unit_of_force')\n", (23077, 23125), False, 'import mdi\n'), ((23129, 23147), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (23142, 23147), False, 'import pytest\n'), ((23159, 23218), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_force"""', '"""newton"""'], {}), "('atomic_unit_of_force', 'newton')\n", (23184, 23218), False, 'import mdi\n'), ((23222, 23258), 'pytest.approx', 'pytest.approx', (['(3.753838631429819e-15)'], {}), '(3.753838631429819e-15)\n', (23235, 23258), False, 'import pytest\n'), ((23270, 23329), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""newton"""', '"""atomic_unit_of_force"""'], {}), "('newton', 'atomic_unit_of_force')\n", (23295, 23329), False, 'import mdi\n'), ((23333, 23375), 'pytest.approx', 'pytest.approx', (['(1.0 / 3.753838631429819e-15)'], {}), '(1.0 / 3.753838631429819e-15)\n', (23346, 23375), False, 'import pytest\n'), ((23387, 23432), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""newton"""', '"""newton"""'], {}), "('newton', 'newton')\n", (23412, 23432), False, 'import mdi\n'), ((23436, 23454), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (23449, 23454), False, 'import pytest\n'), ((23502, 23564), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_length"""', '"""angstrom"""'], {}), "('atomic_unit_of_length', 'angstrom')\n", (23527, 23564), False, 'import mdi\n'), ((23568, 23596), 'pytest.approx', 'pytest.approx', (['(0.52917721067)'], {}), '(0.52917721067)\n', (23581, 23596), False, 'import pytest\n'), ((23608, 23683), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_length"""', '"""atomic_unit_of_length"""'], {}), "('atomic_unit_of_length', 'atomic_unit_of_length')\n", (23633, 23683), False, 'import mdi\n'), ((23687, 23705), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (23700, 23705), False, 'import pytest\n'), ((23717, 23775), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_length"""', '"""bohr"""'], {}), "('atomic_unit_of_length', 'bohr')\n", (23742, 23775), False, 'import mdi\n'), ((23779, 23797), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (23792, 23797), False, 'import pytest\n'), ((23809, 23868), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_length"""', '"""meter"""'], {}), "('atomic_unit_of_length', 'meter')\n", (23834, 23868), False, 'import mdi\n'), ((23872, 23904), 'pytest.approx', 'pytest.approx', (['(5.29177210903e-11)'], {}), '(5.29177210903e-11)\n', (23885, 23904), False, 'import pytest\n'), ((23916, 23979), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_length"""', '"""nanometer"""'], {}), "('atomic_unit_of_length', 'nanometer')\n", (23941, 23979), False, 'import mdi\n'), ((23983, 24013), 'pytest.approx', 'pytest.approx', (['(0.0529177210903)'], {}), '(0.0529177210903)\n', (23996, 24013), False, 'import pytest\n'), ((24026, 24089), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_length"""', '"""picometer"""'], {}), "('atomic_unit_of_length', 'picometer')\n", (24051, 24089), False, 'import mdi\n'), ((24093, 24121), 'pytest.approx', 'pytest.approx', (['(52.9177210903)'], {}), '(52.9177210903)\n', (24106, 24121), False, 'import pytest\n'), ((24136, 24198), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""angstrom"""', '"""atomic_unit_of_length"""'], {}), "('angstrom', 'atomic_unit_of_length')\n", (24161, 24198), False, 'import mdi\n'), ((24202, 24236), 'pytest.approx', 'pytest.approx', (['(1.0 / 0.52917721067)'], {}), '(1.0 / 0.52917721067)\n', (24215, 24236), False, 'import pytest\n'), ((24248, 24306), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""bohr"""', '"""atomic_unit_of_length"""'], {}), "('bohr', 'atomic_unit_of_length')\n", (24273, 24306), False, 'import mdi\n'), ((24310, 24328), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (24323, 24328), False, 'import pytest\n'), ((24340, 24399), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""meter"""', '"""atomic_unit_of_length"""'], {}), "('meter', 'atomic_unit_of_length')\n", (24365, 24399), False, 'import mdi\n'), ((24403, 24441), 'pytest.approx', 'pytest.approx', (['(1.0 / 5.29177210903e-11)'], {}), '(1.0 / 5.29177210903e-11)\n', (24416, 24441), False, 'import pytest\n'), ((24453, 24516), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""nanometer"""', '"""atomic_unit_of_length"""'], {}), "('nanometer', 'atomic_unit_of_length')\n", (24478, 24516), False, 'import mdi\n'), ((24520, 24556), 'pytest.approx', 'pytest.approx', (['(1.0 / 0.0529177210903)'], {}), '(1.0 / 0.0529177210903)\n', (24533, 24556), False, 'import pytest\n'), ((24569, 24632), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""picometer"""', '"""atomic_unit_of_length"""'], {}), "('picometer', 'atomic_unit_of_length')\n", (24594, 24632), False, 'import mdi\n'), ((24636, 24670), 'pytest.approx', 'pytest.approx', (['(1.0 / 52.9177210903)'], {}), '(1.0 / 52.9177210903)\n', (24649, 24670), False, 'import pytest\n'), ((24718, 24789), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_mass"""', '"""atomic_unit_of_mass"""'], {}), "('atomic_unit_of_mass', 'atomic_unit_of_mass')\n", (24743, 24789), False, 'import mdi\n'), ((24793, 24811), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (24806, 24811), False, 'import pytest\n'), ((24823, 24883), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_mass"""', '"""kilogram"""'], {}), "('atomic_unit_of_mass', 'kilogram')\n", (24848, 24883), False, 'import mdi\n'), ((24887, 24916), 'pytest.approx', 'pytest.approx', (['(9.10938356e-31)'], {}), '(9.10938356e-31)\n', (24900, 24916), False, 'import pytest\n'), ((24928, 24984), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_mass"""', '"""gram"""'], {}), "('atomic_unit_of_mass', 'gram')\n", (24953, 24984), False, 'import mdi\n'), ((24988, 25017), 'pytest.approx', 'pytest.approx', (['(9.10938356e-28)'], {}), '(9.10938356e-28)\n', (25001, 25017), False, 'import pytest\n'), ((25029, 25097), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_mass"""', '"""atomic_mass_unit"""'], {}), "('atomic_unit_of_mass', 'atomic_mass_unit')\n", (25054, 25097), False, 'import mdi\n'), ((25101, 25137), 'pytest.approx', 'pytest.approx', (['(0.0005485799093287202)'], {}), '(0.0005485799093287202)\n', (25114, 25137), False, 'import pytest\n'), ((25149, 25209), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""kilogram"""', '"""atomic_unit_of_mass"""'], {}), "('kilogram', 'atomic_unit_of_mass')\n", (25174, 25209), False, 'import mdi\n'), ((25213, 25248), 'pytest.approx', 'pytest.approx', (['(1.0 / 9.10938356e-31)'], {}), '(1.0 / 9.10938356e-31)\n', (25226, 25248), False, 'import pytest\n'), ((25260, 25309), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""kilogram"""', '"""kilogram"""'], {}), "('kilogram', 'kilogram')\n", (25285, 25309), False, 'import mdi\n'), ((25313, 25331), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (25326, 25331), False, 'import pytest\n'), ((25343, 25388), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""kilogram"""', '"""gram"""'], {}), "('kilogram', 'gram')\n", (25368, 25388), False, 'import mdi\n'), ((25392, 25413), 'pytest.approx', 'pytest.approx', (['(1000.0)'], {}), '(1000.0)\n', (25405, 25413), False, 'import pytest\n'), ((25425, 25482), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""kilogram"""', '"""atomic_mass_unit"""'], {}), "('kilogram', 'atomic_mass_unit')\n", (25450, 25482), False, 'import mdi\n'), ((25486, 25522), 'pytest.approx', 'pytest.approx', (['(6.022140858549162e+26)'], {}), '(6.022140858549162e+26)\n', (25499, 25522), False, 'import pytest\n'), ((25534, 25590), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""gram"""', '"""atomic_unit_of_mass"""'], {}), "('gram', 'atomic_unit_of_mass')\n", (25559, 25590), False, 'import mdi\n'), ((25594, 25629), 'pytest.approx', 'pytest.approx', (['(1.0 / 9.10938356e-28)'], {}), '(1.0 / 9.10938356e-28)\n', (25607, 25629), False, 'import pytest\n'), ((25641, 25686), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""gram"""', '"""kilogram"""'], {}), "('gram', 'kilogram')\n", (25666, 25686), False, 'import mdi\n'), ((25690, 25710), 'pytest.approx', 'pytest.approx', (['(0.001)'], {}), '(0.001)\n', (25703, 25710), False, 'import pytest\n'), ((25722, 25763), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""gram"""', '"""gram"""'], {}), "('gram', 'gram')\n", (25747, 25763), False, 'import mdi\n'), ((25767, 25785), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (25780, 25785), False, 'import pytest\n'), ((25797, 25850), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""gram"""', '"""atomic_mass_unit"""'], {}), "('gram', 'atomic_mass_unit')\n", (25822, 25850), False, 'import mdi\n'), ((25854, 25891), 'pytest.approx', 'pytest.approx', (['(6.0221408585491626e+23)'], {}), '(6.0221408585491626e+23)\n', (25867, 25891), False, 'import pytest\n'), ((25903, 25971), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_mass_unit"""', '"""atomic_unit_of_mass"""'], {}), "('atomic_mass_unit', 'atomic_unit_of_mass')\n", (25928, 25971), False, 'import mdi\n'), ((25975, 26017), 'pytest.approx', 'pytest.approx', (['(1.0 / 0.0005485799093287202)'], {}), '(1.0 / 0.0005485799093287202)\n', (25988, 26017), False, 'import pytest\n'), ((26029, 26086), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_mass_unit"""', '"""kilogram"""'], {}), "('atomic_mass_unit', 'kilogram')\n", (26054, 26086), False, 'import mdi\n'), ((26090, 26119), 'pytest.approx', 'pytest.approx', (['(1.66053904e-27)'], {}), '(1.66053904e-27)\n', (26103, 26119), False, 'import pytest\n'), ((26131, 26184), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_mass_unit"""', '"""gram"""'], {}), "('atomic_mass_unit', 'gram')\n", (26156, 26184), False, 'import mdi\n'), ((26188, 26217), 'pytest.approx', 'pytest.approx', (['(1.66053904e-24)'], {}), '(1.66053904e-24)\n', (26201, 26217), False, 'import pytest\n'), ((26229, 26294), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_mass_unit"""', '"""atomic_mass_unit"""'], {}), "('atomic_mass_unit', 'atomic_mass_unit')\n", (26254, 26294), False, 'import mdi\n'), ((26298, 26316), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (26311, 26316), False, 'import pytest\n'), ((26361, 26432), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_time"""', '"""atomic_unit_of_time"""'], {}), "('atomic_unit_of_time', 'atomic_unit_of_time')\n", (26386, 26432), False, 'import mdi\n'), ((26436, 26454), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (26449, 26454), False, 'import pytest\n'), ((26466, 26528), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_time"""', '"""picosecond"""'], {}), "('atomic_unit_of_time', 'picosecond')\n", (26491, 26528), False, 'import mdi\n'), ((26532, 26569), 'pytest.approx', 'pytest.approx', (['(2.4188843265857007e-05)'], {}), '(2.4188843265857007e-05)\n', (26545, 26569), False, 'import pytest\n'), ((26581, 26639), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_time"""', '"""second"""'], {}), "('atomic_unit_of_time', 'second')\n", (26606, 26639), False, 'import mdi\n'), ((26643, 26680), 'pytest.approx', 'pytest.approx', (['(2.4188843265857007e-17)'], {}), '(2.4188843265857007e-17)\n', (26656, 26680), False, 'import pytest\n'), ((26692, 26754), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""picosecond"""', '"""atomic_unit_of_time"""'], {}), "('picosecond', 'atomic_unit_of_time')\n", (26717, 26754), False, 'import mdi\n'), ((26758, 26801), 'pytest.approx', 'pytest.approx', (['(1.0 / 2.4188843265857007e-05)'], {}), '(1.0 / 2.4188843265857007e-05)\n', (26771, 26801), False, 'import pytest\n'), ((26813, 26866), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""picosecond"""', '"""picosecond"""'], {}), "('picosecond', 'picosecond')\n", (26838, 26866), False, 'import mdi\n'), ((26870, 26888), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (26883, 26888), False, 'import pytest\n'), ((26900, 26949), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""picosecond"""', '"""second"""'], {}), "('picosecond', 'second')\n", (26925, 26949), False, 'import mdi\n'), ((26953, 26973), 'pytest.approx', 'pytest.approx', (['(1e-12)'], {}), '(1e-12)\n', (26966, 26973), False, 'import pytest\n'), ((26987, 27045), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""second"""', '"""atomic_unit_of_time"""'], {}), "('second', 'atomic_unit_of_time')\n", (27012, 27045), False, 'import mdi\n'), ((27049, 27092), 'pytest.approx', 'pytest.approx', (['(1.0 / 2.4188843265857007e-17)'], {}), '(1.0 / 2.4188843265857007e-17)\n', (27062, 27092), False, 'import pytest\n'), ((27104, 27153), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""second"""', '"""picosecond"""'], {}), "('second', 'picosecond')\n", (27129, 27153), False, 'import mdi\n'), ((27157, 27187), 'pytest.approx', 'pytest.approx', (['(1000000000000.0)'], {}), '(1000000000000.0)\n', (27170, 27187), False, 'import pytest\n'), ((27191, 27236), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""second"""', '"""second"""'], {}), "('second', 'second')\n", (27216, 27236), False, 'import mdi\n'), ((27240, 27258), 'pytest.approx', 'pytest.approx', (['(1.0)'], {}), '(1.0)\n', (27253, 27258), False, 'import pytest\n'), ((27314, 27338), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (27327, 27338), False, 'import pytest\n'), ((27355, 27401), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""fake_unit"""', '"""bohr"""'], {}), "('fake_unit', 'bohr')\n", (27380, 27401), False, 'import mdi\n'), ((27410, 27434), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (27423, 27434), False, 'import pytest\n'), ((27451, 27492), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""angstrom"""', '""""""'], {}), "('angstrom', '')\n", (27476, 27492), False, 'import mdi\n'), ((27552, 27576), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (27565, 27576), False, 'import pytest\n'), ((27593, 27666), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""atomic_unit_of_energy"""', '"""atomic_unit_of_time"""'], {}), "('atomic_unit_of_energy', 'atomic_unit_of_time')\n", (27618, 27666), False, 'import mdi\n'), ((27675, 27699), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (27688, 27699), False, 'import pytest\n'), ((27716, 27761), 'mdi.MDI_Conversion_Factor', 'mdi.MDI_Conversion_Factor', (['"""meter"""', '"""calorie"""'], {}), "('meter', 'calorie')\n", (27741, 27761), False, 'import mdi\n'), ((27961, 27985), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (27974, 27985), False, 'import pytest\n'), ((27995, 28024), 'mdi.MDI_Accept_Communicator', 'mdi.MDI_Accept_Communicator', ([], {}), '()\n', (28022, 28024), False, 'import mdi\n'), ((28034, 28058), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (28047, 28058), False, 'import pytest\n'), ((28068, 28110), 'mdi.MDI_Send', 'mdi.MDI_Send', (['[1, 2]', '(2)', 'mdi.MDI_INT', 'comm'], {}), '([1, 2], 2, mdi.MDI_INT, comm)\n', (28080, 28110), False, 'import mdi\n'), ((28120, 28144), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (28133, 28144), False, 'import pytest\n'), ((28154, 28188), 'mdi.MDI_Recv', 'mdi.MDI_Recv', (['(2)', 'mdi.MDI_INT', 'comm'], {}), '(2, mdi.MDI_INT, comm)\n', (28166, 28188), False, 'import mdi\n'), ((28198, 28222), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (28211, 28222), False, 'import pytest\n'), ((28232, 28270), 'mdi.MDI_Send_Command', 'mdi.MDI_Send_Command', (['"""<VERSION"""', 'comm'], {}), "('<VERSION', comm)\n", (28252, 28270), False, 'import mdi\n'), ((28280, 28304), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (28293, 28304), False, 'import pytest\n'), ((28314, 28340), 'mdi.MDI_Recv_Command', 'mdi.MDI_Recv_Command', (['comm'], {}), '(comm)\n', (28334, 28340), False, 'import mdi\n'), ((28350, 28374), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (28363, 28374), False, 'import pytest\n'), ((28384, 28417), 'mdi.MDI_Register_Node', 'mdi.MDI_Register_Node', (['"""TESTNODE"""'], {}), "('TESTNODE')\n", (28405, 28417), False, 'import mdi\n'), ((28427, 28451), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (28440, 28451), False, 'import pytest\n'), ((28461, 28504), 'mdi.MDI_Check_Node_Exists', 'mdi.MDI_Check_Node_Exists', (['"""TESTNODE"""', 'comm'], {}), "('TESTNODE', comm)\n", (28486, 28504), False, 'import mdi\n'), ((28514, 28538), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (28527, 28538), False, 'import pytest\n'), ((28548, 28585), 'mdi.MDI_Get_Node', 'mdi.MDI_Get_Node', (['(0)', 'comm', '"""TESTNODE"""'], {}), "(0, comm, 'TESTNODE')\n", (28564, 28585), False, 'import mdi\n'), ((28595, 28619), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (28608, 28619), False, 'import pytest\n'), ((28629, 28653), 'mdi.MDI_Get_NNodes', 'mdi.MDI_Get_NNodes', (['comm'], {}), '(comm)\n', (28647, 28653), False, 'import mdi\n'), ((28663, 28687), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (28676, 28687), False, 'import pytest\n'), ((28697, 28722), 'mdi.MDI_Get_Node', 'mdi.MDI_Get_Node', (['(0)', 'comm'], {}), '(0, comm)\n', (28713, 28722), False, 'import mdi\n'), ((28732, 28756), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (28745, 28756), False, 'import pytest\n'), ((28766, 28814), 'mdi.MDI_Register_Command', 'mdi.MDI_Register_Command', (['"""TESTNODE"""', '"""TESTCOMM"""'], {}), "('TESTNODE', 'TESTCOMM')\n", (28790, 28814), False, 'import mdi\n'), ((28824, 28848), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (28837, 28848), False, 'import pytest\n'), ((28858, 28916), 'mdi.MDI_Check_Command_Exists', 'mdi.MDI_Check_Command_Exists', (['"""TESTNODE"""', '"""TESTCOMM"""', 'comm'], {}), "('TESTNODE', 'TESTCOMM', comm)\n", (28886, 28916), False, 'import mdi\n'), ((28926, 28950), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (28939, 28950), False, 'import pytest\n'), ((28960, 28999), 'mdi.MDI_Get_NCommands', 'mdi.MDI_Get_NCommands', (['"""TESTNODE"""', 'comm'], {}), "('TESTNODE', comm)\n", (28981, 28999), False, 'import mdi\n'), ((29009, 29033), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (29022, 29033), False, 'import pytest\n'), ((29043, 29083), 'mdi.MDI_Get_Command', 'mdi.MDI_Get_Command', (['"""TESTNODE"""', '(0)', 'comm'], {}), "('TESTNODE', 0, comm)\n", (29062, 29083), False, 'import mdi\n'), ((29093, 29117), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (29106, 29117), False, 'import pytest\n'), ((29127, 29176), 'mdi.MDI_Register_Callback', 'mdi.MDI_Register_Callback', (['"""TESTNODE"""', '"""TESTCALL"""'], {}), "('TESTNODE', 'TESTCALL')\n", (29152, 29176), False, 'import mdi\n'), ((29186, 29210), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (29199, 29210), False, 'import pytest\n'), ((29220, 29279), 'mdi.MDI_Check_Callback_Exists', 'mdi.MDI_Check_Callback_Exists', (['"""TESTNODE"""', '"""TESTCALL"""', 'comm'], {}), "('TESTNODE', 'TESTCALL', comm)\n", (29249, 29279), False, 'import mdi\n'), ((29289, 29313), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (29302, 29313), False, 'import pytest\n'), ((29323, 29363), 'mdi.MDI_Get_NCallbacks', 'mdi.MDI_Get_NCallbacks', (['"""TESTNODE"""', 'comm'], {}), "('TESTNODE', comm)\n", (29345, 29363), False, 'import mdi\n'), ((29373, 29397), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (29386, 29397), False, 'import pytest\n'), ((29407, 29448), 'mdi.MDI_Get_Callback', 'mdi.MDI_Get_Callback', (['"""TESTNODE"""', '(0)', 'comm'], {}), "('TESTNODE', 0, comm)\n", (29427, 29448), False, 'import mdi\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2018, IBM.
#
# This source code is licensed under the Apache License, Version 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
"""
Quantum tomography fitter data formatting.
"""
import logging
import itertools as it
import numpy as np
from scipy import linalg as la
from qiskit import QiskitError
from ..data import marginal_counts, count_keys
from ..basis import TomographyBasis, default_basis
# Create logger
logger = logging.getLogger(__name__)
###########################################################################
# Data formats for converting from counts to fitter data
###########################################################################
def fitter_data(tomo_data,
meas_basis='Pauli',
prep_basis='Pauli',
standard_weights=True,
beta=0.5):
"""Generate tomography fitter data from a tomography data dictionary.
Args:
tomo_data (dict): tomography data returned from `tomography_data`
function.
meas_matrix_fn (function, optional): A function to return measurement
operators corresponding to measurement outcomes. See
Additional Information (default: 'Pauli')
prep_matrix_fn (function, optional): A function to return preparation
operators. See Additional Information (default: 'Pauli')
standard_weights (bool, optional): Apply weights to basis matrix
and data based on count probability (default: True)
beta (float): hedging parameter for 0, 1 probabilities (default: 0.5)
Returns:
tuple: (data, basis_matrix, weights) where `data` is a vector of the
probability values, and `basis_matrix` is a matrix of the preparation
and measurement operator, and `weights` is a vector of weights for the
given probabilities.
Additional Information
----------------------
standard_weights:
Weights are calculated from from binomial distribution standard
deviation
"""
# Load built-in circuit functions
if callable(meas_basis):
measurement = meas_basis
else:
measurement = default_basis(meas_basis)
if isinstance(measurement, TomographyBasis):
if measurement.measurement is not True:
raise QiskitError("Invalid measurement basis")
measurement = measurement.measurement_matrix
if callable(prep_basis):
preparation = prep_basis
else:
preparation = default_basis(prep_basis)
if isinstance(preparation, TomographyBasis):
if preparation.preparation is not True:
raise QiskitError("Invalid preparation basis")
preparation = preparation.preparation_matrix
data = []
basis_blocks = []
if standard_weights:
weights = []
else:
weights = None
# Check if input data is state or process tomography data based
# on the label tuples
label = next(iter(tomo_data))
is_qpt = (isinstance(label, tuple) and len(label) == 2 and
isinstance(label[0], tuple) and isinstance(label[1], tuple))
# Generate counts keys for converting to np array
if is_qpt:
ctkeys = count_keys(len(label[1]))
else:
ctkeys = count_keys(len(label))
for label, cts in tomo_data.items():
# Convert counts dict to numpy array
if isinstance(cts, dict):
cts = np.array([cts.get(key, 0) for key in ctkeys])
# Get probabilities
shots = np.sum(cts)
probs = np.array(cts) / shots
data += list(probs)
# Compute binomial weights
if standard_weights is True:
wts = binomial_weights(cts, beta)
weights += list(wts)
# Get reconstruction basis operators
if is_qpt:
prep_label = label[0]
meas_label = label[1]
else:
prep_label = None
meas_label = label
prep_op = _preparation_op(prep_label, preparation)
meas_ops = _measurement_ops(meas_label, measurement)
block = _basis_operator_matrix(
[np.kron(prep_op.T, mop) for mop in meas_ops])
basis_blocks.append(block)
return data, np.vstack(basis_blocks), weights
###########################################################################
# Binomial weights for count statistics
###########################################################################
def binomial_weights(counts, beta=0.5):
"""
Compute binomial weights for list or dictionary of counts.
Args:
counts (dict, vector): A set of measurement counts for
all outcomes of a given measurement
configuration.
beta (float >= 0): A hedging parameter used to bias probabilities
computed from input counts away from 0 or 1.
Returns:
A numpy array of binomial weights for the input counts and beta
parameter.
Additional Information:
The weights are determined by
w[i] = sqrt(shots / p[i] * (1 - p[i]))
p[i] = (counts[i] + beta) / (shots + K * beta)
where
`shots` is the sum of all counts in the input
`p` is the hedged probability computed for a count
`K` is the total number of possible measurement outcomes.
"""
# Sort counts if input is a dictionary
if isinstance(counts, dict):
mcts = marginal_counts(counts, pad_zeros=True)
ordered_keys = sorted(list(mcts))
counts = np.array([mcts[k] for k in ordered_keys])
# Assume counts are already sorted if a list
else:
counts = np.array(counts)
shots = np.sum(counts)
# If beta is 0 check if we would be dividing by zero
# If so change beta value and log warning.
if beta < 0:
raise ValueError('beta = {} must be non-negative.'.format(beta))
if beta == 0 and (shots in counts or 0 in counts):
beta = 0.5
msg = ("Counts result in probabilities of 0 or 1 in binomial weights "
"calculation. Setting hedging parameter beta={} to prevent "
"dividing by zero.".format(beta))
logger.warning(msg)
K = len(counts) # Number of possible outcomes.
# Compute hedged frequencies which are shifted to never be 0 or 1.
freqs_hedged = (counts + beta) / (shots + K * beta)
# Return gaussian weights for 2-outcome measurements.
return np.sqrt(shots / (freqs_hedged * (1 - freqs_hedged)))
###########################################################################
# Wizard Method rescaling
###########################################################################
def make_positive_semidefinite(mat, epsilon=0):
"""
Rescale a Hermitian matrix to nearest postive semidefinite matrix.
Args:
mat (array like): a hermitian matrix.
epsilon (float >=0, optional): the threshold for setting
eigenvalues to zero. If epsilon > 0 positive eigenvalues
below epislon will also be set to zero (Default 0).
Returns:
The input matrix rescaled to have non-negative eigenvalues.
References:
[1] <NAME>, <NAME>, <NAME>, Phys. Rev. Lett. 108, 070502
(2012). Open access: arXiv:1106.5458 [quant-ph].
"""
if epsilon < 0:
raise ValueError('epsilon must be non-negative.')
# Get the eigenvalues and eigenvectors of rho
# eigenvalues are sorted in increasing order
# v[i] <= v[i+1]
dim = len(mat)
v, w = la.eigh(mat)
for j in range(dim):
if v[j] < epsilon:
tmp = v[j]
v[j] = 0.
# Rescale remaining eigenvalues
x = 0.
for k in range(j + 1, dim):
x += tmp / (dim - (j + 1))
v[k] = v[k] + tmp / (dim - (j + 1))
# Build positive matrix from the rescaled eigenvalues
# and the original eigenvectors
mat_psd = np.zeros([dim, dim], dtype=complex)
for j in range(dim):
mat_psd += v[j] * np.outer(w[:, j], np.conj(w[:, j]))
return mat_psd
###########################################################################
# Basis projector construction functions
###########################################################################
def _basis_operator_matrix(basis):
"""
Return a basis measurement matrix of the input basis.
Args:
basis (list (array like)): a list of basis matrices.
Returns:
A numpy array of shape (n, col * row) where n is the number
of operators of shape (row, col) in `basis`.
"""
# Dimensions
num_ops = len(basis)
nrows, ncols = basis[0].shape
size = nrows * ncols
ret = np.zeros((num_ops, size), dtype=complex)
for j, b in enumerate(basis):
ret[j] = np.array(b).reshape((1, size), order='F').conj()
return ret
def _preparation_op(label, prep_matrix_fn):
"""
Return the multi-qubit matrix for a state preparation label.
Args:
label (tuple(str)): a preparation configuration label for a
tomography circuit.
prep_matrix_fn (function): a function that returns the matrix
corresponding to a single qubit preparation label.
The functions should have signature
prep_matrix_fn(str) -> np.array
Returns:
A Numpy array for the multi-qubit prepration operator specified
by label.
Additional Information:
See the Pauli and SIC-POVM preparation functions
`pauli_preparation_matrix` and `sicpovm_preparation_matrix` for
examples.
"""
# Trivial case
if label is None:
return np.eye(1, dtype=complex)
# Construct preparation matrix
op = np.eye(1, dtype=complex)
for l in label:
op = np.kron(prep_matrix_fn(l), op)
return op
def _measurement_ops(label, meas_matrix_fn):
"""
Return a list multi-qubit matrices for a measurement label.
Args:
label (tuple(str)): a measurement configuration label for a
tomography circuit.
meas_matrix_fn (function): a function that returns the matrix
corresponding to a single qubit measurement label
for a given outcome. The functions should have
signature meas_matrix_fn(str, int) -> np.array
Returns:
A list of Numpy array for the multi-qubit measurement operators
for all measurement outcomes for the measurement basis specified
by the label. These are ordered in increasing binary order. Eg for
2-qubits the returned matrices correspond to outcomes [00, 01, 10, 11]
Additional Information:
See the Pauli measurement function `pauli_measurement_matrix`
for an example.
"""
num_qubits = len(label)
meas_ops = []
# Construct measurement POVM for all measurement outcomes for a given
# measurement label. This will be a list of 2 ** n operators.
for l in sorted(it.product((0, 1), repeat=num_qubits)):
op = np.eye(1, dtype=complex)
# Reverse label to correspond to QISKit bit ordering
for m, outcome in zip(reversed(label), l):
op = np.kron(op, meas_matrix_fn(m, outcome))
meas_ops.append(op)
return meas_ops
|
[
"numpy.conj",
"numpy.sum",
"qiskit.QiskitError",
"numpy.zeros",
"numpy.vstack",
"numpy.array",
"scipy.linalg.eigh",
"numpy.kron",
"itertools.product",
"numpy.eye",
"logging.getLogger",
"numpy.sqrt"
] |
[((492, 519), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (509, 519), False, 'import logging\n'), ((5869, 5883), 'numpy.sum', 'np.sum', (['counts'], {}), '(counts)\n', (5875, 5883), True, 'import numpy as np\n'), ((6636, 6688), 'numpy.sqrt', 'np.sqrt', (['(shots / (freqs_hedged * (1 - freqs_hedged)))'], {}), '(shots / (freqs_hedged * (1 - freqs_hedged)))\n', (6643, 6688), True, 'import numpy as np\n'), ((7715, 7727), 'scipy.linalg.eigh', 'la.eigh', (['mat'], {}), '(mat)\n', (7722, 7727), True, 'from scipy import linalg as la\n'), ((8133, 8168), 'numpy.zeros', 'np.zeros', (['[dim, dim]'], {'dtype': 'complex'}), '([dim, dim], dtype=complex)\n', (8141, 8168), True, 'import numpy as np\n'), ((8900, 8940), 'numpy.zeros', 'np.zeros', (['(num_ops, size)'], {'dtype': 'complex'}), '((num_ops, size), dtype=complex)\n', (8908, 8940), True, 'import numpy as np\n'), ((9981, 10005), 'numpy.eye', 'np.eye', (['(1)'], {'dtype': 'complex'}), '(1, dtype=complex)\n', (9987, 10005), True, 'import numpy as np\n'), ((3653, 3664), 'numpy.sum', 'np.sum', (['cts'], {}), '(cts)\n', (3659, 3664), True, 'import numpy as np\n'), ((4363, 4386), 'numpy.vstack', 'np.vstack', (['basis_blocks'], {}), '(basis_blocks)\n', (4372, 4386), True, 'import numpy as np\n'), ((5722, 5763), 'numpy.array', 'np.array', (['[mcts[k] for k in ordered_keys]'], {}), '([mcts[k] for k in ordered_keys])\n', (5730, 5763), True, 'import numpy as np\n'), ((5840, 5856), 'numpy.array', 'np.array', (['counts'], {}), '(counts)\n', (5848, 5856), True, 'import numpy as np\n'), ((9911, 9935), 'numpy.eye', 'np.eye', (['(1)'], {'dtype': 'complex'}), '(1, dtype=complex)\n', (9917, 9935), True, 'import numpy as np\n'), ((11268, 11305), 'itertools.product', 'it.product', (['(0, 1)'], {'repeat': 'num_qubits'}), '((0, 1), repeat=num_qubits)\n', (11278, 11305), True, 'import itertools as it\n'), ((11321, 11345), 'numpy.eye', 'np.eye', (['(1)'], {'dtype': 'complex'}), '(1, dtype=complex)\n', (11327, 11345), True, 'import numpy as np\n'), ((3681, 3694), 'numpy.array', 'np.array', (['cts'], {}), '(cts)\n', (3689, 3694), True, 'import numpy as np\n'), ((2435, 2475), 'qiskit.QiskitError', 'QiskitError', (['"""Invalid measurement basis"""'], {}), "('Invalid measurement basis')\n", (2446, 2475), False, 'from qiskit import QiskitError\n'), ((2780, 2820), 'qiskit.QiskitError', 'QiskitError', (['"""Invalid preparation basis"""'], {}), "('Invalid preparation basis')\n", (2791, 2820), False, 'from qiskit import QiskitError\n'), ((4264, 4287), 'numpy.kron', 'np.kron', (['prep_op.T', 'mop'], {}), '(prep_op.T, mop)\n', (4271, 4287), True, 'import numpy as np\n'), ((8238, 8254), 'numpy.conj', 'np.conj', (['w[:, j]'], {}), '(w[:, j])\n', (8245, 8254), True, 'import numpy as np\n'), ((8992, 9003), 'numpy.array', 'np.array', (['b'], {}), '(b)\n', (9000, 9003), True, 'import numpy as np\n')]
|
import unittest
import numpy as np
from texar.torch.run.metric.summary import *
class RegressionMetricTest(unittest.TestCase):
def setUp(self) -> None:
self.n_examples = 100
self.batch_size = 2
self.values = np.random.randn(self.n_examples)
def test_running_average(self):
queue_size = 10
metric = RunningAverage(queue_size)
for idx in range(0, self.n_examples, self.batch_size):
end_idx = idx + self.batch_size
metric.add(self.values[idx:end_idx], None)
value = metric.value()
answer = self.values[max(0, end_idx - queue_size):end_idx].mean()
self.assertAlmostEqual(value, answer)
|
[
"numpy.random.randn"
] |
[((240, 272), 'numpy.random.randn', 'np.random.randn', (['self.n_examples'], {}), '(self.n_examples)\n', (255, 272), True, 'import numpy as np\n')]
|
# Generated by Django 3.0.2 on 2021-04-13 18:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('website', '0018_remove_rule_description'),
]
operations = [
migrations.AddField(
model_name='rule',
name='internal',
field=models.BooleanField(default=False, verbose_name='Intern regel'),
),
]
|
[
"django.db.models.BooleanField"
] |
[((339, 402), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)', 'verbose_name': '"""Intern regel"""'}), "(default=False, verbose_name='Intern regel')\n", (358, 402), False, 'from django.db import migrations, models\n')]
|
import sys, os.path as p
wdir = p.abspath(p.dirname(__file__))
topdir = p.normpath(p.join(wdir, p.pardir, p.pardir))
srcdir = p.join(topdir, 'src')
sys.path.insert(0, p.join(topdir, 'conf'))
from mpiscanner import Scanner
scanner = Scanner()
libmpi_pxd = p.join(srcdir, 'mpi4py', 'libmpi.pxd')
scanner.parse_file(libmpi_pxd)
libmpi_h = p.join(wdir, 'libmpi.h')
scanner.dump_header_h(libmpi_h)
#try:
# from cStringIO import StringIO
#except ImportError:
# from io import StringIO
#libmpi_h = StringIO()
#scanner.dump_header_h(libmpi_h)
#print libmpi_h.read()
libmpi_c = p.join(wdir, 'libmpi.c.in')
with open(libmpi_c, 'w') as f:
f.write("""\
#include <mpi.h>
#include "%(srcdir)s/lib-mpi/config.h"
#include "%(srcdir)s/lib-mpi/missing.h"
#include "%(srcdir)s/lib-mpi/fallback.h"
#include "%(srcdir)s/lib-mpi/compat.h"
""" % vars())
|
[
"os.path.dirname",
"os.path.join",
"mpiscanner.Scanner"
] |
[((126, 147), 'os.path.join', 'p.join', (['topdir', '"""src"""'], {}), "(topdir, 'src')\n", (132, 147), True, 'import sys, os.path as p\n'), ((233, 242), 'mpiscanner.Scanner', 'Scanner', ([], {}), '()\n', (240, 242), False, 'from mpiscanner import Scanner\n'), ((256, 294), 'os.path.join', 'p.join', (['srcdir', '"""mpi4py"""', '"""libmpi.pxd"""'], {}), "(srcdir, 'mpi4py', 'libmpi.pxd')\n", (262, 294), True, 'import sys, os.path as p\n'), ((337, 361), 'os.path.join', 'p.join', (['wdir', '"""libmpi.h"""'], {}), "(wdir, 'libmpi.h')\n", (343, 361), True, 'import sys, os.path as p\n'), ((578, 605), 'os.path.join', 'p.join', (['wdir', '"""libmpi.c.in"""'], {}), "(wdir, 'libmpi.c.in')\n", (584, 605), True, 'import sys, os.path as p\n'), ((42, 61), 'os.path.dirname', 'p.dirname', (['__file__'], {}), '(__file__)\n', (51, 61), True, 'import sys, os.path as p\n'), ((83, 115), 'os.path.join', 'p.join', (['wdir', 'p.pardir', 'p.pardir'], {}), '(wdir, p.pardir, p.pardir)\n', (89, 115), True, 'import sys, os.path as p\n'), ((167, 189), 'os.path.join', 'p.join', (['topdir', '"""conf"""'], {}), "(topdir, 'conf')\n", (173, 189), True, 'import sys, os.path as p\n')]
|
import unittest
from python.feb_stats.parsers.feb_livescore_parser import FEBLivescoreParser
class GenericParserTestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(GenericParserTestCase, self).__init__(*args, **kwargs)
self.parser = FEBLivescoreParser()
self.test_file = "test_data/1_livescore.html"
def test_parse_str(self):
test_str = (
" Rebotes D O T "
)
desired_test_str = "Rebotes D O T"
out_str = self.parser.parse_str(test_str)
self.assertEqual(out_str, desired_test_str)
test_str = "\n\t\t\t\t\t\t\n\t\t\t\t\nRebotes\n\t\t\t\t\t\n\t\t\tD\n\t\t\t\t\t\tO\n\t\t\t\t\t\tT\n\t\t\t\t\t\t"
out_str = self.parser.parse_str(test_str)
self.assertEqual(out_str, desired_test_str)
test_str = " 0 0 0 "
desired_test_str = "0 0 0"
out_str = self.parser.parse_str(test_str)
self.assertEqual(out_str, desired_test_str)
test_str = (
"\n\t\t\t\t\t\t\n\t\t\t\t\n0\n\t\t\t\t\t\n\t\t\t0\n\t\t\t\t\t\t0\t\t\t\t\t"
)
out_str = self.parser.parse_str(test_str)
self.assertEqual(out_str, desired_test_str)
test_str = (
" Tapones Fa Co "
)
desired_test_str = "Tapones Fa Co"
out_str = self.parser.parse_str(test_str)
self.assertEqual(out_str, desired_test_str)
test_str = " 0 0 "
desired_test_str = "0 0"
out_str = self.parser.parse_str(test_str)
self.assertEqual(out_str, desired_test_str)
def test_get_elements(self):
doc = self.parser.read_link_file(self.test_file)
id = '//table[@cellpadding="0"]//tbody'
table_local, table_away = self.parser.get_elements(doc, id)
self.assertEqual(len(table_local), 13)
self.assertEqual(len(table_away), 15)
def test_elements_to_df(self):
doc = self.parser.read_link_file(self.test_file)
id = '//table[@cellpadding="0"]//tbody'
table_local, table_away = self.parser.get_elements(doc, id)
local_df = self.parser.elements_to_df(
table_local, initial_row=2, discard_last=0
)
away_df = self.parser.elements_to_df(table_away, initial_row=2, discard_last=0)
self.assertEqual(local_df.shape, (11, 22))
self.assertEqual(away_df.shape, (13, 22))
for df in (local_df, away_df):
self.assertListEqual(
list(df.columns),
[
"inicial",
"dorsal",
"nombre jugador",
"minutos",
"puntos",
"tiros dos",
"tiros tres",
"tiros campo",
"tiros libres",
"rebotes total",
"rebotes defensivos",
"rebotes ofensivos",
"asistencias",
"recuperaciones",
"perdidas",
"tapones favor",
"tapones contra",
"mates",
"faltas cometidas",
"faltas recibidas",
"valoracion",
"balance",
],
)
def test_parse_boxscores(self):
with open(self.test_file, mode="rb") as f:
boxscores_bytes = f.read()
league = self.parser.parse_boxscores([boxscores_bytes])
self.assertEqual(2, len(league.teams))
self.assertEqual(1, len(league.games))
def test_read_link_bytes(self):
with open(self.test_file, mode="rb") as f:
link_bytes = f.read()
doc = self.parser.read_link_bytes(link_bytes)
self.assertIsNotNone(doc.forms)
self.assertIsNotNone(doc.body)
self.assertIsNotNone(doc.head)
def test_read_link_file(self):
doc = self.parser.read_link_file(self.test_file)
self.assertIsNotNone(doc.forms)
self.assertIsNotNone(doc.body)
self.assertIsNotNone(doc.head)
def test_parse_game_metadata(self):
doc = self.parser.read_link_file(self.test_file)
game_metadata = self.parser.parse_game_metadata(doc)
desired_dict = {
"date": "08/03/2020",
"hour": "18:00",
"league": "LIGA EBA",
"season": "2019/2020",
"home_team": "HERO JAIRIS",
"home_score": "75",
"away_team": "UCAM MURCIA JIFFY",
"away_score": "68",
"main_referee": "-", # "<NAME>. ALBERT",
"second_referee": "-", # "ARAQUE CACERES. MAURO",
}
self.assertDictEqual(game_metadata, desired_dict)
def test_parse_game_stats(self):
doc = self.parser.read_link_file(self.test_file)
game, (local_team, away_team) = self.parser.parse_game_stats(doc)
self.assertTrue(game.date, "08/03/2020")
self.assertTrue(game.hour, "18:00")
self.assertTrue(game.league, "LIGA EBA")
self.assertTrue(game.season, "2019/2020")
self.assertTrue(game.home_score, "75")
self.assertTrue(game.away_team, "UCAM MURCIA JIFFY")
self.assertTrue(game.away_score, "68")
self.assertTrue(game.main_referee, "<NAME>. ALBERT")
self.assertTrue(game.aux_referee, "<NAME>. MAURO")
self.assertTrue(local_team.name, "<NAME>")
self.assertTrue(away_team.name, "UCAM MURCIA JIFFY")
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"python.feb_stats.parsers.feb_livescore_parser.FEBLivescoreParser"
] |
[((5800, 5815), 'unittest.main', 'unittest.main', ([], {}), '()\n', (5813, 5815), False, 'import unittest\n'), ((275, 295), 'python.feb_stats.parsers.feb_livescore_parser.FEBLivescoreParser', 'FEBLivescoreParser', ([], {}), '()\n', (293, 295), False, 'from python.feb_stats.parsers.feb_livescore_parser import FEBLivescoreParser\n')]
|
#!/usr/bin/env python
#
# Copyright 2012 The Closure Linter Authors. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the error_fixer module."""
# Allow non-Google copyright
# pylint: disable=g-bad-file-header
import unittest as googletest
from closure_linter import error_fixer
from closure_linter import testutil
class ErrorFixerTest(googletest.TestCase):
"""Unit tests for error_fixer."""
def setUp(self):
self.error_fixer = error_fixer.ErrorFixer()
def testDeleteToken(self):
start_token = testutil.TokenizeSourceAndRunEcmaPass(_TEST_SCRIPT)
second_token = start_token.next
self.error_fixer.HandleFile('test_file', start_token)
self.error_fixer._DeleteToken(start_token)
self.assertEqual(second_token, self.error_fixer._file_token)
def testDeleteTokens(self):
start_token = testutil.TokenizeSourceAndRunEcmaPass(_TEST_SCRIPT)
fourth_token = start_token.next.next.next
self.error_fixer.HandleFile('test_file', start_token)
self.error_fixer._DeleteTokens(start_token, 3)
self.assertEqual(fourth_token, self.error_fixer._file_token)
_TEST_SCRIPT = """\
var x = 3;
"""
if __name__ == '__main__':
googletest.main()
|
[
"unittest.main",
"closure_linter.testutil.TokenizeSourceAndRunEcmaPass",
"closure_linter.error_fixer.ErrorFixer"
] |
[((1701, 1718), 'unittest.main', 'googletest.main', ([], {}), '()\n', (1716, 1718), True, 'import unittest as googletest\n'), ((979, 1003), 'closure_linter.error_fixer.ErrorFixer', 'error_fixer.ErrorFixer', ([], {}), '()\n', (1001, 1003), False, 'from closure_linter import error_fixer\n'), ((1052, 1103), 'closure_linter.testutil.TokenizeSourceAndRunEcmaPass', 'testutil.TokenizeSourceAndRunEcmaPass', (['_TEST_SCRIPT'], {}), '(_TEST_SCRIPT)\n', (1089, 1103), False, 'from closure_linter import testutil\n'), ((1361, 1412), 'closure_linter.testutil.TokenizeSourceAndRunEcmaPass', 'testutil.TokenizeSourceAndRunEcmaPass', (['_TEST_SCRIPT'], {}), '(_TEST_SCRIPT)\n', (1398, 1412), False, 'from closure_linter import testutil\n')]
|
from logging import getLogger
import numpy as np
from imblearn.over_sampling import SMOTE
from sklearn.base import clone
from ..utils import augmented_rvalue, BaseTransformer
class MOS(BaseTransformer):
"""Perform Minimizing Overlapping Selection under SMOTE (MOSS) or under
No-Sampling (MOSNS) algorithm.
Parameters
----------
model : object
The model that should have a fit(X, y) method and a field corresponding
to feature weights. Currently only SGDClassifier should be passed,
other models would not work.
weight_func : callable
The function to extract weights from the model.
loss : str, 'log' or 'hinge'
Loss function to use in the algorithm. 'log' gives a logistic
regression, while 'hinge' gives a support vector machine.
seed : int, optional
Seed for python random.
l1_ratio : float
The value used to balance the L1 and L2 penalties in elastic-net.
threshold : float
The threshold value for feature dropout. Instead of comparing them to
zero, they are normalized and values with absolute value lower than the
threshold are dropped out.
epochs : int
The number of epochs to perform in the algorithm.
alphas : array-like, shape (n_alphas,), optional
The range of lambdas that should form the regularization path.
sampling : bool
Bool value that control whether MOSS (True) or MOSNS (False) should be
executed.
k_neighbors : int
Amount of nearest neighbors to use in SMOTE if MOSS is used.
Notes
-----
For more details see `this paper
<https://www.sciencedirect.com/science/article/pii/S0169743919306070/>`_.
Examples
--------
>>> from ITMO_FS.embedded import MOS
>>> from sklearn.linear_model import SGDClassifier
>>> import numpy as np
>>> from sklearn.datasets import make_classification
>>> from sklearn.linear_model import LogisticRegression
>>> dataset = make_classification(n_samples=100, n_features=10,
... n_informative=5, n_redundant=0, weights=[0.85, 0.15], random_state=42,
... shuffle=False)
>>> X, y = np.array(dataset[0]), np.array(dataset[1])
>>> m = MOS(model=SGDClassifier(),
... weight_func=lambda model: np.square(model.coef_).sum(axis=0)).fit(X, y)
>>> m.selected_features_
array([1, 3, 4], dtype=int64)
>>> m = MOS(model=SGDClassifier(), sampling=True,
... weight_func=lambda model: np.square(model.coef_).sum(axis=0)).fit(X, y)
>>> m.selected_features_
array([1, 3, 4, 6], dtype=int64)
"""
def __init__(self, model, weight_func, loss='log', seed=42, l1_ratio=0.5,
threshold=1e-3, epochs=1000, alphas=np.arange(0.01, 0.2, 0.01),
sampling=False, k_neighbors=2):
self.model = model
self.weight_func = weight_func
self.loss = loss
self.seed = seed
self.l1_ratio = l1_ratio
self.threshold = threshold
self.epochs = epochs
self.alphas = alphas
self.sampling = sampling
self.k_neighbors = k_neighbors
def _fit(self, X, y):
"""Run the MOS algorithm on the specified dataset.
Parameters
----------
X : array-like, shape (n_samples, n_features)
The input samples.
y : array-like, shape (n_samples,)
The classes for the samples.
Returns
-------
None
"""
if self.loss not in ['hinge', 'log']:
getLogger(__name__).error(
"Loss should be 'hinge' or 'log', %s was passed", self.loss)
raise KeyError(
"Loss should be 'hinge' or 'log', %s was passed" % self.loss)
if self.sampling:
try:
X, y = SMOTE(
random_state=self.seed,
k_neighbors=self.k_neighbors).fit_resample(X, y)
except ValueError:
getLogger(__name__).warning(
"Couldn't perform SMOTE because k_neighbors is bigger "
"than amount of instances in one of the classes; MOSNS "
"would be performed instead")
min_rvalue = 1
min_b = []
model = clone(self.model)
for a in self.alphas: # TODO: do a little more
# research on the range of lambdas
model = model.set_params(
loss=self.loss, random_state=self.seed, penalty='elasticnet',
alpha=a, l1_ratio=self.l1_ratio, max_iter=self.epochs)
model.fit(X, y)
b = self.weight_func(model)
rvalue = augmented_rvalue(
X[:, np.flatnonzero(np.abs(b) > self.threshold)], y)
getLogger(__name__).info(
"For alpha %f: rvalue = %f, weight vector = %s", a, rvalue, b)
if min_rvalue > rvalue:
min_rvalue = rvalue
min_b = b
getLogger(__name__).info("New minimum rvalue: %f", rvalue)
getLogger(__name__).info("New weight vector: %s", b)
self.selected_features_ = np.flatnonzero(np.abs(min_b) > self.threshold)
|
[
"numpy.abs",
"numpy.arange",
"imblearn.over_sampling.SMOTE",
"sklearn.base.clone",
"logging.getLogger"
] |
[((2738, 2764), 'numpy.arange', 'np.arange', (['(0.01)', '(0.2)', '(0.01)'], {}), '(0.01, 0.2, 0.01)\n', (2747, 2764), True, 'import numpy as np\n'), ((4274, 4291), 'sklearn.base.clone', 'clone', (['self.model'], {}), '(self.model)\n', (4279, 4291), False, 'from sklearn.base import clone\n'), ((5166, 5179), 'numpy.abs', 'np.abs', (['min_b'], {}), '(min_b)\n', (5172, 5179), True, 'import numpy as np\n'), ((3539, 3558), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (3548, 3558), False, 'from logging import getLogger\n'), ((4770, 4789), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (4779, 4789), False, 'from logging import getLogger\n'), ((3816, 3875), 'imblearn.over_sampling.SMOTE', 'SMOTE', ([], {'random_state': 'self.seed', 'k_neighbors': 'self.k_neighbors'}), '(random_state=self.seed, k_neighbors=self.k_neighbors)\n', (3821, 3875), False, 'from imblearn.over_sampling import SMOTE\n'), ((4989, 5008), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (4998, 5008), False, 'from logging import getLogger\n'), ((5064, 5083), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (5073, 5083), False, 'from logging import getLogger\n'), ((3983, 4002), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (3992, 4002), False, 'from logging import getLogger\n'), ((4725, 4734), 'numpy.abs', 'np.abs', (['b'], {}), '(b)\n', (4731, 4734), True, 'import numpy as np\n')]
|
import xml.etree.ElementTree as ET
from ..v3_0 import dto, namespaces as ns
from .serialize_element import serialize_text_element
from ois_api_client.xml.get_full_tag import get_full_tag
# def serialize_user(data: dto.UserHeader, password_hash: str, request_signature: str) -> ET.Element:
def serialize_user(data: dto.UserHeader) -> ET.Element:
result = ET.Element(get_full_tag(ns.COMMON, 'user'))
serialize_text_element(result, 'login', data.login, ns.COMMON)
serialize_text_element(result, 'passwordHash', data.password_hash.value, ns.COMMON,
{'cryptoType': data.password_hash.crypto_type})
serialize_text_element(result, 'taxNumber', data.tax_number, ns.COMMON)
serialize_text_element(result, 'requestSignature', data.request_signature.value, ns.COMMON,
{'cryptoType': data.request_signature.crypto_type})
return result
|
[
"ois_api_client.xml.get_full_tag.get_full_tag"
] |
[((372, 403), 'ois_api_client.xml.get_full_tag.get_full_tag', 'get_full_tag', (['ns.COMMON', '"""user"""'], {}), "(ns.COMMON, 'user')\n", (384, 403), False, 'from ois_api_client.xml.get_full_tag import get_full_tag\n')]
|
from matplotlib import pyplot
from statistics import stdev, mean
import os
import glob
path = 'input/'
XLABEL = "moves"
YLABEL = "accuracy"
OFFSET = 0
CI = True
showT1 = [0, 1, 3, 5,7] # showing 4 epochs + average of last 10 epochs.
# table caption ..
Caption = ""
label = "OutputFileName" # table label
parsing = "Outcome" # Threshold, Window, Settings, Clauses, S
epoch = 8
standdev = 0 # 1 if enabled in graph, 0 if not
title_graph = ""
backslash = "\ "
backslash = backslash[:1]
for FILENAME in sorted(glob.glob(os.path.join(path, '*.csv'))):
print(FILENAME)
with open(f"{FILENAME}", 'r') as file:
m = []
s = []
m2 = []
s2 = []
perc1 = []
perc2 = []
perc3 = []
c = 0
counter = 0
tab = []
tab2 = []
labelinput = "Correct white"
labelinput2 = "Correct black"
names = "White"
names2 = "Black"
names3 = "Total"
namestab = []
average = 0
#print(FILENAME)
lined = []
Black = 0
White = 0
Total = 0
for line in file.readlines():
print(line)
counter += 1
if counter == 1:
lineds = [str(x) for x in line.strip().split(',')]
Caption = str(lineds[0])
if counter == 2:
lined = [int(x) for x in line.strip().split(',')]
tab.append(lined)
for i in range(len(tab)):
m.append(tab[i])
s.append(tab[i])
if counter == 3:
lined = [int(x) for x in line.strip().split(',')]
tab2.append(lined)
for i in range(len(tab2)):
m2.append(tab2[i])
s2.append(tab2[i])
m = m[0]
m2 = m2[0]
s = s[0]
s2 = s2[0]
if counter == 4:
lined = [str(x) for x in line.strip().split(',')]
Black = lined[3]
White = lined[1]
Total = lined[5]
for i in range(epoch):
perc1.append(int(m[i])/int(White)*100)
perc2.append(int(m2[i]) / int(Black) * 100)
perc3.append((int(m[i])+int(m2[i]))/(int(White)+int(Black))*100)
#print(perc1)
#print(perc2)
table = open("output/" + FILENAME[6:-4] + ".txt", 'w')
table.write(backslash + "FloatBarrier\n")
table.write(backslash + "begin{figure}[h!]\n")
table.write(" " + backslash + "centering\n")
table.write(" " + backslash + "includegraphics[scale=.6]{Images/Results/" + FILENAME[6:-4] + ".png}\n")
table.write(" " + backslash + "caption{" + Caption + "}\n")
table.write(" " + backslash + "label{fig:" + FILENAME[6:-4] + "}\n")
table.write(backslash + "end{figure}\n")
table.write(backslash + "FloatBarrier\n")
table.write(backslash + "FloatBarrier\n")
table.write(backslash + "begin{table}[h!]\n")
table.write(backslash + "centering\n")
if len(showT1) == 5:
table.write(backslash + "begin{tabular}{|a|d|d|d|d|d|}\n")
if len(showT1) == 4:
table.write(backslash + "begin{tabular}{|a|d|d|d|d||x|}\n")
table.write(backslash + "hline\n")
table.write(backslash + "rowcolor{Blue}\n")
if counter == 4:
table.write(backslash + "begin{tabular}[c]{@{}l@{}}" + parsing + backslash + "end{tabular}&")
table.write(backslash + "begin{tabular}[c]{@{}l@{}} B Move " + str(showT1[0]) + backslash + "end{tabular}&")
table.write(backslash + "begin{tabular}[c]{@{}l@{}} B Move " + str(showT1[1]) + backslash + "end{tabular}&")
table.write(backslash + "begin{tabular}[c]{@{}l@{}} B Move " + str(showT1[2]) + backslash + "end{tabular}&")
table.write(backslash + "begin{tabular}[c]{@{}l@{}} B Move " + str(showT1[3]) + backslash + "end{tabular}")
ttitle = ""
ctitle = ""
wtitle = ""
stitle = ""
if len(showT1) == 5:
table.write("&")
table.write(
backslash + "begin{tabular}[c]{@{}l@{}} B Move " + str(showT1[4]) + backslash + "end{tabular}")
if len(showT1) == 4:
table.write("&")
table.write(backslash + "begin{tabular}[c]{@{}l@{}} Last 10 Epoch" + backslash + "end{tabular}")
table.write(backslash + backslash + " " + backslash + "hline\n")
#print(names)
backslash = "\ "
backslash = backslash[:1]
table.write(backslash + "begin{tabular}[c]{@{}l@{}} " + labelinput2 + "\end{tabular}\n")
table.write("&" + backslash + "begin{tabular}[c]{@{}l@{}}" + str(round(m2[showT1[0]], 2)) + backslash + "end{tabular}\n")
table.write("&" + backslash + "begin{tabular}[c]{@{}l@{}}" + str(round(m2[showT1[1]], 2)) + backslash+ "end{tabular}\n")
table.write("&" + backslash + "begin{tabular}[c]{@{}l@{}}" + str(round(m2[showT1[2]], 2)) + backslash + "end{tabular}\n")
table.write("&" + backslash + "begin{tabular}[c]{@{}l@{}}" + str(round(m2[showT1[3]], 2)) + backslash + "end{tabular}")
table.write("\n")
table.write("&" + backslash + "begin{tabular}[c]{@{}l@{}}" + str(round(m2[showT1[4]], 2))+ backslash + "end{tabular}")
table.write(backslash + backslash+" " + backslash + "hline\n")
table.write(backslash + "begin{tabular}[c]{@{}l@{}} " + labelinput + "\end{tabular}\n")
table.write("&" + backslash + "begin{tabular}[c]{@{}l@{}}" + str(round(m[showT1[0]], 2)) + backslash + "end{tabular}\n")
table.write("&" + backslash + "begin{tabular}[c]{@{}l@{}}" + str(round(m[showT1[1]], 2)) + backslash+ "end{tabular}\n")
table.write("&" + backslash + "begin{tabular}[c]{@{}l@{}}" + str(round(m[showT1[2]], 2)) + backslash + "end{tabular}\n")
table.write("&" + backslash + "begin{tabular}[c]{@{}l@{}}" + str(round(m[showT1[3]], 2))+ backslash + "end{tabular}")
table.write("\n")
table.write("&" + backslash + "begin{tabular}[c]{@{}l@{}}" + str(round(m[showT1[4]], 2))+ backslash + "end{tabular}")
table.write(backslash + backslash+" " + backslash + "hline\n")
x3 = [i for i in range(len(perc3))]
x2 = [i for i in range(len(perc2))]
x = [i for i in range(len(perc1))]
pyplot.plot(x3, perc3, lw=2, label=f"{names3}")
pyplot.plot(x2, perc2, lw=2, label=f"{names2}")
pyplot.plot(x, perc1, lw=2, label=f"{names}")
table.write(backslash + "end{tabular}\n")
table.write(backslash + "caption{" + Caption + "}\n")
table.write(backslash + "label{tab:" + FILENAME[6:-4] + "}\n")
table.write(backslash + "end{table}\n")
table.write(backslash + "FloatBarrier\n")
pyplot.legend(loc='best')
TITLE = title_graph
pyplot.gca().set_xlim([1, epoch-1])
pyplot.xticks(x)
pyplot.title(Caption)
pyplot.xlabel(XLABEL)
pyplot.ylabel(YLABEL)
pyplot.grid(zorder=0)
pyplot.savefig("output/" + FILENAME[6:-4], dpi=300)
pyplot.show()
#x = [(i+OFFSET) for i in x]
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"os.path.join",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.gca",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xticks",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.xlabel"
] |
[((6498, 6545), 'matplotlib.pyplot.plot', 'pyplot.plot', (['x3', 'perc3'], {'lw': '(2)', 'label': 'f"""{names3}"""'}), "(x3, perc3, lw=2, label=f'{names3}')\n", (6509, 6545), False, 'from matplotlib import pyplot\n'), ((6550, 6597), 'matplotlib.pyplot.plot', 'pyplot.plot', (['x2', 'perc2'], {'lw': '(2)', 'label': 'f"""{names2}"""'}), "(x2, perc2, lw=2, label=f'{names2}')\n", (6561, 6597), False, 'from matplotlib import pyplot\n'), ((6602, 6647), 'matplotlib.pyplot.plot', 'pyplot.plot', (['x', 'perc1'], {'lw': '(2)', 'label': 'f"""{names}"""'}), "(x, perc1, lw=2, label=f'{names}')\n", (6613, 6647), False, 'from matplotlib import pyplot\n'), ((6913, 6938), 'matplotlib.pyplot.legend', 'pyplot.legend', ([], {'loc': '"""best"""'}), "(loc='best')\n", (6926, 6938), False, 'from matplotlib import pyplot\n'), ((7007, 7023), 'matplotlib.pyplot.xticks', 'pyplot.xticks', (['x'], {}), '(x)\n', (7020, 7023), False, 'from matplotlib import pyplot\n'), ((7028, 7049), 'matplotlib.pyplot.title', 'pyplot.title', (['Caption'], {}), '(Caption)\n', (7040, 7049), False, 'from matplotlib import pyplot\n'), ((7054, 7075), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['XLABEL'], {}), '(XLABEL)\n', (7067, 7075), False, 'from matplotlib import pyplot\n'), ((7080, 7101), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['YLABEL'], {}), '(YLABEL)\n', (7093, 7101), False, 'from matplotlib import pyplot\n'), ((7106, 7127), 'matplotlib.pyplot.grid', 'pyplot.grid', ([], {'zorder': '(0)'}), '(zorder=0)\n', (7117, 7127), False, 'from matplotlib import pyplot\n'), ((7132, 7183), 'matplotlib.pyplot.savefig', 'pyplot.savefig', (["('output/' + FILENAME[6:-4])"], {'dpi': '(300)'}), "('output/' + FILENAME[6:-4], dpi=300)\n", (7146, 7183), False, 'from matplotlib import pyplot\n'), ((7188, 7201), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (7199, 7201), False, 'from matplotlib import pyplot\n'), ((520, 547), 'os.path.join', 'os.path.join', (['path', '"""*.csv"""'], {}), "(path, '*.csv')\n", (532, 547), False, 'import os\n'), ((6967, 6979), 'matplotlib.pyplot.gca', 'pyplot.gca', ([], {}), '()\n', (6977, 6979), False, 'from matplotlib import pyplot\n')]
|
"""A shim module for deprecated imports
"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
import sys
import types
from importlib import import_module
from .importstring import import_item
class ShimWarning(Warning):
"""A warning to show when a module has moved, and a shim is in its place."""
class ShimImporter(object):
"""Import hook for a shim.
This ensures that submodule imports return the real target module,
not a clone that will confuse `is` and `isinstance` checks.
"""
def __init__(self, src, mirror):
self.src = src
self.mirror = mirror
def _mirror_name(self, fullname):
"""get the name of the mirrored module"""
return self.mirror + fullname[len(self.src):]
def find_module(self, fullname, path=None):
"""Return self if we should be used to import the module."""
if fullname.startswith(self.src + '.'):
mirror_name = self._mirror_name(fullname)
try:
mod = import_item(mirror_name)
except ImportError:
return
else:
if not isinstance(mod, types.ModuleType):
# not a module
return None
return self
def load_module(self, fullname):
"""Import the mirrored module, and insert it into sys.modules"""
mirror_name = self._mirror_name(fullname)
mod = import_item(mirror_name)
sys.modules[fullname] = mod
return mod
class ShimModule(types.ModuleType):
def __init__(self, *args, **kwargs):
self._mirror = kwargs.pop("mirror")
src = kwargs.pop("src", None)
if src:
kwargs['name'] = src.rsplit('.', 1)[-1]
super(ShimModule, self).__init__(*args, **kwargs)
# add import hook for descendent modules
if src:
sys.meta_path.append(
ShimImporter(src=src, mirror=self._mirror)
)
@property
def __path__(self):
return []
@property
def __spec__(self):
"""Don't produce __spec__ until requested"""
return import_module(self._mirror).__spec__
def __dir__(self):
return dir(import_module(self._mirror))
@property
def __all__(self):
"""Ensure __all__ is always defined"""
mod = import_module(self._mirror)
try:
return mod.__all__
except AttributeError:
return [name for name in dir(mod) if not name.startswith('_')]
def __getattr__(self, key):
# Use the equivalent of import_item(name), see below
name = "%s.%s" % (self._mirror, key)
try:
return import_item(name)
except ImportError:
raise AttributeError(key)
|
[
"importlib.import_module"
] |
[((2422, 2449), 'importlib.import_module', 'import_module', (['self._mirror'], {}), '(self._mirror)\n', (2435, 2449), False, 'from importlib import import_module\n'), ((2206, 2233), 'importlib.import_module', 'import_module', (['self._mirror'], {}), '(self._mirror)\n', (2219, 2233), False, 'from importlib import import_module\n'), ((2290, 2317), 'importlib.import_module', 'import_module', (['self._mirror'], {}), '(self._mirror)\n', (2303, 2317), False, 'from importlib import import_module\n')]
|
import pylab as p
from flask import Blueprint,render_template,request
from flask_login import login_required, current_user
from . import db
from keras.models import load_model
from keras.preprocessing import image
import numpy as np
import cv2
main=Blueprint('main',__name__)
model=load_model('D:\Shubham\Alzhiemer-Prediction\project\model.h5')
CATEGORIES = ["MildDemented", "ModerateDemented","NonDemented","VeryMildDemented"]
def predict_label(image_name):
image_path='D:/Shubham/Alzhiemer-Prediction/project/static/alzheimer-images/' +image_name
image = cv2.imread(image_path, cv2.COLOR_BGR2GRAY)
image = cv2.resize(image, (224, 224), interpolation=cv2.INTER_AREA)
image = np.array(image)
image = image.astype('float32')
image /= 255
image = np.stack((image,) * 3, axis=-1)
image = image.reshape(-1, 224, 224, 3)
result=model.predict(image)
return result
@main.route('/')
def home():
return render_template('home.html')
@main.route('/prediction')
# @login_required
def prediction():
return render_template('prediction.html', name='current_user.name')
@main.route('/prediction', methods=['POST'])
def predict():
img=request.files['image']
imgpath='./project/static/alzheimer-images/'+img.filename
img.save(imgpath)
p=predict_label(img.filename)
classes=np.argmax(p)
print("Disease name ", CATEGORIES[classes])
return render_template('prediction.html', result=CATEGORIES[classes], image='alzheimer-images/'+img.filename)
|
[
"keras.models.load_model",
"numpy.stack",
"flask.Blueprint",
"numpy.argmax",
"cv2.imread",
"numpy.array",
"keras.preprocessing.image.astype",
"keras.preprocessing.image.reshape",
"flask.render_template",
"cv2.resize"
] |
[((259, 286), 'flask.Blueprint', 'Blueprint', (['"""main"""', '__name__'], {}), "('main', __name__)\n", (268, 286), False, 'from flask import Blueprint, render_template, request\n'), ((295, 361), 'keras.models.load_model', 'load_model', (['"""D:\\\\Shubham\\\\Alzhiemer-Prediction\\\\project\\\\model.h5"""'], {}), "('D:\\\\Shubham\\\\Alzhiemer-Prediction\\\\project\\\\model.h5')\n", (305, 361), False, 'from keras.models import load_model\n'), ((586, 628), 'cv2.imread', 'cv2.imread', (['image_path', 'cv2.COLOR_BGR2GRAY'], {}), '(image_path, cv2.COLOR_BGR2GRAY)\n', (596, 628), False, 'import cv2\n'), ((642, 701), 'cv2.resize', 'cv2.resize', (['image', '(224, 224)'], {'interpolation': 'cv2.INTER_AREA'}), '(image, (224, 224), interpolation=cv2.INTER_AREA)\n', (652, 701), False, 'import cv2\n'), ((715, 730), 'numpy.array', 'np.array', (['image'], {}), '(image)\n', (723, 730), True, 'import numpy as np\n'), ((744, 767), 'keras.preprocessing.image.astype', 'image.astype', (['"""float32"""'], {}), "('float32')\n", (756, 767), False, 'from keras.preprocessing import image\n'), ((799, 830), 'numpy.stack', 'np.stack', (['((image,) * 3)'], {'axis': '(-1)'}), '((image,) * 3, axis=-1)\n', (807, 830), True, 'import numpy as np\n'), ((844, 874), 'keras.preprocessing.image.reshape', 'image.reshape', (['(-1)', '(224)', '(224)', '(3)'], {}), '(-1, 224, 224, 3)\n', (857, 874), False, 'from keras.preprocessing import image\n'), ((974, 1002), 'flask.render_template', 'render_template', (['"""home.html"""'], {}), "('home.html')\n", (989, 1002), False, 'from flask import Blueprint, render_template, request\n'), ((1083, 1143), 'flask.render_template', 'render_template', (['"""prediction.html"""'], {'name': '"""current_user.name"""'}), "('prediction.html', name='current_user.name')\n", (1098, 1143), False, 'from flask import Blueprint, render_template, request\n'), ((1376, 1388), 'numpy.argmax', 'np.argmax', (['p'], {}), '(p)\n', (1385, 1388), True, 'import numpy as np\n'), ((1452, 1561), 'flask.render_template', 'render_template', (['"""prediction.html"""'], {'result': 'CATEGORIES[classes]', 'image': "('alzheimer-images/' + img.filename)"}), "('prediction.html', result=CATEGORIES[classes], image=\n 'alzheimer-images/' + img.filename)\n", (1467, 1561), False, 'from flask import Blueprint, render_template, request\n')]
|
#!/usr/bin/python3
#
# Copyright (C) 2016 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os,sys,json
from constants_and_utils import *
def usage():
print("""
Merges certain results of the gradle build.
This script recognizes the following arguments in any order:
mergeBuildInfo (merge the buildInfo files (used by jetpad) produced by the most recent (compose and androidx) builds)
mergeExecutionData (merge the coverage execution data (recorded by jacoco) produced by the most recent builds)
mergeSourceJars (merge the source jars (used by jacoco/coverage ui) produced by the most recent builds)
(these args are case sensitive)
DIST_DIR=<a path to a directory> if this is not passed, the default value is out/dist.
Should be absolute or relative to e.g. androidx-master-dev/
Sample usage: time busytown/merge.py mergeSourceJars DIST_DIR=out/dist
""")
exit(1)
def main():
things_to_merge, dist_dir = parse_arguments()
move_to_dist_dir(dist_dir)
if SOURCE_JARS in things_to_merge: mergeSourceJars()
if EXECUTION_DATA in things_to_merge: mergeCoverageExecution()
if BUILD_INFO in things_to_merge:
mergeAggregateBuildInfoFiles()
mergeBuildInfoFolders()
if LIBRARY_METRICS in things_to_merge: mergeLibraryMetrics()
def parse_arguments():
things_to_merge = []
for arg in sys.argv:
if any(arg.endswith(SKIPPED_SUFFIX) for SKIPPED_SUFFIX in SKIPPED_ARG_SUFFIXES): continue
elif arg in MERGE_COMMANDS:
things_to_merge.append(arg)
elif any(help_keyword in arg for help_keyword in HELP_SYNTAX_LIST):
usage()
else:
print("ERROR:", arg, "is an invalid keyword")
usage()
dist_dir = os.environ['DIST_DIR'] if 'DIST_DIR' in os.environ else "out/dist"
return things_to_merge, dist_dir
def move_to_dist_dir(dist_dir):
move_to_base_dir(print_pwd=False)
os.chdir(dist_dir)
print("Currently in", os.getcwd())
def thisRunIsValidForCoverage():
# Assert that AMD did not exclude any projects from being tested
# in either androidx or ui build (thereby invalidating this run for coverage).
# If the AMD ran, that file will exist; if it skipped browser, that log will have:
# "checking whether I should include :browser:browser and my answer is false"
if (os.path.exists("affected_module_detector_log.txt") and\
'false' in open('affected_module_detector_log.txt').read()) or\
(os.path.exists("ui/affected_module_detector_log.txt") and\
'false' in open('ui/affected_module_detector_log.txt').read()):
print("WARNING: not doing coverage merging because the AMD was not a no-op")
return False
return True
def mergeSourceJars():
if not thisRunIsValidForCoverage(): return
# assert that the report jars exist
if not (os.path.exists("jacoco-report-classes.jar") and os.path.exists("ui/jacoco-report-classes.jar")):
print("WARNING: not merging jacoco source jars as the source jars are not present")
return
ziptmp = "ziptmp"
run_command("rm -rf " + ziptmp)
run_command("mkdir " + ziptmp)
# exclude these test/sample app files which are duplicated in the source jars so that `unzip` doesn't fail
# See b/145211240 for more context. A full solution may be blocked on b/143934485.
run_command("unzip -quo jacoco-report-classes.jar -d ziptmp -x \"testapp-debug-androidTest-allclasses*\"")
run_command("unzip -quo ui/jacoco-report-classes.jar -d ziptmp -x \"samples-debug-androidTest-allclasses*\"")
run_command("rm -f jacoco-report-classes-all.jar") # -f to not fail if file doesn't exist
run_command("jar -cf jacoco-report-classes-all.jar -C ziptmp .")
run_command("rm -rf " + ziptmp)
def mergeCoverageExecution():
if not thisRunIsValidForCoverage(): return
# assert that the coverage zips exist
if not (os.path.exists("coverage_ec_files.zip") and os.path.exists("ui/coverage_ec_files.zip")):
print("WARNING: not merging coverage execution data as the coverage zips are not present")
return
ziptmp = "ziptmp"
run_command("rm -rf " + ziptmp)
run_command("mkdir " + ziptmp)
run_command("unzip -quo coverage_ec_files.zip -d ziptmp") # -quo = quiet; keep newer only
run_command("unzip -quo ui/coverage_ec_files.zip -d ziptmp")
run_command("rm -f coverage_ec_files_all.zip") # -f to not fail if file doesn't exist
run_command("zip -rq coverage_ec_files_all.zip ziptmp")
run_command("rm -rf " + ziptmp)
def mergeAggregateBuildInfoFiles() :
print("merging aggregate build info files")
androidx_buildInfo = json.load(open("androidx_aggregate_build_info.txt"))["artifacts"]
compose_buildInfo = json.load(open("ui/androidx_aggregate_build_info.txt"))["artifacts"]
duplicate_checking_dict = {}
for buildinfo in androidx_buildInfo + compose_buildInfo:
artifactId, groupId, sha = buildinfo["artifactId"], buildinfo["groupId"], buildinfo["sha"]
# artifactid and groupid is the unique identifier for libraries
if (artifactId, groupId) not in duplicate_checking_dict:
duplicate_checking_dict[(artifactId, groupId)] = (sha, buildinfo)
else:
expected_hash = duplicate_checking_dict[(artifactId, groupId)][0]
if expected_hash != sha:
raise Exception("Build info specifies having been built from multiple commits: " + expected_hash + " and " + sha + ". Were AndroidX and Compose built from the same commit?")
# don't allow androidx and compose to release two different versions of the same lib
resultJson = {"artifacts":[buildinfo for sha,buildinfo in duplicate_checking_dict.values()]}
with open("androidx_aggregate_build_info.txt", 'w') as outfile:
json.dump(resultJson, outfile, sort_keys=True, indent=4, separators=(',', ': '))
def mergeBuildInfoFolders(): # -a = all in directory. -u = overwrite older (in case androidx build hasn't been run in a while)
run_command("cp -au ui/build-info/. build-info/")
def mergeLibraryMetrics():
run_command("cp -au ui/librarymetrics/. librarymetrics/")
if __name__ == "__main__": main()
|
[
"os.getcwd",
"json.dump",
"os.path.exists",
"os.chdir"
] |
[((2574, 2592), 'os.chdir', 'os.chdir', (['dist_dir'], {}), '(dist_dir)\n', (2582, 2592), False, 'import os, sys, json\n'), ((2619, 2630), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2628, 2630), False, 'import os, sys, json\n'), ((6467, 6552), 'json.dump', 'json.dump', (['resultJson', 'outfile'], {'sort_keys': '(True)', 'indent': '(4)', 'separators': "(',', ': ')"}), "(resultJson, outfile, sort_keys=True, indent=4, separators=(',', ': ')\n )\n", (6476, 6552), False, 'import os, sys, json\n'), ((2996, 3046), 'os.path.exists', 'os.path.exists', (['"""affected_module_detector_log.txt"""'], {}), "('affected_module_detector_log.txt')\n", (3010, 3046), False, 'import os, sys, json\n'), ((3133, 3186), 'os.path.exists', 'os.path.exists', (['"""ui/affected_module_detector_log.txt"""'], {}), "('ui/affected_module_detector_log.txt')\n", (3147, 3186), False, 'import os, sys, json\n'), ((3511, 3554), 'os.path.exists', 'os.path.exists', (['"""jacoco-report-classes.jar"""'], {}), "('jacoco-report-classes.jar')\n", (3525, 3554), False, 'import os, sys, json\n'), ((3559, 3605), 'os.path.exists', 'os.path.exists', (['"""ui/jacoco-report-classes.jar"""'], {}), "('ui/jacoco-report-classes.jar')\n", (3573, 3605), False, 'import os, sys, json\n'), ((4562, 4601), 'os.path.exists', 'os.path.exists', (['"""coverage_ec_files.zip"""'], {}), "('coverage_ec_files.zip')\n", (4576, 4601), False, 'import os, sys, json\n'), ((4606, 4648), 'os.path.exists', 'os.path.exists', (['"""ui/coverage_ec_files.zip"""'], {}), "('ui/coverage_ec_files.zip')\n", (4620, 4648), False, 'import os, sys, json\n')]
|
import random
import time, datetime
import struct
import base64
class Client_UUID(object): #Huge thanks to github user fweak for helping me figure out the mystery of the client_uuid. made some discord "science" notes here: https://docs.google.com/document/d/1b5aDx7S1iLHoeb6B56izZakbXItA84gUjFzK-0OBwy0
def __init__(self, userID, creationTime="now", eventNum=0):
self.userID = int(userID)
num = int(4294967296 * random.random())
self.randomPrefix = num if num<=2147483647 else num-4294967296
self.creationTime = int(time.mktime(datetime.datetime.now().timetuple()) * 1000) if creationTime == "now" else creationTime
self.eventNum = eventNum
self.UUID = ""
def calculate(self, eventNum, userID, increment):
if eventNum == "default":
eventNum = self.eventNum
if userID == "default":
userID = self.userID
else:
userID = int(userID)
buf = bytearray(struct.pack('24x'))
buf[0:4] = struct.pack("<i", userID%4294967296 if userID%4294967296<=2147483647 else userID%4294967296-2147483647)
buf[4:8] = struct.pack("<i", userID>>32)
buf[8:12] = struct.pack("<i", self.randomPrefix)
buf[12:16] = struct.pack("<i", self.creationTime%4294967296 if self.creationTime%4294967296<=2147483647 else self.creationTime%4294967296-2147483647)
buf[16:20] = struct.pack("<i", self.creationTime>>32)
buf[20:24] = struct.pack("<i", eventNum)
if increment:
self.eventNum += 1
self.UUID = base64.b64encode(buf).decode('ascii')
return self.UUID
def refresh(self, resetEventNum=True):
self.randomPrefix = num if num<=2147483647 else num-4294967296
self.creationTime = int(time.mktime(datetime.datetime.now().timetuple()) * 1000) if creationTime == "now" else creationTime
if resetEventNum:
self.eventNum = 0
return self.calculate(eventNum="default", userID="default", increment=True)
@staticmethod
def parse(client_uuid):
decoded_client_uuid = base64.b64decode(client_uuid)
unpacked = []
for i in range(6):
unpacked.append(struct.unpack('<i', decoded_client_uuid[4*i:4*i+4])[0])
UUIDdata = {}
userIDguess = (unpacked[1]<<32) + unpacked[0]
UUIDdata['userID'] = repr(userIDguess if userIDguess%4294967296<=2147483647 else userIDguess+4294967296)
UUIDdata['randomPrefix'] = unpacked[2]
creationTimeGuess = (unpacked[4]<<32) + unpacked[3]
UUIDdata['creationTime'] = creationTimeGuess if creationTimeGuess%4294967296<=2147483647 else userIDguess+4294967296
UUIDdata['eventNum'] = unpacked[5]
return UUIDdata
|
[
"struct.unpack",
"base64.b64decode",
"struct.pack",
"random.random",
"base64.b64encode",
"datetime.datetime.now"
] |
[((1011, 1128), 'struct.pack', 'struct.pack', (['"""<i"""', '(userID % 4294967296 if userID % 4294967296 <= 2147483647 else userID % \n 4294967296 - 2147483647)'], {}), "('<i', userID % 4294967296 if userID % 4294967296 <= 2147483647 else\n userID % 4294967296 - 2147483647)\n", (1022, 1128), False, 'import struct\n'), ((1134, 1165), 'struct.pack', 'struct.pack', (['"""<i"""', '(userID >> 32)'], {}), "('<i', userID >> 32)\n", (1145, 1165), False, 'import struct\n'), ((1184, 1220), 'struct.pack', 'struct.pack', (['"""<i"""', 'self.randomPrefix'], {}), "('<i', self.randomPrefix)\n", (1195, 1220), False, 'import struct\n'), ((1242, 1393), 'struct.pack', 'struct.pack', (['"""<i"""', '(self.creationTime % 4294967296 if self.creationTime % 4294967296 <= \n 2147483647 else self.creationTime % 4294967296 - 2147483647)'], {}), "('<i', self.creationTime % 4294967296 if self.creationTime % \n 4294967296 <= 2147483647 else self.creationTime % 4294967296 - 2147483647)\n", (1253, 1393), False, 'import struct\n'), ((1400, 1442), 'struct.pack', 'struct.pack', (['"""<i"""', '(self.creationTime >> 32)'], {}), "('<i', self.creationTime >> 32)\n", (1411, 1442), False, 'import struct\n'), ((1462, 1489), 'struct.pack', 'struct.pack', (['"""<i"""', 'eventNum'], {}), "('<i', eventNum)\n", (1473, 1489), False, 'import struct\n'), ((2091, 2120), 'base64.b64decode', 'base64.b64decode', (['client_uuid'], {}), '(client_uuid)\n', (2107, 2120), False, 'import base64\n'), ((972, 990), 'struct.pack', 'struct.pack', (['"""24x"""'], {}), "('24x')\n", (983, 990), False, 'import struct\n'), ((433, 448), 'random.random', 'random.random', ([], {}), '()\n', (446, 448), False, 'import random\n'), ((1564, 1585), 'base64.b64encode', 'base64.b64encode', (['buf'], {}), '(buf)\n', (1580, 1585), False, 'import base64\n'), ((2198, 2255), 'struct.unpack', 'struct.unpack', (['"""<i"""', 'decoded_client_uuid[4 * i:4 * i + 4]'], {}), "('<i', decoded_client_uuid[4 * i:4 * i + 4])\n", (2211, 2255), False, 'import struct\n'), ((565, 588), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (586, 588), False, 'import time, datetime\n'), ((1786, 1809), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1807, 1809), False, 'import time, datetime\n')]
|
# Future
from __future__ import annotations
# Packages
import aiohttp.web
import aiohttp_session
async def logout(request: aiohttp.web.Request) -> aiohttp.web.Response:
session = await aiohttp_session.get_session(request)
session.invalidate()
return aiohttp.web.HTTPFound("/")
def setup(app: aiohttp.web.Application) -> None:
app.add_routes(
[
aiohttp.web.get(r"/api/discord/logout", logout)
]
)
|
[
"aiohttp_session.get_session"
] |
[((193, 229), 'aiohttp_session.get_session', 'aiohttp_session.get_session', (['request'], {}), '(request)\n', (220, 229), False, 'import aiohttp_session\n')]
|
"""Stage 7: Puzzle 10 of 11
Here's a change to the previous puzzle. How many times should you
repeat to complete the drawing?
"""
import sys
sys.path.append('..')
import codestudio
artist = codestudio.load('s1level44')
artist.speed = 'fastest'
a = artist
for count3 in range(9): # ???
for count2 in range(10):
artist.color = artist.random_colour()
for count in range(4):
artist.move_forward(20)
artist.turn_right(90)
artist.move_forward(20)
artist.turn_right(80)
artist.check()
|
[
"sys.path.append",
"codestudio.load"
] |
[((144, 165), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (159, 165), False, 'import sys\n'), ((193, 221), 'codestudio.load', 'codestudio.load', (['"""s1level44"""'], {}), "('s1level44')\n", (208, 221), False, 'import codestudio\n')]
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from base import BaseObject
from cendalytics.education.bp import EducationAnalysisAPI
from cendalytics.inference.bp import InferenceAPI
from cendalytics.skills.core.bp import SkillsReportAPI
from cendalytics.tfidf.core.bp import VectorSpaceAPI
class CendantAPI(BaseObject):
""" Cendant API """
def __init__(self,
is_debug: bool = False):
"""
Created:
6-Nov-2019
<EMAIL>
"""
BaseObject.__init__(self, __name__)
self._is_debug = is_debug
def inference(self) -> InferenceAPI:
return InferenceAPI(is_debug=self._is_debug)
def skills(self) -> SkillsReportAPI:
return SkillsReportAPI(is_debug=self._is_debug)
def vectorspace(self) -> VectorSpaceAPI:
return VectorSpaceAPI(is_debug=self._is_debug)
def education(self) -> EducationAnalysisAPI:
return EducationAnalysisAPI(is_debug=self._is_debug)
def main():
pass
if __name__ == "__main__":
main()
|
[
"cendalytics.inference.bp.InferenceAPI",
"cendalytics.education.bp.EducationAnalysisAPI",
"base.BaseObject.__init__",
"cendalytics.skills.core.bp.SkillsReportAPI",
"cendalytics.tfidf.core.bp.VectorSpaceAPI"
] |
[((506, 541), 'base.BaseObject.__init__', 'BaseObject.__init__', (['self', '__name__'], {}), '(self, __name__)\n', (525, 541), False, 'from base import BaseObject\n'), ((633, 670), 'cendalytics.inference.bp.InferenceAPI', 'InferenceAPI', ([], {'is_debug': 'self._is_debug'}), '(is_debug=self._is_debug)\n', (645, 670), False, 'from cendalytics.inference.bp import InferenceAPI\n'), ((728, 768), 'cendalytics.skills.core.bp.SkillsReportAPI', 'SkillsReportAPI', ([], {'is_debug': 'self._is_debug'}), '(is_debug=self._is_debug)\n', (743, 768), False, 'from cendalytics.skills.core.bp import SkillsReportAPI\n'), ((830, 869), 'cendalytics.tfidf.core.bp.VectorSpaceAPI', 'VectorSpaceAPI', ([], {'is_debug': 'self._is_debug'}), '(is_debug=self._is_debug)\n', (844, 869), False, 'from cendalytics.tfidf.core.bp import VectorSpaceAPI\n'), ((935, 980), 'cendalytics.education.bp.EducationAnalysisAPI', 'EducationAnalysisAPI', ([], {'is_debug': 'self._is_debug'}), '(is_debug=self._is_debug)\n', (955, 980), False, 'from cendalytics.education.bp import EducationAnalysisAPI\n')]
|
from datetime import timedelta
from django.conf import settings
from django.test import override_settings
from unittest.mock import patch
from orchestra.tests.helpers import OrchestraTestCase
from orchestra.tests.helpers.fixtures import setup_models
from orchestra.tests.helpers.fixtures import StaffBotRequestFactory
from orchestra.tests.helpers.fixtures import StaffingRequestInquiryFactory
from orchestra.tests.helpers.fixtures import StepFactory
from orchestra.tests.helpers.fixtures import TaskFactory
from orchestra.tests.helpers.fixtures import WorkerFactory
from orchestra.bots.errors import SlackUserUnauthorized
from orchestra.bots.staffbot import StaffBot
from orchestra.bots.tests.fixtures import get_mock_slack_data
from orchestra.communication.staffing import send_staffing_requests
from orchestra.communication.mail import html_from_plaintext
from orchestra.models import CommunicationPreference
from orchestra.models import StaffBotRequest
from orchestra.models import StaffingRequestInquiry
from orchestra.models import Task
from orchestra.models import Worker
from orchestra.models import WorkerCertification
from orchestra.utils.task_lifecycle import assign_task
from orchestra.utils.task_lifecycle import is_worker_certified_for_task
def _noop_details(task_details):
return ''
class StaffBotTest(OrchestraTestCase):
def setUp(self):
super().setUp()
setup_models(self)
self.worker = self.workers[0]
self.worker.user.is_superuser = True
self.worker.user.save()
patcher = patch(
('orchestra.bots.staffbot.StaffBot'
'._send_staffing_request_by_slack'))
patcher.start()
self.addCleanup(patcher.stop)
def _get_worker_for_task(self, task, role):
# Get certified reviewer
for worker in Worker.objects.all():
if is_worker_certified_for_task(worker, task, role):
return worker
def _test_staffing_requests(self, worker, task, command,
can_slack=False, can_mail=False):
StaffBotRequest.objects.all().delete()
bot = StaffBot()
communication_type = (CommunicationPreference.CommunicationType
.NEW_TASK_AVAILABLE.value)
communication_preference = CommunicationPreference.objects.get(
worker=worker,
communication_type=communication_type)
communication_preference.methods.slack = can_slack
communication_preference.methods.email = can_mail
communication_preference.save()
data = get_mock_slack_data(
text=command,
user_id=self.worker.slack_user_id)
bot.dispatch(data)
send_staffing_requests(worker_batch_size=20,
frequency=timedelta(minutes=0))
self.assertEqual(StaffingRequestInquiry.objects.filter(
communication_preference__worker_id=worker,
request__task=task).count(), can_slack + can_mail)
def test_assert_validate_error(self):
bot = StaffBot()
with self.assertRaises(SlackUserUnauthorized):
mock_slack_data = get_mock_slack_data(text='staff 5')
bot.dispatch(mock_slack_data)
def test_commands(self):
"""
Ensure that the bot can handle the following commands:
/staffbot staff <task_id>
/staffbot restaff <task_id> <username>
This test only validates that the commands are processed, other
tests verify the functionality of the command execution.
"""
bot = StaffBot()
# Test staff command
mock_slack_data = get_mock_slack_data(
text='staff 5',
user_id=self.worker.slack_user_id)
response = bot.dispatch(mock_slack_data)
self.assertFalse(bot.default_error_text in response.get('text', ''))
# Test the restaff command
mock_slack_data['text'] = 'restaff 5 username'
response = bot.dispatch(mock_slack_data)
self.assertFalse(bot.default_error_text in response.get('text', ''))
# Test we fail gracefully
mock_slack_data['text'] = 'invalid command'
response = bot.dispatch(mock_slack_data)
self.assertTrue(bot.default_error_text in response.get('text', ''))
@patch('orchestra.bots.staffbot.send_mail')
@patch('orchestra.bots.staffbot.message_experts_slack_group')
@patch('orchestra.bots.staffbot.StaffBot._send_staffing_request_by_slack')
def test_staff_command(self, mock_slack, mock_experts_slack, mock_mail):
"""
Test that the staffing logic is properly executed for the
staff command.
"""
task = (Task.objects
.filter(status=Task.Status.AWAITING_PROCESSING)
.first())
# Get certified worker
worker = self._get_worker_for_task(
task, WorkerCertification.Role.ENTRY_LEVEL)
self._test_staffing_requests(worker, task, 'staff {}'.format(task.id),
can_slack=True, can_mail=True)
self._test_staffing_requests(worker, task, 'staff {}'.format(task.id),
can_slack=False, can_mail=False)
# Change the task state to pending review
task = assign_task(worker.id, task.id)
task.status = Task.Status.PENDING_REVIEW
task.save()
StaffingRequestInquiry.objects.all().delete()
worker = self._get_worker_for_task(task,
WorkerCertification.Role.REVIEWER)
self._test_staffing_requests(worker, task, 'staff {}'.format(task.id),
can_slack=False, can_mail=False)
self._test_staffing_requests(worker, task, 'staff {}'.format(task.id),
can_slack=True, can_mail=True)
self.assertTrue(mock_mail.called)
self.assertTrue(mock_experts_slack.called)
self.assertTrue(mock_slack.called)
def test_staff_command_errors(self):
"""
Test that the staffing logic errors are raised during
staff command.
"""
bot = StaffBot()
data = get_mock_slack_data(
text='staff 999999999999',
user_id=self.worker.slack_user_id)
response = bot.dispatch(data)
self.assertEqual(response['attachments'][0]['text'],
bot.task_does_not_exist_error.format('999999999999'))
data['text'] = 'staff'
response = bot.dispatch(data)
self.assertTrue(bot.default_error_text in response.get('text'))
task = TaskFactory(status=Task.Status.COMPLETE)
data['text'] = 'staff {}'.format(task.id)
response = bot.dispatch(data)
self.assertEqual(response['attachments'][0]['text'],
bot.task_assignment_error
.format(task.id,
'Status incompatible with new assignment'))
@patch('orchestra.bots.staffbot.send_mail')
@patch('orchestra.bots.staffbot.message_experts_slack_group')
@patch('orchestra.bots.staffbot.StaffBot._send_staffing_request_by_slack')
def test_restaff_command(self, mock_slack, mock_experts_slack, mock_mail):
"""
Test that the restaffing logic is properly executed for the
restaff command.
"""
task = (Task.objects
.filter(status=Task.Status.AWAITING_PROCESSING)
.first())
# Get certified worker
task = assign_task(self.worker.id, task.id)
command = 'restaff {} {}'.format(task.id, self.worker.user.username)
worker = self.workers[3]
self._test_staffing_requests(worker, task, command,
can_slack=False, can_mail=True)
self.assertTrue(mock_mail.called)
self.assertTrue(mock_experts_slack.called)
self.assertTrue(mock_slack.called)
def test_restaff_command_errors(self):
"""
Test that the staffing logic errors are raised during
staff command.
"""
bot = StaffBot()
command = 'restaff 999999999999 unknown'
data = get_mock_slack_data(
text=command,
user_id=self.worker.slack_user_id)
response = bot.dispatch(data)
self.assertEqual(response.get('text'),
command)
self.assertEqual(response['attachments'][0]['text'],
bot.worker_does_not_exist.format('unknown'))
worker = WorkerFactory(user__username='username')
data['text'] = 'restaff 999999999999 username'
response = bot.dispatch(data)
self.assertEqual(response['attachments'][0]['text'],
bot.task_does_not_exist_error.format('999999999999'))
# making sure it works with slack username as well.
worker.slack_username = 'slackusername'
worker.save()
data['text'] = 'restaff 999999999999 slackusername'
response = bot.dispatch(data)
self.assertEqual(response['attachments'][0]['text'],
bot.task_does_not_exist_error.format('999999999999'))
data['text'] = 'restaff'
response = bot.dispatch(data)
self.assertTrue(bot.default_error_text in response.get('text'))
task = TaskFactory(status=Task.Status.COMPLETE)
command = 'restaff {} {}'.format(task.id, worker.user.username)
data['text'] = command
response = bot.dispatch(data)
self.assertEqual(response['attachments'][0]['text'],
(bot.task_assignment_does_not_exist_error
.format(worker.user.username, task.id)))
@override_settings(ORCHESTRA_MOCK_EMAILS=True)
@patch('orchestra.bots.staffbot.send_mail')
def test_get_staffing_request_messsage(self, mock_mail):
def _task_factory(status, path):
description_no_kwargs = {'path': path}
return TaskFactory(
status=status,
step=StepFactory(
slug='stepslug',
description='the step',
detailed_description_function=description_no_kwargs),
project__workflow_version__workflow__description=(
'the workflow'),
project__short_description='the coolest project'
)
# Test slack without review and with a detailed_description_function
task = _task_factory(
Task.Status.AWAITING_PROCESSING,
'orchestra.tests.helpers.fixtures.get_detailed_description')
staffing_request_inquiry = StaffingRequestInquiryFactory(
communication_preference__worker__user__first_name='test-name',
request__task=task)
message = StaffBot()._get_staffing_request_message(
staffing_request_inquiry,
'communication/new_task_available_slack.txt')
self.assertEqual(message,
'''Hello test-name!
A new task is available for you to work on, if you'd like! Here are the details:
Project: the workflow
Project description: the coolest project
Task: the step
Details: No text given stepslug
<http://127.0.0.1:8000/orchestra/communication/accept_staffing_request_inquiry/{}/|Accept the Task>
<http://127.0.0.1:8000/orchestra/communication/reject_staffing_request_inquiry/{}/|Ignore the Task>
<http://127.0.0.1:8000/orchestra/communication/available_staffing_requests/|View All Available Tasks>
'''.format(staffing_request_inquiry.id, staffing_request_inquiry.id)) # noqa
# Test email with review and no detailed_description_function
task = _task_factory(
Task.Status.PENDING_REVIEW,
'orchestra.bots.tests.test_staffbot._noop_details')
staffing_request_inquiry = StaffingRequestInquiryFactory(
communication_preference__worker__user__first_name='test-name2',
request=StaffBotRequestFactory(
task=task, required_role_counter=1))
message = StaffBot()._get_staffing_request_message(
staffing_request_inquiry,
'communication/new_task_available_email.txt')
self.assertEqual(message,
'''Hello test-name2!
A new task is available for you to work on, if you'd like! Here are the details:
Project: the workflow
Project description: the coolest project
Task: the step [Review]
<a href="http://127.0.0.1:8000/orchestra/communication/accept_staffing_request_inquiry/{}/">Accept the Task</a>
<a href="http://127.0.0.1:8000/orchestra/communication/reject_staffing_request_inquiry/{}/">Ignore the Task</a>
<a href="http://127.0.0.1:8000/orchestra/communication/available_staffing_requests/">View All Available Tasks</a>
'''.format(staffing_request_inquiry.id, staffing_request_inquiry.id)) # noqa
# Test that we markdown things
StaffBot()._send_staffing_request_by_mail('<EMAIL>', message)
mock_mail.assert_called_once_with(
'A new task is available for you',
message,
settings.ORCHESTRA_NOTIFICATIONS_FROM_EMAIL,
['<EMAIL>'],
html_message=html_from_plaintext(message)
)
|
[
"orchestra.utils.task_lifecycle.assign_task",
"orchestra.models.StaffingRequestInquiry.objects.filter",
"orchestra.tests.helpers.fixtures.WorkerFactory",
"orchestra.models.StaffingRequestInquiry.objects.all",
"orchestra.tests.helpers.fixtures.setup_models",
"orchestra.tests.helpers.fixtures.TaskFactory",
"datetime.timedelta",
"orchestra.bots.tests.fixtures.get_mock_slack_data",
"orchestra.models.CommunicationPreference.objects.get",
"unittest.mock.patch",
"orchestra.models.StaffBotRequest.objects.all",
"django.test.override_settings",
"orchestra.models.Worker.objects.all",
"orchestra.communication.mail.html_from_plaintext",
"orchestra.utils.task_lifecycle.is_worker_certified_for_task",
"orchestra.models.Task.objects.filter",
"orchestra.tests.helpers.fixtures.StepFactory",
"orchestra.tests.helpers.fixtures.StaffingRequestInquiryFactory",
"orchestra.bots.staffbot.StaffBot",
"orchestra.tests.helpers.fixtures.StaffBotRequestFactory"
] |
[((4316, 4358), 'unittest.mock.patch', 'patch', (['"""orchestra.bots.staffbot.send_mail"""'], {}), "('orchestra.bots.staffbot.send_mail')\n", (4321, 4358), False, 'from unittest.mock import patch\n'), ((4364, 4424), 'unittest.mock.patch', 'patch', (['"""orchestra.bots.staffbot.message_experts_slack_group"""'], {}), "('orchestra.bots.staffbot.message_experts_slack_group')\n", (4369, 4424), False, 'from unittest.mock import patch\n'), ((4430, 4503), 'unittest.mock.patch', 'patch', (['"""orchestra.bots.staffbot.StaffBot._send_staffing_request_by_slack"""'], {}), "('orchestra.bots.staffbot.StaffBot._send_staffing_request_by_slack')\n", (4435, 4503), False, 'from unittest.mock import patch\n'), ((7026, 7068), 'unittest.mock.patch', 'patch', (['"""orchestra.bots.staffbot.send_mail"""'], {}), "('orchestra.bots.staffbot.send_mail')\n", (7031, 7068), False, 'from unittest.mock import patch\n'), ((7074, 7134), 'unittest.mock.patch', 'patch', (['"""orchestra.bots.staffbot.message_experts_slack_group"""'], {}), "('orchestra.bots.staffbot.message_experts_slack_group')\n", (7079, 7134), False, 'from unittest.mock import patch\n'), ((7140, 7213), 'unittest.mock.patch', 'patch', (['"""orchestra.bots.staffbot.StaffBot._send_staffing_request_by_slack"""'], {}), "('orchestra.bots.staffbot.StaffBot._send_staffing_request_by_slack')\n", (7145, 7213), False, 'from unittest.mock import patch\n'), ((9782, 9827), 'django.test.override_settings', 'override_settings', ([], {'ORCHESTRA_MOCK_EMAILS': '(True)'}), '(ORCHESTRA_MOCK_EMAILS=True)\n', (9799, 9827), False, 'from django.test import override_settings\n'), ((9833, 9875), 'unittest.mock.patch', 'patch', (['"""orchestra.bots.staffbot.send_mail"""'], {}), "('orchestra.bots.staffbot.send_mail')\n", (9838, 9875), False, 'from unittest.mock import patch\n'), ((1399, 1417), 'orchestra.tests.helpers.fixtures.setup_models', 'setup_models', (['self'], {}), '(self)\n', (1411, 1417), False, 'from orchestra.tests.helpers.fixtures import setup_models\n'), ((1551, 1624), 'unittest.mock.patch', 'patch', (['"""orchestra.bots.staffbot.StaffBot._send_staffing_request_by_slack"""'], {}), "('orchestra.bots.staffbot.StaffBot._send_staffing_request_by_slack')\n", (1556, 1624), False, 'from unittest.mock import patch\n'), ((1822, 1842), 'orchestra.models.Worker.objects.all', 'Worker.objects.all', ([], {}), '()\n', (1840, 1842), False, 'from orchestra.models import Worker\n'), ((2128, 2138), 'orchestra.bots.staffbot.StaffBot', 'StaffBot', ([], {}), '()\n', (2136, 2138), False, 'from orchestra.bots.staffbot import StaffBot\n'), ((2303, 2397), 'orchestra.models.CommunicationPreference.objects.get', 'CommunicationPreference.objects.get', ([], {'worker': 'worker', 'communication_type': 'communication_type'}), '(worker=worker, communication_type=\n communication_type)\n', (2338, 2397), False, 'from orchestra.models import CommunicationPreference\n'), ((2590, 2658), 'orchestra.bots.tests.fixtures.get_mock_slack_data', 'get_mock_slack_data', ([], {'text': 'command', 'user_id': 'self.worker.slack_user_id'}), '(text=command, user_id=self.worker.slack_user_id)\n', (2609, 2658), False, 'from orchestra.bots.tests.fixtures import get_mock_slack_data\n'), ((3067, 3077), 'orchestra.bots.staffbot.StaffBot', 'StaffBot', ([], {}), '()\n', (3075, 3077), False, 'from orchestra.bots.staffbot import StaffBot\n'), ((3591, 3601), 'orchestra.bots.staffbot.StaffBot', 'StaffBot', ([], {}), '()\n', (3599, 3601), False, 'from orchestra.bots.staffbot import StaffBot\n'), ((3658, 3728), 'orchestra.bots.tests.fixtures.get_mock_slack_data', 'get_mock_slack_data', ([], {'text': '"""staff 5"""', 'user_id': 'self.worker.slack_user_id'}), "(text='staff 5', user_id=self.worker.slack_user_id)\n", (3677, 3728), False, 'from orchestra.bots.tests.fixtures import get_mock_slack_data\n'), ((5309, 5340), 'orchestra.utils.task_lifecycle.assign_task', 'assign_task', (['worker.id', 'task.id'], {}), '(worker.id, task.id)\n', (5320, 5340), False, 'from orchestra.utils.task_lifecycle import assign_task\n'), ((6190, 6200), 'orchestra.bots.staffbot.StaffBot', 'StaffBot', ([], {}), '()\n', (6198, 6200), False, 'from orchestra.bots.staffbot import StaffBot\n'), ((6216, 6302), 'orchestra.bots.tests.fixtures.get_mock_slack_data', 'get_mock_slack_data', ([], {'text': '"""staff 999999999999"""', 'user_id': 'self.worker.slack_user_id'}), "(text='staff 999999999999', user_id=self.worker.\n slack_user_id)\n", (6235, 6302), False, 'from orchestra.bots.tests.fixtures import get_mock_slack_data\n'), ((6660, 6700), 'orchestra.tests.helpers.fixtures.TaskFactory', 'TaskFactory', ([], {'status': 'Task.Status.COMPLETE'}), '(status=Task.Status.COMPLETE)\n', (6671, 6700), False, 'from orchestra.tests.helpers.fixtures import TaskFactory\n'), ((7576, 7612), 'orchestra.utils.task_lifecycle.assign_task', 'assign_task', (['self.worker.id', 'task.id'], {}), '(self.worker.id, task.id)\n', (7587, 7612), False, 'from orchestra.utils.task_lifecycle import assign_task\n'), ((8156, 8166), 'orchestra.bots.staffbot.StaffBot', 'StaffBot', ([], {}), '()\n', (8164, 8166), False, 'from orchestra.bots.staffbot import StaffBot\n'), ((8231, 8299), 'orchestra.bots.tests.fixtures.get_mock_slack_data', 'get_mock_slack_data', ([], {'text': 'command', 'user_id': 'self.worker.slack_user_id'}), '(text=command, user_id=self.worker.slack_user_id)\n', (8250, 8299), False, 'from orchestra.bots.tests.fixtures import get_mock_slack_data\n'), ((8595, 8635), 'orchestra.tests.helpers.fixtures.WorkerFactory', 'WorkerFactory', ([], {'user__username': '"""username"""'}), "(user__username='username')\n", (8608, 8635), False, 'from orchestra.tests.helpers.fixtures import WorkerFactory\n'), ((9398, 9438), 'orchestra.tests.helpers.fixtures.TaskFactory', 'TaskFactory', ([], {'status': 'Task.Status.COMPLETE'}), '(status=Task.Status.COMPLETE)\n', (9409, 9438), False, 'from orchestra.tests.helpers.fixtures import TaskFactory\n'), ((10725, 10847), 'orchestra.tests.helpers.fixtures.StaffingRequestInquiryFactory', 'StaffingRequestInquiryFactory', ([], {'communication_preference__worker__user__first_name': '"""test-name"""', 'request__task': 'task'}), "(\n communication_preference__worker__user__first_name='test-name',\n request__task=task)\n", (10754, 10847), False, 'from orchestra.tests.helpers.fixtures import StaffingRequestInquiryFactory\n'), ((1859, 1907), 'orchestra.utils.task_lifecycle.is_worker_certified_for_task', 'is_worker_certified_for_task', (['worker', 'task', 'role'], {}), '(worker, task, role)\n', (1887, 1907), False, 'from orchestra.utils.task_lifecycle import is_worker_certified_for_task\n'), ((3163, 3198), 'orchestra.bots.tests.fixtures.get_mock_slack_data', 'get_mock_slack_data', ([], {'text': '"""staff 5"""'}), "(text='staff 5')\n", (3182, 3198), False, 'from orchestra.bots.tests.fixtures import get_mock_slack_data\n'), ((2075, 2104), 'orchestra.models.StaffBotRequest.objects.all', 'StaffBotRequest.objects.all', ([], {}), '()\n', (2102, 2104), False, 'from orchestra.models import StaffBotRequest\n'), ((2805, 2825), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(0)'}), '(minutes=0)\n', (2814, 2825), False, 'from datetime import timedelta\n'), ((4710, 4769), 'orchestra.models.Task.objects.filter', 'Task.objects.filter', ([], {'status': 'Task.Status.AWAITING_PROCESSING'}), '(status=Task.Status.AWAITING_PROCESSING)\n', (4729, 4769), False, 'from orchestra.models import Task\n'), ((5419, 5455), 'orchestra.models.StaffingRequestInquiry.objects.all', 'StaffingRequestInquiry.objects.all', ([], {}), '()\n', (5453, 5455), False, 'from orchestra.models import StaffingRequestInquiry\n'), ((7426, 7485), 'orchestra.models.Task.objects.filter', 'Task.objects.filter', ([], {'status': 'Task.Status.AWAITING_PROCESSING'}), '(status=Task.Status.AWAITING_PROCESSING)\n', (7445, 7485), False, 'from orchestra.models import Task\n'), ((10882, 10892), 'orchestra.bots.staffbot.StaffBot', 'StaffBot', ([], {}), '()\n', (10890, 10892), False, 'from orchestra.bots.staffbot import StaffBot\n'), ((12043, 12101), 'orchestra.tests.helpers.fixtures.StaffBotRequestFactory', 'StaffBotRequestFactory', ([], {'task': 'task', 'required_role_counter': '(1)'}), '(task=task, required_role_counter=1)\n', (12065, 12101), False, 'from orchestra.tests.helpers.fixtures import StaffBotRequestFactory\n'), ((12138, 12148), 'orchestra.bots.staffbot.StaffBot', 'StaffBot', ([], {}), '()\n', (12146, 12148), False, 'from orchestra.bots.staffbot import StaffBot\n'), ((12994, 13004), 'orchestra.bots.staffbot.StaffBot', 'StaffBot', ([], {}), '()\n', (13002, 13004), False, 'from orchestra.bots.staffbot import StaffBot\n'), ((13274, 13302), 'orchestra.communication.mail.html_from_plaintext', 'html_from_plaintext', (['message'], {}), '(message)\n', (13293, 13302), False, 'from orchestra.communication.mail import html_from_plaintext\n'), ((2852, 2958), 'orchestra.models.StaffingRequestInquiry.objects.filter', 'StaffingRequestInquiry.objects.filter', ([], {'communication_preference__worker_id': 'worker', 'request__task': 'task'}), '(communication_preference__worker_id=\n worker, request__task=task)\n', (2889, 2958), False, 'from orchestra.models import StaffingRequestInquiry\n'), ((10113, 10222), 'orchestra.tests.helpers.fixtures.StepFactory', 'StepFactory', ([], {'slug': '"""stepslug"""', 'description': '"""the step"""', 'detailed_description_function': 'description_no_kwargs'}), "(slug='stepslug', description='the step',\n detailed_description_function=description_no_kwargs)\n", (10124, 10222), False, 'from orchestra.tests.helpers.fixtures import StepFactory\n')]
|
import pysm3
import pysm3.units as u
def test_read_map_unit():
m = pysm3.read_map("pysm_2/dust_temp.fits", nside=8, field=0, unit="uK_RJ")
assert u.Unit("uK_RJ") == m.unit
def test_read_map_unit_dimensionless():
m = pysm3.read_map("pysm_2/dust_temp.fits", nside=8, field=0)
assert u.Unit("") == m.unit
|
[
"pysm3.units.Unit",
"pysm3.read_map"
] |
[((73, 144), 'pysm3.read_map', 'pysm3.read_map', (['"""pysm_2/dust_temp.fits"""'], {'nside': '(8)', 'field': '(0)', 'unit': '"""uK_RJ"""'}), "('pysm_2/dust_temp.fits', nside=8, field=0, unit='uK_RJ')\n", (87, 144), False, 'import pysm3\n'), ((232, 289), 'pysm3.read_map', 'pysm3.read_map', (['"""pysm_2/dust_temp.fits"""'], {'nside': '(8)', 'field': '(0)'}), "('pysm_2/dust_temp.fits', nside=8, field=0)\n", (246, 289), False, 'import pysm3\n'), ((156, 171), 'pysm3.units.Unit', 'u.Unit', (['"""uK_RJ"""'], {}), "('uK_RJ')\n", (162, 171), True, 'import pysm3.units as u\n'), ((301, 311), 'pysm3.units.Unit', 'u.Unit', (['""""""'], {}), "('')\n", (307, 311), True, 'import pysm3.units as u\n')]
|
"""
tools to deal with I/O on the shell
"""
import sys
def progressbar(i, m):
''' Draws a progress bar with = signs and percentage.
IN: i (step of the loop), m (max of the loop).'''
sys.stdout.write('\r')
sys.stdout.write("[%-30s] %d%%" %
('=' * int(round(i * 30. / m)), 100. * i / m))
sys.stdout.flush()
if i == m:
sys.stdout.write('\n')
class Getch:
"""
Multi-platform getch
Gets a single character from standard input.
Does not echo to the screen.
"""
def __init__(self):
try:
self.impl = _GetchWindows()
except ImportError:
self.impl = _GetchUnix()
def __call__(self):
return self.impl()
class _GetchUnix:
def __init__(self):
import tty
import sys
def __call__(self):
import sys
import tty
import termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
class _GetchWindows:
def __init__(self):
import msvcrt
def __call__(self):
import msvcrt
return msvcrt.getch()
|
[
"sys.stdout.write",
"sys.stdin.read",
"termios.tcgetattr",
"msvcrt.getch",
"termios.tcsetattr",
"sys.stdout.flush",
"sys.stdin.fileno"
] |
[((200, 222), 'sys.stdout.write', 'sys.stdout.write', (["'\\r'"], {}), "('\\r')\n", (216, 222), False, 'import sys\n'), ((333, 351), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (349, 351), False, 'import sys\n'), ((375, 397), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (391, 397), False, 'import sys\n'), ((911, 929), 'sys.stdin.fileno', 'sys.stdin.fileno', ([], {}), '()\n', (927, 929), False, 'import sys\n'), ((953, 974), 'termios.tcgetattr', 'termios.tcgetattr', (['fd'], {}), '(fd)\n', (970, 974), False, 'import termios\n'), ((1299, 1313), 'msvcrt.getch', 'msvcrt.getch', ([], {}), '()\n', (1311, 1313), False, 'import msvcrt\n'), ((1048, 1065), 'sys.stdin.read', 'sys.stdin.read', (['(1)'], {}), '(1)\n', (1062, 1065), False, 'import sys\n'), ((1095, 1149), 'termios.tcsetattr', 'termios.tcsetattr', (['fd', 'termios.TCSADRAIN', 'old_settings'], {}), '(fd, termios.TCSADRAIN, old_settings)\n', (1112, 1149), False, 'import termios\n'), ((1011, 1029), 'sys.stdin.fileno', 'sys.stdin.fileno', ([], {}), '()\n', (1027, 1029), False, 'import sys\n')]
|
import signal
import functools
from datetime import datetime
def run_time(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
start_time = datetime.now()
_return = func(*args, **kwargs)
end_time = datetime.now()
print(f">> [{func.__name__}] run time: {end_time - start_time}")
return _return
return wrapper
def run_count(func):
count = 0
@functools.wraps(func)
def wrapper(*args, **kwargs):
nonlocal count
count += 1
print(f">> [{func.__name__}] run count: {count}")
return func(*args, **kwargs)
return wrapper
def ctrl_c(func):
signal.signal(signal.SIGINT, signal_handler)
signal.signal(signal.SIGHUP, signal_handler)
signal.signal(signal.SIGTERM, signal_handler)
@functools.wraps(func)
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper
def signal_handler(_a, _b):
while True:
exit_flag = input("Are you sure to quit? yes/no\n>> ")
if exit_flag == "yes":
print(">> exit...")
exit()
elif exit_flag == "no":
break
|
[
"signal.signal",
"datetime.datetime.now",
"functools.wraps"
] |
[((88, 109), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (103, 109), False, 'import functools\n'), ((413, 434), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (428, 434), False, 'import functools\n'), ((650, 694), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'signal_handler'], {}), '(signal.SIGINT, signal_handler)\n', (663, 694), False, 'import signal\n'), ((699, 743), 'signal.signal', 'signal.signal', (['signal.SIGHUP', 'signal_handler'], {}), '(signal.SIGHUP, signal_handler)\n', (712, 743), False, 'import signal\n'), ((748, 793), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'signal_handler'], {}), '(signal.SIGTERM, signal_handler)\n', (761, 793), False, 'import signal\n'), ((800, 821), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (815, 821), False, 'import functools\n'), ((165, 179), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (177, 179), False, 'from datetime import datetime\n'), ((239, 253), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (251, 253), False, 'from datetime import datetime\n')]
|
#!/usr/bin/env python
import sys,os
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from logparser.LogCluster import LogCluster_iterative
# Hyperparameter tuning for cisco_router.log :
numIterations = 27 # Hyperparameter that needs to be tuned.
rsupports = [23.9, 23, 9, 9, 9, 8, 8, 8, 7, 26.7, 7, 7, 8, 7, 10.1, 7, 5, 9, 5,7,5, 8.6, 5, 8, 5, 5] # Hyperparameter that needs to be tuned.
initial_support = 20 # Hyperparameter that needs to be tuned.
file_name = "cisco_router.log"
for iteration in range(1,numIterations+1):
if iteration==1:
input_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))+'/logs/Sample_logs/' # The input directory of log file
output_dir = 'LogCluster_result/financial_transaction_results/Iterative_results' # The output directory of parsing results
log_file = file_name
log_format = '<Date> <Time> <Level> <Router> <Pid>: <Month> <Day> <UTCTime>: <Component>: <Content>' # cisco_router log format
# log_format = '<Date> <Time> <Level> <Module> \[<StatusAndPayThread>\] - <Content>'
rsupport = initial_support # The minimum threshold of relative support, 10 denotes 10%
regex = []
else :
input_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # The input directory of log file
output_dir = 'LogCluster_result/financial_transaction_results/Iterative_results'#cisco_router_results/Iterative_results' # The output directory of parsing results
log_file = 'logcluster_input.log'
log_format = '<Content>'
rsupport = rsupports[iteration-2]
regex = []
print("Iteration : ",iteration)
parser = LogCluster_iterative.LogParser(input_dir, log_format, output_dir, rsupport=rsupport, iteration=iteration, file_name = file_name, initial_support= initial_support)
parser.parse(log_file)
|
[
"os.path.abspath",
"logparser.LogCluster.LogCluster_iterative.LogParser"
] |
[((1715, 1884), 'logparser.LogCluster.LogCluster_iterative.LogParser', 'LogCluster_iterative.LogParser', (['input_dir', 'log_format', 'output_dir'], {'rsupport': 'rsupport', 'iteration': 'iteration', 'file_name': 'file_name', 'initial_support': 'initial_support'}), '(input_dir, log_format, output_dir, rsupport=\n rsupport, iteration=iteration, file_name=file_name, initial_support=\n initial_support)\n', (1745, 1884), False, 'from logparser.LogCluster import LogCluster_iterative\n'), ((84, 109), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (99, 109), False, 'import sys, os\n'), ((1286, 1311), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1301, 1311), False, 'import sys, os\n'), ((628, 653), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (643, 653), False, 'import sys, os\n')]
|
# Copyright (c) 2016, AB Uobis
# All rights reserved.
import os
import io
import uuid
import ast
import csv
import calendar
from collections import OrderedDict
from datetime import datetime,date
from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file
from xac import app, db, forms, models
import sqlalchemy
from sqlalchemy.sql import func
from sqlalchemy.orm import aliased
from xac.accounting.memoranda import process_filestorage
import xac.accounting.ledgers as ledgers
import xac.accounting.rates as rates
import xac.accounting.valuations as valuations
@app.route('/')
def index():
return render_template("index.html")
@app.route('/Configure')
def configure():
return redirect(url_for('chart_of_accounts'))
@app.route('/Configure/ChartOfAccounts')
def chart_of_accounts():
classificationform = forms.NewClassification()
accountform = forms.NewAccount()
subaccountform = forms.NewSubAccount()
subaccounts = models.Subaccounts.query.all()
return render_template("configure/chart_of_accounts.html",
subaccounts=subaccounts,
classificationform=classificationform,
accountform=accountform,
subaccountform=subaccountform)
@app.route('/Configure/ChartOfAccounts/AddClassification', methods=['POST','GET'])
def add_classification():
if request.method == 'POST':
form = request.form.copy().to_dict()
name = form['classification']
parent = form['classificationparent']
parent = models.Elements.query.filter_by(id=parent).one()
parent = parent.name
classification = models.Classifications(name=name, parent=parent)
db.session.add(classification)
db.session.commit()
return redirect(url_for('chart_of_accounts'))
@app.route('/Configure/ChartOfAccounts/DeleteClassification/<classification>')
def delete_classification(classification):
classification = models.Classifications \
.query \
.filter_by(name=classification) \
.first()
db.session.delete(classification)
db.session.commit()
return redirect(url_for('chart_of_accounts'))
@app.route('/Configure/ChartOfAccounts/AddAccount', methods=['POST','GET'])
def add_account():
if request.method == 'POST':
form = request.form.copy().to_dict()
name = form['account']
parent = form['accountparent']
parent = models.Classifications \
.query \
.filter_by(id=parent) \
.one()
parent = parent.name
account = models.Accounts(name=name, parent=parent)
db.session.add(account)
db.session.commit()
return redirect(url_for('chart_of_accounts'))
@app.route('/Configure/ChartOfAccounts/DeleteAccount/<account>')
def delete_account(account):
account = models.Accounts.query.filter_by(name=account).first()
db.session.delete(account)
db.session.commit()
return redirect(url_for('chart_of_accounts'))
@app.route('/Configure/ChartOfAccounts/AddSubAccount', methods=['POST','GET'])
def add_subaccount():
if request.method == 'POST':
form = request.form.copy().to_dict()
name = form['subaccount']
parent = form['subaccountparent']
parent = models.Accounts.query.filter_by(id=parent).one()
parent = parent.name
subaccount = models.Subaccounts(name=name, parent=parent)
db.session.add(subaccount)
db.session.commit()
return redirect(url_for('chart_of_accounts'))
@app.route('/Configure/ChartOfAccounts/DeleteSubAccount/<subaccount>')
def delete_subaccount(subaccount):
subaccount = models.Accounts.query.filter_by(name=subaccount).first()
db.session.delete(subaccount)
db.session.commit()
return redirect(url_for('chart_of_accounts'))
@app.route('/Bookkeeping')
def bookkeeping():
return redirect(url_for('upload_csv'))
@app.route('/Bookkeeping/Memoranda/Upload', methods=['POST','GET'])
def upload_csv():
filenames = ''
if request.method == 'POST':
uploaded_files = request.files.getlist("file[]")
for file in uploaded_files:
process_filestorage(file)
return redirect(url_for('upload_csv'))
memos = models.Memoranda \
.query \
.order_by(models.Memoranda.date.desc()) \
.all()
return render_template('bookkeeping/upload.html',
title = 'Upload',
memos=memos)
@app.route('/Bookkeeping/Memoranda/ExchangeRates')
def exchange_rates():
return render_template("bookkeeping/exchange_rates.html")
@app.route('/Bookkeeping/Memoranda/DownloadRates')
def download_rates():
rates.download_rates()
return redirect(url_for('exchange_rates'))
@app.route('/Bookkeeping/Memoranda/ExchangeRates/Summarize')
def summarize_rates():
rates.summarize_rates("xac")
return redirect(url_for('exchange_rates'))
@app.route('/Bookkeeping/Memoranda/ExchangeRates/Import')
def import_rates():
rates.import_rates("xac")
return redirect(url_for('exchange_rates'))
@app.route('/Bookkeeping/Memoranda/ExchangeRates/CalculateGains/<method>')
def calc_gains(method):
valuations.calculate_bitcoin_gains(method)
return redirect(url_for('exchange_rates'))
@app.route('/Bookkeeping/Memoranda/Memos', methods=['POST','GET'])
def memoranda():
memos = models.Memoranda \
.query \
.order_by(models.Memoranda.date.desc()) \
.all()
for memo in memos:
transactions = models.MemorandaTransactions \
.query \
.filter_by(memoranda_id=memo.id) \
.all()
memo.count = len(transactions)
return render_template('bookkeeping/memos.html',
title = 'Memoranda',
memos=memos)
@app.route('/Bookkeeping/Memoranda/Memos/Delete/<fileName>')
def delete_memoranda(fileName):
memo = models.Memoranda \
.query \
.filter_by(fileName=fileName) \
.first()
transactions = models.MemorandaTransactions \
.query \
.filter_by(memoranda_id=memo.id) \
.all()
for transaction in transactions:
journal_entry = models.JournalEntries \
.query \
.filter_by(memoranda_transactions_id=transaction.id) \
.first()
ledger_entries = models.LedgerEntries \
.query \
.filter_by(journal_entry_id = journal_entry.id) \
.all()
for entry in ledger_entries:
db.session.delete(entry)
db.session.commit()
db.session.delete(journal_entry)
db.session.commit()
db.session.delete(transaction)
db.session.commit()
db.session.delete(memo)
db.session.commit()
return redirect(url_for('upload_csv'))
@app.route('/Bookkeeping/Memoranda/Memos/<fileName>')
def memo_file(fileName):
memo = models.Memoranda.query.filter_by(fileName=fileName).first()
fileText = memo.fileText
document = io.StringIO(fileText)
reader = csv.reader(document)
rows = [pair for pair in reader]
return render_template('bookkeeping/memo_file.html',
title = 'Memo',
rows=rows,
fileName=fileName)
@app.route('/Bookkeeping/Memoranda/Memos/Transactions')
def transactions():
transactions = models.MemorandaTransactions.query.all()
for transaction in transactions:
transaction.details = ast.literal_eval(transaction.details)
journal_entry = models.JournalEntries.query.filter_by(memoranda_transactions_id=transaction.id).first()
transaction.journal_entry_id = journal_entry.id
return render_template('bookkeeping/memo_transactions.html',
title = 'Memo',
transactions=transactions)
@app.route('/Bookkeeping/Memoranda/Memos/<fileName>/Transactions')
def memo_transactions(fileName):
memo = models.Memoranda.query.filter_by(fileName=fileName).first()
transactions = models.MemorandaTransactions.query.filter_by(memoranda_id=memo.id).all()
for transaction in transactions:
transaction.details = ast.literal_eval(transaction.details)
journal_entry = models.JournalEntries.query.filter_by(memoranda_transactions_id=transaction.id).first()
transaction.journal_entry_id = journal_entry.id
return render_template('bookkeeping/memo_transactions.html',
title = 'Memo',
transactions=transactions,
fileName=fileName)
@app.route('/Bookkeeping/GeneralJournal/<currency>')
def general_journal(currency):
journal_entries = db.session \
.query(models.JournalEntries) \
.filter(models.JournalEntries.ledgerentries \
.any(currency=currency)) \
.join(models.LedgerEntries) \
.order_by(models.LedgerEntries.date.desc()) \
.all()
for journal_entry in journal_entries:
journal_entry.ledgerentries = [c for c in journal_entry.ledgerentries if c.currency == currency]
return render_template('bookkeeping/general_journal.html',
title = 'General Journal',
journal_entries=journal_entries,
currency=currency)
@app.route('/Bookkeeping/GeneralJournal/Entry/<id>')
def journal_entry(id):
journal_entry = models.JournalEntries.query.filter_by(id = id).first()
ledger_entries = models.LedgerEntries.query.filter_by(journal_entry_id = id).order_by(models.LedgerEntries.date.desc()).order_by(models.LedgerEntries.tside.desc()).all()
transaction = models.MemorandaTransactions.query.filter_by(id=journal_entry.memoranda_transactions_id).first()
memo = models.Memoranda.query.filter_by(id=transaction.memoranda_id).first()
transaction.details = ast.literal_eval(transaction.details)
print(ledger_entries)
return render_template('bookkeeping/journal_entry.html',
title = 'Journal Entry',
journal_entry=journal_entry,
ledger_entries=ledger_entries,
transaction=transaction,
memo=memo)
@app.route('/Bookkeeping/GeneralJournal/<id>/Edit', methods=['POST','GET'])
def edit_journal_entry(id):
journal_entry = models.JournalEntries.query.filter_by(id = id).first()
ledger_entries = models.LedgerEntries.query.filter_by(journal_entry_id = id).order_by(models.LedgerEntries.date.desc()).order_by(models.LedgerEntries.tside.desc()).all()
transaction = models.MemorandaTransactions.query.filter_by(id=journal_entry.memoranda_transactions_id).first()
memo = models.Memoranda.query.filter_by(id=transaction.memoranda_id).first()
transaction.details = ast.literal_eval(transaction.details)
return render_template('bookkeeping/journal_entry_edit.html',
title = 'Journal Entry',
journal_entry=journal_entry,
ledger_entries=ledger_entries,
transaction=transaction,
memo=memo)
@app.route('/Bookkeeping/GeneralLedger/<currency>')
def general_ledger(currency):
accountsQuery = db.session\
.query(models.LedgerEntries.ledger)\
.group_by(models.LedgerEntries.ledger).all()
accounts = []
for accountResult in accountsQuery:
accountName = accountResult[0]
query = ledgers.query_entries(accountName, 'Monthly', currency)
accounts.append(query)
return render_template('bookkeeping/general_ledger.html',
title = 'General Ledger',
accounts=accounts,
currency=currency)
@app.route('/Bookkeeping/Ledger/<accountName>/<currency>/<groupby>')
def ledger(accountName, currency, groupby):
query = ledgers.query_entries(accountName, groupby, currency)
return render_template('bookkeeping/ledger.html',
title = 'Ledger',
currency=currency,
account=query[0],
ledger_entries=query[1],
groupby = groupby,
accountName=accountName)
@app.route('/Bookkeeping/Ledger/<accountName>/<currency>/<groupby>/<interval>')
def ledger_page(accountName, currency, groupby, interval):
if groupby == "Daily":
interval = datetime.strptime(interval, "%m-%d-%Y")
year = interval.year
month = interval.month
day = interval.day
ledger_entries = models.LedgerEntries \
.query \
.filter_by(ledger=accountName) \
.filter_by(currency=currency) \
.filter( \
func.date_part('year', models.LedgerEntries.date)==year, \
func.date_part('month', models.LedgerEntries.date)==month, \
func.date_part('day', models.LedgerEntries.date)==day) \
.order_by(models.LedgerEntries.date) \
.order_by(models.LedgerEntries.tside.asc()) \
.all()
account = ledgers.foot_account(accountName, ledger_entries, 'All')
if groupby == "Monthly":
interval = datetime.strptime(interval, "%m-%Y")
year = interval.year
month = interval.month
ledger_entries = models.LedgerEntries\
.query\
.filter_by(ledger=accountName) \
.filter_by(currency=currency) \
.filter( \
func.date_part('year', models.LedgerEntries.date)==year, \
func.date_part('month', models.LedgerEntries.date)==month)\
.order_by(models.LedgerEntries.date) \
.order_by(models.LedgerEntries.tside.desc()) \
.all()
account = ledgers.foot_account(accountName, ledger_entries, 'All')
return render_template('bookkeeping/ledger.html',
title = 'Ledger',
account=account,
ledger_entries=ledger_entries,
groupby2 = groupby,
groupby = 'All',
accountName=accountName,
interval=interval,
currency=currency)
@app.route('/Bookkeeping/TrialBalance/<currency>')
def trial_balance(currency):
accountsQuery = db.session \
.query(models.LedgerEntries.ledger) \
.group_by(models.LedgerEntries.ledger) \
.filter(models.LedgerEntries.currency==currency) \
.all()
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date) + '-'+
func.date_part('month', models.LedgerEntries.date)) \
.filter(models.LedgerEntries.currency==currency) \
.group_by(\
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.all()
period = datetime.now()
year = period.year
month = period.month
accounts = []
totalDebits = 0
totalCredits = 0
for accountResult in accountsQuery:
accountName = accountResult[0]
ledger_entries = models.LedgerEntries \
.query \
.filter_by(ledger=accountName)\
.filter_by(currency=currency) \
.filter( \
func.date_part('year', models.LedgerEntries.date)==year,
func.date_part('month', models.LedgerEntries.date)==month) \
.order_by(models.LedgerEntries.date) \
.order_by(models.LedgerEntries.tside.desc()) \
.all()
query = ledgers.foot_account(accountName, ledger_entries, 'All')
totalDebits += query['debitBalance']
totalCredits += query['creditBalance']
accounts.append(query)
return render_template('bookkeeping/trial_balance.html',
currency=currency,
periods=periods,
period=period,
accounts=accounts,
totalDebits=totalDebits,
totalCredits=totalCredits)
@app.route('/Bookkeeping/TrialBalance/<currency>/<groupby>/<period>')
def trial_balance_historical(currency, groupby, period):
accountsQuery = db.session \
.query(models.LedgerEntries.ledger) \
.group_by(models.LedgerEntries.ledger) \
.filter(models.LedgerEntries.currency==currency) \
.all()
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date) + '-'+
func.date_part('month', models.LedgerEntries.date)) \
.group_by(\
func.date_part('year', models.LedgerEntries.date),\
func.date_part('month', models.LedgerEntries.date)) \
.filter(models.LedgerEntries.currency==currency) \
.all()
period = datetime.strptime(period, "%Y-%m")
year = period.year
month = period.month
day = calendar.monthrange(year, month)[1]
period = datetime(year, month, day, 23, 59, 59)
accounts = []
totalDebits = 0
totalCredits = 0
for accountResult in accountsQuery:
accountName = accountResult[0]
ledger_entries = models.LedgerEntries \
.query \
.filter_by(ledger=accountName) \
.filter_by(currency=currency) \
.filter( \
func.date_part('year', models.LedgerEntries.date)==year, \
func.date_part('month', models.LedgerEntries.date)==month) \
.order_by(models.LedgerEntries.date) \
.order_by(models.LedgerEntries.tside.desc()) \
.all()
query = ledgers.foot_account(accountName, ledger_entries, 'All')
totalDebits += query['debitBalance']
totalCredits += query['creditBalance']
accounts.append(query)
return render_template('bookkeeping/trial_balance.html',
currency=currency,
periods=periods,
period=period,
accounts=accounts,
totalDebits=totalDebits,
totalCredits=totalCredits)
@app.route('/FinancialStatements')
def financial_statements():
return redirect(url_for('income_statement', currency='satoshis'))
@app.route('/FinancialStatements/IncomeStatement/<currency>')
def income_statement(currency):
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date),\
func.date_part('month', models.LedgerEntries.date)) \
.group_by( \
func.date_part('year', models.LedgerEntries.date),\
func.date_part('month', models.LedgerEntries.date)) \
.all()
periods = sorted([date(int(period[0]), int(period[1]), 1) for period in periods])
period = datetime.now()
period_beg = datetime(period.year, period.month, 1, 0, 0, 0, 0)
period_end = datetime(period.year, period.month, period.day, 23, 59, 59, 999999)
elements = db.session \
.query(models.Elements) \
.join(models.Classifications) \
.filter(models.Classifications.name.in_(['Revenues', 'Expenses', 'Gains', 'Losses']))\
.join(models.Accounts) \
.join(models.Subaccounts) \
.all()
net_income = 0
for element in elements:
element.classifications = [c for c in element.classifications if c.name in ['Revenues', 'Expenses', 'Gains', 'Losses']]
for classification in element.classifications:
for account in classification.accounts:
for subaccount in account.subaccounts:
subaccount.total = 0
subaccount.ledgerentries = [c for c in subaccount.ledgerentries if period_beg <= c.date <= period_end ]
for ledger_entry in subaccount.ledgerentries:
if ledger_entry.currency == currency:
if ledger_entry.tside == 'credit':
subaccount.total += ledger_entry.amount
net_income += ledger_entry.amount
elif ledger_entry.tside == 'debit':
net_income -= ledger_entry.amount
subaccount.total -= ledger_entry.amount
return render_template('financial_statements/income_statement.html',
title = 'Income Statement',
periods = periods,
currency = currency,
elements = elements,
net_income = net_income)
@app.route('/FinancialStatements/IncomeStatement/<currency>/<period>')
def income_statement_historical(currency, period):
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.group_by( \
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.all()
periods = sorted([date(int(period[0]), int(period[1]), 1) for period in periods])
period = datetime.strptime(period, "%Y-%m")
lastday = calendar.monthrange(period.year, period.month)[1]
period_beg = datetime(period.year, period.month, 1, 0, 0, 0, 0)
period_end = datetime(period.year, period.month, lastday, 23, 59, 59, 999999)
elements = db.session \
.query(models.Elements) \
.join(models.Classifications) \
.filter(models.Classifications.name.in_(['Revenues', 'Expenses', 'Gains', 'Losses']))\
.join(models.Accounts) \
.join(models.Subaccounts) \
.all()
net_income = 0
for element in elements:
element.classifications = [c for c in element.classifications if c.name in ['Revenues', 'Expenses', 'Gains', 'Losses']]
for classification in element.classifications:
for account in classification.accounts:
for subaccount in account.subaccounts:
subaccount.total = 0
subaccount.ledgerentries = [c for c in subaccount.ledgerentries if period_beg <= c.date <= period_end ]
for ledger_entry in subaccount.ledgerentries:
if ledger_entry.currency == currency:
if ledger_entry.tside == 'credit':
net_income += ledger_entry.amount
subaccount.total += ledger_entry.amount
elif ledger_entry.tside == 'debit':
net_income -= ledger_entry.amount
subaccount.total -= ledger_entry.amount
return render_template('financial_statements/income_statement.html',
title = 'Income Statement',
periods = periods,
currency = currency,
elements = elements,
net_income = net_income)
@app.route('/FinancialStatements/BalanceSheet/<currency>')
def balance_sheet(currency):
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.group_by( \
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.all()
periods = sorted([date(int(period[0]), int(period[1]), 1) for period in periods])
period = datetime.now()
period_beg = datetime(period.year, period.month, 1, 0, 0, 0, 0)
period_end = datetime(period.year, period.month, period.day, 23, 59, 59, 999999)
elements = db.session \
.query(models.Elements) \
.join(models.Classifications) \
.join(models.Accounts) \
.join(models.Subaccounts) \
.all()
retained_earnings = 0
for element in elements:
element.balance = 0
for classification in element.classifications:
classification.balance = 0
for account in classification.accounts:
account.balance = 0
for subaccount in account.subaccounts:
subaccount.balance = 0
subaccount.ledgerentries = [c for c in subaccount.ledgerentries if c.date <= period_end ]
for ledger_entry in subaccount.ledgerentries:
if ledger_entry.currency == currency:
if ledger_entry.tside == 'credit':
element.balance -= ledger_entry.amount
classification.balance -= ledger_entry.amount
account.balance -= ledger_entry.amount
subaccount.balance -= ledger_entry.amount
elif ledger_entry.tside == 'debit':
element.balance += ledger_entry.amount
classification.balance += ledger_entry.amount
account.balance += ledger_entry.amount
subaccount.balance += ledger_entry.amount
if element.name == 'Equity':
retained_earnings = -element.balance
print(retained_earnings)
elements = [c for c in elements if c.name in ['Assets', 'Liabilities']]
return render_template('financial_statements/balance_sheet.html',
periods=periods,
currency=currency,
elements=elements,
retained_earnings=retained_earnings,
period=period_end)
@app.route('/FinancialStatements/BalanceSheet/<currency>/<period>')
def balance_sheet_historical(currency, period):
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.group_by( \
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.all()
periods = sorted([date(int(period[0]), int(period[1]), 1) for period in periods])
period = datetime.strptime(period, "%Y-%m")
lastday = calendar.monthrange(period.year, period.month)[1]
period_beg = datetime(period.year, period.month, 1, 0, 0, 0, 0)
period_end = datetime(period.year, period.month, lastday, 23, 59, 59, 999999)
elements = db.session \
.query(models.Elements) \
.join(models.Classifications) \
.join(models.Accounts) \
.join(models.Subaccounts) \
.all()
retained_earnings = 0
for element in elements:
element.balance = 0
for classification in element.classifications:
classification.balance = 0
for account in classification.accounts:
account.balance = 0
for subaccount in account.subaccounts:
subaccount.balance = 0
subaccount.ledgerentries = [c for c in subaccount.ledgerentries if c.date <= period_end ]
for ledger_entry in subaccount.ledgerentries:
if ledger_entry.currency == currency:
if ledger_entry.tside == 'credit':
element.balance -= ledger_entry.amount
classification.balance -= ledger_entry.amount
account.balance -= ledger_entry.amount
subaccount.balance -= ledger_entry.amount
elif ledger_entry.tside == 'debit':
element.balance += ledger_entry.amount
classification.balance += ledger_entry.amount
account.balance += ledger_entry.amount
subaccount.balance += ledger_entry.amount
if element.name == 'Equity':
retained_earnings = -element.balance
print(retained_earnings)
elements = [c for c in elements if c.name in ['Assets', 'Liabilities']]
return render_template('financial_statements/balance_sheet.html',
periods=periods,
currency=currency,
elements=elements,
retained_earnings=retained_earnings,
period=period_end)
@app.route('/FinancialStatements/StatementOfCashFlows/<currency>/<period>')
def statement_of_cash_flows(currency, period):
periods = db.session \
.query(\
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.group_by( \
func.date_part('year', models.LedgerEntries.date), \
func.date_part('month', models.LedgerEntries.date)) \
.all()
periods = sorted([date(int(period[0]), int(period[1]), 1) for period in periods])
if period == 'Current':
period = datetime.now()
lastday = period.day
else:
period = datetime.strptime(period, "%Y-%m")
lastday = calendar.monthrange(period.year, period.month)[1]
period_beg = datetime(period.year, period.month, 1, 0, 0, 0, 0)
period_end = datetime(period.year, period.month, lastday, 23, 59, 59, 999999)
elements = db.session \
.query(models.Elements) \
.join(models.Classifications) \
.filter(models.Classifications.name.in_(['Revenues', 'Expenses', 'Gains', 'Losses']))\
.join(models.Accounts) \
.join(models.Subaccounts) \
.all()
net_income = 0
for element in elements:
element.classifications = [c for c in element.classifications if c.name in ['Revenues', 'Expenses', 'Gains', 'Losses']]
for classification in element.classifications:
classification.balance = 0
for account in classification.accounts:
account.balance = 0
for subaccount in account.subaccounts:
subaccount.balance = 0
subaccount.ledgerentries = [c for c in subaccount.ledgerentries if period_beg <= c.date <= period_end ]
for ledger_entry in subaccount.ledgerentries:
if ledger_entry.currency == currency:
if ledger_entry.tside == 'credit':
classification.balance -= ledger_entry.amount
account.balance -= ledger_entry.amount
subaccount.balance -= ledger_entry.amount
elif ledger_entry.tside == 'debit':
classification.balance += ledger_entry.amount
account.balance += ledger_entry.amount
subaccount.balance += ledger_entry.amount
return render_template('financial_statements/statement_of_cash_flows.html',
period = period,
periods = periods,
currency = currency,
elements = elements,
net_income = net_income)
|
[
"xac.forms.NewSubAccount",
"csv.reader",
"xac.models.Elements.query.filter_by",
"xac.db.session.delete",
"flask.url_for",
"xac.accounting.rates.import_rates",
"flask.request.form.copy",
"xac.models.LedgerEntries.tside.desc",
"xac.accounting.memoranda.process_filestorage",
"xac.models.LedgerEntries.date.desc",
"xac.db.session.commit",
"flask.request.files.getlist",
"xac.models.Memoranda.query.filter_by",
"xac.models.Classifications",
"xac.accounting.ledgers.foot_account",
"flask.render_template",
"xac.forms.NewClassification",
"datetime.datetime.now",
"xac.models.Subaccounts.query.all",
"xac.db.session.add",
"xac.models.Accounts.query.filter_by",
"xac.models.JournalEntries.query.filter_by",
"io.StringIO",
"xac.accounting.ledgers.query_entries",
"xac.models.MemorandaTransactions.query.all",
"xac.models.Subaccounts",
"xac.accounting.valuations.calculate_bitcoin_gains",
"xac.accounting.rates.download_rates",
"xac.models.Classifications.name.in_",
"xac.models.LedgerEntries.tside.asc",
"xac.models.JournalEntries.ledgerentries.any",
"xac.accounting.rates.summarize_rates",
"datetime.datetime",
"datetime.datetime.strptime",
"calendar.monthrange",
"xac.forms.NewAccount",
"sqlalchemy.sql.func.date_part",
"xac.db.session.query",
"xac.models.Memoranda.date.desc",
"xac.models.MemorandaTransactions.query.filter_by",
"xac.app.route",
"xac.models.Accounts",
"ast.literal_eval",
"xac.models.LedgerEntries.query.filter_by",
"xac.models.Classifications.query.filter_by"
] |
[((605, 619), 'xac.app.route', 'app.route', (['"""/"""'], {}), "('/')\n", (614, 619), False, 'from xac import app, db, forms, models\n'), ((676, 699), 'xac.app.route', 'app.route', (['"""/Configure"""'], {}), "('/Configure')\n", (685, 699), False, 'from xac import app, db, forms, models\n'), ((769, 808), 'xac.app.route', 'app.route', (['"""/Configure/ChartOfAccounts"""'], {}), "('/Configure/ChartOfAccounts')\n", (778, 808), False, 'from xac import app, db, forms, models\n'), ((1231, 1317), 'xac.app.route', 'app.route', (['"""/Configure/ChartOfAccounts/AddClassification"""'], {'methods': "['POST', 'GET']"}), "('/Configure/ChartOfAccounts/AddClassification', methods=['POST',\n 'GET'])\n", (1240, 1317), False, 'from xac import app, db, forms, models\n'), ((1789, 1866), 'xac.app.route', 'app.route', (['"""/Configure/ChartOfAccounts/DeleteClassification/<classification>"""'], {}), "('/Configure/ChartOfAccounts/DeleteClassification/<classification>')\n", (1798, 1866), False, 'from xac import app, db, forms, models\n'), ((2146, 2221), 'xac.app.route', 'app.route', (['"""/Configure/ChartOfAccounts/AddAccount"""'], {'methods': "['POST', 'GET']"}), "('/Configure/ChartOfAccounts/AddAccount', methods=['POST', 'GET'])\n", (2155, 2221), False, 'from xac import app, db, forms, models\n'), ((2707, 2770), 'xac.app.route', 'app.route', (['"""/Configure/ChartOfAccounts/DeleteAccount/<account>"""'], {}), "('/Configure/ChartOfAccounts/DeleteAccount/<account>')\n", (2716, 2770), False, 'from xac import app, db, forms, models\n'), ((2975, 3053), 'xac.app.route', 'app.route', (['"""/Configure/ChartOfAccounts/AddSubAccount"""'], {'methods': "['POST', 'GET']"}), "('/Configure/ChartOfAccounts/AddSubAccount', methods=['POST', 'GET'])\n", (2984, 3053), False, 'from xac import app, db, forms, models\n'), ((3505, 3574), 'xac.app.route', 'app.route', (['"""/Configure/ChartOfAccounts/DeleteSubAccount/<subaccount>"""'], {}), "('/Configure/ChartOfAccounts/DeleteSubAccount/<subaccount>')\n", (3514, 3574), False, 'from xac import app, db, forms, models\n'), ((3794, 3819), 'xac.app.route', 'app.route', (['"""/Bookkeeping"""'], {}), "('/Bookkeeping')\n", (3803, 3819), False, 'from xac import app, db, forms, models\n'), ((3884, 3951), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Memoranda/Upload"""'], {'methods': "['POST', 'GET']"}), "('/Bookkeeping/Memoranda/Upload', methods=['POST', 'GET'])\n", (3893, 3951), False, 'from xac import app, db, forms, models\n'), ((4415, 4464), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Memoranda/ExchangeRates"""'], {}), "('/Bookkeeping/Memoranda/ExchangeRates')\n", (4424, 4464), False, 'from xac import app, db, forms, models\n'), ((4556, 4605), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Memoranda/DownloadRates"""'], {}), "('/Bookkeeping/Memoranda/DownloadRates')\n", (4565, 4605), False, 'from xac import app, db, forms, models\n'), ((4706, 4765), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Memoranda/ExchangeRates/Summarize"""'], {}), "('/Bookkeeping/Memoranda/ExchangeRates/Summarize')\n", (4715, 4765), False, 'from xac import app, db, forms, models\n'), ((4873, 4929), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Memoranda/ExchangeRates/Import"""'], {}), "('/Bookkeeping/Memoranda/ExchangeRates/Import')\n", (4882, 4929), False, 'from xac import app, db, forms, models\n'), ((5029, 5102), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Memoranda/ExchangeRates/CalculateGains/<method>"""'], {}), "('/Bookkeeping/Memoranda/ExchangeRates/CalculateGains/<method>')\n", (5038, 5102), False, 'from xac import app, db, forms, models\n'), ((5223, 5289), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Memoranda/Memos"""'], {'methods': "['POST', 'GET']"}), "('/Bookkeeping/Memoranda/Memos', methods=['POST', 'GET'])\n", (5232, 5289), False, 'from xac import app, db, forms, models\n'), ((5728, 5787), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Memoranda/Memos/Delete/<fileName>"""'], {}), "('/Bookkeeping/Memoranda/Memos/Delete/<fileName>')\n", (5737, 5787), False, 'from xac import app, db, forms, models\n'), ((6732, 6784), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Memoranda/Memos/<fileName>"""'], {}), "('/Bookkeeping/Memoranda/Memos/<fileName>')\n", (6741, 6784), False, 'from xac import app, db, forms, models\n'), ((7149, 7203), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Memoranda/Memos/Transactions"""'], {}), "('/Bookkeeping/Memoranda/Memos/Transactions')\n", (7158, 7203), False, 'from xac import app, db, forms, models\n'), ((7684, 7749), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Memoranda/Memos/<fileName>/Transactions"""'], {}), "('/Bookkeeping/Memoranda/Memos/<fileName>/Transactions')\n", (7693, 7749), False, 'from xac import app, db, forms, models\n'), ((8372, 8423), 'xac.app.route', 'app.route', (['"""/Bookkeeping/GeneralJournal/<currency>"""'], {}), "('/Bookkeeping/GeneralJournal/<currency>')\n", (8381, 8423), False, 'from xac import app, db, forms, models\n'), ((9045, 9096), 'xac.app.route', 'app.route', (['"""/Bookkeeping/GeneralJournal/Entry/<id>"""'], {}), "('/Bookkeeping/GeneralJournal/Entry/<id>')\n", (9054, 9096), False, 'from xac import app, db, forms, models\n'), ((9879, 9954), 'xac.app.route', 'app.route', (['"""/Bookkeeping/GeneralJournal/<id>/Edit"""'], {'methods': "['POST', 'GET']"}), "('/Bookkeeping/GeneralJournal/<id>/Edit', methods=['POST', 'GET'])\n", (9888, 9954), False, 'from xac import app, db, forms, models\n'), ((10720, 10770), 'xac.app.route', 'app.route', (['"""/Bookkeeping/GeneralLedger/<currency>"""'], {}), "('/Bookkeeping/GeneralLedger/<currency>')\n", (10729, 10770), False, 'from xac import app, db, forms, models\n'), ((11284, 11351), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Ledger/<accountName>/<currency>/<groupby>"""'], {}), "('/Bookkeeping/Ledger/<accountName>/<currency>/<groupby>')\n", (11293, 11351), False, 'from xac import app, db, forms, models\n'), ((11690, 11768), 'xac.app.route', 'app.route', (['"""/Bookkeeping/Ledger/<accountName>/<currency>/<groupby>/<interval>"""'], {}), "('/Bookkeeping/Ledger/<accountName>/<currency>/<groupby>/<interval>')\n", (11699, 11768), False, 'from xac import app, db, forms, models\n'), ((13575, 13624), 'xac.app.route', 'app.route', (['"""/Bookkeeping/TrialBalance/<currency>"""'], {}), "('/Bookkeeping/TrialBalance/<currency>')\n", (13584, 13624), False, 'from xac import app, db, forms, models\n'), ((15398, 15466), 'xac.app.route', 'app.route', (['"""/Bookkeeping/TrialBalance/<currency>/<groupby>/<period>"""'], {}), "('/Bookkeeping/TrialBalance/<currency>/<groupby>/<period>')\n", (15407, 15466), False, 'from xac import app, db, forms, models\n'), ((17352, 17385), 'xac.app.route', 'app.route', (['"""/FinancialStatements"""'], {}), "('/FinancialStatements')\n", (17361, 17385), False, 'from xac import app, db, forms, models\n'), ((17486, 17546), 'xac.app.route', 'app.route', (['"""/FinancialStatements/IncomeStatement/<currency>"""'], {}), "('/FinancialStatements/IncomeStatement/<currency>')\n", (17495, 17546), False, 'from xac import app, db, forms, models\n'), ((19755, 19824), 'xac.app.route', 'app.route', (['"""/FinancialStatements/IncomeStatement/<currency>/<period>"""'], {}), "('/FinancialStatements/IncomeStatement/<currency>/<period>')\n", (19764, 19824), False, 'from xac import app, db, forms, models\n'), ((22135, 22192), 'xac.app.route', 'app.route', (['"""/FinancialStatements/BalanceSheet/<currency>"""'], {}), "('/FinancialStatements/BalanceSheet/<currency>')\n", (22144, 22192), False, 'from xac import app, db, forms, models\n'), ((24777, 24843), 'xac.app.route', 'app.route', (['"""/FinancialStatements/BalanceSheet/<currency>/<period>"""'], {}), "('/FinancialStatements/BalanceSheet/<currency>/<period>')\n", (24786, 24843), False, 'from xac import app, db, forms, models\n'), ((27526, 27600), 'xac.app.route', 'app.route', (['"""/FinancialStatements/StatementOfCashFlows/<currency>/<period>"""'], {}), "('/FinancialStatements/StatementOfCashFlows/<currency>/<period>')\n", (27535, 27600), False, 'from xac import app, db, forms, models\n'), ((644, 673), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (659, 673), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((859, 884), 'xac.forms.NewClassification', 'forms.NewClassification', ([], {}), '()\n', (882, 884), False, 'from xac import app, db, forms, models\n'), ((903, 921), 'xac.forms.NewAccount', 'forms.NewAccount', ([], {}), '()\n', (919, 921), False, 'from xac import app, db, forms, models\n'), ((943, 964), 'xac.forms.NewSubAccount', 'forms.NewSubAccount', ([], {}), '()\n', (962, 964), False, 'from xac import app, db, forms, models\n'), ((983, 1013), 'xac.models.Subaccounts.query.all', 'models.Subaccounts.query.all', ([], {}), '()\n', (1011, 1013), False, 'from xac import app, db, forms, models\n'), ((1025, 1204), 'flask.render_template', 'render_template', (['"""configure/chart_of_accounts.html"""'], {'subaccounts': 'subaccounts', 'classificationform': 'classificationform', 'accountform': 'accountform', 'subaccountform': 'subaccountform'}), "('configure/chart_of_accounts.html', subaccounts=subaccounts,\n classificationform=classificationform, accountform=accountform,\n subaccountform=subaccountform)\n", (1040, 1204), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((2036, 2069), 'xac.db.session.delete', 'db.session.delete', (['classification'], {}), '(classification)\n', (2053, 2069), False, 'from xac import app, db, forms, models\n'), ((2074, 2093), 'xac.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2091, 2093), False, 'from xac import app, db, forms, models\n'), ((2872, 2898), 'xac.db.session.delete', 'db.session.delete', (['account'], {}), '(account)\n', (2889, 2898), False, 'from xac import app, db, forms, models\n'), ((2903, 2922), 'xac.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2920, 2922), False, 'from xac import app, db, forms, models\n'), ((3688, 3717), 'xac.db.session.delete', 'db.session.delete', (['subaccount'], {}), '(subaccount)\n', (3705, 3717), False, 'from xac import app, db, forms, models\n'), ((3722, 3741), 'xac.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3739, 3741), False, 'from xac import app, db, forms, models\n'), ((4323, 4394), 'flask.render_template', 'render_template', (['"""bookkeeping/upload.html"""'], {'title': '"""Upload"""', 'memos': 'memos'}), "('bookkeeping/upload.html', title='Upload', memos=memos)\n", (4338, 4394), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((4503, 4553), 'flask.render_template', 'render_template', (['"""bookkeeping/exchange_rates.html"""'], {}), "('bookkeeping/exchange_rates.html')\n", (4518, 4553), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((4632, 4654), 'xac.accounting.rates.download_rates', 'rates.download_rates', ([], {}), '()\n', (4652, 4654), True, 'import xac.accounting.rates as rates\n'), ((4793, 4821), 'xac.accounting.rates.summarize_rates', 'rates.summarize_rates', (['"""xac"""'], {}), "('xac')\n", (4814, 4821), True, 'import xac.accounting.rates as rates\n'), ((4954, 4979), 'xac.accounting.rates.import_rates', 'rates.import_rates', (['"""xac"""'], {}), "('xac')\n", (4972, 4979), True, 'import xac.accounting.rates as rates\n'), ((5131, 5173), 'xac.accounting.valuations.calculate_bitcoin_gains', 'valuations.calculate_bitcoin_gains', (['method'], {}), '(method)\n', (5165, 5173), True, 'import xac.accounting.valuations as valuations\n'), ((5634, 5707), 'flask.render_template', 'render_template', (['"""bookkeeping/memos.html"""'], {'title': '"""Memoranda"""', 'memos': 'memos'}), "('bookkeeping/memos.html', title='Memoranda', memos=memos)\n", (5649, 5707), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((6639, 6662), 'xac.db.session.delete', 'db.session.delete', (['memo'], {}), '(memo)\n', (6656, 6662), False, 'from xac import app, db, forms, models\n'), ((6667, 6686), 'xac.db.session.commit', 'db.session.commit', ([], {}), '()\n', (6684, 6686), False, 'from xac import app, db, forms, models\n'), ((6925, 6946), 'io.StringIO', 'io.StringIO', (['fileText'], {}), '(fileText)\n', (6936, 6946), False, 'import io\n'), ((6960, 6980), 'csv.reader', 'csv.reader', (['document'], {}), '(document)\n', (6970, 6980), False, 'import csv\n'), ((7029, 7122), 'flask.render_template', 'render_template', (['"""bookkeeping/memo_file.html"""'], {'title': '"""Memo"""', 'rows': 'rows', 'fileName': 'fileName'}), "('bookkeeping/memo_file.html', title='Memo', rows=rows,\n fileName=fileName)\n", (7044, 7122), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((7243, 7283), 'xac.models.MemorandaTransactions.query.all', 'models.MemorandaTransactions.query.all', ([], {}), '()\n', (7281, 7283), False, 'from xac import app, db, forms, models\n'), ((7568, 7666), 'flask.render_template', 'render_template', (['"""bookkeeping/memo_transactions.html"""'], {'title': '"""Memo"""', 'transactions': 'transactions'}), "('bookkeeping/memo_transactions.html', title='Memo',\n transactions=transactions)\n", (7583, 7666), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((8230, 8347), 'flask.render_template', 'render_template', (['"""bookkeeping/memo_transactions.html"""'], {'title': '"""Memo"""', 'transactions': 'transactions', 'fileName': 'fileName'}), "('bookkeeping/memo_transactions.html', title='Memo',\n transactions=transactions, fileName=fileName)\n", (8245, 8347), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((8888, 9020), 'flask.render_template', 'render_template', (['"""bookkeeping/general_journal.html"""'], {'title': '"""General Journal"""', 'journal_entries': 'journal_entries', 'currency': 'currency'}), "('bookkeeping/general_journal.html', title='General Journal',\n journal_entries=journal_entries, currency=currency)\n", (8903, 9020), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((9591, 9628), 'ast.literal_eval', 'ast.literal_eval', (['transaction.details'], {}), '(transaction.details)\n', (9607, 9628), False, 'import ast\n'), ((9666, 9843), 'flask.render_template', 'render_template', (['"""bookkeeping/journal_entry.html"""'], {'title': '"""Journal Entry"""', 'journal_entry': 'journal_entry', 'ledger_entries': 'ledger_entries', 'transaction': 'transaction', 'memo': 'memo'}), "('bookkeeping/journal_entry.html', title='Journal Entry',\n journal_entry=journal_entry, ledger_entries=ledger_entries, transaction\n =transaction, memo=memo)\n", (9681, 9843), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((10453, 10490), 'ast.literal_eval', 'ast.literal_eval', (['transaction.details'], {}), '(transaction.details)\n', (10469, 10490), False, 'import ast\n'), ((10502, 10685), 'flask.render_template', 'render_template', (['"""bookkeeping/journal_entry_edit.html"""'], {'title': '"""Journal Entry"""', 'journal_entry': 'journal_entry', 'ledger_entries': 'ledger_entries', 'transaction': 'transaction', 'memo': 'memo'}), "('bookkeeping/journal_entry_edit.html', title=\n 'Journal Entry', journal_entry=journal_entry, ledger_entries=\n ledger_entries, transaction=transaction, memo=memo)\n", (10517, 10685), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((11143, 11259), 'flask.render_template', 'render_template', (['"""bookkeeping/general_ledger.html"""'], {'title': '"""General Ledger"""', 'accounts': 'accounts', 'currency': 'currency'}), "('bookkeeping/general_ledger.html', title='General Ledger',\n accounts=accounts, currency=currency)\n", (11158, 11259), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((11408, 11461), 'xac.accounting.ledgers.query_entries', 'ledgers.query_entries', (['accountName', 'groupby', 'currency'], {}), '(accountName, groupby, currency)\n', (11429, 11461), True, 'import xac.accounting.ledgers as ledgers\n'), ((11473, 11644), 'flask.render_template', 'render_template', (['"""bookkeeping/ledger.html"""'], {'title': '"""Ledger"""', 'currency': 'currency', 'account': 'query[0]', 'ledger_entries': 'query[1]', 'groupby': 'groupby', 'accountName': 'accountName'}), "('bookkeeping/ledger.html', title='Ledger', currency=\n currency, account=query[0], ledger_entries=query[1], groupby=groupby,\n accountName=accountName)\n", (11488, 11644), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((13300, 13510), 'flask.render_template', 'render_template', (['"""bookkeeping/ledger.html"""'], {'title': '"""Ledger"""', 'account': 'account', 'ledger_entries': 'ledger_entries', 'groupby2': 'groupby', 'groupby': '"""All"""', 'accountName': 'accountName', 'interval': 'interval', 'currency': 'currency'}), "('bookkeeping/ledger.html', title='Ledger', account=account,\n ledger_entries=ledger_entries, groupby2=groupby, groupby='All',\n accountName=accountName, interval=interval, currency=currency)\n", (13315, 13510), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((14273, 14287), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (14285, 14287), False, 'from datetime import datetime, date\n'), ((15176, 15356), 'flask.render_template', 'render_template', (['"""bookkeeping/trial_balance.html"""'], {'currency': 'currency', 'periods': 'periods', 'period': 'period', 'accounts': 'accounts', 'totalDebits': 'totalDebits', 'totalCredits': 'totalCredits'}), "('bookkeeping/trial_balance.html', currency=currency,\n periods=periods, period=period, accounts=accounts, totalDebits=\n totalDebits, totalCredits=totalCredits)\n", (15191, 15356), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((16142, 16176), 'datetime.datetime.strptime', 'datetime.strptime', (['period', '"""%Y-%m"""'], {}), "(period, '%Y-%m')\n", (16159, 16176), False, 'from datetime import datetime, date\n'), ((16284, 16322), 'datetime.datetime', 'datetime', (['year', 'month', 'day', '(23)', '(59)', '(59)'], {}), '(year, month, day, 23, 59, 59)\n', (16292, 16322), False, 'from datetime import datetime, date\n'), ((17130, 17310), 'flask.render_template', 'render_template', (['"""bookkeeping/trial_balance.html"""'], {'currency': 'currency', 'periods': 'periods', 'period': 'period', 'accounts': 'accounts', 'totalDebits': 'totalDebits', 'totalCredits': 'totalCredits'}), "('bookkeeping/trial_balance.html', currency=currency,\n periods=periods, period=period, accounts=accounts, totalDebits=\n totalDebits, totalCredits=totalCredits)\n", (17145, 17310), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((18018, 18032), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (18030, 18032), False, 'from datetime import datetime, date\n'), ((18050, 18100), 'datetime.datetime', 'datetime', (['period.year', 'period.month', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(period.year, period.month, 1, 0, 0, 0, 0)\n', (18058, 18100), False, 'from datetime import datetime, date\n'), ((18118, 18185), 'datetime.datetime', 'datetime', (['period.year', 'period.month', 'period.day', '(23)', '(59)', '(59)', '(999999)'], {}), '(period.year, period.month, period.day, 23, 59, 59, 999999)\n', (18126, 18185), False, 'from datetime import datetime, date\n'), ((19513, 19688), 'flask.render_template', 'render_template', (['"""financial_statements/income_statement.html"""'], {'title': '"""Income Statement"""', 'periods': 'periods', 'currency': 'currency', 'elements': 'elements', 'net_income': 'net_income'}), "('financial_statements/income_statement.html', title=\n 'Income Statement', periods=periods, currency=currency, elements=\n elements, net_income=net_income)\n", (19528, 19688), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((20317, 20351), 'datetime.datetime.strptime', 'datetime.strptime', (['period', '"""%Y-%m"""'], {}), "(period, '%Y-%m')\n", (20334, 20351), False, 'from datetime import datetime, date\n'), ((20433, 20483), 'datetime.datetime', 'datetime', (['period.year', 'period.month', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(period.year, period.month, 1, 0, 0, 0, 0)\n', (20441, 20483), False, 'from datetime import datetime, date\n'), ((20501, 20565), 'datetime.datetime', 'datetime', (['period.year', 'period.month', 'lastday', '(23)', '(59)', '(59)', '(999999)'], {}), '(period.year, period.month, lastday, 23, 59, 59, 999999)\n', (20509, 20565), False, 'from datetime import datetime, date\n'), ((21893, 22068), 'flask.render_template', 'render_template', (['"""financial_statements/income_statement.html"""'], {'title': '"""Income Statement"""', 'periods': 'periods', 'currency': 'currency', 'elements': 'elements', 'net_income': 'net_income'}), "('financial_statements/income_statement.html', title=\n 'Income Statement', periods=periods, currency=currency, elements=\n elements, net_income=net_income)\n", (21908, 22068), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((22663, 22677), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (22675, 22677), False, 'from datetime import datetime, date\n'), ((22695, 22745), 'datetime.datetime', 'datetime', (['period.year', 'period.month', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(period.year, period.month, 1, 0, 0, 0, 0)\n', (22703, 22745), False, 'from datetime import datetime, date\n'), ((22763, 22830), 'datetime.datetime', 'datetime', (['period.year', 'period.month', 'period.day', '(23)', '(59)', '(59)', '(999999)'], {}), '(period.year, period.month, period.day, 23, 59, 59, 999999)\n', (22771, 22830), False, 'from datetime import datetime, date\n'), ((24580, 24758), 'flask.render_template', 'render_template', (['"""financial_statements/balance_sheet.html"""'], {'periods': 'periods', 'currency': 'currency', 'elements': 'elements', 'retained_earnings': 'retained_earnings', 'period': 'period_end'}), "('financial_statements/balance_sheet.html', periods=periods,\n currency=currency, elements=elements, retained_earnings=\n retained_earnings, period=period_end)\n", (24595, 24758), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((25333, 25367), 'datetime.datetime.strptime', 'datetime.strptime', (['period', '"""%Y-%m"""'], {}), "(period, '%Y-%m')\n", (25350, 25367), False, 'from datetime import datetime, date\n'), ((25449, 25499), 'datetime.datetime', 'datetime', (['period.year', 'period.month', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(period.year, period.month, 1, 0, 0, 0, 0)\n', (25457, 25499), False, 'from datetime import datetime, date\n'), ((25517, 25581), 'datetime.datetime', 'datetime', (['period.year', 'period.month', 'lastday', '(23)', '(59)', '(59)', '(999999)'], {}), '(period.year, period.month, lastday, 23, 59, 59, 999999)\n', (25525, 25581), False, 'from datetime import datetime, date\n'), ((27327, 27505), 'flask.render_template', 'render_template', (['"""financial_statements/balance_sheet.html"""'], {'periods': 'periods', 'currency': 'currency', 'elements': 'elements', 'retained_earnings': 'retained_earnings', 'period': 'period_end'}), "('financial_statements/balance_sheet.html', periods=periods,\n currency=currency, elements=elements, retained_earnings=\n retained_earnings, period=period_end)\n", (27342, 27505), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((28312, 28362), 'datetime.datetime', 'datetime', (['period.year', 'period.month', '(1)', '(0)', '(0)', '(0)', '(0)'], {}), '(period.year, period.month, 1, 0, 0, 0, 0)\n', (28320, 28362), False, 'from datetime import datetime, date\n'), ((28380, 28444), 'datetime.datetime', 'datetime', (['period.year', 'period.month', 'lastday', '(23)', '(59)', '(59)', '(999999)'], {}), '(period.year, period.month, lastday, 23, 59, 59, 999999)\n', (28388, 28444), False, 'from datetime import datetime, date\n'), ((30048, 30218), 'flask.render_template', 'render_template', (['"""financial_statements/statement_of_cash_flows.html"""'], {'period': 'period', 'periods': 'periods', 'currency': 'currency', 'elements': 'elements', 'net_income': 'net_income'}), "('financial_statements/statement_of_cash_flows.html', period\n =period, periods=periods, currency=currency, elements=elements,\n net_income=net_income)\n", (30063, 30218), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((737, 765), 'flask.url_for', 'url_for', (['"""chart_of_accounts"""'], {}), "('chart_of_accounts')\n", (744, 765), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((1621, 1669), 'xac.models.Classifications', 'models.Classifications', ([], {'name': 'name', 'parent': 'parent'}), '(name=name, parent=parent)\n', (1643, 1669), False, 'from xac import app, db, forms, models\n'), ((1678, 1708), 'xac.db.session.add', 'db.session.add', (['classification'], {}), '(classification)\n', (1692, 1708), False, 'from xac import app, db, forms, models\n'), ((1717, 1736), 'xac.db.session.commit', 'db.session.commit', ([], {}), '()\n', (1734, 1736), False, 'from xac import app, db, forms, models\n'), ((1757, 1785), 'flask.url_for', 'url_for', (['"""chart_of_accounts"""'], {}), "('chart_of_accounts')\n", (1764, 1785), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((2114, 2142), 'flask.url_for', 'url_for', (['"""chart_of_accounts"""'], {}), "('chart_of_accounts')\n", (2121, 2142), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((2553, 2594), 'xac.models.Accounts', 'models.Accounts', ([], {'name': 'name', 'parent': 'parent'}), '(name=name, parent=parent)\n', (2568, 2594), False, 'from xac import app, db, forms, models\n'), ((2603, 2626), 'xac.db.session.add', 'db.session.add', (['account'], {}), '(account)\n', (2617, 2626), False, 'from xac import app, db, forms, models\n'), ((2635, 2654), 'xac.db.session.commit', 'db.session.commit', ([], {}), '()\n', (2652, 2654), False, 'from xac import app, db, forms, models\n'), ((2675, 2703), 'flask.url_for', 'url_for', (['"""chart_of_accounts"""'], {}), "('chart_of_accounts')\n", (2682, 2703), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((2943, 2971), 'flask.url_for', 'url_for', (['"""chart_of_accounts"""'], {}), "('chart_of_accounts')\n", (2950, 2971), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((3345, 3389), 'xac.models.Subaccounts', 'models.Subaccounts', ([], {'name': 'name', 'parent': 'parent'}), '(name=name, parent=parent)\n', (3363, 3389), False, 'from xac import app, db, forms, models\n'), ((3398, 3424), 'xac.db.session.add', 'db.session.add', (['subaccount'], {}), '(subaccount)\n', (3412, 3424), False, 'from xac import app, db, forms, models\n'), ((3433, 3452), 'xac.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3450, 3452), False, 'from xac import app, db, forms, models\n'), ((3473, 3501), 'flask.url_for', 'url_for', (['"""chart_of_accounts"""'], {}), "('chart_of_accounts')\n", (3480, 3501), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((3762, 3790), 'flask.url_for', 'url_for', (['"""chart_of_accounts"""'], {}), "('chart_of_accounts')\n", (3769, 3790), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((3859, 3880), 'flask.url_for', 'url_for', (['"""upload_csv"""'], {}), "('upload_csv')\n", (3866, 3880), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((4046, 4077), 'flask.request.files.getlist', 'request.files.getlist', (['"""file[]"""'], {}), "('file[]')\n", (4067, 4077), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((4675, 4700), 'flask.url_for', 'url_for', (['"""exchange_rates"""'], {}), "('exchange_rates')\n", (4682, 4700), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((4842, 4867), 'flask.url_for', 'url_for', (['"""exchange_rates"""'], {}), "('exchange_rates')\n", (4849, 4867), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((5000, 5025), 'flask.url_for', 'url_for', (['"""exchange_rates"""'], {}), "('exchange_rates')\n", (5007, 5025), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((5194, 5219), 'flask.url_for', 'url_for', (['"""exchange_rates"""'], {}), "('exchange_rates')\n", (5201, 5219), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((6507, 6539), 'xac.db.session.delete', 'db.session.delete', (['journal_entry'], {}), '(journal_entry)\n', (6524, 6539), False, 'from xac import app, db, forms, models\n'), ((6548, 6567), 'xac.db.session.commit', 'db.session.commit', ([], {}), '()\n', (6565, 6567), False, 'from xac import app, db, forms, models\n'), ((6576, 6606), 'xac.db.session.delete', 'db.session.delete', (['transaction'], {}), '(transaction)\n', (6593, 6606), False, 'from xac import app, db, forms, models\n'), ((6615, 6634), 'xac.db.session.commit', 'db.session.commit', ([], {}), '()\n', (6632, 6634), False, 'from xac import app, db, forms, models\n'), ((6707, 6728), 'flask.url_for', 'url_for', (['"""upload_csv"""'], {}), "('upload_csv')\n", (6714, 6728), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((7351, 7388), 'ast.literal_eval', 'ast.literal_eval', (['transaction.details'], {}), '(transaction.details)\n', (7367, 7388), False, 'import ast\n'), ((8013, 8050), 'ast.literal_eval', 'ast.literal_eval', (['transaction.details'], {}), '(transaction.details)\n', (8029, 8050), False, 'import ast\n'), ((11045, 11100), 'xac.accounting.ledgers.query_entries', 'ledgers.query_entries', (['accountName', '"""Monthly"""', 'currency'], {}), "(accountName, 'Monthly', currency)\n", (11066, 11100), True, 'import xac.accounting.ledgers as ledgers\n'), ((11874, 11913), 'datetime.datetime.strptime', 'datetime.strptime', (['interval', '"""%m-%d-%Y"""'], {}), "(interval, '%m-%d-%Y')\n", (11891, 11913), False, 'from datetime import datetime, date\n'), ((12553, 12609), 'xac.accounting.ledgers.foot_account', 'ledgers.foot_account', (['accountName', 'ledger_entries', '"""All"""'], {}), "(accountName, ledger_entries, 'All')\n", (12573, 12609), True, 'import xac.accounting.ledgers as ledgers\n'), ((12658, 12694), 'datetime.datetime.strptime', 'datetime.strptime', (['interval', '"""%m-%Y"""'], {}), "(interval, '%m-%Y')\n", (12675, 12694), False, 'from datetime import datetime, date\n'), ((13232, 13288), 'xac.accounting.ledgers.foot_account', 'ledgers.foot_account', (['accountName', 'ledger_entries', '"""All"""'], {}), "(accountName, ledger_entries, 'All')\n", (13252, 13288), True, 'import xac.accounting.ledgers as ledgers\n'), ((14985, 15041), 'xac.accounting.ledgers.foot_account', 'ledgers.foot_account', (['accountName', 'ledger_entries', '"""All"""'], {}), "(accountName, ledger_entries, 'All')\n", (15005, 15041), True, 'import xac.accounting.ledgers as ledgers\n'), ((16235, 16267), 'calendar.monthrange', 'calendar.monthrange', (['year', 'month'], {}), '(year, month)\n', (16254, 16267), False, 'import calendar\n'), ((16939, 16995), 'xac.accounting.ledgers.foot_account', 'ledgers.foot_account', (['accountName', 'ledger_entries', '"""All"""'], {}), "(accountName, ledger_entries, 'All')\n", (16959, 16995), True, 'import xac.accounting.ledgers as ledgers\n'), ((17434, 17482), 'flask.url_for', 'url_for', (['"""income_statement"""'], {'currency': '"""satoshis"""'}), "('income_statement', currency='satoshis')\n", (17441, 17482), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((20366, 20412), 'calendar.monthrange', 'calendar.monthrange', (['period.year', 'period.month'], {}), '(period.year, period.month)\n', (20385, 20412), False, 'import calendar\n'), ((25382, 25428), 'calendar.monthrange', 'calendar.monthrange', (['period.year', 'period.month'], {}), '(period.year, period.month)\n', (25401, 25428), False, 'import calendar\n'), ((28121, 28135), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (28133, 28135), False, 'from datetime import datetime, date\n'), ((28192, 28226), 'datetime.datetime.strptime', 'datetime.strptime', (['period', '"""%Y-%m"""'], {}), "(period, '%Y-%m')\n", (28209, 28226), False, 'from datetime import datetime, date\n'), ((1931, 1990), 'xac.models.Classifications.query.filter_by', 'models.Classifications.query.filter_by', ([], {'name': 'classification'}), '(name=classification)\n', (1969, 1990), False, 'from xac import app, db, forms, models\n'), ((2814, 2859), 'xac.models.Accounts.query.filter_by', 'models.Accounts.query.filter_by', ([], {'name': 'account'}), '(name=account)\n', (2845, 2859), False, 'from xac import app, db, forms, models\n'), ((3627, 3675), 'xac.models.Accounts.query.filter_by', 'models.Accounts.query.filter_by', ([], {'name': 'subaccount'}), '(name=subaccount)\n', (3658, 3675), False, 'from xac import app, db, forms, models\n'), ((4126, 4151), 'xac.accounting.memoranda.process_filestorage', 'process_filestorage', (['file'], {}), '(file)\n', (4145, 4151), False, 'from xac.accounting.memoranda import process_filestorage\n'), ((4176, 4197), 'flask.url_for', 'url_for', (['"""upload_csv"""'], {}), "('upload_csv')\n", (4183, 4197), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((5831, 5882), 'xac.models.Memoranda.query.filter_by', 'models.Memoranda.query.filter_by', ([], {'fileName': 'fileName'}), '(fileName=fileName)\n', (5863, 5882), False, 'from xac import app, db, forms, models\n'), ((5943, 6009), 'xac.models.MemorandaTransactions.query.filter_by', 'models.MemorandaTransactions.query.filter_by', ([], {'memoranda_id': 'memo.id'}), '(memoranda_id=memo.id)\n', (5987, 6009), False, 'from xac import app, db, forms, models\n'), ((6442, 6466), 'xac.db.session.delete', 'db.session.delete', (['entry'], {}), '(entry)\n', (6459, 6466), False, 'from xac import app, db, forms, models\n'), ((6479, 6498), 'xac.db.session.commit', 'db.session.commit', ([], {}), '()\n', (6496, 6498), False, 'from xac import app, db, forms, models\n'), ((6821, 6872), 'xac.models.Memoranda.query.filter_by', 'models.Memoranda.query.filter_by', ([], {'fileName': 'fileName'}), '(fileName=fileName)\n', (6853, 6872), False, 'from xac import app, db, forms, models\n'), ((7794, 7845), 'xac.models.Memoranda.query.filter_by', 'models.Memoranda.query.filter_by', ([], {'fileName': 'fileName'}), '(fileName=fileName)\n', (7826, 7845), False, 'from xac import app, db, forms, models\n'), ((7873, 7939), 'xac.models.MemorandaTransactions.query.filter_by', 'models.MemorandaTransactions.query.filter_by', ([], {'memoranda_id': 'memo.id'}), '(memoranda_id=memo.id)\n', (7917, 7939), False, 'from xac import app, db, forms, models\n'), ((9140, 9184), 'xac.models.JournalEntries.query.filter_by', 'models.JournalEntries.query.filter_by', ([], {'id': 'id'}), '(id=id)\n', (9177, 9184), False, 'from xac import app, db, forms, models\n'), ((9387, 9480), 'xac.models.MemorandaTransactions.query.filter_by', 'models.MemorandaTransactions.query.filter_by', ([], {'id': 'journal_entry.memoranda_transactions_id'}), '(id=journal_entry.\n memoranda_transactions_id)\n', (9431, 9480), False, 'from xac import app, db, forms, models\n'), ((9495, 9556), 'xac.models.Memoranda.query.filter_by', 'models.Memoranda.query.filter_by', ([], {'id': 'transaction.memoranda_id'}), '(id=transaction.memoranda_id)\n', (9527, 9556), False, 'from xac import app, db, forms, models\n'), ((10002, 10046), 'xac.models.JournalEntries.query.filter_by', 'models.JournalEntries.query.filter_by', ([], {'id': 'id'}), '(id=id)\n', (10039, 10046), False, 'from xac import app, db, forms, models\n'), ((10249, 10342), 'xac.models.MemorandaTransactions.query.filter_by', 'models.MemorandaTransactions.query.filter_by', ([], {'id': 'journal_entry.memoranda_transactions_id'}), '(id=journal_entry.\n memoranda_transactions_id)\n', (10293, 10342), False, 'from xac import app, db, forms, models\n'), ((10357, 10418), 'xac.models.Memoranda.query.filter_by', 'models.Memoranda.query.filter_by', ([], {'id': 'transaction.memoranda_id'}), '(id=transaction.memoranda_id)\n', (10389, 10418), False, 'from xac import app, db, forms, models\n'), ((28245, 28291), 'calendar.monthrange', 'calendar.monthrange', (['period.year', 'period.month'], {}), '(period.year, period.month)\n', (28264, 28291), False, 'import calendar\n'), ((1387, 1406), 'flask.request.form.copy', 'request.form.copy', ([], {}), '()\n', (1404, 1406), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((1518, 1560), 'xac.models.Elements.query.filter_by', 'models.Elements.query.filter_by', ([], {'id': 'parent'}), '(id=parent)\n', (1549, 1560), False, 'from xac import app, db, forms, models\n'), ((2288, 2307), 'flask.request.form.copy', 'request.form.copy', ([], {}), '()\n', (2305, 2307), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((2405, 2454), 'xac.models.Classifications.query.filter_by', 'models.Classifications.query.filter_by', ([], {'id': 'parent'}), '(id=parent)\n', (2443, 2454), False, 'from xac import app, db, forms, models\n'), ((3123, 3142), 'flask.request.form.copy', 'request.form.copy', ([], {}), '()\n', (3140, 3142), False, 'from flask import flash, render_template, request, redirect, url_for, send_from_directory, send_file\n'), ((3246, 3288), 'xac.models.Accounts.query.filter_by', 'models.Accounts.query.filter_by', ([], {'id': 'parent'}), '(id=parent)\n', (3277, 3288), False, 'from xac import app, db, forms, models\n'), ((4265, 4293), 'xac.models.Memoranda.date.desc', 'models.Memoranda.date.desc', ([], {}), '()\n', (4291, 4293), False, 'from xac import app, db, forms, models\n'), ((5372, 5400), 'xac.models.Memoranda.date.desc', 'models.Memoranda.date.desc', ([], {}), '()\n', (5398, 5400), False, 'from xac import app, db, forms, models\n'), ((5465, 5531), 'xac.models.MemorandaTransactions.query.filter_by', 'models.MemorandaTransactions.query.filter_by', ([], {'memoranda_id': 'memo.id'}), '(memoranda_id=memo.id)\n', (5509, 5531), False, 'from xac import app, db, forms, models\n'), ((6110, 6189), 'xac.models.JournalEntries.query.filter_by', 'models.JournalEntries.query.filter_by', ([], {'memoranda_transactions_id': 'transaction.id'}), '(memoranda_transactions_id=transaction.id)\n', (6147, 6189), False, 'from xac import app, db, forms, models\n'), ((6268, 6339), 'xac.models.LedgerEntries.query.filter_by', 'models.LedgerEntries.query.filter_by', ([], {'journal_entry_id': 'journal_entry.id'}), '(journal_entry_id=journal_entry.id)\n', (6304, 6339), False, 'from xac import app, db, forms, models\n'), ((7413, 7492), 'xac.models.JournalEntries.query.filter_by', 'models.JournalEntries.query.filter_by', ([], {'memoranda_transactions_id': 'transaction.id'}), '(memoranda_transactions_id=transaction.id)\n', (7450, 7492), False, 'from xac import app, db, forms, models\n'), ((8075, 8154), 'xac.models.JournalEntries.query.filter_by', 'models.JournalEntries.query.filter_by', ([], {'memoranda_transactions_id': 'transaction.id'}), '(memoranda_transactions_id=transaction.id)\n', (8112, 8154), False, 'from xac import app, db, forms, models\n'), ((8679, 8711), 'xac.models.LedgerEntries.date.desc', 'models.LedgerEntries.date.desc', ([], {}), '()\n', (8709, 8711), False, 'from xac import app, db, forms, models\n'), ((9328, 9361), 'xac.models.LedgerEntries.tside.desc', 'models.LedgerEntries.tside.desc', ([], {}), '()\n', (9359, 9361), False, 'from xac import app, db, forms, models\n'), ((10190, 10223), 'xac.models.LedgerEntries.tside.desc', 'models.LedgerEntries.tside.desc', ([], {}), '()\n', (10221, 10223), False, 'from xac import app, db, forms, models\n'), ((14126, 14175), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (14140, 14175), False, 'from sqlalchemy.sql import func\n'), ((14191, 14241), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (14205, 14241), False, 'from sqlalchemy.sql import func\n'), ((17786, 17835), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (17800, 17835), False, 'from sqlalchemy.sql import func\n'), ((17850, 17900), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (17864, 17900), False, 'from sqlalchemy.sql import func\n'), ((20084, 20133), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (20098, 20133), False, 'from sqlalchemy.sql import func\n'), ((20149, 20199), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (20163, 20199), False, 'from sqlalchemy.sql import func\n'), ((22430, 22479), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (22444, 22479), False, 'from sqlalchemy.sql import func\n'), ((22495, 22545), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (22509, 22545), False, 'from sqlalchemy.sql import func\n'), ((25100, 25149), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (25114, 25149), False, 'from sqlalchemy.sql import func\n'), ((25165, 25215), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (25179, 25215), False, 'from sqlalchemy.sql import func\n'), ((27856, 27905), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (27870, 27905), False, 'from sqlalchemy.sql import func\n'), ((27921, 27971), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (27935, 27971), False, 'from sqlalchemy.sql import func\n'), ((10821, 10866), 'xac.db.session.query', 'db.session.query', (['models.LedgerEntries.ledger'], {}), '(models.LedgerEntries.ledger)\n', (10837, 10866), False, 'from xac import app, db, forms, models\n'), ((12480, 12512), 'xac.models.LedgerEntries.tside.asc', 'models.LedgerEntries.tside.asc', ([], {}), '()\n', (12510, 12512), False, 'from xac import app, db, forms, models\n'), ((13158, 13191), 'xac.models.LedgerEntries.tside.desc', 'models.LedgerEntries.tside.desc', ([], {}), '()\n', (13189, 13191), False, 'from xac import app, db, forms, models\n'), ((14909, 14942), 'xac.models.LedgerEntries.tside.desc', 'models.LedgerEntries.tside.desc', ([], {}), '()\n', (14940, 14942), False, 'from xac import app, db, forms, models\n'), ((16867, 16900), 'xac.models.LedgerEntries.tside.desc', 'models.LedgerEntries.tside.desc', ([], {}), '()\n', (16898, 16900), False, 'from xac import app, db, forms, models\n'), ((9285, 9317), 'xac.models.LedgerEntries.date.desc', 'models.LedgerEntries.date.desc', ([], {}), '()\n', (9315, 9317), False, 'from xac import app, db, forms, models\n'), ((10147, 10179), 'xac.models.LedgerEntries.date.desc', 'models.LedgerEntries.date.desc', ([], {}), '()\n', (10177, 10179), False, 'from xac import app, db, forms, models\n'), ((15937, 15986), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (15951, 15986), False, 'from sqlalchemy.sql import func\n'), ((16001, 16051), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (16015, 16051), False, 'from sqlalchemy.sql import func\n'), ((17635, 17684), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (17649, 17684), False, 'from sqlalchemy.sql import func\n'), ((17699, 17749), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (17713, 17749), False, 'from sqlalchemy.sql import func\n'), ((19932, 19981), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (19946, 19981), False, 'from sqlalchemy.sql import func\n'), ((19997, 20047), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (20011, 20047), False, 'from sqlalchemy.sql import func\n'), ((22278, 22327), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (22292, 22327), False, 'from sqlalchemy.sql import func\n'), ((22343, 22393), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (22357, 22393), False, 'from sqlalchemy.sql import func\n'), ((24948, 24997), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (24962, 24997), False, 'from sqlalchemy.sql import func\n'), ((25013, 25063), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (25027, 25063), False, 'from sqlalchemy.sql import func\n'), ((27704, 27753), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (27718, 27753), False, 'from sqlalchemy.sql import func\n'), ((27769, 27819), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (27783, 27819), False, 'from sqlalchemy.sql import func\n'), ((9216, 9273), 'xac.models.LedgerEntries.query.filter_by', 'models.LedgerEntries.query.filter_by', ([], {'journal_entry_id': 'id'}), '(journal_entry_id=id)\n', (9252, 9273), False, 'from xac import app, db, forms, models\n'), ((10078, 10135), 'xac.models.LedgerEntries.query.filter_by', 'models.LedgerEntries.query.filter_by', ([], {'journal_entry_id': 'id'}), '(journal_entry_id=id)\n', (10114, 10135), False, 'from xac import app, db, forms, models\n'), ((13674, 13719), 'xac.db.session.query', 'db.session.query', (['models.LedgerEntries.ledger'], {}), '(models.LedgerEntries.ledger)\n', (13690, 13719), False, 'from xac import app, db, forms, models\n'), ((15544, 15589), 'xac.db.session.query', 'db.session.query', (['models.LedgerEntries.ledger'], {}), '(models.LedgerEntries.ledger)\n', (15560, 15589), False, 'from xac import app, db, forms, models\n'), ((8546, 8604), 'xac.models.JournalEntries.ledgerentries.any', 'models.JournalEntries.ledgerentries.any', ([], {'currency': 'currency'}), '(currency=currency)\n', (8585, 8604), False, 'from xac import app, db, forms, models\n'), ((18305, 18381), 'xac.models.Classifications.name.in_', 'models.Classifications.name.in_', (["['Revenues', 'Expenses', 'Gains', 'Losses']"], {}), "(['Revenues', 'Expenses', 'Gains', 'Losses'])\n", (18336, 18381), False, 'from xac import app, db, forms, models\n'), ((20685, 20761), 'xac.models.Classifications.name.in_', 'models.Classifications.name.in_', (["['Revenues', 'Expenses', 'Gains', 'Losses']"], {}), "(['Revenues', 'Expenses', 'Gains', 'Losses'])\n", (20716, 20761), False, 'from xac import app, db, forms, models\n'), ((28564, 28640), 'xac.models.Classifications.name.in_', 'models.Classifications.name.in_', (["['Revenues', 'Expenses', 'Gains', 'Losses']"], {}), "(['Revenues', 'Expenses', 'Gains', 'Losses'])\n", (28595, 28640), False, 'from xac import app, db, forms, models\n'), ((8477, 8516), 'xac.db.session.query', 'db.session.query', (['models.JournalEntries'], {}), '(models.JournalEntries)\n', (8493, 8516), False, 'from xac import app, db, forms, models\n'), ((13981, 14031), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (13995, 14031), False, 'from sqlalchemy.sql import func\n'), ((15851, 15901), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (15865, 15901), False, 'from sqlalchemy.sql import func\n'), ((22847, 22880), 'xac.db.session.query', 'db.session.query', (['models.Elements'], {}), '(models.Elements)\n', (22863, 22880), False, 'from xac import app, db, forms, models\n'), ((25598, 25631), 'xac.db.session.query', 'db.session.query', (['models.Elements'], {}), '(models.Elements)\n', (25614, 25631), False, 'from xac import app, db, forms, models\n'), ((12198, 12247), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (12212, 12247), False, 'from sqlalchemy.sql import func\n'), ((12273, 12323), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (12287, 12323), False, 'from sqlalchemy.sql import func\n'), ((12350, 12398), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""day"""', 'models.LedgerEntries.date'], {}), "('day', models.LedgerEntries.date)\n", (12364, 12398), False, 'from sqlalchemy.sql import func\n'), ((12950, 12999), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (12964, 12999), False, 'from sqlalchemy.sql import func\n'), ((13025, 13075), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (13039, 13075), False, 'from sqlalchemy.sql import func\n'), ((13912, 13961), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (13926, 13961), False, 'from sqlalchemy.sql import func\n'), ((14690, 14739), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (14704, 14739), False, 'from sqlalchemy.sql import func\n'), ((14767, 14817), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (14781, 14817), False, 'from sqlalchemy.sql import func\n'), ((15782, 15831), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (15796, 15831), False, 'from sqlalchemy.sql import func\n'), ((16658, 16707), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""year"""', 'models.LedgerEntries.date'], {}), "('year', models.LedgerEntries.date)\n", (16672, 16707), False, 'from sqlalchemy.sql import func\n'), ((16733, 16783), 'sqlalchemy.sql.func.date_part', 'func.date_part', (['"""month"""', 'models.LedgerEntries.date'], {}), "('month', models.LedgerEntries.date)\n", (16747, 16783), False, 'from sqlalchemy.sql import func\n'), ((18202, 18235), 'xac.db.session.query', 'db.session.query', (['models.Elements'], {}), '(models.Elements)\n', (18218, 18235), False, 'from xac import app, db, forms, models\n'), ((20582, 20615), 'xac.db.session.query', 'db.session.query', (['models.Elements'], {}), '(models.Elements)\n', (20598, 20615), False, 'from xac import app, db, forms, models\n'), ((28461, 28494), 'xac.db.session.query', 'db.session.query', (['models.Elements'], {}), '(models.Elements)\n', (28477, 28494), False, 'from xac import app, db, forms, models\n'), ((12026, 12082), 'xac.models.LedgerEntries.query.filter_by', 'models.LedgerEntries.query.filter_by', ([], {'ledger': 'accountName'}), '(ledger=accountName)\n', (12062, 12082), False, 'from xac import app, db, forms, models\n'), ((12780, 12836), 'xac.models.LedgerEntries.query.filter_by', 'models.LedgerEntries.query.filter_by', ([], {'ledger': 'accountName'}), '(ledger=accountName)\n', (12816, 12836), False, 'from xac import app, db, forms, models\n'), ((14503, 14559), 'xac.models.LedgerEntries.query.filter_by', 'models.LedgerEntries.query.filter_by', ([], {'ledger': 'accountName'}), '(ledger=accountName)\n', (14539, 14559), False, 'from xac import app, db, forms, models\n'), ((16486, 16542), 'xac.models.LedgerEntries.query.filter_by', 'models.LedgerEntries.query.filter_by', ([], {'ledger': 'accountName'}), '(ledger=accountName)\n', (16522, 16542), False, 'from xac import app, db, forms, models\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-10-26 12:22
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('cmsplugin_cascade', '0020_page_icon_font'),
]
operations = [
migrations.AlterModelOptions(
name='cascadepage',
options={'verbose_name': 'Cascade Page Settings', 'verbose_name_plural': 'Cascade Page Settings'},
),
]
|
[
"django.db.migrations.AlterModelOptions"
] |
[((299, 450), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""cascadepage"""', 'options': "{'verbose_name': 'Cascade Page Settings', 'verbose_name_plural':\n 'Cascade Page Settings'}"}), "(name='cascadepage', options={'verbose_name':\n 'Cascade Page Settings', 'verbose_name_plural': 'Cascade Page Settings'})\n", (327, 450), False, 'from django.db import migrations\n')]
|
import requests
from .api import API
from envparse import env
class SafeBrowsingAPI(API):
"""Safe Browsing API
"""
_name = "Safe Browsing"
_KEY = env('SAFE_BROWSING')
_CLIENT = "threatfeed"
_APPVER = '0.1'
_PVER = '3.1'
def dispatch(self, request):
if 'url' in request:
response=self.query_by_url(request['url'])
else:
response = None
return response
def response(self, r):
if r.status_code == 200:
resp = 'URL type: {}'.format(r.text)
elif r.status_code == 204:
resp = 'URL type: Ok'
else:
resp=''
return resp
def query_by_url(self, url):
request_body = '1\n'+url
request_url = ('https://sb-ssl.google.com/safebrowsing/api/lookup'
'?client={}&key={}&appver={}&pver={}'
.format(self._CLIENT, self._KEY, self._APPVER, self._PVER))
r = requests.post(request_url, data=request_body)
return self.response(r)
|
[
"envparse.env",
"requests.post"
] |
[((167, 187), 'envparse.env', 'env', (['"""SAFE_BROWSING"""'], {}), "('SAFE_BROWSING')\n", (170, 187), False, 'from envparse import env\n'), ((955, 1000), 'requests.post', 'requests.post', (['request_url'], {'data': 'request_body'}), '(request_url, data=request_body)\n', (968, 1000), False, 'import requests\n')]
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from toscaparser.tests.base import TestCase
from toscaparser.topology_template import TopologyTemplate
from toscaparser.tosca_template import ToscaTemplate
import toscaparser.utils.yamlparser
YAML_LOADER = toscaparser.utils.yamlparser.load_yaml
class TopologyTemplateTest(TestCase):
def setUp(self):
TestCase.setUp(self)
'''TOSCA template.'''
self.tosca_tpl_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/topology_template/subsystem.yaml")
self.tpl = YAML_LOADER(self.tosca_tpl_path)
self.topo_tpl = self.tpl.get('topology_template')
self.imports = self.tpl.get('imports')
self.topo = TopologyTemplate(self.topo_tpl,
self._get_all_custom_def())
def _get_custom_def(self, type_definition):
custom_defs = {}
for definition in self.imports:
if os.path.isabs(definition):
def_file = definition
else:
tpl_dir = os.path.dirname(os.path.abspath(self.tosca_tpl_path))
def_file = os.path.join(tpl_dir, definition)
custom_type = YAML_LOADER(def_file)
custom_defs.update(custom_type.get(type_definition))
return custom_defs
def _get_all_custom_def(self):
custom_defs = {}
custom_defs.update(self._get_custom_def('node_types'))
custom_defs.update(self._get_custom_def('capability_types'))
return custom_defs
def test_description(self):
expected_desc = 'Template of a database including its hosting stack.'
self.assertEqual(expected_desc, self.topo.description)
def test_inputs(self):
self.assertEqual(
['mq_server_ip', 'my_cpus', 'receiver_port'],
sorted([input.name for input in self.topo.inputs]))
input_name = "receiver_port"
expected_description = "Port to be used for receiving messages."
for input in self.topo.inputs:
if input.name == input_name:
self.assertEqual(expected_description, input.description)
def test_node_tpls(self):
'''Test nodetemplate names.'''
self.assertEqual(
['app', 'server', 'websrv'],
sorted([tpl.name for tpl in self.topo.nodetemplates]))
tpl_name = "app"
expected_type = "example.SomeApp"
expected_properties = ['admin_user', 'pool_size']
expected_capabilities = ['message_receiver']
expected_requirements = [{'host': {'node': 'websrv'}}]
expected_relationshp = ['tosca.relationships.HostedOn']
expected_host = ['websrv']
for tpl in self.topo.nodetemplates:
if tpl_name == tpl.name:
'''Test node type.'''
self.assertEqual(tpl.type, expected_type)
'''Test properties.'''
self.assertEqual(
expected_properties,
sorted(tpl.get_properties().keys()))
'''Test capabilities.'''
self.assertEqual(
expected_capabilities,
sorted(tpl.get_capabilities().keys()))
'''Test requirements.'''
self.assertEqual(
expected_requirements, tpl.requirements)
'''Test relationship.'''
''' TODO : skip tempororily. need to fix it
'''
self.assertEqual(
expected_relationshp,
[x.type for x in tpl.relationships.keys()])
self.assertEqual(
expected_host,
[y.name for y in tpl.relationships.values()])
'''Test interfaces.'''
# TODO(hurf) add interface test when new template is available
if tpl.name == 'server':
'''Test property value'''
props = tpl.get_properties()
if props and 'mem_size' in props.keys():
self.assertEqual(props['mem_size'].value, '4096 MB')
'''Test capability'''
caps = tpl.get_capabilities()
self.assertIn('os', caps.keys())
os_props_objs = None
os_props = None
os_type_prop = None
if caps and 'os' in caps.keys():
capability = caps['os']
os_props_objs = capability.get_properties_objects()
os_props = capability.get_properties()
os_type_prop = capability.get_property_value('type')
break
self.assertEqual(
['Linux'],
[p.value for p in os_props_objs if p.name == 'type'])
self.assertEqual(
'Linux',
os_props['type'].value if 'type' in os_props else '')
self.assertEqual('Linux', os_props['type'].value)
self.assertEqual('Linux', os_type_prop)
def test_outputs(self):
self.assertEqual(
['receiver_ip'],
sorted([output.name for output in self.topo.outputs]))
def test_groups(self):
group = self.topo.groups[0]
self.assertEqual('webserver_group', group.name)
self.assertEqual(['websrv', 'server'], group.members)
for node in group.get_member_nodes():
if node.name == 'server':
'''Test property value'''
props = node.get_properties()
if props and 'mem_size' in props.keys():
self.assertEqual(props['mem_size'].value, '4096 MB')
def test_system_template(self):
tpl_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"data/topology_template/system.yaml")
self.assertIsNotNone(ToscaTemplate(tpl_path))
|
[
"os.path.isabs",
"os.path.abspath",
"toscaparser.tosca_template.ToscaTemplate",
"toscaparser.tests.base.TestCase.setUp",
"os.path.join"
] |
[((900, 920), 'toscaparser.tests.base.TestCase.setUp', 'TestCase.setUp', (['self'], {}), '(self)\n', (914, 920), False, 'from toscaparser.tests.base import TestCase\n'), ((1507, 1532), 'os.path.isabs', 'os.path.isabs', (['definition'], {}), '(definition)\n', (1520, 1532), False, 'import os\n'), ((6484, 6507), 'toscaparser.tosca_template.ToscaTemplate', 'ToscaTemplate', (['tpl_path'], {}), '(tpl_path)\n', (6497, 6507), False, 'from toscaparser.tosca_template import ToscaTemplate\n'), ((1023, 1048), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1038, 1048), False, 'import os\n'), ((1697, 1730), 'os.path.join', 'os.path.join', (['tpl_dir', 'definition'], {}), '(tpl_dir, definition)\n', (1709, 1730), False, 'import os\n'), ((6377, 6402), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (6392, 6402), False, 'import os\n'), ((1632, 1668), 'os.path.abspath', 'os.path.abspath', (['self.tosca_tpl_path'], {}), '(self.tosca_tpl_path)\n', (1647, 1668), False, 'import os\n')]
|
import unittest
from itertools import combinations
from flask_validation_extended.types import List, Dict, All, type_check
from flask_validation_extended.exceptions import (
InvalidCustomTypeArgument
)
class TypeTestCase(unittest.TestCase):
def setUp(self) -> None:
self.targets = [List, Dict]
self.types = [
int, str, float, bool,
list, dict, List(), Dict(), All
]
self.invalid_types = [
1, "1", 1.2, True, False,
[1,2,3], {1:2}, {1,2,3}
]
def _test_type(self, target):
"""custom type List validation test"""
# allowed cases
target()
for type_i in self.types:
target(type_i)
for i in range(1, len(self.types)):
for comb in combinations(self.types, i):
target(comb)
# not allowed cases
for type_i in self.invalid_types:
except_check = False
try:
target(type_i)
except InvalidCustomTypeArgument:
except_check = True
self.assertTrue(except_check)
for i in range(1, len(self.invalid_types)):
for comb in combinations(self.invalid_types, i):
except_check = False
try:
target(comb)
except InvalidCustomTypeArgument:
except_check = True
self.assertTrue(except_check)
for valid_type in self.types:
types = self.invalid_types + [valid_type]
for i in range(2, len(types)):
for comb in combinations(types, i):
except_check = False
try:
target(comb)
except InvalidCustomTypeArgument:
except_check = True
self.assertTrue(except_check)
for i in range(1, len(self.types)):
for comb in combinations(self.types, i):
for invalid_type in self.invalid_types:
temp_comb = list(comb) + [invalid_type]
except_check = False
try:
target(temp_comb)
except InvalidCustomTypeArgument:
except_check = True
self.assertTrue(except_check)
def test_types(self):
"""custom type validation test"""
for target in self.targets:
self._test_type(target)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"flask_validation_extended.types.List",
"itertools.combinations",
"flask_validation_extended.types.Dict"
] |
[((2551, 2566), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2564, 2566), False, 'import unittest\n'), ((394, 400), 'flask_validation_extended.types.List', 'List', ([], {}), '()\n', (398, 400), False, 'from flask_validation_extended.types import List, Dict, All, type_check\n'), ((402, 408), 'flask_validation_extended.types.Dict', 'Dict', ([], {}), '()\n', (406, 408), False, 'from flask_validation_extended.types import List, Dict, All, type_check\n'), ((793, 820), 'itertools.combinations', 'combinations', (['self.types', 'i'], {}), '(self.types, i)\n', (805, 820), False, 'from itertools import combinations\n'), ((1204, 1239), 'itertools.combinations', 'combinations', (['self.invalid_types', 'i'], {}), '(self.invalid_types, i)\n', (1216, 1239), False, 'from itertools import combinations\n'), ((1976, 2003), 'itertools.combinations', 'combinations', (['self.types', 'i'], {}), '(self.types, i)\n', (1988, 2003), False, 'from itertools import combinations\n'), ((1632, 1654), 'itertools.combinations', 'combinations', (['types', 'i'], {}), '(types, i)\n', (1644, 1654), False, 'from itertools import combinations\n')]
|
# Computation of transition state optimization and IRC
from pyqchem import get_output_from_qchem, QchemInput
from pyqchem.parsers.parser_frequencies import basic_frequencies
from pyqchem.parsers.parser_optimization import basic_optimization
from pyqchem.parsers.parser_irc import basic_irc
from pyqchem.structure import Structure
from pyqchem.file_io import write_structure_to_xyz
import matplotlib.pyplot as plt
# define molecule
coordinates = [[ 0.6268743917, -0.1366254266, -0.0000000000],
[ 0.3711605704, 1.0377672206, 0.0000000000],
[-0.5903438458, -0.0311449516, 0.0000000000]]
symbols = ['C', 'H', 'N']
molecule = Structure(coordinates=coordinates,
symbols=symbols,
charge=0,
multiplicity=1)
# Transition state optimization
qc_input = QchemInput(molecule,
jobtype='ts',
exchange='hf',
basis='sto-3g',
geom_opt_tol_gradient=300,
geom_opt_tol_energy=100,
geom_opt_tol_displacement=1200,
geom_opt_max_cycles=50, # reduce this number to test not convergence case
)
opt_data, ee = get_output_from_qchem(qc_input,
processors=4,
parser=basic_optimization,
force_recalculation=False,
read_fchk=True,
store_full_output=True)
print('Transition state')
print(opt_data['optimized_molecule'])
# frequencies calculation
qc_input = QchemInput(opt_data['optimized_molecule'],
jobtype='freq',
exchange='hf',
basis='sto-3g',
sym_ignore=True,
symmetry=False,
scf_guess=ee['coefficients'])
freq_data, ee = get_output_from_qchem(qc_input,
processors=4,
force_recalculation=True,
parser=basic_frequencies,
read_fchk=True,
store_full_output=True)
# IRC calculation
qc_input = QchemInput(opt_data['optimized_molecule'],
jobtype='rpath',
exchange='hf',
basis='sto-3g',
rpath_max_cycles=30,
scf_guess=ee['coefficients'],
hessian=ee['hessian']
)
irc_data = get_output_from_qchem(qc_input,
processors=4,
parser=basic_irc,
store_full_output=True
)
# write coordinates into file
write_structure_to_xyz([step['molecule'] for step in irc_data['irc_forward']][::-1] +
[step['molecule'] for step in irc_data['irc_backward']], 'irc.xyz')
# plot SCF energies
energies_f = [step['energy'] for step in irc_data['irc_forward']]
energies_b = [step['energy'] for step in irc_data['irc_backward']]
plt.plot(energies_f[::-1] + energies_b)
plt.xticks([], [])
plt.xlabel('Intrinsic reaction coordinate')
plt.ylabel('Energy (Hartree)')
plt.show()
|
[
"pyqchem.QchemInput",
"pyqchem.file_io.write_structure_to_xyz",
"pyqchem.get_output_from_qchem",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xticks",
"pyqchem.structure.Structure",
"matplotlib.pyplot.xlabel"
] |
[((670, 747), 'pyqchem.structure.Structure', 'Structure', ([], {'coordinates': 'coordinates', 'symbols': 'symbols', 'charge': '(0)', 'multiplicity': '(1)'}), '(coordinates=coordinates, symbols=symbols, charge=0, multiplicity=1)\n', (679, 747), False, 'from pyqchem.structure import Structure\n'), ((855, 1036), 'pyqchem.QchemInput', 'QchemInput', (['molecule'], {'jobtype': '"""ts"""', 'exchange': '"""hf"""', 'basis': '"""sto-3g"""', 'geom_opt_tol_gradient': '(300)', 'geom_opt_tol_energy': '(100)', 'geom_opt_tol_displacement': '(1200)', 'geom_opt_max_cycles': '(50)'}), "(molecule, jobtype='ts', exchange='hf', basis='sto-3g',\n geom_opt_tol_gradient=300, geom_opt_tol_energy=100,\n geom_opt_tol_displacement=1200, geom_opt_max_cycles=50)\n", (865, 1036), False, 'from pyqchem import get_output_from_qchem, QchemInput\n'), ((1274, 1417), 'pyqchem.get_output_from_qchem', 'get_output_from_qchem', (['qc_input'], {'processors': '(4)', 'parser': 'basic_optimization', 'force_recalculation': '(False)', 'read_fchk': '(True)', 'store_full_output': '(True)'}), '(qc_input, processors=4, parser=basic_optimization,\n force_recalculation=False, read_fchk=True, store_full_output=True)\n', (1295, 1417), False, 'from pyqchem import get_output_from_qchem, QchemInput\n'), ((1703, 1864), 'pyqchem.QchemInput', 'QchemInput', (["opt_data['optimized_molecule']"], {'jobtype': '"""freq"""', 'exchange': '"""hf"""', 'basis': '"""sto-3g"""', 'sym_ignore': '(True)', 'symmetry': '(False)', 'scf_guess': "ee['coefficients']"}), "(opt_data['optimized_molecule'], jobtype='freq', exchange='hf',\n basis='sto-3g', sym_ignore=True, symmetry=False, scf_guess=ee[\n 'coefficients'])\n", (1713, 1864), False, 'from pyqchem import get_output_from_qchem, QchemInput\n'), ((2005, 2146), 'pyqchem.get_output_from_qchem', 'get_output_from_qchem', (['qc_input'], {'processors': '(4)', 'force_recalculation': '(True)', 'parser': 'basic_frequencies', 'read_fchk': '(True)', 'store_full_output': '(True)'}), '(qc_input, processors=4, force_recalculation=True,\n parser=basic_frequencies, read_fchk=True, store_full_output=True)\n', (2026, 2146), False, 'from pyqchem import get_output_from_qchem, QchemInput\n'), ((2363, 2535), 'pyqchem.QchemInput', 'QchemInput', (["opt_data['optimized_molecule']"], {'jobtype': '"""rpath"""', 'exchange': '"""hf"""', 'basis': '"""sto-3g"""', 'rpath_max_cycles': '(30)', 'scf_guess': "ee['coefficients']", 'hessian': "ee['hessian']"}), "(opt_data['optimized_molecule'], jobtype='rpath', exchange='hf',\n basis='sto-3g', rpath_max_cycles=30, scf_guess=ee['coefficients'],\n hessian=ee['hessian'])\n", (2373, 2535), False, 'from pyqchem import get_output_from_qchem, QchemInput\n'), ((2695, 2786), 'pyqchem.get_output_from_qchem', 'get_output_from_qchem', (['qc_input'], {'processors': '(4)', 'parser': 'basic_irc', 'store_full_output': '(True)'}), '(qc_input, processors=4, parser=basic_irc,\n store_full_output=True)\n', (2716, 2786), False, 'from pyqchem import get_output_from_qchem, QchemInput\n'), ((2947, 3109), 'pyqchem.file_io.write_structure_to_xyz', 'write_structure_to_xyz', (["([step['molecule'] for step in irc_data['irc_forward']][::-1] + [step[\n 'molecule'] for step in irc_data['irc_backward']])", '"""irc.xyz"""'], {}), "([step['molecule'] for step in irc_data['irc_forward'\n ]][::-1] + [step['molecule'] for step in irc_data['irc_backward']],\n 'irc.xyz')\n", (2969, 3109), False, 'from pyqchem.file_io import write_structure_to_xyz\n'), ((3279, 3318), 'matplotlib.pyplot.plot', 'plt.plot', (['(energies_f[::-1] + energies_b)'], {}), '(energies_f[::-1] + energies_b)\n', (3287, 3318), True, 'import matplotlib.pyplot as plt\n'), ((3319, 3337), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[]', '[]'], {}), '([], [])\n', (3329, 3337), True, 'import matplotlib.pyplot as plt\n'), ((3338, 3381), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Intrinsic reaction coordinate"""'], {}), "('Intrinsic reaction coordinate')\n", (3348, 3381), True, 'import matplotlib.pyplot as plt\n'), ((3382, 3412), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Energy (Hartree)"""'], {}), "('Energy (Hartree)')\n", (3392, 3412), True, 'import matplotlib.pyplot as plt\n'), ((3413, 3423), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3421, 3423), True, 'import matplotlib.pyplot as plt\n')]
|
from pysys.constants import *
from apama.basetest import ApamaBaseTest
from apama.correlator import CorrelatorHelper
import re
class PySysTest(ApamaBaseTest):
def execute(self):
correlator = CorrelatorHelper(self, name='testcorrelator')
correlator.start(logfile='testcorrelator.log')
correlator.injectEPL(filenames=['CSVPlugin.mon'], filedir=PROJECT.APAMA_WORK+'/monitors')
correlator.injectEPL(filenames=['test.mon'])
correlator.flush()
def validate(self):
self.assertGrep('testcorrelator.log', expr='ERROR', contains=False)
self.assertDiff('testcorrelator.out', 'testcorrelator.out')
|
[
"apama.correlator.CorrelatorHelper"
] |
[((195, 240), 'apama.correlator.CorrelatorHelper', 'CorrelatorHelper', (['self'], {'name': '"""testcorrelator"""'}), "(self, name='testcorrelator')\n", (211, 240), False, 'from apama.correlator import CorrelatorHelper\n')]
|
import os
import shutil
from os.path import join, basename, exists
import pytest
from osgeo import gdal
from preprocessing import crop_mask_resample_reproject as crop
from preprocessing.crop_mask_resample_reproject import Options
# The mock files have extents = '900000 -4300000 940000 -4260000'
mock_extents = [920000, -4300000, 930000, -4280000]
@pytest.fixture
def mock_files(uncover):
data_dir = os.path.join(uncover, 'preprocessing', 'mocks')
std2000 = os.path.join(data_dir, 'std2000.tif')
std2000_no_mask = os.path.join(data_dir, 'std2000_no_mask.tif')
mask = os.path.join(data_dir, 'mask.tif')
result = dict(std2000=std2000, mask=mask,
std2000_no_mask=std2000_no_mask)
return result
def test_geotransform_projection_nodata(mock_files, random_filename):
tmp_output = random_filename(ext='.tif')
extents = [str(s) for s in [920000, -4300000, 929000, -4290000]]
# the mock is already geotransformed, so this will have no effect
# to projection and nodata, but will be cropped making the
# geotransform tuple different
crop.crop_reproject_resample(mock_files['std2000_no_mask'], tmp_output,
sampling='bilinear',
extents=extents,
reproject=True)
ds = gdal.Open(tmp_output)
gt = ds.GetGeoTransform()
projection = ds.GetProjection()
nodata = ds.GetRasterBand(1).GetNoDataValue()
ds = None
ds = gdal.Open(mock_files['std2000_no_mask'])
projection_input = ds.GetProjection()
nodata_input = ds.GetRasterBand(1).GetNoDataValue()
ds = None
assert nodata == nodata_input
assert projection == projection_input
assert gt[1] == float(crop.OUTPUT_RES[0])
assert gt[0] == float(extents[0])
assert gt[3] == float(extents[3])
os.remove(tmp_output)
def test_apply_mask(mock_files, random_filename):
output_file = random_filename(ext='.tif')
jpeg = False
tmp_out_file = random_filename(ext='.tif')
shutil.copy(mock_files['std2000_no_mask'], tmp_out_file)
crop.apply_mask(mask_file=mock_files['mask'],
tmp_output_file=tmp_out_file,
output_file=output_file,
jpeg=jpeg)
ds = gdal.Open(output_file)
gt = ds.GetGeoTransform()
projection = ds.GetProjection()
nodata = ds.GetRasterBand(1).GetNoDataValue()
ds = None
ds = gdal.Open(mock_files['std2000'])
projection_input = ds.GetProjection()
nodata_input = ds.GetRasterBand(1).GetNoDataValue()
ds = None
assert nodata == nodata_input
assert projection == projection_input
assert gt[1] == float(crop.OUTPUT_RES[0])
assert gt[0] == mock_extents[0]
assert gt[3] == mock_extents[3]
os.remove(output_file)
def test_do_work(mock_files, random_filename):
# input_file, mask_file, output_file, resampling, extents, jpeg
output_file = random_filename(ext='.tif')
options = Options(resampling='bilinear',
extents=mock_extents,
jpeg=True,
reproject=True)
crop.do_work(input_file=mock_files['std2000_no_mask'],
output_file=output_file,
options=options,
mask_file=mock_files['mask'])
# output file was created
assert exists(output_file)
# assert jpeg was created
assert exists(output_file.rsplit('.')[0] + '.jpg')
os.remove(output_file)
|
[
"preprocessing.crop_mask_resample_reproject.crop_reproject_resample",
"os.remove",
"preprocessing.crop_mask_resample_reproject.apply_mask",
"os.path.exists",
"preprocessing.crop_mask_resample_reproject.Options",
"preprocessing.crop_mask_resample_reproject.do_work",
"osgeo.gdal.Open",
"os.path.join",
"shutil.copy"
] |
[((407, 454), 'os.path.join', 'os.path.join', (['uncover', '"""preprocessing"""', '"""mocks"""'], {}), "(uncover, 'preprocessing', 'mocks')\n", (419, 454), False, 'import os\n'), ((469, 506), 'os.path.join', 'os.path.join', (['data_dir', '"""std2000.tif"""'], {}), "(data_dir, 'std2000.tif')\n", (481, 506), False, 'import os\n'), ((529, 574), 'os.path.join', 'os.path.join', (['data_dir', '"""std2000_no_mask.tif"""'], {}), "(data_dir, 'std2000_no_mask.tif')\n", (541, 574), False, 'import os\n'), ((586, 620), 'os.path.join', 'os.path.join', (['data_dir', '"""mask.tif"""'], {}), "(data_dir, 'mask.tif')\n", (598, 620), False, 'import os\n'), ((1095, 1224), 'preprocessing.crop_mask_resample_reproject.crop_reproject_resample', 'crop.crop_reproject_resample', (["mock_files['std2000_no_mask']", 'tmp_output'], {'sampling': '"""bilinear"""', 'extents': 'extents', 'reproject': '(True)'}), "(mock_files['std2000_no_mask'], tmp_output,\n sampling='bilinear', extents=extents, reproject=True)\n", (1123, 1224), True, 'from preprocessing import crop_mask_resample_reproject as crop\n'), ((1330, 1351), 'osgeo.gdal.Open', 'gdal.Open', (['tmp_output'], {}), '(tmp_output)\n', (1339, 1351), False, 'from osgeo import gdal\n'), ((1492, 1532), 'osgeo.gdal.Open', 'gdal.Open', (["mock_files['std2000_no_mask']"], {}), "(mock_files['std2000_no_mask'])\n", (1501, 1532), False, 'from osgeo import gdal\n'), ((1848, 1869), 'os.remove', 'os.remove', (['tmp_output'], {}), '(tmp_output)\n', (1857, 1869), False, 'import os\n'), ((2036, 2092), 'shutil.copy', 'shutil.copy', (["mock_files['std2000_no_mask']", 'tmp_out_file'], {}), "(mock_files['std2000_no_mask'], tmp_out_file)\n", (2047, 2092), False, 'import shutil\n'), ((2097, 2212), 'preprocessing.crop_mask_resample_reproject.apply_mask', 'crop.apply_mask', ([], {'mask_file': "mock_files['mask']", 'tmp_output_file': 'tmp_out_file', 'output_file': 'output_file', 'jpeg': 'jpeg'}), "(mask_file=mock_files['mask'], tmp_output_file=tmp_out_file,\n output_file=output_file, jpeg=jpeg)\n", (2112, 2212), True, 'from preprocessing import crop_mask_resample_reproject as crop\n'), ((2279, 2301), 'osgeo.gdal.Open', 'gdal.Open', (['output_file'], {}), '(output_file)\n', (2288, 2301), False, 'from osgeo import gdal\n'), ((2442, 2474), 'osgeo.gdal.Open', 'gdal.Open', (["mock_files['std2000']"], {}), "(mock_files['std2000'])\n", (2451, 2474), False, 'from osgeo import gdal\n'), ((2786, 2808), 'os.remove', 'os.remove', (['output_file'], {}), '(output_file)\n', (2795, 2808), False, 'import os\n'), ((2986, 3065), 'preprocessing.crop_mask_resample_reproject.Options', 'Options', ([], {'resampling': '"""bilinear"""', 'extents': 'mock_extents', 'jpeg': '(True)', 'reproject': '(True)'}), "(resampling='bilinear', extents=mock_extents, jpeg=True, reproject=True)\n", (2993, 3065), False, 'from preprocessing.crop_mask_resample_reproject import Options\n'), ((3136, 3267), 'preprocessing.crop_mask_resample_reproject.do_work', 'crop.do_work', ([], {'input_file': "mock_files['std2000_no_mask']", 'output_file': 'output_file', 'options': 'options', 'mask_file': "mock_files['mask']"}), "(input_file=mock_files['std2000_no_mask'], output_file=\n output_file, options=options, mask_file=mock_files['mask'])\n", (3148, 3267), True, 'from preprocessing import crop_mask_resample_reproject as crop\n'), ((3356, 3375), 'os.path.exists', 'exists', (['output_file'], {}), '(output_file)\n', (3362, 3375), False, 'from os.path import join, basename, exists\n'), ((3467, 3489), 'os.remove', 'os.remove', (['output_file'], {}), '(output_file)\n', (3476, 3489), False, 'import os\n')]
|
import warnings
from typing import Optional
import pandas as pd
from boadata.core import DataObject
from boadata.core.data_conversion import ChainConversion, IdentityConversion
from .pandas_types import PandasDataFrameBase
@DataObject.register_type()
@IdentityConversion.enable_to("pandas_data_frame")
@ChainConversion.enable_to("numpy_array", through="pandas_data_frame")
class ParquetFile(PandasDataFrameBase):
type_name = "parquet"
@classmethod
def accepts_uri(cls, uri: str) -> bool:
return uri[-8:] == ".parquet" or uri[-11:] == ".parquet.gz"
@classmethod
def from_uri(cls, uri: str, *, source: Optional[DataObject] = None) -> "ParquetFile":
with warnings.catch_warnings():
warnings.simplefilter("ignore")
data = pd.read_parquet(uri)
return cls(inner_data=data, uri=uri, source=source)
|
[
"boadata.core.data_conversion.IdentityConversion.enable_to",
"warnings.simplefilter",
"boadata.core.DataObject.register_type",
"boadata.core.data_conversion.ChainConversion.enable_to",
"warnings.catch_warnings",
"pandas.read_parquet"
] |
[((228, 254), 'boadata.core.DataObject.register_type', 'DataObject.register_type', ([], {}), '()\n', (252, 254), False, 'from boadata.core import DataObject\n'), ((256, 305), 'boadata.core.data_conversion.IdentityConversion.enable_to', 'IdentityConversion.enable_to', (['"""pandas_data_frame"""'], {}), "('pandas_data_frame')\n", (284, 305), False, 'from boadata.core.data_conversion import ChainConversion, IdentityConversion\n'), ((307, 376), 'boadata.core.data_conversion.ChainConversion.enable_to', 'ChainConversion.enable_to', (['"""numpy_array"""'], {'through': '"""pandas_data_frame"""'}), "('numpy_array', through='pandas_data_frame')\n", (332, 376), False, 'from boadata.core.data_conversion import ChainConversion, IdentityConversion\n'), ((694, 719), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (717, 719), False, 'import warnings\n'), ((733, 764), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (754, 764), False, 'import warnings\n'), ((784, 804), 'pandas.read_parquet', 'pd.read_parquet', (['uri'], {}), '(uri)\n', (799, 804), True, 'import pandas as pd\n')]
|
"""
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
import os
import shutil
import click
import jinja2
@click.command()
@click.option("--name",
prompt="Package name", help="Project name")
@click.option("--platform",
prompt="Platform name", help="xRally platform name")
@click.option("--author",
prompt="Author name", help="Author name")
@click.option("--author-url", default="http://", help="Author web site")
@click.option("--description", default="", help="Package descirption")
@click.option("--path", default="./", help="Root directory of new project.")
@click.option("--existing/--no-existing", default=False)
def cli(name, platform, author, author_url, description, path, existing):
skeleton = "existing_platform" if existing else "new_platform"
path = os.path.join(path, "xrally-%s" % name)
shutil.copytree(
os.path.join(os.path.dirname(os.path.abspath(__file__)),
"skeletons", skeleton),
path
)
render_path(path, name)
render_content(path, name, platform, author, author_url, description)
def render_path(path, name):
for p in os.listdir(path):
full_path = os.path.join(path, p)
if os.path.isfile(full_path):
shutil.move(
full_path,
full_path.replace("{project}", name).replace(".j2", "")
)
elif os.path.isdir(full_path):
new_path = full_path.replace("{project}", name)
shutil.move(full_path, new_path)
render_path(new_path, name)
def render_content(path, name, platform, author, author_url, description):
env = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.join(path), encoding="utf8"))
for root, dirs, files in os.walk(path):
for file_name in files:
with open(os.path.join(root, file_name), "r+") as f:
template = f.read()
content = env.from_string(template).render(
package="xrally_%s" % name, name=name, platform=platform,
author=author, author_url=author_url,
description=description)
f.seek(0)
f.write(content)
f.truncate()
def main():
cli()
if __name__ == "__main__":
main()
|
[
"os.path.abspath",
"os.path.isdir",
"os.walk",
"click.option",
"click.command",
"os.path.isfile",
"shutil.move",
"os.path.join",
"os.listdir"
] |
[((661, 676), 'click.command', 'click.command', ([], {}), '()\n', (674, 676), False, 'import click\n'), ((678, 744), 'click.option', 'click.option', (['"""--name"""'], {'prompt': '"""Package name"""', 'help': '"""Project name"""'}), "('--name', prompt='Package name', help='Project name')\n", (690, 744), False, 'import click\n'), ((760, 839), 'click.option', 'click.option', (['"""--platform"""'], {'prompt': '"""Platform name"""', 'help': '"""xRally platform name"""'}), "('--platform', prompt='Platform name', help='xRally platform name')\n", (772, 839), False, 'import click\n'), ((855, 921), 'click.option', 'click.option', (['"""--author"""'], {'prompt': '"""Author name"""', 'help': '"""Author name"""'}), "('--author', prompt='Author name', help='Author name')\n", (867, 921), False, 'import click\n'), ((937, 1008), 'click.option', 'click.option', (['"""--author-url"""'], {'default': '"""http://"""', 'help': '"""Author web site"""'}), "('--author-url', default='http://', help='Author web site')\n", (949, 1008), False, 'import click\n'), ((1010, 1079), 'click.option', 'click.option', (['"""--description"""'], {'default': '""""""', 'help': '"""Package descirption"""'}), "('--description', default='', help='Package descirption')\n", (1022, 1079), False, 'import click\n'), ((1081, 1156), 'click.option', 'click.option', (['"""--path"""'], {'default': '"""./"""', 'help': '"""Root directory of new project."""'}), "('--path', default='./', help='Root directory of new project.')\n", (1093, 1156), False, 'import click\n'), ((1158, 1213), 'click.option', 'click.option', (['"""--existing/--no-existing"""'], {'default': '(False)'}), "('--existing/--no-existing', default=False)\n", (1170, 1213), False, 'import click\n'), ((1367, 1405), 'os.path.join', 'os.path.join', (['path', "('xrally-%s' % name)"], {}), "(path, 'xrally-%s' % name)\n", (1379, 1405), False, 'import os\n'), ((1703, 1719), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (1713, 1719), False, 'import os\n'), ((2340, 2353), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (2347, 2353), False, 'import os\n'), ((1741, 1762), 'os.path.join', 'os.path.join', (['path', 'p'], {}), '(path, p)\n', (1753, 1762), False, 'import os\n'), ((1775, 1800), 'os.path.isfile', 'os.path.isfile', (['full_path'], {}), '(full_path)\n', (1789, 1800), False, 'import os\n'), ((1954, 1978), 'os.path.isdir', 'os.path.isdir', (['full_path'], {}), '(full_path)\n', (1967, 1978), False, 'import os\n'), ((1465, 1490), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1480, 1490), False, 'import os\n'), ((2052, 2084), 'shutil.move', 'shutil.move', (['full_path', 'new_path'], {}), '(full_path, new_path)\n', (2063, 2084), False, 'import shutil\n'), ((2272, 2290), 'os.path.join', 'os.path.join', (['path'], {}), '(path)\n', (2284, 2290), False, 'import os\n'), ((2409, 2438), 'os.path.join', 'os.path.join', (['root', 'file_name'], {}), '(root, file_name)\n', (2421, 2438), False, 'import os\n')]
|
from typing import Optional, Dict
from flask import Flask
from config_aws import S3Config
from service import DataCrawlerInterface, PGFN, Bacen
app = Flask(__name__)
s3 = S3Config()
tipos : Dict[str, DataCrawlerInterface] = {
'pgfn': PGFN(),
'bacen': Bacen()
}
@app.route("/download/<string:tipo>", methods=['GET'])
@app.route("/download/<string:tipo>/", methods=['GET'])
def download_route(tipo : Optional[str] = None):
if tipo in ('pgfn', 'bacen'):
cod = tipos[tipo].download()
return {'file': {'cod': cod}}, 200
else:
return {'msg': 'erro in file tipy for download'}, 404
@app.route("/prepare/<string:tipo>/<uuid:cod_file>", methods=['GET'])
@app.route("/prepare/<string:tipo>/<uuid:cod_file>/", methods=['GET'])
def prepare_route(tipo : Optional[str] = None, cod_file : Optional[str] = None):
if tipo in ('pgfn', 'bacen'):
cod = tipos[tipo].prepare(str(cod_file))
return {'file': {'cod': cod}}, 200
else:
return {'msg': 'erro in file tipy for download'}, 404
@app.route("/send/<string:tipo>/<uuid:cod_prep>", methods=['GET'])
@app.route("/send/<string:tipo>/<uuid:cod_prep>/", methods=['GET'])
def send_route(tipo : Optional[str] = None, cod_prep : Optional[str] = None):
if tipo in ('pgfn', 'bacen'):
cod = tipos[tipo].send(str(cod_prep))
return {'file': {'cod': cod}}, 200
else:
return {'msg': 'erro in file tipy for download'}, 404
@app.route("/do-it", methods=['GET'])
@app.route("/do-it/", methods=['GET'])
def do_it_route():
ret_cods = []
for tipo in tipos.keys():
cod_file = tipos[tipo].download()
cod_prep = tipos[tipo].prepare(str(cod_file))
cod_send = tipos[tipo].send(str(cod_prep))
ret_cods.append({ tipo: {
'donwload': {'cod': cod_file},
'prepare': {'cod': cod_prep},
'send': {'cod': cod_send}
}})
return {i[0]:i[1] for i in [k.popitem() for k in ret_cods]}, 200
@app.route("/info/<uuid:cod>", methods=['GET'])
@app.route("/info/<uuid:cod>/", methods=['GET'])
def info_route(cod: str):
return DataCrawlerInterface.info(str(cod)), 200
@app.route("/")
def index():
return {'msg': 'ok'}, 200
if __name__ == "__main__":
app.run(host='0.0.0.0', port='8663')
|
[
"service.Bacen",
"service.PGFN",
"flask.Flask",
"config_aws.S3Config"
] |
[((153, 168), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (158, 168), False, 'from flask import Flask\n'), ((174, 184), 'config_aws.S3Config', 'S3Config', ([], {}), '()\n', (182, 184), False, 'from config_aws import S3Config\n'), ((241, 247), 'service.PGFN', 'PGFN', ([], {}), '()\n', (245, 247), False, 'from service import DataCrawlerInterface, PGFN, Bacen\n'), ((262, 269), 'service.Bacen', 'Bacen', ([], {}), '()\n', (267, 269), False, 'from service import DataCrawlerInterface, PGFN, Bacen\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Apr 24 13:02:09 2020
@author: utsav
"""
import numpy as np
import cv2
import base64
import requests
import json
def to_image_string(image_filepath):
return base64.b64encode(open(image_filepath, 'rb').read())#.encode('base64')
def from_base64(base64_data):
nparr = np.fromstring(base64_data.decode('base64'), np.uint8)
return cv2.imdecode(nparr, cv2.IMREAD_ANYCOLOR)
def hit_api_validate(number):
# prepare headers for http request
content_type = 'application/json'
headers = {'content-type': content_type}
addr = 'http://localhost:9001'
url = addr + '/api/validate'
response = requests.post(url, json={"test_number": number} , headers=headers)
return json.loads(response.text)
def hit_api_extract(img):
#img_bytes = base64.b64encode(img)
#convert byte to string
#encoded_string = img_bytes.decode("utf-8")
# prepare headers for http request
content_type = 'application/json'
headers = {'content-type': content_type}
addr = 'http://localhost:9001'
url = addr + '/api/ocr'
response = requests.post(url, json={"doc_b64": base64.b64encode(img.getvalue()).decode()} , headers=headers)
return json.loads(response.text)
def hit_api_mask_aadhaar(img,number_list):
# img_bytes = to_image_string(filepath)
# #convert byte to string
# encoded_string = img_bytes.decode("utf-8")
# # prepare headers for http request
content_type = 'application/json'
headers = {'content-type': content_type}
addr = 'http://localhost:9001'
url = addr + '/api/mask'
response = requests.post(url, json={"doc_b64": base64.b64encode(img.getvalue()).decode(), 'aadhaar': [str(number_list)]}, headers=headers)
return json.loads(response.text)
def hit_api_brut_mask(img):
# img_bytes = to_image_string(input_name)
# #convert byte to string
# encoded_string = img_bytes.decode("utf-8")
# # prepare headers for http request
content_type = 'application/json'
headers = {'content-type': content_type}
addr = 'http://localhost:9001'
url = addr + '/api/brut_mask'
response = requests.post(url, json={"doc_b64": base64.b64encode(img.getvalue()).decode()}, headers=headers)
return json.loads(response.text)
# r = json.loads(response.text)
# save_name = output_name
# decoded_data = base64.b64decode(r['doc_b64_brut_masked'])
# np_data = np.fromstring(decoded_data,np.uint8)
# img = cv2.imdecode(np_data,cv2.IMREAD_UNCHANGED)
# cv2.imwrite(save_name,img)
# return "masked document saved as "+ save_name
def hit_api_sample_pipe(img,brut = False):
# img_bytes = to_image_string(input_name)
# #convert byte to string
# encoded_string = img_bytes.decode("utf-8")
# # prepare headers for http request
content_type = 'application/json'
headers = {'content-type': content_type}
addr = 'http://localhost:9001'
url = addr + '/api/sample_pipe'
response = requests.post(url, json={"doc_b64": base64.b64encode(img.getvalue()).decode(), "brut" : brut}, headers=headers)
return json.loads(response.text)
# r = json.loads(response.text)
# if r['is_masked']:
# save_name = output_name
# decoded_data = base64.b64decode(r['doc_b64_masked'])
# np_data = np.fromstring(decoded_data,np.uint8)
# img = cv2.imdecode(np_data,cv2.IMREAD_UNCHANGED)
# cv2.imwrite(save_name,img)
# print("Execution Mode =>",r['mode_executed'])
# if r['mode_executed'] == "OCR-MASKING":
# print("Aadhaar List =>",r['aadhaar_list'])
# print("Validated Aadhaar list =>",r['valid_aadhaar_list'])
# return "masked document saved as "+ save_name
# else:
# print("Execution Mode =>",r['mode_executed'])
# print("Error =>",r['error'])
# return "Unable to find given number in the image :/ (try brut mode)"
#
################################################### UI ################################
import streamlit as st
st.title("[PyraDox :page_with_curl:](https://github.com/festivitymishra/PyraDox)")
st.info(
"""
**PyraDox is a python tool which helps in document digitization** by extracting text
information and masking of personal information with the help of Tesseract-ocr.
"""
)
st.sidebar.subheader('PyraDox :page_with_curl:')
Run_Mode = st.sidebar.selectbox(
'Select Document Type',
('Select Doc','Aadhaar Card', 'Driver Licence', 'Passport', 'Pan Card'))
if Run_Mode == 'Select Doc':
image_filepath = 'resources/PyraDox.jpg'
st.image(open(image_filepath, 'rb').read(), caption='', use_column_width=True)
st.write("Supported Documents : \n - Aadhaar Card (UIDAI)"
)
elif Run_Mode == 'Aadhaar Card':
st.write(" Document Type **Aadhaar Card** ")
feature = st.sidebar.radio("What's dow you want to try?",('Mask 1st 8 digits of Aadhaar',
'Validate Aadhaar Number',
'Extract Aadhaar Number',
'Mask Aadhaar Number',
'Brut Mask Numbers'))
if feature == "Validate Aadhaar Number":
st.write('Please enter 12 digit Aadhaar Number')
number = st.number_input('Insert aadhaar number', min_value=10000000000,
max_value=999999999999, value=397788000234,
step=1,format = '%d')
#st.write('The current number is ', number)
if st.button('Validate'):
if hit_api_validate(number)['validity']:
st.write(" :white_check_mark: Valid Aadhaar Card Sequence Number")
else:
st.write(" :no_entry: Invalid Aadhaar Card Sequence Number")
elif feature == "Extract Aadhaar Number":
uploaded_file = st.file_uploader("Upload an image file", type=['png', 'jpg'])
if uploaded_file is not None:
# image = Image.open(uploaded_file)
st.image(uploaded_file, caption='Uploaded Image.', use_column_width=True) #width = 450)#
st.write("Click on extract to **extract** Aadhar Number from the image!")
if st.button('Extract'):
#st.write(hit_api_extract_new(uploaded_file))
extract_response = hit_api_extract(uploaded_file)['aadhaar_list']
if len(extract_response)>0:
st.write(" :white_check_mark: Extracted Aadhaar Number is **",extract_response[0],"**")
else:
st.write(" :no_entry: Failed to find any Aadhaar Number in the given document")
elif feature == "Mask Aadhaar Number":
uploaded_file_2 = st.file_uploader("Upload an image file", type=['png', 'jpg'])
st.write('Please enter 12 digit Aadhaar Number to be Masked')
number_2 = st.number_input('Insert aadhaar number', min_value=10000000000,
max_value=999999999999, value=397788000234,
step=1,format = '%d')
if uploaded_file_2 is not None and number_2 is not None:
st.image(uploaded_file_2, caption='Uploaded Image.', width = 400)#
st.write("Click on **Mask** to mask given Aadhar Number from the image!")
if st.button('Mask'):
r = hit_api_mask_aadhaar(uploaded_file_2,number_2)
if r['is_masked']:
decoded_data = base64.b64decode(r['doc_b64_masked'])
st.image(decoded_data, caption='Masked Image.', width = 400)#
st.write(" :white_check_mark: Masked Given Number in the Document")
else:
st.write(" :no_entry: Unable to find given number in the image :/ (try brut mode)")
elif feature == "Mask 1st 8 digits of Aadhaar":
uploaded_file_3 = st.file_uploader("Upload an image file", type=['png', 'jpg'])
if uploaded_file_3 is not None:
st.image(uploaded_file_3, caption='Uploaded Image.', width = 400)#
st.write("Click on **Mask** to Mask first 8 digits Aadhar Number from the image!")
brut_mode = st.checkbox('Use Brut Mode')
# st.write("brut_mode is",brut_mode)
if st.button('Mask'):
r = hit_api_sample_pipe(uploaded_file_3,brut_mode)
if r['is_masked']:
decoded_data_1 = base64.b64decode(r['doc_b64_masked'])
st.image(decoded_data_1, caption='Masked Image.', width = 400)#
st.write(" :white_check_mark: Masked Given Number in the Document")
st.write("Execution Mode =>",r['mode_executed'])
if r['mode_executed'] == "OCR-MASKING":
st.write("Aadhaar List =>**",r['aadhaar_list'][0],"**")
if len(r['valid_aadhaar_list'])>0:
st.write("Validated Aadhaar list =>**",r['valid_aadhaar_list'][0],"**")
else:
st.write("Execution Mode =>",r['mode_executed'])
st.write("Error =>",r['error'])
st.write(" :no_entry: Unable to find given number in the image :/ (try brut mode)")
elif feature == "Brut Mask Numbers":
uploaded_file_4 = st.file_uploader("Upload an image file", type=['png', 'jpg'])
if uploaded_file_4 is not None:
st.image(uploaded_file_4, caption='Uploaded Image.', width = 400)#
st.write("Click on **BRUT Mask** to Brut Mask Numbers from the image!")
# st.write("brut_mode is",brut_mode)
if st.button('BRUT Mask'):
r = hit_api_brut_mask(uploaded_file_4)
decoded_data_2 = base64.b64decode(r['doc_b64_brut_masked'])
st.image(decoded_data_2, caption='Masked Image.', width = 400)#
else:
st.write(" **Yowza! we are yet baking it for you ...** :penguin:")
comingsoon = 'resources/coming-soon.jpg'
st.image(open(comingsoon, 'rb').read(), caption='', use_column_width=True)
|
[
"streamlit.sidebar.subheader",
"json.loads",
"streamlit.image",
"streamlit.checkbox",
"cv2.imdecode",
"streamlit.title",
"streamlit.write",
"streamlit.file_uploader",
"base64.b64decode",
"streamlit.sidebar.selectbox",
"streamlit.info",
"streamlit.button",
"streamlit.sidebar.radio",
"requests.post",
"streamlit.number_input"
] |
[((4029, 4116), 'streamlit.title', 'st.title', (['"""[PyraDox :page_with_curl:](https://github.com/festivitymishra/PyraDox)"""'], {}), "(\n '[PyraDox :page_with_curl:](https://github.com/festivitymishra/PyraDox)')\n", (4037, 4116), True, 'import streamlit as st\n'), ((4113, 4313), 'streamlit.info', 'st.info', (['"""\n **PyraDox is a python tool which helps in document digitization** by extracting text \n information and masking of personal information with the help of Tesseract-ocr.\n"""'], {}), '(\n """\n **PyraDox is a python tool which helps in document digitization** by extracting text \n information and masking of personal information with the help of Tesseract-ocr.\n"""\n )\n', (4120, 4313), True, 'import streamlit as st\n'), ((4312, 4360), 'streamlit.sidebar.subheader', 'st.sidebar.subheader', (['"""PyraDox :page_with_curl:"""'], {}), "('PyraDox :page_with_curl:')\n", (4332, 4360), True, 'import streamlit as st\n'), ((4373, 4495), 'streamlit.sidebar.selectbox', 'st.sidebar.selectbox', (['"""Select Document Type"""', "('Select Doc', 'Aadhaar Card', 'Driver Licence', 'Passport', 'Pan Card')"], {}), "('Select Document Type', ('Select Doc', 'Aadhaar Card',\n 'Driver Licence', 'Passport', 'Pan Card'))\n", (4393, 4495), True, 'import streamlit as st\n'), ((407, 447), 'cv2.imdecode', 'cv2.imdecode', (['nparr', 'cv2.IMREAD_ANYCOLOR'], {}), '(nparr, cv2.IMREAD_ANYCOLOR)\n', (419, 447), False, 'import cv2\n'), ((684, 749), 'requests.post', 'requests.post', (['url'], {'json': "{'test_number': number}", 'headers': 'headers'}), "(url, json={'test_number': number}, headers=headers)\n", (697, 749), False, 'import requests\n'), ((762, 787), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (772, 787), False, 'import json\n'), ((1239, 1264), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (1249, 1264), False, 'import json\n'), ((1776, 1801), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (1786, 1801), False, 'import json\n'), ((2274, 2299), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (2284, 2299), False, 'import json\n'), ((3114, 3139), 'json.loads', 'json.loads', (['response.text'], {}), '(response.text)\n', (3124, 3139), False, 'import json\n'), ((4674, 4736), 'streamlit.write', 'st.write', (['"""Supported Documents : \n - Aadhaar Card (UIDAI)"""'], {}), '("""Supported Documents : \n - Aadhaar Card (UIDAI)""")\n', (4682, 4736), True, 'import streamlit as st\n'), ((4785, 4829), 'streamlit.write', 'st.write', (['""" Document Type **Aadhaar Card** """'], {}), "(' Document Type **Aadhaar Card** ')\n", (4793, 4829), True, 'import streamlit as st\n'), ((4844, 5031), 'streamlit.sidebar.radio', 'st.sidebar.radio', (['"""What\'s dow you want to try?"""', "('Mask 1st 8 digits of Aadhaar', 'Validate Aadhaar Number',\n 'Extract Aadhaar Number', 'Mask Aadhaar Number', 'Brut Mask Numbers')"], {}), '("What\'s dow you want to try?", (\n \'Mask 1st 8 digits of Aadhaar\', \'Validate Aadhaar Number\',\n \'Extract Aadhaar Number\', \'Mask Aadhaar Number\', \'Brut Mask Numbers\'))\n', (4860, 5031), True, 'import streamlit as st\n'), ((10125, 10191), 'streamlit.write', 'st.write', (['""" **Yowza! we are yet baking it for you ...** :penguin:"""'], {}), "(' **Yowza! we are yet baking it for you ...** :penguin:')\n", (10133, 10191), True, 'import streamlit as st\n'), ((5329, 5377), 'streamlit.write', 'st.write', (['"""Please enter 12 digit Aadhaar Number"""'], {}), "('Please enter 12 digit Aadhaar Number')\n", (5337, 5377), True, 'import streamlit as st\n'), ((5395, 5528), 'streamlit.number_input', 'st.number_input', (['"""Insert aadhaar number"""'], {'min_value': '(10000000000)', 'max_value': '(999999999999)', 'value': '(397788000234)', 'step': '(1)', 'format': '"""%d"""'}), "('Insert aadhaar number', min_value=10000000000, max_value=\n 999999999999, value=397788000234, step=1, format='%d')\n", (5410, 5528), True, 'import streamlit as st\n'), ((5658, 5679), 'streamlit.button', 'st.button', (['"""Validate"""'], {}), "('Validate')\n", (5667, 5679), True, 'import streamlit as st\n'), ((5983, 6044), 'streamlit.file_uploader', 'st.file_uploader', (['"""Upload an image file"""'], {'type': "['png', 'jpg']"}), "('Upload an image file', type=['png', 'jpg'])\n", (5999, 6044), True, 'import streamlit as st\n'), ((5750, 5816), 'streamlit.write', 'st.write', (['""" :white_check_mark: Valid Aadhaar Card Sequence Number"""'], {}), "(' :white_check_mark: Valid Aadhaar Card Sequence Number')\n", (5758, 5816), True, 'import streamlit as st\n'), ((5851, 5911), 'streamlit.write', 'st.write', (['""" :no_entry: Invalid Aadhaar Card Sequence Number"""'], {}), "(' :no_entry: Invalid Aadhaar Card Sequence Number')\n", (5859, 5911), True, 'import streamlit as st\n'), ((6151, 6224), 'streamlit.image', 'st.image', (['uploaded_file'], {'caption': '"""Uploaded Image."""', 'use_column_width': '(True)'}), "(uploaded_file, caption='Uploaded Image.', use_column_width=True)\n", (6159, 6224), True, 'import streamlit as st\n'), ((6252, 6325), 'streamlit.write', 'st.write', (['"""Click on extract to **extract** Aadhar Number from the image!"""'], {}), "('Click on extract to **extract** Aadhar Number from the image!')\n", (6260, 6325), True, 'import streamlit as st\n'), ((6341, 6361), 'streamlit.button', 'st.button', (['"""Extract"""'], {}), "('Extract')\n", (6350, 6361), True, 'import streamlit as st\n'), ((6884, 6945), 'streamlit.file_uploader', 'st.file_uploader', (['"""Upload an image file"""'], {'type': "['png', 'jpg']"}), "('Upload an image file', type=['png', 'jpg'])\n", (6900, 6945), True, 'import streamlit as st\n'), ((6954, 7015), 'streamlit.write', 'st.write', (['"""Please enter 12 digit Aadhaar Number to be Masked"""'], {}), "('Please enter 12 digit Aadhaar Number to be Masked')\n", (6962, 7015), True, 'import streamlit as st\n'), ((7035, 7168), 'streamlit.number_input', 'st.number_input', (['"""Insert aadhaar number"""'], {'min_value': '(10000000000)', 'max_value': '(999999999999)', 'value': '(397788000234)', 'step': '(1)', 'format': '"""%d"""'}), "('Insert aadhaar number', min_value=10000000000, max_value=\n 999999999999, value=397788000234, step=1, format='%d')\n", (7050, 7168), True, 'import streamlit as st\n'), ((7321, 7384), 'streamlit.image', 'st.image', (['uploaded_file_2'], {'caption': '"""Uploaded Image."""', 'width': '(400)'}), "(uploaded_file_2, caption='Uploaded Image.', width=400)\n", (7329, 7384), True, 'import streamlit as st\n'), ((7400, 7473), 'streamlit.write', 'st.write', (['"""Click on **Mask** to mask given Aadhar Number from the image!"""'], {}), "('Click on **Mask** to mask given Aadhar Number from the image!')\n", (7408, 7473), True, 'import streamlit as st\n'), ((7502, 7519), 'streamlit.button', 'st.button', (['"""Mask"""'], {}), "('Mask')\n", (7511, 7519), True, 'import streamlit as st\n'), ((8081, 8142), 'streamlit.file_uploader', 'st.file_uploader', (['"""Upload an image file"""'], {'type': "['png', 'jpg']"}), "('Upload an image file', type=['png', 'jpg'])\n", (8097, 8142), True, 'import streamlit as st\n'), ((6605, 6698), 'streamlit.write', 'st.write', (['""" :white_check_mark: Extracted Aadhaar Number is **"""', 'extract_response[0]', '"""**"""'], {}), "(' :white_check_mark: Extracted Aadhaar Number is **',\n extract_response[0], '**')\n", (6613, 6698), True, 'import streamlit as st\n'), ((6735, 6814), 'streamlit.write', 'st.write', (['""" :no_entry: Failed to find any Aadhaar Number in the given document"""'], {}), "(' :no_entry: Failed to find any Aadhaar Number in the given document')\n", (6743, 6814), True, 'import streamlit as st\n'), ((8204, 8267), 'streamlit.image', 'st.image', (['uploaded_file_3'], {'caption': '"""Uploaded Image."""', 'width': '(400)'}), "(uploaded_file_3, caption='Uploaded Image.', width=400)\n", (8212, 8267), True, 'import streamlit as st\n'), ((8283, 8370), 'streamlit.write', 'st.write', (['"""Click on **Mask** to Mask first 8 digits Aadhar Number from the image!"""'], {}), "(\n 'Click on **Mask** to Mask first 8 digits Aadhar Number from the image!')\n", (8291, 8370), True, 'import streamlit as st\n'), ((8390, 8418), 'streamlit.checkbox', 'st.checkbox', (['"""Use Brut Mode"""'], {}), "('Use Brut Mode')\n", (8401, 8418), True, 'import streamlit as st\n'), ((8482, 8499), 'streamlit.button', 'st.button', (['"""Mask"""'], {}), "('Mask')\n", (8491, 8499), True, 'import streamlit as st\n'), ((9542, 9603), 'streamlit.file_uploader', 'st.file_uploader', (['"""Upload an image file"""'], {'type': "['png', 'jpg']"}), "('Upload an image file', type=['png', 'jpg'])\n", (9558, 9603), True, 'import streamlit as st\n'), ((7658, 7695), 'base64.b64decode', 'base64.b64decode', (["r['doc_b64_masked']"], {}), "(r['doc_b64_masked'])\n", (7674, 7695), False, 'import base64\n'), ((7716, 7774), 'streamlit.image', 'st.image', (['decoded_data'], {'caption': '"""Masked Image."""', 'width': '(400)'}), "(decoded_data, caption='Masked Image.', width=400)\n", (7724, 7774), True, 'import streamlit as st\n'), ((7798, 7865), 'streamlit.write', 'st.write', (['""" :white_check_mark: Masked Given Number in the Document"""'], {}), "(' :white_check_mark: Masked Given Number in the Document')\n", (7806, 7865), True, 'import streamlit as st\n'), ((7908, 7996), 'streamlit.write', 'st.write', (['""" :no_entry: Unable to find given number in the image :/ (try brut mode)"""'], {}), "(\n ' :no_entry: Unable to find given number in the image :/ (try brut mode)')\n", (7916, 7996), True, 'import streamlit as st\n'), ((9665, 9728), 'streamlit.image', 'st.image', (['uploaded_file_4'], {'caption': '"""Uploaded Image."""', 'width': '(400)'}), "(uploaded_file_4, caption='Uploaded Image.', width=400)\n", (9673, 9728), True, 'import streamlit as st\n'), ((9744, 9815), 'streamlit.write', 'st.write', (['"""Click on **BRUT Mask** to Brut Mask Numbers from the image!"""'], {}), "('Click on **BRUT Mask** to Brut Mask Numbers from the image!')\n", (9752, 9815), True, 'import streamlit as st\n'), ((9879, 9901), 'streamlit.button', 'st.button', (['"""BRUT Mask"""'], {}), "('BRUT Mask')\n", (9888, 9901), True, 'import streamlit as st\n'), ((8640, 8677), 'base64.b64decode', 'base64.b64decode', (["r['doc_b64_masked']"], {}), "(r['doc_b64_masked'])\n", (8656, 8677), False, 'import base64\n'), ((8698, 8758), 'streamlit.image', 'st.image', (['decoded_data_1'], {'caption': '"""Masked Image."""', 'width': '(400)'}), "(decoded_data_1, caption='Masked Image.', width=400)\n", (8706, 8758), True, 'import streamlit as st\n'), ((8782, 8849), 'streamlit.write', 'st.write', (['""" :white_check_mark: Masked Given Number in the Document"""'], {}), "(' :white_check_mark: Masked Given Number in the Document')\n", (8790, 8849), True, 'import streamlit as st\n'), ((8870, 8919), 'streamlit.write', 'st.write', (['"""Execution Mode =>"""', "r['mode_executed']"], {}), "('Execution Mode =>', r['mode_executed'])\n", (8878, 8919), True, 'import streamlit as st\n'), ((9268, 9317), 'streamlit.write', 'st.write', (['"""Execution Mode =>"""', "r['mode_executed']"], {}), "('Execution Mode =>', r['mode_executed'])\n", (9276, 9317), True, 'import streamlit as st\n'), ((9337, 9369), 'streamlit.write', 'st.write', (['"""Error =>"""', "r['error']"], {}), "('Error =>', r['error'])\n", (9345, 9369), True, 'import streamlit as st\n'), ((9389, 9484), 'streamlit.write', 'st.write', (['""" :no_entry: Unable to find given number in the image :/ (try brut mode)"""'], {}), "(\n ' :no_entry: Unable to find given number in the image :/ (try brut mode)'\n )\n", (9397, 9484), True, 'import streamlit as st\n'), ((9991, 10033), 'base64.b64decode', 'base64.b64decode', (["r['doc_b64_brut_masked']"], {}), "(r['doc_b64_brut_masked'])\n", (10007, 10033), False, 'import base64\n'), ((10050, 10110), 'streamlit.image', 'st.image', (['decoded_data_2'], {'caption': '"""Masked Image."""', 'width': '(400)'}), "(decoded_data_2, caption='Masked Image.', width=400)\n", (10058, 10110), True, 'import streamlit as st\n'), ((9003, 9060), 'streamlit.write', 'st.write', (['"""Aadhaar List =>**"""', "r['aadhaar_list'][0]", '"""**"""'], {}), "('Aadhaar List =>**', r['aadhaar_list'][0], '**')\n", (9011, 9060), True, 'import streamlit as st\n'), ((9154, 9227), 'streamlit.write', 'st.write', (['"""Validated Aadhaar list =>**"""', "r['valid_aadhaar_list'][0]", '"""**"""'], {}), "('Validated Aadhaar list =>**', r['valid_aadhaar_list'][0], '**')\n", (9162, 9227), True, 'import streamlit as st\n')]
|
# Author: <NAME>
import copy
from kernel.term import Term, Inst
from kernel.thm import Thm
class ItemID():
"""Represents id of an item."""
def __init__(self, id=None):
"""Convert id into tuple form."""
if id is None:
self.id = tuple()
elif isinstance(id, tuple) and all(isinstance(i, int) for i in id):
self.id = id
elif isinstance(id, int):
self.id = (id,)
elif isinstance(id, str):
self.id = tuple(int(s) for s in id.split("."))
elif isinstance(id, ItemID):
self.id = id.id
else:
raise TypeError
def __str__(self):
"""Print id in n1.n2.n3 form."""
return ".".join(str(i) for i in self.id)
def __eq__(self, other):
return self.id == other.id
def incr_id_after(self, start, n):
"""Perform the id adjustment necessary for adding n lines before
start id. The exact logic is as follows:
Suppose start has length k. Find all ids with length at least k,
where the first k-1 numbers agree with start, and the k'th number
is greater than or equal to start. Increment the k'th number by n
and leave the rest unchanged.
"""
k = len(start.id)
if len(self.id) >= k and self.id[:k-1] == start.id[:k-1] and self.id[k-1] >= start.id[k-1]:
return ItemID(self.id[:k-1] + (self.id[k-1] + n,) + self.id[k:])
else:
return self
def incr_id(self, n):
"""Increment the last number in id by n."""
return ItemID(self.id[:-1] + (self.id[-1] + n,))
def decr_id(self, id_remove):
"""Decrement a single id, with the aim of closing the gap at
id_remove. The logic used is similar to that incr_id_after.
"""
k = len(id_remove.id)
if len(self.id) >= k and self.id[:k-1] == id_remove.id[:k-1] and self.id[k-1] > id_remove.id[k-1]:
return ItemID(self.id[:k-1] + (self.id[k-1] - 1,) + self.id[k:])
else:
return self
def last(self):
"""Return the last entry of the id."""
return self.id[-1]
def can_depend_on(self, other):
"""Return whether the current id can depend on another id."""
l = len(other.id)
if l > len(self.id):
return False
if other.id[:l-1] != self.id[:l-1]:
return False
return other.id[l-1] < self.id[l-1]
class ProofStateException(Exception):
pass
class ProofItem():
"""An item in a proof, consisting of the following data:
- id: an identifier for reference by later proof items.
- rule: derivation rule used to derive the theorem.
- args: arguments to the rule.
- prevs: previous sequents used. Default to [].
- th: optional theorem statement (as a sequent).
- subproof: optional expanded proof of the statement.
"""
def __init__(self, id, rule, *, args=None, prevs=None, th=None):
self.id = ItemID(id)
self.rule = rule
self.args = args
self.prevs = [ItemID(prev) for prev in prevs] if prevs is not None else []
self.th = th
self.subproof = None
def print_str_args(self):
def str_val(val):
if isinstance(val, Inst):
items = sorted(val.items(), key = lambda pair: pair[0])
return "{" + ", ".join(key + ": " + str_val(val) for key, val in items) + "}"
else:
return str(val)
if isinstance(self.args, tuple):
return ", ".join(str_val(val) for val in self.args)
elif self.args:
return str_val(self.args)
else:
return ""
def __str__(self):
"""Print the given proof item."""
str_id = str(self.id)
str_args = " " + self.print_str_args() if self.args else ""
str_prevs = " from " + ", ".join(str(prev) for prev in self.prevs) if self.prevs else ""
str_th = str(self.th) + " by " if self.th else ""
cur_line = str_id + ": " + str_th + self.rule + str_args + str_prevs
if self.subproof:
return cur_line + "\n" + "\n".join(str(item) for item in self.subproof.items)
else:
return cur_line
def __repr__(self):
return str(self)
def __eq__(self, other):
return self.id == other.id and self.rule == other.rule and self.args == other.args \
and self.prevs == other.prevs and self.th == other.th
def __copy__(self):
res = ProofItem(self.id, self.rule, args=self.args, prevs=self.prevs, th=self.th)
if self.subproof:
res.subproof = copy.copy(self.subproof)
return res
def get_sorrys(self):
"""Return the list of gaps in the item (including subproofs)."""
if self.rule == 'sorry':
assert self.subproof is None
return [self.th]
if self.subproof:
return sum([item.get_sorrys() for item in self.subproof.items], [])
else:
return []
def incr_proof_item(self, start, n):
"""Increment all ids in the proof item (including subproofs)."""
self.id = self.id.incr_id_after(start, n)
self.prevs = [id.incr_id_after(start, n) for id in self.prevs]
if self.subproof:
for subitem in self.subproof.items:
subitem.incr_proof_item(start, n)
def decr_proof_item(self, id_remove):
"""Decrement all ids in the proof item (including subproofs)."""
self.id = self.id.decr_id(id_remove)
self.prevs = [id.decr_id(id_remove) for id in self.prevs]
if self.subproof:
for subitem in self.subproof.items:
subitem.decr_proof_item(id_remove)
class Proof():
"""Proof objects represent proofs in the natural deduction format.
Each proof consists of a list of items, where each item contains a
theorem, which is derived from zero or more previous theorems using
one of the deduction rules.
"""
def __init__(self, *assums):
"""Initialization can take a list of n assumptions, and generates
first n steps 0, ..., n-1 using Thm.assume on the assumptions.
"""
self.items = [ProofItem(i, "assume", args=assum) for i, assum in enumerate(assums)]
def add_item(self, id, rule, *, args=None, prevs=[], th=None):
"""Add the given item to the end of the proof."""
self.items.append(ProofItem(id, rule, args=args, prevs=prevs, th=th))
def __str__(self):
"""Print the given proof object."""
return '\n'.join(str(item) for item in self.items)
def __repr__(self):
return str(self)
def __copy__(self):
res = Proof()
res.items = [copy.copy(item) for item in self.items]
return res
def find_item(self, id):
"""Find item at the given id."""
try:
item = self.items[id.id[0]]
for i in id.id[1:]:
item = item.subproof.items[i]
return item
except (AttributeError, IndexError):
raise ProofStateException
def get_parent_proof(self, id):
"""Traverse the proof to the subproof containing the given id."""
try:
prf = self
for i in id.id[:-1]:
prf = prf.items[i].subproof
if prf is None:
raise ProofStateException
return prf
except IndexError:
raise ProofStateException
def insert_item(self, item):
"""Insert the item using the id in the item. This item should
be placed exactly after the last position of its subproof.
"""
try:
prf = self
for i in item.id.id[:-1]:
if prf.items[i].subproof is None:
prf.items[i].subproof = Proof()
prf = prf.items[i].subproof
if item.id.id[-1] != len(prf.items):
raise ProofStateException
prf.items.append(item)
except IndexError:
raise ProofStateException
def get_sorrys(self):
"""Return the list of gaps in the proof."""
return sum([item.get_sorrys() for item in self.items], [])
|
[
"copy.copy"
] |
[((4687, 4711), 'copy.copy', 'copy.copy', (['self.subproof'], {}), '(self.subproof)\n', (4696, 4711), False, 'import copy\n'), ((6814, 6829), 'copy.copy', 'copy.copy', (['item'], {}), '(item)\n', (6823, 6829), False, 'import copy\n')]
|
from unittest.mock import patch, create_autospec, PropertyMock
from stack.expectmore import ExpectMore
from stack.switch.x1052 import SwitchDellX1052
# Switch data to mock MAC address table
SWITCH_DATA = """
show mac address-table
show mac address-table
Flags: I - Internal usage VLAN
Aging time is 300 sec
Vlan Mac Address Port Type
------------ --------------------- ---------- ----------
1 00:00:00:00:00:00 gi1/0/10 dynamic
1 f4:8e:38:44:10:15 0 self
console#:
"""
# Intercept expectmore calls
mock_expectmore = patch(target = "stack.switch.x1052.ExpectMore", autospec = True).start()
# Need to set the instance mock returned from calling ExpectMore()
mock_expectmore.return_value = create_autospec(
spec = ExpectMore,
spec_set = True,
instance = True,
)
# Need to set the match_index to the base console prompt so that the switch thinks it is at the
# correct prompt, and wont try to page through output.
type(mock_expectmore.return_value).match_index = PropertyMock(
return_value = SwitchDellX1052.CONSOLE_PROMPTS.index(SwitchDellX1052.CONSOLE_PROMPT)
)
# Return our SWITCH_DATA from ExpectMore().ask()
mock_expectmore.return_value.ask.return_value = SWITCH_DATA.splitlines()
|
[
"unittest.mock.patch",
"unittest.mock.create_autospec",
"stack.switch.x1052.SwitchDellX1052.CONSOLE_PROMPTS.index"
] |
[((767, 829), 'unittest.mock.create_autospec', 'create_autospec', ([], {'spec': 'ExpectMore', 'spec_set': '(True)', 'instance': '(True)'}), '(spec=ExpectMore, spec_set=True, instance=True)\n', (782, 829), False, 'from unittest.mock import patch, create_autospec, PropertyMock\n'), ((596, 656), 'unittest.mock.patch', 'patch', ([], {'target': '"""stack.switch.x1052.ExpectMore"""', 'autospec': '(True)'}), "(target='stack.switch.x1052.ExpectMore', autospec=True)\n", (601, 656), False, 'from unittest.mock import patch, create_autospec, PropertyMock\n'), ((1072, 1141), 'stack.switch.x1052.SwitchDellX1052.CONSOLE_PROMPTS.index', 'SwitchDellX1052.CONSOLE_PROMPTS.index', (['SwitchDellX1052.CONSOLE_PROMPT'], {}), '(SwitchDellX1052.CONSOLE_PROMPT)\n', (1109, 1141), False, 'from stack.switch.x1052 import SwitchDellX1052\n')]
|
# -*- coding: utf-8 -*-
import sys
registers = [1, 0] # registers(0) = a, registers(1) = b
instructions = []
class Instruction:
def __init__(self, instructionstring):
self.instruction = ''
self.register = 0 # or 1
self.jump = 1 # default move on to next
self.fullinstruction = instructionstring.rstrip()
toks = self.fullinstruction.split(' ')
self.instruction = toks[0]
if (self.instruction == 'hlf'):
if (toks[1] == 'b'):
self.register = 1
elif (self.instruction == 'tpl'):
if (toks[1] == 'b'):
self.register = 1
elif (self.instruction == 'inc'):
if (toks[1] == 'b'):
self.register = 1
elif (self.instruction == 'jmp'):
self.jump = int(toks[1])
elif (self.instruction == 'jie'):
if (toks[1] == 'b,'):
self.register = 1
self.jump = int(toks[2])
elif (self.instruction == 'jio'):
if (toks[1] == 'b,'):
self.register = 1
self.jump = int(toks[2])
else:
print ("HUH? parsing {}".format(instructionstring))
sys.exit()
def executeInstruction(self, registers):
print(" Executing {}".format(self.instruction), end='')
myjump = self.jump
if (self.instruction == 'hlf'):
registers[self.register] = int(registers[self.register] / 2)
elif (self.instruction == 'tpl'):
registers[self.register] = registers[self.register] * 3
elif (self.instruction == 'inc'):
registers[self.register] = registers[self.register] + 1
elif (self.instruction == 'jmp'):
pass # already have self.jump
elif (self.instruction == 'jie'):
if ( registers[self.register] % 2 != 0):
myjump = 1
elif (self.instruction == 'jio'):
if ( registers[self.register] != 1):
myjump = 1
else:
print ("WHAAAHUH? executing {}".format(self.instruction))
sys.exit()
return myjump
if __name__ == "__main__":
"""Day23: compiler instructions"""
with open('day23.dat') as datafile:
for thisstring in datafile:
thisIns = Instruction(thisstring)
instructions.append(thisIns)
# now we have the instructions loaded
currentinstruction = 0
count = 0
while (count < 10000) and (currentinstruction >= 0) and (currentinstruction < len(instructions)) :
print("Ins {}/{} registers {}".format(currentinstruction,count,registers), end='')
inc = instructions[currentinstruction].executeInstruction(registers)
print(" jumping {}".format(inc))
currentinstruction = currentinstruction + inc
count = count + 1
print("Registers: {}".format(registers))
|
[
"sys.exit"
] |
[((1223, 1233), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1231, 1233), False, 'import sys\n'), ((2197, 2207), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2205, 2207), False, 'import sys\n')]
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from kuryr.lib import utils as lib_utils
from kuryr_libnetwork.tests.fullstack import kuryr_base
from kuryr_libnetwork import utils
class NetworkTest(kuryr_base.KuryrBaseTest):
"""Test Networks operation
Test networks creation/deletion from docker to Neutron
"""
def test_create_delete_network_with_kuryr_driver(self):
"""Create and Delete docker network with Kuryr
This method creates a docker network with Kuryr driver
and tests it was created in Neutron.
It then deletes the docker network and tests that it was
deleted from Neutron.
"""
fake_ipam = {
"Driver": "kuryr",
"Options": {},
"Config": [
{
"Subnet": "10.0.0.0/16",
"IPRange": "10.0.0.0/24",
"Gateway": "10.0.0.1"
}
]
}
net_name = lib_utils.get_random_string(8)
res = self.docker_client.create_network(name=net_name, driver='kuryr',
ipam=fake_ipam)
net_id = res['Id']
network = self.neutron_client.list_networks(
tags=utils.make_net_tags(net_id))
self.assertEqual(1, len(network['networks']))
self.docker_client.remove_network(net_id)
network = self.neutron_client.list_networks(
tags=utils.make_net_tags(net_id))
self.assertEqual(0, len(network['networks']))
def test_create_delete_network_without_kuryr_driver(self):
"""Create and Delete docker network without Kuryr
This method create a docker network with the default
docker driver, It tests that it was created correctly, but
not added to Neutron
"""
net_name = lib_utils.get_random_string(8)
res = self.docker_client.create_network(name=net_name)
net_id = res['Id']
network = self.neutron_client.list_networks(
tags=utils.make_net_tags(net_id))
self.assertEqual(0, len(network['networks']))
docker_networks = self.docker_client.networks()
network_found = False
for docker_net in docker_networks:
if docker_net['Id'] == net_id:
network_found = True
self.assertTrue(network_found)
self.docker_client.remove_network(net_id)
def test_create_network_with_same_name(self):
"""Create docker network with same name
Create two docker networks with same name,
delete them and see that neutron networks are
deleted as well
"""
fake_ipam_1 = {
"Driver": "kuryr",
"Options": {},
"Config": [
{
"Subnet": "10.1.0.0/16",
"IPRange": "10.1.0.0/24",
"Gateway": "10.1.0.1"
}
]
}
fake_ipam_2 = {
"Driver": "kuryr",
"Options": {},
"Config": [
{
"Subnet": "10.2.0.0/16",
"IPRange": "10.2.0.0/24",
"Gateway": "10.2.0.1"
}
]
}
net_name = lib_utils.get_random_string(8)
res = self.docker_client.create_network(name=net_name, driver='kuryr',
ipam=fake_ipam_1)
net_id1 = res['Id']
res = self.docker_client.create_network(name=net_name, driver='kuryr',
ipam=fake_ipam_2)
net_id2 = res['Id']
network = self.neutron_client.list_networks(
tags=utils.make_net_tags(net_id1))
self.assertEqual(1, len(network['networks']))
network = self.neutron_client.list_networks(
tags=utils.make_net_tags(net_id2))
self.assertEqual(1, len(network['networks']))
self.docker_client.remove_network(net_id1)
self.docker_client.remove_network(net_id2)
network = self.neutron_client.list_networks(
tags=utils.make_net_tags(net_id1))
self.assertEqual(0, len(network['networks']))
network = self.neutron_client.list_networks(
tags=utils.make_net_tags(net_id2))
self.assertEqual(0, len(network['networks']))
|
[
"kuryr_libnetwork.utils.make_net_tags",
"kuryr.lib.utils.get_random_string"
] |
[((1484, 1514), 'kuryr.lib.utils.get_random_string', 'lib_utils.get_random_string', (['(8)'], {}), '(8)\n', (1511, 1514), True, 'from kuryr.lib import utils as lib_utils\n'), ((2361, 2391), 'kuryr.lib.utils.get_random_string', 'lib_utils.get_random_string', (['(8)'], {}), '(8)\n', (2388, 2391), True, 'from kuryr.lib import utils as lib_utils\n'), ((3800, 3830), 'kuryr.lib.utils.get_random_string', 'lib_utils.get_random_string', (['(8)'], {}), '(8)\n', (3827, 3830), True, 'from kuryr.lib import utils as lib_utils\n'), ((1755, 1782), 'kuryr_libnetwork.utils.make_net_tags', 'utils.make_net_tags', (['net_id'], {}), '(net_id)\n', (1774, 1782), False, 'from kuryr_libnetwork import utils\n'), ((1958, 1985), 'kuryr_libnetwork.utils.make_net_tags', 'utils.make_net_tags', (['net_id'], {}), '(net_id)\n', (1977, 1985), False, 'from kuryr_libnetwork import utils\n'), ((2552, 2579), 'kuryr_libnetwork.utils.make_net_tags', 'utils.make_net_tags', (['net_id'], {}), '(net_id)\n', (2571, 2579), False, 'from kuryr_libnetwork import utils\n'), ((4248, 4276), 'kuryr_libnetwork.utils.make_net_tags', 'utils.make_net_tags', (['net_id1'], {}), '(net_id1)\n', (4267, 4276), False, 'from kuryr_libnetwork import utils\n'), ((4402, 4430), 'kuryr_libnetwork.utils.make_net_tags', 'utils.make_net_tags', (['net_id2'], {}), '(net_id2)\n', (4421, 4430), False, 'from kuryr_libnetwork import utils\n'), ((4658, 4686), 'kuryr_libnetwork.utils.make_net_tags', 'utils.make_net_tags', (['net_id1'], {}), '(net_id1)\n', (4677, 4686), False, 'from kuryr_libnetwork import utils\n'), ((4812, 4840), 'kuryr_libnetwork.utils.make_net_tags', 'utils.make_net_tags', (['net_id2'], {}), '(net_id2)\n', (4831, 4840), False, 'from kuryr_libnetwork import utils\n')]
|
import os
import subprocess
import sys
import zipfile
if (os.path.exists("build")):
dl=[]
for r,ndl,fl in os.walk("build"):
dl=[os.path.join(r,k) for k in ndl]+dl
for f in fl:
os.remove(os.path.join(r,f))
for k in dl:
os.rmdir(k)
else:
os.mkdir("build")
jfl=[]
for r,_,fl in os.walk("src"):
for f in fl:
if (f[-5:]==".java"):
jfl.append(os.path.join(r,f))
if (subprocess.run(["javac","-cp","src/com/krzem/nn_opencv_object_detection/modules/opencv-420.jar;","-d","build"]+jfl).returncode!=0):
sys.exit(1)
with zipfile.ZipFile("build/nn_opencv_object_detection.jar","w") as zf,zipfile.ZipFile("src/com/krzem/nn_opencv_object_detection/modules/opencv-420.jar","r") as jf:
print("Writing: META-INF/MANIFEST.MF")
zf.write("manifest.mf",arcname="META-INF/MANIFEST.MF")
print("Writing: com/krzem/nn_opencv_object_detection/modules/opencv_java420.dll")
zf.write("src/com/krzem/nn_opencv_object_detection/modules/opencv_java420.dll",arcname="com/krzem/nn_opencv_object_detection/modules/opencv_java420.dll")
for r,_,fl in os.walk("build"):
for f in fl:
if (f[-6:]==".class"):
print(f"Writing: {os.path.join(r,f)[6:].replace(chr(92),'/')}")
zf.write(os.path.join(r,f),os.path.join(r,f)[6:])
for k in jf.namelist():
if (k.upper()!="META-INF/MANIFEST.MF" and k[-6:]==".class"):
dt=jf.read(k)
if (len(k)>0):
print(f"Writing: {k}")
zf.writestr(k,dt)
if ("--run" in sys.argv):
subprocess.run(["java","-jar","build/nn_opencv_object_detection.jar","data/video.mov","data/full.nn-data","out.txt","0.01","0.5","80","40","30"])
|
[
"os.mkdir",
"subprocess.run",
"zipfile.ZipFile",
"os.walk",
"os.path.exists",
"os.rmdir",
"os.path.join",
"sys.exit"
] |
[((61, 84), 'os.path.exists', 'os.path.exists', (['"""build"""'], {}), "('build')\n", (75, 84), False, 'import os\n'), ((291, 305), 'os.walk', 'os.walk', (['"""src"""'], {}), "('src')\n", (298, 305), False, 'import os\n'), ((111, 127), 'os.walk', 'os.walk', (['"""build"""'], {}), "('build')\n", (118, 127), False, 'import os\n'), ((252, 269), 'os.mkdir', 'os.mkdir', (['"""build"""'], {}), "('build')\n", (260, 269), False, 'import os\n'), ((515, 526), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (523, 526), False, 'import sys\n'), ((532, 592), 'zipfile.ZipFile', 'zipfile.ZipFile', (['"""build/nn_opencv_object_detection.jar"""', '"""w"""'], {}), "('build/nn_opencv_object_detection.jar', 'w')\n", (547, 592), False, 'import zipfile\n'), ((598, 690), 'zipfile.ZipFile', 'zipfile.ZipFile', (['"""src/com/krzem/nn_opencv_object_detection/modules/opencv-420.jar"""', '"""r"""'], {}), "(\n 'src/com/krzem/nn_opencv_object_detection/modules/opencv-420.jar', 'r')\n", (613, 690), False, 'import zipfile\n'), ((1041, 1057), 'os.walk', 'os.walk', (['"""build"""'], {}), "('build')\n", (1048, 1057), False, 'import os\n'), ((1421, 1584), 'subprocess.run', 'subprocess.run', (["['java', '-jar', 'build/nn_opencv_object_detection.jar', 'data/video.mov',\n 'data/full.nn-data', 'out.txt', '0.01', '0.5', '80', '40', '30']"], {}), "(['java', '-jar', 'build/nn_opencv_object_detection.jar',\n 'data/video.mov', 'data/full.nn-data', 'out.txt', '0.01', '0.5', '80',\n '40', '30'])\n", (1435, 1584), False, 'import subprocess\n'), ((233, 244), 'os.rmdir', 'os.rmdir', (['k'], {}), '(k)\n', (241, 244), False, 'import os\n'), ((382, 511), 'subprocess.run', 'subprocess.run', (["(['javac', '-cp',\n 'src/com/krzem/nn_opencv_object_detection/modules/opencv-420.jar;',\n '-d', 'build'] + jfl)"], {}), "(['javac', '-cp',\n 'src/com/krzem/nn_opencv_object_detection/modules/opencv-420.jar;',\n '-d', 'build'] + jfl)\n", (396, 511), False, 'import subprocess\n'), ((135, 153), 'os.path.join', 'os.path.join', (['r', 'k'], {}), '(r, k)\n', (147, 153), False, 'import os\n'), ((198, 216), 'os.path.join', 'os.path.join', (['r', 'f'], {}), '(r, f)\n', (210, 216), False, 'import os\n'), ((359, 377), 'os.path.join', 'os.path.join', (['r', 'f'], {}), '(r, f)\n', (371, 377), False, 'import os\n'), ((1181, 1199), 'os.path.join', 'os.path.join', (['r', 'f'], {}), '(r, f)\n', (1193, 1199), False, 'import os\n'), ((1199, 1217), 'os.path.join', 'os.path.join', (['r', 'f'], {}), '(r, f)\n', (1211, 1217), False, 'import os\n'), ((1122, 1140), 'os.path.join', 'os.path.join', (['r', 'f'], {}), '(r, f)\n', (1134, 1140), False, 'import os\n')]
|
"""
Analyze the words of a text.
We consider not only the content of a word as string, but also its position in the text.
A word can appear in multiple locations in the text. (We consider inflections as
different words: "go" and "goes" are considered two different words.)
We define an order of the words. A word ``x`` is smaller than a word ``y`` if the
difference between the first and last occurrence of ``x`` is smaller than the
corresponding difference for ``y``.
Provide a function to extract words from a text.
Provide a function ``top`` to return the largest ``n`` words based on the aforementioned
order.
"""
import re
from typing import List, Dict, cast
from icontract import require, DBC, ensure
WORD_RE = re.compile(r"^[a-z]+(-[a-z])*$") #: Express a normalized word of a text.
class Token(DBC, str):
"""Represent a word as a token of the text."""
@require(lambda text: WORD_RE.match(text))
def __new__(cls, text: str) -> "Token":
"""Enforce the properties on the ``text`` of the word."""
return cast(Token, text)
class WordOccurrence(DBC):
"""Represent a word occurence in the text."""
# fmt: off
@require(
lambda first, last:
first <= last
)
@require(lambda first: first >= 0)
@require(lambda last: last >= 0)
# fmt: on
def __init__(self, first: int, last: int, text: Token) -> None:
"""Initialize with the given values."""
self.first = first #: Index of the first occurrence
self.last = last #: Index of the last occurrence
self.text = text #: Text of the word
def __lt__(self, other: "WordOccurrence") -> bool:
"""
Compare against ``other`` based on the :py:attr:`.first` and :py:attr:`.last`.
"""
return self.last - self.first < other.last - other.first
# ERROR (mristin, 2021-06-09):
# I forgot to specify how ``<=`` works for the top words. This broke down for the
# program input such as:
# Falsifying example: execute(
# kwargs={'limit': 2, 'words': [Word(1, 1, 'a'), Word(1, 1, 'a')]},
# )
#
# I should have written a function like the following one:
# def __le__(self, other: "WordOccurrence") -> bool:
# """
# Compare against ``other`` based on the :py:attr:`.first` and :py:attr:`.last`.
# """
# return self.last - self.first <= other.last - other.first
def __repr__(self) -> str:
"""Represent the word occurrence as string for easier debugging."""
return (
f"{self.__class__.__name__}("
f"{self.first!r}, {self.last!r}, {self.text!r}"
f")"
)
# fmt: off
@ensure(
lambda result:
(
word_texts := [word_occurrence.text for word_occurrence in result],
len(word_texts) == len(set(word_texts))
)[1],
"No duplicate word occurrences"
)
@ensure(
lambda tokens, result:
all(
tokens[word_occurrence.first] == word_occurrence.text
and tokens[word_occurrence.last] == word_occurrence.text
for word_occurrence in result
)
)
@ensure(lambda tokens, result: len(result) <= len(tokens))
@ensure(
lambda tokens, result:
not (len(tokens) > 0) or len(result) > 0
)
# fmt: on
def tokens_to_words(tokens: List[Token]) -> List[WordOccurrence]:
first_occurrences = dict() # type: Dict[Token, int]
last_occurrences = dict() # type: Dict[Token, int]
for i, token in enumerate(tokens):
if token not in first_occurrences:
assert token not in last_occurrences
first_occurrences[token] = i
last_occurrences[token] = i
else:
last_occurrences[token] = i
assert len(first_occurrences) == len(last_occurrences)
assert set(first_occurrences.keys()) == set(last_occurrences.keys())
word_occurrences = [] # type: List[WordOccurrence]
for token, first_occurrence in first_occurrences.items():
word_occurrences.append(
WordOccurrence(
first=first_occurrence,
last=last_occurrences[token],
text=token,
)
)
return word_occurrences
TOKEN_RE = re.compile("[a-zA-Z]+(-[a-zA-Z])*") #: Express a token of a text.
# fmt: off
@ensure(
lambda text, result:
sum(len(token) for token in result) <= len(text)
)
# fmt: on
def tokenize(text: str) -> List[Token]:
"""Tokenize the text into normalized word tokens ignoring the punctuation."""
result = [] # type: List[Token]
for match in TOKEN_RE.finditer(text):
result.append(Token(match.group().lower()))
return result
# fmt: off
@require(lambda limit: limit > 0)
@ensure(
lambda word_occurrences, result:
(
word_set := set(word_occurrences),
all(
word_occurrence in word_set # pylint: disable=used-before-assignment
for word_occurrence in result
)
)[1]
)
@ensure(
lambda result:
all(
result[i] >= result[i + 1]
for i in range(len(result) - 1)
)
)
@ensure(
lambda word_occurrences, limit, result:
len(result) == min(len(word_occurrences), limit)
)
# fmt: on
def find_top(
word_occurrences: List[WordOccurrence], limit: int
) -> List[WordOccurrence]:
"""Find the ``limit`` top occurrences in ``word_occurrences``."""
sorted_words = sorted(word_occurrences, reverse=True)
return sorted_words[:limit]
|
[
"typing.cast",
"icontract.require",
"re.compile"
] |
[((724, 755), 're.compile', 're.compile', (['"""^[a-z]+(-[a-z])*$"""'], {}), "('^[a-z]+(-[a-z])*$')\n", (734, 755), False, 'import re\n'), ((4206, 4241), 're.compile', 're.compile', (['"""[a-zA-Z]+(-[a-zA-Z])*"""'], {}), "('[a-zA-Z]+(-[a-zA-Z])*')\n", (4216, 4241), False, 'import re\n'), ((4671, 4703), 'icontract.require', 'require', (['(lambda limit: limit > 0)'], {}), '(lambda limit: limit > 0)\n', (4678, 4703), False, 'from icontract import require, DBC, ensure\n'), ((1165, 1207), 'icontract.require', 'require', (['(lambda first, last: first <= last)'], {}), '(lambda first, last: first <= last)\n', (1172, 1207), False, 'from icontract import require, DBC, ensure\n'), ((1235, 1268), 'icontract.require', 'require', (['(lambda first: first >= 0)'], {}), '(lambda first: first >= 0)\n', (1242, 1268), False, 'from icontract import require, DBC, ensure\n'), ((1274, 1305), 'icontract.require', 'require', (['(lambda last: last >= 0)'], {}), '(lambda last: last >= 0)\n', (1281, 1305), False, 'from icontract import require, DBC, ensure\n'), ((1047, 1064), 'typing.cast', 'cast', (['Token', 'text'], {}), '(Token, text)\n', (1051, 1064), False, 'from typing import List, Dict, cast\n')]
|
import argparse
import json
from difflib import unified_diff
import sys
from typing import List
from typing import Mapping
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import Union
def _get_pretty_format(
contents: str,
indent: str,
ensure_ascii: bool = True,
sort_keys: bool = True,
top_keys: Sequence[str] = ()
) -> str:
def pairs_first(pairs: Sequence[Tuple[str, str]]) -> Mapping[str, str]:
before = [pair for pair in pairs if pair[0] in top_keys]
before = sorted(before, key=lambda x: top_keys.index(x[0]))
after = [pair for pair in pairs if pair[0] not in top_keys]
if sort_keys:
after.sort()
return dict(before + after)
json_pretty = json.dumps(
json.loads(contents, object_pairs_hook=pairs_first),
indent=indent,
ensure_ascii=ensure_ascii,
)
return f'{json_pretty}\n'
def _autofix(filename: str, new_contents: str) -> None:
print('Fixing file {}'.format(filename))
with open(filename, 'w', encoding='UTF-8') as file_handler:
file_handler.write(new_contents)
def parse_num_to_int(string: str) -> Union[int, str]:
"""Convert string numbers to int, leaving strings as is."""
try:
return int(string)
except ValueError:
return string
def parse_topkeys(string: str) -> List[str]:
return string.split(',')
def get_diff(source: str, target: str, file: str) -> str:
source_lines = source.splitlines(True)
target_lines = target.splitlines(True)
diff = unified_diff(source_lines, target_lines, fromfile=file, tofile=file)
return ''.join(diff)
def main(argv: Optional[Sequence[str]] = None) -> int:
parser = argparse.ArgumentParser()
parser.add_argument(
'--autofix',
action='store_true',
dest='autofix',
help='Automatically fixes encountered not-pretty-formatted files',
)
parser.add_argument(
'--indent',
type=parse_num_to_int,
default='2',
help=(
'The number of indent spaces or a string to be used as delimiter'
' for indentation level e.g. 4 or "\t" (Default: 2)'
),
)
parser.add_argument(
'--no-ensure-ascii',
action='store_true',
dest='no_ensure_ascii',
default=False,
help=('Do NOT convert non-ASCII characters to Unicode escape sequences '
'(\\uXXXX)'),
)
parser.add_argument(
'--no-sort-keys',
action='store_true',
dest='no_sort_keys',
default=False,
help='Keep JSON nodes in the same order',
)
parser.add_argument(
'--top-keys',
type=parse_topkeys,
dest='top_keys',
default=[],
help='Ordered list of keys to keep at the top of JSON hashes',
)
parser.add_argument('filenames', nargs='*', help='Filenames to fix')
args = parser.parse_args(argv)
status = 0
for json_file in args.filenames:
with open(json_file, encoding='UTF-8') as file_handler:
contents = file_handler.read()
try:
pretty_contents = _get_pretty_format(
contents,
args.indent,
ensure_ascii=not args.no_ensure_ascii,
sort_keys=not args.no_sort_keys,
top_keys=args.top_keys,
)
if contents != pretty_contents:
if args.autofix:
_autofix(json_file, pretty_contents)
else:
print(
get_diff(contents, pretty_contents, json_file),
end='',
)
status = 1
except ValueError:
print(f'Input File {json_file} is not a valid JSON, consider using ' f'check-json', )
return 1
return status
if __name__ == '__main__':
sys.exit(main())
|
[
"argparse.ArgumentParser",
"difflib.unified_diff",
"json.loads"
] |
[((1581, 1649), 'difflib.unified_diff', 'unified_diff', (['source_lines', 'target_lines'], {'fromfile': 'file', 'tofile': 'file'}), '(source_lines, target_lines, fromfile=file, tofile=file)\n', (1593, 1649), False, 'from difflib import unified_diff\n'), ((1745, 1770), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (1768, 1770), False, 'import argparse\n'), ((792, 843), 'json.loads', 'json.loads', (['contents'], {'object_pairs_hook': 'pairs_first'}), '(contents, object_pairs_hook=pairs_first)\n', (802, 843), False, 'import json\n')]
|
import logging
def setup_custom_logger(name):
logger = logging.getLogger(name)
c_handler = logging.StreamHandler()
f_handler = logging.FileHandler('jamstockex.log')
c_handler.setLevel(logging.DEBUG)
f_handler.setLevel(logging.ERROR)
# Create formatters and add it to handlers
c_format = logging.Formatter('%(module)s - %(levelname)s - %(message)s')
f_format = logging.Formatter('%(asctime)s - %(module)s - %(levelname)s - %(message)s')
c_handler.setFormatter(c_format)
f_handler.setFormatter(f_format)
# Add handlers to the logger
logger.addHandler(c_handler)
logger.setLevel(logging.DEBUG)
# logger.addHandler(f_handler)
return logger
|
[
"logging.Formatter",
"logging.StreamHandler",
"logging.FileHandler",
"logging.getLogger"
] |
[((61, 84), 'logging.getLogger', 'logging.getLogger', (['name'], {}), '(name)\n', (78, 84), False, 'import logging\n'), ((102, 125), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (123, 125), False, 'import logging\n'), ((142, 179), 'logging.FileHandler', 'logging.FileHandler', (['"""jamstockex.log"""'], {}), "('jamstockex.log')\n", (161, 179), False, 'import logging\n'), ((319, 380), 'logging.Formatter', 'logging.Formatter', (['"""%(module)s - %(levelname)s - %(message)s"""'], {}), "('%(module)s - %(levelname)s - %(message)s')\n", (336, 380), False, 'import logging\n'), ((396, 471), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(module)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(module)s - %(levelname)s - %(message)s')\n", (413, 471), False, 'import logging\n')]
|
# Copyright (C) 2012 <NAME> and The Pepper Developers
# Released under the MIT License. See the file COPYING.txt for details.
from assert_parser_result import assert_parser_result
def test_operator_plus():
assert_parser_result(
r"""
0001:0001 SYMBOL(a)
0001:0002 PLUS(+)
0001:0003 SYMBOL(b)
0001:0004 NEWLINE
""",
r"""
[PLUS:+]
[SYMBOL:a]
[SYMBOL:b]
[EOF:]
""",
r"""
PepPlus(
PepSymbol('a'),
PepSymbol('b')
)
""" )
def test_operator_minus():
assert_parser_result(
r"""
0001:0001 SYMBOL(a)
0001:0002 MINUS(-)
0001:0003 SYMBOL(b)
0001:0004 NEWLINE
""",
r"""
[MINUS:-]
[SYMBOL:a]
[SYMBOL:b]
[EOF:]
""",
r"""
PepMinus(
PepSymbol('a'),
PepSymbol('b')
)
""" )
def test_operator_times():
assert_parser_result(
r"""
0001:0001 SYMBOL(a)
0001:0002 TIMES(*)
0001:0003 SYMBOL(b)
0001:0004 NEWLINE
""",
r"""
[TIMES:*]
[SYMBOL:a]
[SYMBOL:b]
[EOF:]
""",
r"""
PepTimes(
PepSymbol('a'),
PepSymbol('b')
)
""" )
def test_operator_greater_than():
assert_parser_result(
r"""
0001:0001 SYMBOL(a)
0001:0002 GT(>)
0001:0003 SYMBOL(b)
0001:0004 NEWLINE
""",
r"""
[GT:>]
[SYMBOL:a]
[SYMBOL:b]
[EOF:]
""",
r"""
PepGreaterThan(
PepSymbol('a'),
PepSymbol('b')
)
""" )
def test_operator_less_than():
assert_parser_result(
r"""
0001:0001 SYMBOL(a)
0001:0002 LT(<)
0001:0003 SYMBOL(b)
0001:0004 NEWLINE
""",
r"""
[LT:<]
[SYMBOL:a]
[SYMBOL:b]
[EOF:]
""",
r"""
PepLessThan(
PepSymbol('a'),
PepSymbol('b')
)
""" )
def test_plus_in_function_call():
assert_parser_result(
r"""
0001:0001 SYMBOL(print)
0001:0006 LPAREN
0001:0008 INT(3)
0001:0010 PLUS(+)
0001:0012 SYMBOL(b)
0001:0014 RPAREN
0001:0015 NEWLINE
""",
r"""
[LPAREN:]
[SYMBOL:print]
[PLUS:+]
[INT:3]
[SYMBOL:b]
[EOF:]
""",
r"""
PepFunctionCall(
PepSymbol('print'),
(
PepPlus(
PepInt('3'),
PepSymbol('b')
),
)
)
""" )
|
[
"assert_parser_result.assert_parser_result"
] |
[((213, 463), 'assert_parser_result.assert_parser_result', 'assert_parser_result', (['"""\n0001:0001 SYMBOL(a)\n0001:0002 PLUS(+)\n0001:0003 SYMBOL(b)\n0001:0004 NEWLINE\n"""', '"""\n[PLUS:+]\n [SYMBOL:a]\n [SYMBOL:b]\n[EOF:]\n"""', '"""\nPepPlus(\n PepSymbol(\'a\'),\n PepSymbol(\'b\')\n)\n"""'], {}), '(\n """\n0001:0001 SYMBOL(a)\n0001:0002 PLUS(+)\n0001:0003 SYMBOL(b)\n0001:0004 NEWLINE\n"""\n , """\n[PLUS:+]\n [SYMBOL:a]\n [SYMBOL:b]\n[EOF:]\n""",\n """\nPepPlus(\n PepSymbol(\'a\'),\n PepSymbol(\'b\')\n)\n""")\n', (233, 463), False, 'from assert_parser_result import assert_parser_result\n'), ((511, 763), 'assert_parser_result.assert_parser_result', 'assert_parser_result', (['"""\n0001:0001 SYMBOL(a)\n0001:0002 MINUS(-)\n0001:0003 SYMBOL(b)\n0001:0004 NEWLINE\n"""', '"""\n[MINUS:-]\n [SYMBOL:a]\n [SYMBOL:b]\n[EOF:]\n"""', '"""\nPepMinus(\n PepSymbol(\'a\'),\n PepSymbol(\'b\')\n)\n"""'], {}), '(\n """\n0001:0001 SYMBOL(a)\n0001:0002 MINUS(-)\n0001:0003 SYMBOL(b)\n0001:0004 NEWLINE\n"""\n , """\n[MINUS:-]\n [SYMBOL:a]\n [SYMBOL:b]\n[EOF:]\n""",\n """\nPepMinus(\n PepSymbol(\'a\'),\n PepSymbol(\'b\')\n)\n""")\n', (531, 763), False, 'from assert_parser_result import assert_parser_result\n'), ((811, 1063), 'assert_parser_result.assert_parser_result', 'assert_parser_result', (['"""\n0001:0001 SYMBOL(a)\n0001:0002 TIMES(*)\n0001:0003 SYMBOL(b)\n0001:0004 NEWLINE\n"""', '"""\n[TIMES:*]\n [SYMBOL:a]\n [SYMBOL:b]\n[EOF:]\n"""', '"""\nPepTimes(\n PepSymbol(\'a\'),\n PepSymbol(\'b\')\n)\n"""'], {}), '(\n """\n0001:0001 SYMBOL(a)\n0001:0002 TIMES(*)\n0001:0003 SYMBOL(b)\n0001:0004 NEWLINE\n"""\n , """\n[TIMES:*]\n [SYMBOL:a]\n [SYMBOL:b]\n[EOF:]\n""",\n """\nPepTimes(\n PepSymbol(\'a\'),\n PepSymbol(\'b\')\n)\n""")\n', (831, 1063), False, 'from assert_parser_result import assert_parser_result\n'), ((1119, 1374), 'assert_parser_result.assert_parser_result', 'assert_parser_result', (['"""\n0001:0001 SYMBOL(a)\n0001:0002 GT(>)\n0001:0003 SYMBOL(b)\n0001:0004 NEWLINE\n"""', '"""\n[GT:>]\n [SYMBOL:a]\n [SYMBOL:b]\n[EOF:]\n"""', '"""\nPepGreaterThan(\n PepSymbol(\'a\'),\n PepSymbol(\'b\')\n)\n"""'], {}), '(\n """\n0001:0001 SYMBOL(a)\n0001:0002 GT(>)\n0001:0003 SYMBOL(b)\n0001:0004 NEWLINE\n"""\n , """\n[GT:>]\n [SYMBOL:a]\n [SYMBOL:b]\n[EOF:]\n""",\n """\nPepGreaterThan(\n PepSymbol(\'a\'),\n PepSymbol(\'b\')\n)\n""")\n', (1139, 1374), False, 'from assert_parser_result import assert_parser_result\n'), ((1426, 1678), 'assert_parser_result.assert_parser_result', 'assert_parser_result', (['"""\n0001:0001 SYMBOL(a)\n0001:0002 LT(<)\n0001:0003 SYMBOL(b)\n0001:0004 NEWLINE\n"""', '"""\n[LT:<]\n [SYMBOL:a]\n [SYMBOL:b]\n[EOF:]\n"""', '"""\nPepLessThan(\n PepSymbol(\'a\'),\n PepSymbol(\'b\')\n)\n"""'], {}), '(\n """\n0001:0001 SYMBOL(a)\n0001:0002 LT(<)\n0001:0003 SYMBOL(b)\n0001:0004 NEWLINE\n"""\n , """\n[LT:<]\n [SYMBOL:a]\n [SYMBOL:b]\n[EOF:]\n""",\n """\nPepLessThan(\n PepSymbol(\'a\'),\n PepSymbol(\'b\')\n)\n""")\n', (1446, 1678), False, 'from assert_parser_result import assert_parser_result\n'), ((1734, 2191), 'assert_parser_result.assert_parser_result', 'assert_parser_result', (['"""\n0001:0001 SYMBOL(print)\n0001:0006 LPAREN\n0001:0008 INT(3)\n0001:0010 PLUS(+)\n0001:0012 SYMBOL(b)\n0001:0014 RPAREN\n0001:0015 NEWLINE\n"""', '"""\n[LPAREN:]\n [SYMBOL:print]\n [PLUS:+]\n [INT:3]\n [SYMBOL:b]\n[EOF:]\n"""', '"""\nPepFunctionCall(\n PepSymbol(\'print\'),\n (\n PepPlus(\n PepInt(\'3\'),\n PepSymbol(\'b\')\n ),\n )\n)\n"""'], {}), '(\n """\n0001:0001 SYMBOL(print)\n0001:0006 LPAREN\n0001:0008 INT(3)\n0001:0010 PLUS(+)\n0001:0012 SYMBOL(b)\n0001:0014 RPAREN\n0001:0015 NEWLINE\n"""\n ,\n """\n[LPAREN:]\n [SYMBOL:print]\n [PLUS:+]\n [INT:3]\n [SYMBOL:b]\n[EOF:]\n"""\n ,\n """\nPepFunctionCall(\n PepSymbol(\'print\'),\n (\n PepPlus(\n PepInt(\'3\'),\n PepSymbol(\'b\')\n ),\n )\n)\n"""\n )\n', (1754, 2191), False, 'from assert_parser_result import assert_parser_result\n')]
|
import json, os
from s3ts import filewriter
class FileStore(object):
def exists( self, path ):
"""Returns true if a file with the given file exists"""
raise RuntimeError("Not implemented")
def get( self, path ):
"""Get the value associated with path
Raises a KeyError if the path doesn't exist
"""
raise RuntimeError("Not implemented")
def put( self, path, body ):
"""Store a value associated with path
Overwrites any existing value
"""
raise RuntimeError("Not implemented")
def remove( self, path ):
"""Remove a value with the path, if it exists
"""
raise RuntimeError("Not implemented")
def list( self, pathPrefix ):
"""Return all paths having the specified path prefix"""
raise RuntimeError("Not implemented")
def getFromJson( self, path, jscoder ):
return jscoder.fromJson( json.loads( self.get(path).decode() ) )
def putToJson( self, path, v, jscoder ):
return self.put( path, json.dumps( jscoder.toJson(v) ).encode() )
def url( self, path, expiresInSecs ):
raise RuntimeError("Not implemented")
def joinPath( self, *elements):
raise RuntimeError("Not implemented")
def splitPath( self, path):
raise RuntimeError("Not implemented")
def getMetadata( self, path):
raise RuntimeError("Not implemented")
class FileMetaData:
def __init__(self,size,lastModified):
self.size = size
self.lastModified = lastModified
def __repr__(self):
return self.__str__()
def __str__(self):
return "size:" + str(self.size) + " mtime:" + str(self.lastModified)
class LocalFileStore(FileStore):
"""implements the FileStore interface using the local file system"""
def __init__( self, root ):
self.root = root
def __path( self, path ):
return os.path.join( self.root, path )
def exists( self, path ):
return os.path.exists( self.__path(path) )
def get( self, path ):
try:
with open( self.__path(path), 'rb' ) as f:
return f.read()
except IOError as e:
raise KeyError(e)
def put( self, path, body ):
path = self.__path(path)
dir = os.path.dirname( path )
if not os.path.isdir( dir ):
os.makedirs( dir )
# Do our best to be atomic in our updates here, in case
# another process is simultaneously updating the file
with filewriter.atomicFileWriter(path) as f:
f.write(body)
def remove( self, path ):
path = self.__path(path)
if os.path.exists( path ):
os.unlink(path)
def list( self, pathPrefix ):
results = []
for dir0, dirs, files in os.walk(self.__path(pathPrefix)):
for file in files:
path = os.path.join( dir0, file)
rpath = os.path.relpath( path, os.path.join( self.root, pathPrefix ) )
results.append( rpath )
return results
def joinPath( self, *elements):
return os.path.join(*elements)
def splitPath( self, path):
return path.split(os.sep)
def getMetadata(self, path):
"""Returns the size and update time for the given path
Raises a KeyError if the path doesn't exist
"""
statinfo = os.stat(self.__path(path))
return FileMetaData(statinfo.st_size, statinfo.st_mtime)
|
[
"os.makedirs",
"os.unlink",
"os.path.isdir",
"os.path.dirname",
"os.path.exists",
"s3ts.filewriter.atomicFileWriter",
"os.path.join"
] |
[((1928, 1957), 'os.path.join', 'os.path.join', (['self.root', 'path'], {}), '(self.root, path)\n', (1940, 1957), False, 'import json, os\n'), ((2311, 2332), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (2326, 2332), False, 'import json, os\n'), ((2684, 2704), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (2698, 2704), False, 'import json, os\n'), ((3145, 3168), 'os.path.join', 'os.path.join', (['*elements'], {}), '(*elements)\n', (3157, 3168), False, 'import json, os\n'), ((2350, 2368), 'os.path.isdir', 'os.path.isdir', (['dir'], {}), '(dir)\n', (2363, 2368), False, 'import json, os\n'), ((2384, 2400), 'os.makedirs', 'os.makedirs', (['dir'], {}), '(dir)\n', (2395, 2400), False, 'import json, os\n'), ((2543, 2576), 's3ts.filewriter.atomicFileWriter', 'filewriter.atomicFileWriter', (['path'], {}), '(path)\n', (2570, 2576), False, 'from s3ts import filewriter\n'), ((2720, 2735), 'os.unlink', 'os.unlink', (['path'], {}), '(path)\n', (2729, 2735), False, 'import json, os\n'), ((2913, 2937), 'os.path.join', 'os.path.join', (['dir0', 'file'], {}), '(dir0, file)\n', (2925, 2937), False, 'import json, os\n'), ((2986, 3021), 'os.path.join', 'os.path.join', (['self.root', 'pathPrefix'], {}), '(self.root, pathPrefix)\n', (2998, 3021), False, 'import json, os\n')]
|
'''
Created on Jun 4, 2017
@author: Asus-PC
'''
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
def lookup(d, key):
return d[key]
@register.filter(is_safe=False)
def get_range(value):
return range(value)
@register.filter
def get_word(value):
return value
@register.filter
def get_item(dictionary, key):
return dictionary.get(key)
|
[
"django.template.Library"
] |
[((145, 163), 'django.template.Library', 'template.Library', ([], {}), '()\n', (161, 163), False, 'from django import template\n')]
|
from typing import List, Tuple, Dict
import torch as t
from torch.nn import CrossEntropyLoss
from unify_eval.model.layers.layer_base import Layer
from unify_eval.utils.vocab import PAD
from unify_eval.model.layers.preprocessing import SequenceMapperLayer, TokenizerLayer
from unify_eval.model.mixins.sequences.seq2seq import PytorchLanguageModel
from unify_eval.model.mixins.stateful import StatefulLayeredModel
from unify_eval.model.types import Tensor
from unify_eval.training.seq2seq.seq2seq_data import Seq2SeqData
class LayeredLanguageModel(StatefulLayeredModel, PytorchLanguageModel):
"""
Class for stateful layered language models models with pytorch backends.
"""
def __init__(self,
sub_layers: List[Tuple[str, Layer]],
optimizer_factory=None):
"""
:param sub_layers: List of name - layer tuples
:param optimizer_factory: callable with an argument named "params" that returns some pytorch optimizer
"""
StatefulLayeredModel.__init__(self, sub_layers)
PytorchLanguageModel.__init__(self,
tokenizer=sub_layers[0][1]["tokenizer"],
sequence_mapper=sub_layers[0][1]["sequence_mapper"])
self.tail = self[1:]
optimizer_factory = optimizer_factory if optimizer_factory is not None else lambda params: t.optim.Adam(lr=1e-4,
params=params)
self.optimizer = optimizer_factory(params=list(self.get_optimizable_parameters()))
self.vocab = sub_layers[0][1]["sequence_mapper"].get_components()["vocab"]
self.xent = CrossEntropyLoss(
ignore_index=self.vocab.token2id[PAD])
@classmethod
def from_layers(cls,
tokenizer_layer: TokenizerLayer,
sequence_mapper_layer: SequenceMapperLayer,
core_model: StatefulLayeredModel,
token_classifier: Layer,
optimizer_factory=None) -> "LayeredLanguageModel":
preprocessing = Layer() \
+ ("tokenizer", tokenizer_layer) \
+ ("sequence_mapper", sequence_mapper_layer)
combined = LayeredLanguageModel([
("preprocessing", preprocessing),
("core_model", core_model),
("token_classifier", token_classifier)
], optimizer_factory=optimizer_factory).reset()
return combined
def get_loss(self, input_indices: List[List[int]], target_indices: List[List[int]], **kwargs) -> Dict[str, Tensor]:
# indices are already preprocessed, so push only through tail of model
logits = self.tail.push(encoded_texts=input_indices,
padding_value=self.vocab.token2id[PAD],
**kwargs)["logits"]
padded_target = t.nn.utils.rnn.pad_sequence(
sequences=[t.tensor(indices_).long() for indices_ in target_indices],
padding_value=self.vocab.token2id[PAD])
l = self.xent(input=logits.view(-1, logits.shape[-1]), target=padded_target.view(-1, ))
return {"xent": l}
def train(self, data: Seq2SeqData, **kwargs) -> "LayeredLanguageModel":
for input_data, target_data in data:
loss = self.get_loss(input_indices=input_data, target_indices=target_data, **kwargs)["xent"]
loss.backward()
self.optimizer.step()
self.optimizer.zero_grad()
return self
def predict_target_logits(self, indices: List[List[int]]) -> Tensor:
# indices are already preprocessed, so push only through tail of model
return self.tail.push(encoded_texts=indices,
padding_value=self.vocab.token2id[PAD])["logits"]
def _get_cross_entropy_singlebatch(self, input_indices: List[List[int]], target_indices: List[List[int]],
batch_first: bool = False, **kwargs) -> Tensor:
return self.get_loss(input_indices=input_indices, target_indices=target_indices)["xent"]
|
[
"unify_eval.model.layers.layer_base.Layer",
"torch.nn.CrossEntropyLoss",
"unify_eval.model.mixins.sequences.seq2seq.PytorchLanguageModel.__init__",
"torch.optim.Adam",
"unify_eval.model.mixins.stateful.StatefulLayeredModel.__init__",
"torch.tensor"
] |
[((1006, 1053), 'unify_eval.model.mixins.stateful.StatefulLayeredModel.__init__', 'StatefulLayeredModel.__init__', (['self', 'sub_layers'], {}), '(self, sub_layers)\n', (1035, 1053), False, 'from unify_eval.model.mixins.stateful import StatefulLayeredModel\n'), ((1062, 1195), 'unify_eval.model.mixins.sequences.seq2seq.PytorchLanguageModel.__init__', 'PytorchLanguageModel.__init__', (['self'], {'tokenizer': "sub_layers[0][1]['tokenizer']", 'sequence_mapper': "sub_layers[0][1]['sequence_mapper']"}), "(self, tokenizer=sub_layers[0][1]['tokenizer'],\n sequence_mapper=sub_layers[0][1]['sequence_mapper'])\n", (1091, 1195), False, 'from unify_eval.model.mixins.sequences.seq2seq import PytorchLanguageModel\n'), ((1739, 1794), 'torch.nn.CrossEntropyLoss', 'CrossEntropyLoss', ([], {'ignore_index': 'self.vocab.token2id[PAD]'}), '(ignore_index=self.vocab.token2id[PAD])\n', (1755, 1794), False, 'from torch.nn import CrossEntropyLoss\n'), ((1396, 1434), 'torch.optim.Adam', 't.optim.Adam', ([], {'lr': '(0.0001)', 'params': 'params'}), '(lr=0.0001, params=params)\n', (1408, 1434), True, 'import torch as t\n'), ((2162, 2169), 'unify_eval.model.layers.layer_base.Layer', 'Layer', ([], {}), '()\n', (2167, 2169), False, 'from unify_eval.model.layers.layer_base import Layer\n'), ((3021, 3039), 'torch.tensor', 't.tensor', (['indices_'], {}), '(indices_)\n', (3029, 3039), True, 'import torch as t\n')]
|
from sleekxmpp import ClientXMPP
import logging
logger = logging.getLogger(__name__)
class HubBot(ClientXMPP):
HUB = "hub.sotecware.net"
SWITCH = "switch.hub.sotecware.net"
FEED = "feed.hub.sotecware.net"
def __init__(self, localpart, resource, password):
jid = "{0}@{1}".format(localpart, self.HUB)
if resource:
jid += "/" + resource
super().__init__(jid, password)
self.register_plugin("xep_0004") # dataforms
self.register_plugin("xep_0045") # muc
self.register_plugin("xep_0060") # pubsub -- let the fun begin
self.add_event_handler("session_start", self.sessionStart)
self.add_event_handler("session_end", self.sessionEnd)
self.add_event_handler("groupchat_message", self.messageMUC)
self.add_event_handler("message", self.message)
self.muc = None
self.pubsub = None
self.dataforms = self.plugin["xep_0004"]
self._switchHandlers = {}
self._switches = []
def _getSwitchJID(self, switch):
return "{0}@{1}".format(switch, self.SWITCH)
def _joinSwitch(self, switchTuple, wait=False):
logger.debug("joining %s as %s", *switchTuple)
self.muc.joinMUC(*switchTuple, wait=wait)
def recieved_roster(self, roster):
pass
def sessionStart(self, event):
self.send_presence()
roster = self.get_roster(block=True)
self.recieved_roster(roster)
self.muc = self.plugin["xep_0045"]
self.pubsub = self.plugin["xep_0060"]
for switchTuple in self._switches:
self._joinSwitch(switchTuple)
def sessionEnd(self, event):
pass
def addSwitch(self, switch, nick, handler=None):
if handler is not None:
self.addSwitchHandler(switch, handler)
switchTuple = (self._getSwitchJID(switch), nick)
self._switches.append(switchTuple)
if self.muc is not None:
self._joinSwitch(switchTuple)
return switchTuple
def addSwitchHandler(self, room, handler):
self._switchHandlers.setdefault(self._getSwitchJID(room), []).append(handler)
def messageMUC(self, msg):
muc = str(msg["from"].bare)
handlers = self._switchHandlers.get(muc, [])
for handler in handlers:
handler(msg)
def message(self, msg):
pass
def reply(self, msg, body):
if msg["type"] == "groupchat":
self.send_message(mtype="groupchat", mto=msg["from"].bare, mbody=body)
else:
self.send_message(mto=msg["from"], mbody=body, mtype="chat")
def run(self):
self.connect()
self.process(block=True)
|
[
"logging.getLogger"
] |
[((58, 85), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (75, 85), False, 'import logging\n')]
|
#
# Copyright (c) 2010-2017 Fabric Software Inc. All rights reserved.
#
import json, os
from PySide import QtCore, QtGui
from FabricEngine.FabricUI import Actions as CppActions
from FabricEngine.Canvas.Application.FabricApplicationStates import *
class BaseHotkeyCommand(QtGui.QUndoCommand):
class State:
REDO_IT = 0
UNDO_IT = 1
def __init__(self):
super(BaseHotkeyCommand, self).__init__()
self.state = self.State.UNDO_IT
class SetKeySequenceCommand(BaseHotkeyCommand):
def __init__(self, model, actName, prevKeySeq, keySeq):
super(SetKeySequenceCommand, self).__init__()
self.succefullyDone = False
self.prevKeySeq = prevKeySeq
self.keySeq = keySeq
self.actName = actName
self.model = model;
self.state = self.State.REDO_IT
self.redo()
def redo(self):
""" Implementation of QtGui.QUndoCommand
"""
if self.state == self.State.REDO_IT:
self.succefullyDone = self.model.setItemKeySequence(self.actName, self.keySeq, False)
self.state = self.State.UNDO_IT
def undo(self):
""" Implementation of QtGui.QUndoCommand
"""
if self.state == self.State.UNDO_IT:
self.model.setItemKeySequence(self.actName, self.prevKeySeq)
self.state = self.State.REDO_IT
class OpenFileCommand(BaseHotkeyCommand):
def __init__(self, hotkeyEditor):
super(OpenFileCommand, self).__init__()
self.jsonData = None
self.preShortcutList = {}
self.prevWindowTitle = None
self.hotkeyEditor = hotkeyEditor
def doIt(self):
""" Implementation of QtGui.QUndoCommand
"""
lastDir = str(GetAppStates().getSettings().value("hotkeyEditor/lastFolder"))
fname, _ = QtGui.QFileDialog.getOpenFileName(None, "Open Hotkey file", lastDir, "*.json")
# Override the shortcuts from the json matches.
if len(fname) > 0:
with open(fname) as infile:
self.jsonData = json.load(infile)
head, tail = os.path.split(fname)
self.windowtTitle = 'Hotkey Editor ' + tail
self.prevWindowTitle = self.hotkeyEditor.windowTitle()
actRegistry = CppActions.ActionRegistry.GetActionRegistry()
for actName in actRegistry.getActionNameList():
self.preShortcutList[actName] = actRegistry.getShortcuts(actName);
self.state = self.State.REDO_IT
self.redo()
return True
return False
def __setKeySequenceList(self, items, windowTitle):
for actName, shortcutList in items.iteritems():
keySeq = QtGui.QKeySequence()
if shortcutList:
keySeq = QtGui.QKeySequence(shortcutList[0])
self.hotkeyEditor.hotkeyTable.model.setItemKeySequence(actName, keySeq)
self.hotkeyEditor.setWindowTitle(windowTitle)
def redo(self):
""" Implementation of QtGui.QUndoCommand
"""
if self.state == self.State.REDO_IT:
self.__setKeySequenceList(self.jsonData, self.windowtTitle)
self.state = self.State.UNDO_IT
def undo(self):
""" Implementation of QtGui.QUndoCommand
"""
if self.state == self.State.UNDO_IT:
self.__setKeySequenceList(self.preShortcutList, self.prevWindowTitle)
self.state = self.State.REDO_IT
class SaveFileCommand(BaseHotkeyCommand):
def __init__(self):
super(SaveFileCommand, self).__init__()
def saveFile(self):
ext = ".json"
fname = str(GetAppStates().getSettings().value("hotkeyEditor/lastFolder"))
fname, _ = QtGui.QFileDialog.getSaveFileName(None, "Save Hotkey file", fname, str("*" + ext))
if not fname:
return False
# Pyside QFileDialog bug on linux, extension is not added by default.
if not fname.endswith(ext):
fname += ext
GetAppStates().getSettings().setValue("hotkeyEditor/lastFolder", os.path.dirname(fname))
actRegistry = CppActions.ActionRegistry.GetActionRegistry()
jsonData = {}
for actName in actRegistry.getActionNameList():
shortcutList = []
for keySeq in actRegistry.getShortcuts(actName):
shortcutList.append(keySeq.toString(QtGui.QKeySequence.NativeText))
jsonData[actName] = shortcutList
with open(fname, 'w') as outfile:
json.dump(jsonData, outfile, ensure_ascii=False, indent=4)
return True
|
[
"json.dump",
"json.load",
"PySide.QtGui.QFileDialog.getOpenFileName",
"PySide.QtGui.QKeySequence",
"os.path.dirname",
"FabricEngine.FabricUI.Actions.ActionRegistry.GetActionRegistry",
"os.path.split"
] |
[((1835, 1913), 'PySide.QtGui.QFileDialog.getOpenFileName', 'QtGui.QFileDialog.getOpenFileName', (['None', '"""Open Hotkey file"""', 'lastDir', '"""*.json"""'], {}), "(None, 'Open Hotkey file', lastDir, '*.json')\n", (1868, 1913), False, 'from PySide import QtCore, QtGui\n'), ((4162, 4207), 'FabricEngine.FabricUI.Actions.ActionRegistry.GetActionRegistry', 'CppActions.ActionRegistry.GetActionRegistry', ([], {}), '()\n', (4205, 4207), True, 'from FabricEngine.FabricUI import Actions as CppActions\n'), ((2133, 2153), 'os.path.split', 'os.path.split', (['fname'], {}), '(fname)\n', (2146, 2153), False, 'import json, os\n'), ((2304, 2349), 'FabricEngine.FabricUI.Actions.ActionRegistry.GetActionRegistry', 'CppActions.ActionRegistry.GetActionRegistry', ([], {}), '()\n', (2347, 2349), True, 'from FabricEngine.FabricUI import Actions as CppActions\n'), ((2742, 2762), 'PySide.QtGui.QKeySequence', 'QtGui.QKeySequence', ([], {}), '()\n', (2760, 2762), False, 'from PySide import QtCore, QtGui\n'), ((4116, 4138), 'os.path.dirname', 'os.path.dirname', (['fname'], {}), '(fname)\n', (4131, 4138), False, 'import json, os\n'), ((4573, 4631), 'json.dump', 'json.dump', (['jsonData', 'outfile'], {'ensure_ascii': '(False)', 'indent': '(4)'}), '(jsonData, outfile, ensure_ascii=False, indent=4)\n', (4582, 4631), False, 'import json, os\n'), ((2073, 2090), 'json.load', 'json.load', (['infile'], {}), '(infile)\n', (2082, 2090), False, 'import json, os\n'), ((2817, 2852), 'PySide.QtGui.QKeySequence', 'QtGui.QKeySequence', (['shortcutList[0]'], {}), '(shortcutList[0])\n', (2835, 2852), False, 'from PySide import QtCore, QtGui\n')]
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core.decorators import Completer
from .util import get_storage_client
from ._validators import validate_client_parameters
from ._client_factory import cf_sa, cf_blob_container_mgmt
@Completer
def file_path_completer(cmd, prefix, namespace):
from azure.common import AzureMissingResourceHttpError
if not namespace.share_name:
return []
validate_client_parameters(cmd, namespace)
t_file_service = cmd.get_models('file#FileService')
client = get_storage_client(cmd.cli_ctx, t_file_service, namespace)
share_name = namespace.share_name
directory_name = prefix or ''
try:
items = list(client.list_directories_and_files(share_name, directory_name))
except AzureMissingResourceHttpError:
directory_name = directory_name.rsplit('/', 1)[0] if '/' in directory_name else ''
items = list(client.list_directories_and_files(share_name, directory_name))
path_format = '{}{}' if directory_name.endswith('/') or not directory_name else '{}/{}'
names = []
for i in items:
name = path_format.format(directory_name, i.name)
if not hasattr(i.properties, 'content_length'):
name = '{}/'.format(name)
names.append(name)
return sorted(names)
@Completer
def dir_path_completer(cmd, prefix, namespace):
from azure.common import AzureMissingResourceHttpError
if not namespace.share_name:
return []
validate_client_parameters(cmd, namespace)
t_file_service = cmd.get_models('file#FileService')
client = get_storage_client(cmd.cli_ctx, t_file_service, namespace)
share_name = namespace.share_name
directory_name = prefix or ''
try:
items = list(client.list_directories_and_files(share_name, directory_name))
except AzureMissingResourceHttpError:
directory_name = directory_name.rsplit('/', 1)[0] if '/' in directory_name else ''
items = list(client.list_directories_and_files(share_name, directory_name))
dir_list = [x for x in items if not hasattr(x.properties, 'content_length')]
path_format = '{}{}/' if directory_name.endswith('/') or not directory_name else '{}/{}/'
names = []
for d in dir_list:
name = path_format.format(directory_name, d.name)
names.append(name)
return sorted(names)
def get_storage_name_completion_list(service, func, parent=None):
@Completer
def completer(cmd, _, namespace):
validate_client_parameters(cmd, namespace)
client = get_storage_client(cmd.cli_ctx, service, namespace)
if parent:
parent_name = getattr(namespace, parent)
method = getattr(client, func)
items = [x.name for x in method(**{parent: parent_name})]
else:
items = [x.name for x in getattr(client, func)()]
return items
return completer
def get_storage_acl_name_completion_list(service, container_param, func):
@Completer
def completer(cmd, _, namespace):
validate_client_parameters(cmd, namespace)
client = get_storage_client(cmd.cli_ctx, service, namespace)
container_name = getattr(namespace, container_param)
return list(getattr(client, func)(container_name))
return completer
@Completer
def get_container_name_completions(cmd, _, namespace):
if namespace.account_name:
account_client = cf_sa(cmd.cli_ctx, None)
account = next((x for x in account_client.list() if x.name == namespace.account_name), None)
if account:
from msrestazure.tools import parse_resource_id
rg = parse_resource_id(account.id)['resource_group']
container_client = cf_blob_container_mgmt(cmd.cli_ctx, None)
return [container.name for container in container_client.list(rg, account.name).value]
return []
|
[
"msrestazure.tools.parse_resource_id"
] |
[((3951, 3980), 'msrestazure.tools.parse_resource_id', 'parse_resource_id', (['account.id'], {}), '(account.id)\n', (3968, 3980), False, 'from msrestazure.tools import parse_resource_id\n')]
|
from unittest import TestCase
from coin_change import Solution
class TestCoinChange(TestCase):
def test_multiple_of_one_coin(self):
self.assertEqual(Solution().coinChange([1, 2, 5], 11), 3)
def test_zero(self):
self.assertEqual(Solution().coinChange([1, 2, 3], 0), 0)
def test_unreachable_value(self):
self.assertEqual(Solution().coinChange([2], 3), -1)
def test_reachable_value_that_encounters_unreachable_value(self):
self.assertEqual(Solution().coinChange([2], 4), 2)
|
[
"coin_change.Solution"
] |
[((164, 174), 'coin_change.Solution', 'Solution', ([], {}), '()\n', (172, 174), False, 'from coin_change import Solution\n'), ((256, 266), 'coin_change.Solution', 'Solution', ([], {}), '()\n', (264, 266), False, 'from coin_change import Solution\n'), ((360, 370), 'coin_change.Solution', 'Solution', ([], {}), '()\n', (368, 370), False, 'from coin_change import Solution\n'), ((491, 501), 'coin_change.Solution', 'Solution', ([], {}), '()\n', (499, 501), False, 'from coin_change import Solution\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/api/label.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name="google/api/label.proto",
package="google.api",
syntax="proto3",
serialized_options=b"\n\016com.google.apiB\nLabelProtoP\001Z5google.golang.org/genproto/googleapis/api/label;label\370\001\001\242\002\004GAPI",
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x16google/api/label.proto\x12\ngoogle.api"\x9c\x01\n\x0fLabelDescriptor\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x39\n\nvalue_type\x18\x02 \x01(\x0e\x32%.google.api.LabelDescriptor.ValueType\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t",\n\tValueType\x12\n\n\x06STRING\x10\x00\x12\x08\n\x04\x42OOL\x10\x01\x12\t\n\x05INT64\x10\x02\x42_\n\x0e\x63om.google.apiB\nLabelProtoP\x01Z5google.golang.org/genproto/googleapis/api/label;label\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3',
)
_LABELDESCRIPTOR_VALUETYPE = _descriptor.EnumDescriptor(
name="ValueType",
full_name="google.api.LabelDescriptor.ValueType",
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name="STRING",
index=0,
number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="BOOL",
index=1,
number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.EnumValueDescriptor(
name="INT64",
index=2,
number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key,
),
],
containing_type=None,
serialized_options=None,
serialized_start=151,
serialized_end=195,
)
_sym_db.RegisterEnumDescriptor(_LABELDESCRIPTOR_VALUETYPE)
_LABELDESCRIPTOR = _descriptor.Descriptor(
name="LabelDescriptor",
full_name="google.api.LabelDescriptor",
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name="key",
full_name="google.api.LabelDescriptor.key",
index=0,
number=1,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="value_type",
full_name="google.api.LabelDescriptor.value_type",
index=1,
number=2,
type=14,
cpp_type=8,
label=1,
has_default_value=False,
default_value=0,
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
_descriptor.FieldDescriptor(
name="description",
full_name="google.api.LabelDescriptor.description",
index=2,
number=3,
type=9,
cpp_type=9,
label=1,
has_default_value=False,
default_value=b"".decode("utf-8"),
message_type=None,
enum_type=None,
containing_type=None,
is_extension=False,
extension_scope=None,
serialized_options=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
),
],
extensions=[],
nested_types=[],
enum_types=[_LABELDESCRIPTOR_VALUETYPE],
serialized_options=None,
is_extendable=False,
syntax="proto3",
extension_ranges=[],
oneofs=[],
serialized_start=39,
serialized_end=195,
)
_LABELDESCRIPTOR.fields_by_name["value_type"].enum_type = _LABELDESCRIPTOR_VALUETYPE
_LABELDESCRIPTOR_VALUETYPE.containing_type = _LABELDESCRIPTOR
DESCRIPTOR.message_types_by_name["LabelDescriptor"] = _LABELDESCRIPTOR
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
LabelDescriptor = _reflection.GeneratedProtocolMessageType(
"LabelDescriptor",
(_message.Message,),
{
"DESCRIPTOR": _LABELDESCRIPTOR,
"__module__": "google.api.label_pb2"
# @@protoc_insertion_point(class_scope:google.api.LabelDescriptor)
},
)
_sym_db.RegisterMessage(LabelDescriptor)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
|
[
"google.protobuf.descriptor.FieldDescriptor",
"google.protobuf.descriptor.EnumValueDescriptor",
"google.protobuf.symbol_database.Default",
"google.protobuf.reflection.GeneratedProtocolMessageType",
"google.protobuf.descriptor.FileDescriptor"
] |
[((998, 1024), 'google.protobuf.symbol_database.Default', '_symbol_database.Default', ([], {}), '()\n', (1022, 1024), True, 'from google.protobuf import symbol_database as _symbol_database\n'), ((1040, 1820), 'google.protobuf.descriptor.FileDescriptor', '_descriptor.FileDescriptor', ([], {'name': '"""google/api/label.proto"""', 'package': '"""google.api"""', 'syntax': '"""proto3"""', 'serialized_options': "b'\\n\\x0ecom.google.apiB\\nLabelProtoP\\x01Z5google.golang.org/genproto/googleapis/api/label;label\\xf8\\x01\\x01\\xa2\\x02\\x04GAPI'", 'create_key': '_descriptor._internal_create_key', 'serialized_pb': 'b\'\\n\\x16google/api/label.proto\\x12\\ngoogle.api"\\x9c\\x01\\n\\x0fLabelDescriptor\\x12\\x0b\\n\\x03key\\x18\\x01 \\x01(\\t\\x129\\n\\nvalue_type\\x18\\x02 \\x01(\\x0e2%.google.api.LabelDescriptor.ValueType\\x12\\x13\\n\\x0bdescription\\x18\\x03 \\x01(\\t",\\n\\tValueType\\x12\\n\\n\\x06STRING\\x10\\x00\\x12\\x08\\n\\x04BOOL\\x10\\x01\\x12\\t\\n\\x05INT64\\x10\\x02B_\\n\\x0ecom.google.apiB\\nLabelProtoP\\x01Z5google.golang.org/genproto/googleapis/api/label;label\\xf8\\x01\\x01\\xa2\\x02\\x04GAPIb\\x06proto3\''}), '(name=\'google/api/label.proto\', package=\n \'google.api\', syntax=\'proto3\', serialized_options=\n b\'\\n\\x0ecom.google.apiB\\nLabelProtoP\\x01Z5google.golang.org/genproto/googleapis/api/label;label\\xf8\\x01\\x01\\xa2\\x02\\x04GAPI\'\n , create_key=_descriptor._internal_create_key, serialized_pb=\n b\'\\n\\x16google/api/label.proto\\x12\\ngoogle.api"\\x9c\\x01\\n\\x0fLabelDescriptor\\x12\\x0b\\n\\x03key\\x18\\x01 \\x01(\\t\\x129\\n\\nvalue_type\\x18\\x02 \\x01(\\x0e2%.google.api.LabelDescriptor.ValueType\\x12\\x13\\n\\x0bdescription\\x18\\x03 \\x01(\\t",\\n\\tValueType\\x12\\n\\n\\x06STRING\\x10\\x00\\x12\\x08\\n\\x04BOOL\\x10\\x01\\x12\\t\\n\\x05INT64\\x10\\x02B_\\n\\x0ecom.google.apiB\\nLabelProtoP\\x01Z5google.golang.org/genproto/googleapis/api/label;label\\xf8\\x01\\x01\\xa2\\x02\\x04GAPIb\\x06proto3\'\n )\n', (1066, 1820), True, 'from google.protobuf import descriptor as _descriptor\n'), ((5571, 5732), 'google.protobuf.reflection.GeneratedProtocolMessageType', '_reflection.GeneratedProtocolMessageType', (['"""LabelDescriptor"""', '(_message.Message,)', "{'DESCRIPTOR': _LABELDESCRIPTOR, '__module__': 'google.api.label_pb2'}"], {}), "('LabelDescriptor', (_message.\n Message,), {'DESCRIPTOR': _LABELDESCRIPTOR, '__module__':\n 'google.api.label_pb2'})\n", (5611, 5732), True, 'from google.protobuf import reflection as _reflection\n'), ((2089, 2244), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', ([], {'name': '"""STRING"""', 'index': '(0)', 'number': '(0)', 'serialized_options': 'None', 'type': 'None', 'create_key': '_descriptor._internal_create_key'}), "(name='STRING', index=0, number=0,\n serialized_options=None, type=None, create_key=_descriptor.\n _internal_create_key)\n", (2120, 2244), True, 'from google.protobuf import descriptor as _descriptor\n'), ((2328, 2481), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', ([], {'name': '"""BOOL"""', 'index': '(1)', 'number': '(1)', 'serialized_options': 'None', 'type': 'None', 'create_key': '_descriptor._internal_create_key'}), "(name='BOOL', index=1, number=1,\n serialized_options=None, type=None, create_key=_descriptor.\n _internal_create_key)\n", (2359, 2481), True, 'from google.protobuf import descriptor as _descriptor\n'), ((2565, 2719), 'google.protobuf.descriptor.EnumValueDescriptor', '_descriptor.EnumValueDescriptor', ([], {'name': '"""INT64"""', 'index': '(2)', 'number': '(2)', 'serialized_options': 'None', 'type': 'None', 'create_key': '_descriptor._internal_create_key'}), "(name='INT64', index=2, number=2,\n serialized_options=None, type=None, create_key=_descriptor.\n _internal_create_key)\n", (2596, 2719), True, 'from google.protobuf import descriptor as _descriptor\n'), ((3823, 4219), 'google.protobuf.descriptor.FieldDescriptor', '_descriptor.FieldDescriptor', ([], {'name': '"""value_type"""', 'full_name': '"""google.api.LabelDescriptor.value_type"""', 'index': '(1)', 'number': '(2)', 'type': '(14)', 'cpp_type': '(8)', 'label': '(1)', 'has_default_value': '(False)', 'default_value': '(0)', 'message_type': 'None', 'enum_type': 'None', 'containing_type': 'None', 'is_extension': '(False)', 'extension_scope': 'None', 'serialized_options': 'None', 'file': 'DESCRIPTOR', 'create_key': '_descriptor._internal_create_key'}), "(name='value_type', full_name=\n 'google.api.LabelDescriptor.value_type', index=1, number=2, type=14,\n cpp_type=8, label=1, has_default_value=False, default_value=0,\n message_type=None, enum_type=None, containing_type=None, is_extension=\n False, extension_scope=None, serialized_options=None, file=DESCRIPTOR,\n create_key=_descriptor._internal_create_key)\n", (3850, 4219), True, 'from google.protobuf import descriptor as _descriptor\n')]
|
#!/usr/bin/env python3
# *******************************************************
# Copyright (c) VMware, Inc. 2020-2021. All Rights Reserved.
# SPDX-License-Identifier: MIT
# *******************************************************
# *
# * DISCLAIMER. THIS PROGRAM IS PROVIDED TO YOU "AS IS" WITHOUT
# * WARRANTIES OR CONDITIONS OF ANY KIND, WHETHER ORAL OR WRITTEN,
# * EXPRESS OR IMPLIED. THE AUTHOR SPECIFICALLY DISCLAIMS ANY IMPLIED
# * WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY,
# * NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
"""Model and Query Classes for Platform Alerts and Workflows"""
import time
from cbc_sdk.errors import ApiError, TimeoutError
from cbc_sdk.platform import PlatformModel
from cbc_sdk.base import (BaseQuery,
UnrefreshableModel,
QueryBuilder,
QueryBuilderSupportMixin,
IterableQueryMixin,
CriteriaBuilderSupportMixin)
from cbc_sdk.endpoint_standard.base import EnrichedEvent
from cbc_sdk.platform.devices import DeviceSearchQuery
"""Alert Models"""
MAX_RESULTS_LIMIT = 10000
class BaseAlert(PlatformModel):
"""Represents a basic alert."""
urlobject = "/appservices/v6/orgs/{0}/alerts"
urlobject_single = "/appservices/v6/orgs/{0}/alerts/{1}"
primary_key = "id"
swagger_meta_file = "platform/models/base_alert.yaml"
def __init__(self, cb, model_unique_id, initial_data=None):
"""
Initialize the BaseAlert object.
Args:
cb (BaseAPI): Reference to API object used to communicate with the server.
model_unique_id (str): ID of the alert represented.
initial_data (dict): Initial data used to populate the alert.
"""
super(BaseAlert, self).__init__(cb, model_unique_id, initial_data)
self._workflow = Workflow(cb, initial_data.get("workflow", None) if initial_data else None)
if model_unique_id is not None and initial_data is None:
self._refresh()
@classmethod
def _query_implementation(cls, cb, **kwargs):
"""
Returns the appropriate query object for this alert type.
Args:
cb (BaseAPI): Reference to API object used to communicate with the server.
**kwargs (dict): Not used, retained for compatibility.
Returns:
BaseAlertSearchQuery: The query object for this alert type.
"""
return BaseAlertSearchQuery(cls, cb)
def _refresh(self):
"""
Rereads the alert data from the server.
Returns:
bool: True if refresh was successful, False if not.
"""
url = self.urlobject_single.format(self._cb.credentials.org_key, self._model_unique_id)
resp = self._cb.get_object(url)
self._info = resp
self._workflow = Workflow(self._cb, resp.get("workflow", None))
self._last_refresh_time = time.time()
return True
@property
def workflow_(self):
"""
Returns the workflow associated with this alert.
Returns:
Workflow: The workflow associated with this alert.
"""
return self._workflow
def _update_workflow_status(self, state, remediation, comment):
"""
Updates the workflow status of this alert.
Args:
state (str): The state to set for this alert, either "OPEN" or "DISMISSED".
remediation (str): The remediation status to set for the alert.
comment (str): The comment to set for the alert.
"""
request = {"state": state}
if remediation:
request["remediation_state"] = remediation
if comment:
request["comment"] = comment
url = self.urlobject_single.format(self._cb.credentials.org_key,
self._model_unique_id) + "/workflow"
resp = self._cb.post_object(url, request)
self._workflow = Workflow(self._cb, resp.json())
self._last_refresh_time = time.time()
def dismiss(self, remediation=None, comment=None):
"""
Dismisses this alert.
Args:
remediation (str): The remediation status to set for the alert.
comment (str): The comment to set for the alert.
"""
self._update_workflow_status("DISMISSED", remediation, comment)
def update(self, remediation=None, comment=None):
"""
Updates this alert while leaving it open.
Args:
remediation (str): The remediation status to set for the alert.
comment (str): The comment to set for the alert.
"""
self._update_workflow_status("OPEN", remediation, comment)
def _update_threat_workflow_status(self, state, remediation, comment):
"""
Updates the workflow status of all alerts with the same threat ID, past or future.
Args:
state (str): The state to set for this alert, either "OPEN" or "DISMISSED".
remediation (str): The remediation status to set for the alert.
comment (str): The comment to set for the alert.
"""
request = {"state": state}
if remediation:
request["remediation_state"] = remediation
if comment:
request["comment"] = comment
url = "/appservices/v6/orgs/{0}/threat/{1}/workflow".format(self._cb.credentials.org_key,
self.threat_id)
resp = self._cb.post_object(url, request)
return Workflow(self._cb, resp.json())
def dismiss_threat(self, remediation=None, comment=None):
"""
Dismisses all alerts with the same threat ID, past or future.
Args:
remediation (str): The remediation status to set for the alert.
comment (str): The comment to set for the alert.
"""
return self._update_threat_workflow_status("DISMISSED", remediation, comment)
def update_threat(self, remediation=None, comment=None):
"""
Updates the status of all alerts with the same threat ID, past or future, while leaving them in OPEN state.
Args:
remediation (str): The remediation status to set for the alert.
comment (str): The comment to set for the alert.
"""
return self._update_threat_workflow_status("OPEN", remediation, comment)
class WatchlistAlert(BaseAlert):
"""Represents watch list alerts."""
urlobject = "/appservices/v6/orgs/{0}/alerts/watchlist"
@classmethod
def _query_implementation(cls, cb, **kwargs):
"""
Returns the appropriate query object for this alert type.
Args:
cb (BaseAPI): Reference to API object used to communicate with the server.
**kwargs (dict): Not used, retained for compatibility.
Returns:
WatchlistAlertSearchQuery: The query object for this alert type.
"""
return WatchlistAlertSearchQuery(cls, cb)
class CBAnalyticsAlert(BaseAlert):
"""Represents CB Analytics alerts."""
urlobject = "/appservices/v6/orgs/{0}/alerts/cbanalytics"
@classmethod
def _query_implementation(cls, cb, **kwargs):
"""
Returns the appropriate query object for this alert type.
Args:
cb (BaseAPI): Reference to API object used to communicate with the server.
**kwargs (dict): Not used, retained for compatibility.
Returns:
CBAnalyticsAlertSearchQuery: The query object for this alert type.
"""
return CBAnalyticsAlertSearchQuery(cls, cb)
def get_events(self, timeout=0, async_mode=False):
"""Requests enriched events detailed results.
Args:
timeout (int): Event details request timeout in milliseconds.
async_mode (bool): True to request details in an asynchronous manner.
Returns:
list: EnrichedEvents matching the legacy_alert_id
Note:
- When using asynchronous mode, this method returns a python future.
You can call result() on the future object to wait for completion and get the results.
"""
self._details_timeout = timeout
alert_id = self._info.get('legacy_alert_id')
if not alert_id:
raise ApiError("Trying to get event details on an invalid alert_id {}".format(alert_id))
if async_mode:
return self._cb._async_submit(lambda arg, kwarg: self._get_events_detailed_results())
return self._get_events_detailed_results()
def _get_events_detailed_results(self):
"""Actual search details implementation"""
"""Flow:
1. Start the job by providing alert_id
2. Check the status of the job - wait until contacted and complete are equal
3. Retrieve the results - it is possible for num_found to be 0, because enreached events are
kept for specific period, so return empty list in that case.
"""
url = "/api/investigate/v2/orgs/{}/enriched_events/detail_jobs".format(self._cb.credentials.org_key)
query_start = self._cb.post_object(url, body={"alert_id": self._info.get('legacy_alert_id')})
job_id = query_start.json().get("job_id")
timed_out = False
submit_time = time.time() * 1000
while True:
status_url = "/api/investigate/v2/orgs/{}/enriched_events/detail_jobs/{}".format(
self._cb.credentials.org_key,
job_id,
)
result = self._cb.get_object(status_url)
searchers_contacted = result.get("contacted", 0)
searchers_completed = result.get("completed", 0)
if searchers_completed == searchers_contacted:
break
if searchers_contacted == 0:
time.sleep(.5)
continue
if searchers_completed < searchers_contacted:
if self._details_timeout != 0 and (time.time() * 1000) - submit_time > self._details_timeout:
timed_out = True
break
time.sleep(.5)
if timed_out:
raise TimeoutError(message="user-specified timeout exceeded while waiting for results")
still_fetching = True
result_url = "/api/investigate/v2/orgs/{}/enriched_events/detail_jobs/{}/results".format(
self._cb.credentials.org_key,
job_id
)
query_parameters = {}
while still_fetching:
result = self._cb.get_object(result_url, query_parameters=query_parameters)
available_results = result.get('num_available', 0)
found_results = result.get('num_found', 0)
# if found is 0, then no enriched events
if found_results == 0:
return []
if available_results != 0:
results = result.get('results', [])
return [EnrichedEvent(self._cb, initial_data=item) for item in results]
class DeviceControlAlert(BaseAlert):
"""Represents Device Control alerts."""
urlobject = "/appservices/v6/orgs/{0}/alerts/devicecontrol"
@classmethod
def _query_implementation(cls, cb, **kwargs):
"""
Returns the appropriate query object for this alert type.
Args:
cb (BaseAPI): Reference to API object used to communicate with the server.
**kwargs (dict): Not used, retained for compatibility.
Returns:
DeviceControlAlertSearchQuery: The query object for this alert type.
"""
return DeviceControlAlertSearchQuery(cls, cb)
class Workflow(UnrefreshableModel):
"""Represents the workflow associated with alerts."""
swagger_meta_file = "platform/models/workflow.yaml"
def __init__(self, cb, initial_data=None):
"""
Initialize the Workflow object.
Args:
cb (BaseAPI): Reference to API object used to communicate with the server.
initial_data (dict): Initial data used to populate the workflow.
"""
super(Workflow, self).__init__(cb, model_unique_id=None, initial_data=initial_data)
class WorkflowStatus(PlatformModel):
"""Represents the current workflow status of a request."""
urlobject_single = "/appservices/v6/orgs/{0}/workflow/status/{1}"
primary_key = "id"
swagger_meta_file = "platform/models/workflow_status.yaml"
def __init__(self, cb, model_unique_id, initial_data=None):
"""
Initialize the BaseAlert object.
Args:
cb (BaseAPI): Reference to API object used to communicate with the server.
model_unique_id (str): ID of the request being processed.
initial_data (dict): Initial data used to populate the status.
"""
super(WorkflowStatus, self).__init__(cb, model_unique_id, initial_data)
self._request_id = model_unique_id
self._workflow = None
if model_unique_id is not None:
self._refresh()
def _refresh(self):
"""
Rereads the request status from the server.
Returns:
bool: True if refresh was successful, False if not.
"""
url = self.urlobject_single.format(self._cb.credentials.org_key, self._request_id)
resp = self._cb.get_object(url)
self._info = resp
self._workflow = Workflow(self._cb, resp.get("workflow", None))
self._last_refresh_time = time.time()
return True
@property
def id_(self):
"""
Returns the request ID of the associated request.
Returns:
str: The request ID of the associated request.
"""
return self._request_id
@property
def workflow_(self):
"""
Returns the current workflow associated with this request.
Returns:
Workflow: The current workflow associated with this request.
"""
return self._workflow
@property
def queued(self):
"""
Returns whether this request has been queued.
Returns:
bool: True if the request is in "queued" state, False if not.
"""
self._refresh()
return self._info.get("status", "") == "QUEUED"
@property
def in_progress(self):
"""
Returns whether this request is currently in progress.
Returns:
bool: True if the request is in "in progress" state, False if not.
"""
self._refresh()
return self._info.get("status", "") == "IN_PROGRESS"
@property
def finished(self):
"""
Returns whether this request has been completed.
Returns:
bool: True if the request is in "finished" state, False if not.
"""
self._refresh()
return self._info.get("status", "") == "FINISHED"
"""Alert Queries"""
class BaseAlertSearchQuery(BaseQuery, QueryBuilderSupportMixin, IterableQueryMixin, CriteriaBuilderSupportMixin):
"""Represents a query that is used to locate BaseAlert objects."""
VALID_CATEGORIES = ["THREAT", "MONITORED"]
VALID_REPUTATIONS = ["KNOWN_MALWARE", "SUSPECT_MALWARE", "PUP", "NOT_LISTED", "ADAPTIVE_WHITE_LIST",
"COMMON_WHITE_LIST", "TRUSTED_WHITE_LIST", "COMPANY_BLACK_LIST"]
VALID_ALERT_TYPES = ["CB_ANALYTICS", "DEVICE_CONTROL", "WATCHLIST"]
VALID_WORKFLOW_VALS = ["OPEN", "DISMISSED"]
VALID_FACET_FIELDS = ["ALERT_TYPE", "CATEGORY", "REPUTATION", "WORKFLOW", "TAG", "POLICY_ID",
"POLICY_NAME", "DEVICE_ID", "DEVICE_NAME", "APPLICATION_HASH",
"APPLICATION_NAME", "STATUS", "RUN_STATE", "POLICY_APPLIED_STATE",
"POLICY_APPLIED", "SENSOR_ACTION"]
def __init__(self, doc_class, cb):
"""
Initialize the BaseAlertSearchQuery.
Args:
doc_class (class): The model class that will be returned by this query.
cb (BaseAPI): Reference to API object used to communicate with the server.
"""
self._doc_class = doc_class
self._cb = cb
self._count_valid = False
super(BaseAlertSearchQuery, self).__init__()
self._query_builder = QueryBuilder()
self._criteria = {}
self._time_filters = {}
self._sortcriteria = {}
self._bulkupdate_url = "/appservices/v6/orgs/{0}/alerts/workflow/_criteria"
self._count_valid = False
self._total_results = 0
def set_categories(self, categories):
"""
Restricts the alerts that this query is performed on to the specified categories.
Args:
categories (list): List of categories to be restricted to. Valid categories are
"THREAT", "MONITORED", "INFO", "MINOR", "SERIOUS", and "CRITICAL."
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all((c in BaseAlertSearchQuery.VALID_CATEGORIES) for c in categories):
raise ApiError("One or more invalid category values")
self._update_criteria("category", categories)
return self
def set_create_time(self, *args, **kwargs):
"""
Restricts the alerts that this query is performed on to the specified creation time.
The time may either be specified as a start and end point or as a range.
Args:
*args (list): Not used.
**kwargs (dict): Used to specify start= for start time, end= for end time, and range= for range.
Returns:
BaseAlertSearchQuery: This instance.
"""
if kwargs.get("start", None) and kwargs.get("end", None):
if kwargs.get("range", None):
raise ApiError("cannot specify range= in addition to start= and end=")
stime = kwargs["start"]
if not isinstance(stime, str):
stime = stime.isoformat()
etime = kwargs["end"]
if not isinstance(etime, str):
etime = etime.isoformat()
self._time_filters["create_time"] = {"start": stime, "end": etime}
elif kwargs.get("range", None):
if kwargs.get("start", None) or kwargs.get("end", None):
raise ApiError("cannot specify start= or end= in addition to range=")
self._time_filters["create_time"] = {"range": kwargs["range"]}
else:
raise ApiError("must specify either start= and end= or range=")
return self
def set_device_ids(self, device_ids):
"""
Restricts the alerts that this query is performed on to the specified device IDs.
Args:
device_ids (list): List of integer device IDs.
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all(isinstance(device_id, int) for device_id in device_ids):
raise ApiError("One or more invalid device IDs")
self._update_criteria("device_id", device_ids)
return self
def set_device_names(self, device_names):
"""
Restricts the alerts that this query is performed on to the specified device names.
Args:
device_names (list): List of string device names.
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all(isinstance(n, str) for n in device_names):
raise ApiError("One or more invalid device names")
self._update_criteria("device_name", device_names)
return self
def set_device_os(self, device_os):
"""
Restricts the alerts that this query is performed on to the specified device operating systems.
Args:
device_os (list): List of string operating systems. Valid values are "WINDOWS", "ANDROID",
"MAC", "IOS", "LINUX", and "OTHER."
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all((osval in DeviceSearchQuery.VALID_OS) for osval in device_os):
raise ApiError("One or more invalid operating systems")
self._update_criteria("device_os", device_os)
return self
def set_device_os_versions(self, device_os_versions):
"""
Restricts the alerts that this query is performed on to the specified device operating system versions.
Args:
device_os_versions (list): List of string operating system versions.
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all(isinstance(n, str) for n in device_os_versions):
raise ApiError("One or more invalid device OS versions")
self._update_criteria("device_os_version", device_os_versions)
return self
def set_device_username(self, users):
"""
Restricts the alerts that this query is performed on to the specified user names.
Args:
users (list): List of string user names.
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all(isinstance(u, str) for u in users):
raise ApiError("One or more invalid user names")
self._update_criteria("device_username", users)
return self
def set_group_results(self, do_group):
"""
Specifies whether or not to group the results of the query.
Args:
do_group (bool): True to group the results, False to not do so.
Returns:
BaseAlertSearchQuery: This instance.
"""
self._criteria["group_results"] = True if do_group else False
return self
def set_alert_ids(self, alert_ids):
"""
Restricts the alerts that this query is performed on to the specified alert IDs.
Args:
alert_ids (list): List of string alert IDs.
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all(isinstance(v, str) for v in alert_ids):
raise ApiError("One or more invalid alert ID values")
self._update_criteria("id", alert_ids)
return self
def set_legacy_alert_ids(self, alert_ids):
"""
Restricts the alerts that this query is performed on to the specified legacy alert IDs.
Args:
alert_ids (list): List of string legacy alert IDs.
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all(isinstance(v, str) for v in alert_ids):
raise ApiError("One or more invalid alert ID values")
self._update_criteria("legacy_alert_id", alert_ids)
return self
def set_minimum_severity(self, severity):
"""
Restricts the alerts that this query is performed on to the specified minimum severity level.
Args:
severity (int): The minimum severity level for alerts.
Returns:
BaseAlertSearchQuery: This instance.
"""
self._criteria["minimum_severity"] = severity
return self
def set_policy_ids(self, policy_ids):
"""
Restricts the alerts that this query is performed on to the specified policy IDs.
Args:
policy_ids (list): List of integer policy IDs.
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all(isinstance(policy_id, int) for policy_id in policy_ids):
raise ApiError("One or more invalid policy IDs")
self._update_criteria("policy_id", policy_ids)
return self
def set_policy_names(self, policy_names):
"""
Restricts the alerts that this query is performed on to the specified policy names.
Args:
policy_names (list): List of string policy names.
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all(isinstance(n, str) for n in policy_names):
raise ApiError("One or more invalid policy names")
self._update_criteria("policy_name", policy_names)
return self
def set_process_names(self, process_names):
"""
Restricts the alerts that this query is performed on to the specified process names.
Args:
process_names (list): List of string process names.
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all(isinstance(n, str) for n in process_names):
raise ApiError("One or more invalid process names")
self._update_criteria("process_name", process_names)
return self
def set_process_sha256(self, shas):
"""
Restricts the alerts that this query is performed on to the specified process SHA-256 hash values.
Args:
shas (list): List of string process SHA-256 hash values.
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all(isinstance(n, str) for n in shas):
raise ApiError("One or more invalid SHA256 values")
self._update_criteria("process_sha256", shas)
return self
def set_reputations(self, reps):
"""
Restricts the alerts that this query is performed on to the specified reputation values.
Args:
reps (list): List of string reputation values. Valid values are "KNOWN_MALWARE", "SUSPECT_MALWARE",
"PUP", "NOT_LISTED", "ADAPTIVE_WHITE_LIST", "COMMON_WHITE_LIST", "TRUSTED_WHITE_LIST",
and "COMPANY_BLACK_LIST".
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all((r in BaseAlertSearchQuery.VALID_REPUTATIONS) for r in reps):
raise ApiError("One or more invalid reputation values")
self._update_criteria("reputation", reps)
return self
def set_tags(self, tags):
"""
Restricts the alerts that this query is performed on to the specified tag values.
Args:
tags (list): List of string tag values.
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all(isinstance(tag, str) for tag in tags):
raise ApiError("One or more invalid tags")
self._update_criteria("tag", tags)
return self
def set_target_priorities(self, priorities):
"""
Restricts the alerts that this query is performed on to the specified target priority values.
Args:
priorities (list): List of string target priority values. Valid values are "LOW", "MEDIUM",
"HIGH", and "MISSION_CRITICAL".
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all((prio in DeviceSearchQuery.VALID_PRIORITIES) for prio in priorities):
raise ApiError("One or more invalid priority values")
self._update_criteria("target_value", priorities)
return self
def set_threat_ids(self, threats):
"""
Restricts the alerts that this query is performed on to the specified threat ID values.
Args:
threats (list): List of string threat ID values.
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all(isinstance(t, str) for t in threats):
raise ApiError("One or more invalid threat ID values")
self._update_criteria("threat_id", threats)
return self
def set_time_range(self, key, **kwargs):
"""
Restricts the alerts that this query is performed on to the specified time range.
The time may either be specified as a start and end point or as a range.
Args:
key (str): The key to use for criteria one of create_time,
first_event_time, last_event_time, or last_update_time
**kwargs (dict): Used to specify start= for start time, end= for end time, and range= for range.
Returns:
BaseAlertSearchQuery: This instance.
"""
if key not in ["create_time", "first_event_time", "last_event_time", "last_update_time"]:
raise ApiError("key must be one of create_time, first_event_time, last_event_time, or last_update_time")
if kwargs.get("start", None) and kwargs.get("end", None):
if kwargs.get("range", None):
raise ApiError("cannot specify range= in addition to start= and end=")
stime = kwargs["start"]
if not isinstance(stime, str):
stime = stime.isoformat()
etime = kwargs["end"]
if not isinstance(etime, str):
etime = etime.isoformat()
self._time_filters[key] = {"start": stime, "end": etime}
elif kwargs.get("range", None):
if kwargs.get("start", None) or kwargs.get("end", None):
raise ApiError("cannot specify start= or end= in addition to range=")
self._time_filters[key] = {"range": kwargs["range"]}
else:
raise ApiError("must specify either start= and end= or range=")
return self
def set_types(self, alerttypes):
"""
Restricts the alerts that this query is performed on to the specified alert type values.
Args:
alerttypes (list): List of string alert type values. Valid values are "CB_ANALYTICS",
and "WATCHLIST".
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all((t in BaseAlertSearchQuery.VALID_ALERT_TYPES) for t in alerttypes):
raise ApiError("One or more invalid alert type values")
self._update_criteria("type", alerttypes)
return self
def set_workflows(self, workflow_vals):
"""
Restricts the alerts that this query is performed on to the specified workflow status values.
Args:
workflow_vals (list): List of string alert type values. Valid values are "OPEN" and "DISMISSED".
Returns:
BaseAlertSearchQuery: This instance.
"""
if not all((t in BaseAlertSearchQuery.VALID_WORKFLOW_VALS) for t in workflow_vals):
raise ApiError("One or more invalid workflow status values")
self._update_criteria("workflow", workflow_vals)
return self
def _build_criteria(self):
"""
Builds the criteria object for use in a query.
Returns:
dict: The criteria object.
"""
mycrit = self._criteria
if self._time_filters:
mycrit.update(self._time_filters)
return mycrit
def sort_by(self, key, direction="ASC"):
"""
Sets the sorting behavior on a query's results.
Example:
>>> cb.select(BaseAlert).sort_by("name")
Args:
key (str): The key in the schema to sort by.
direction (str): The sort order, either "ASC" or "DESC".
Returns:
BaseAlertSearchQuery: This instance.
"""
if direction not in DeviceSearchQuery.VALID_DIRECTIONS:
raise ApiError("invalid sort direction specified")
self._sortcriteria = {"field": key, "order": direction}
return self
def _build_request(self, from_row, max_rows, add_sort=True):
"""
Creates the request body for an API call.
Args:
from_row (int): The row to start the query at.
max_rows (int): The maximum number of rows to be returned.
add_sort (bool): If True(default), the sort criteria will be added as part of the request.
Returns:
dict: The complete request body.
"""
request = {"criteria": self._build_criteria()}
request["query"] = self._query_builder._collapse()
# Fetch 100 rows per page (instead of 10 by default) for better performance
request["rows"] = 100
if from_row > 0:
request["start"] = from_row
if max_rows >= 0:
request["rows"] = max_rows
if add_sort and self._sortcriteria != {}:
request["sort"] = [self._sortcriteria]
return request
def _build_url(self, tail_end):
"""
Creates the URL to be used for an API call.
Args:
tail_end (str): String to be appended to the end of the generated URL.
Returns:
str: The complete URL.
"""
url = self._doc_class.urlobject.format(self._cb.credentials.org_key) + tail_end
return url
def _count(self):
"""
Returns the number of results from the run of this query.
Returns:
int: The number of results from the run of this query.
"""
if self._count_valid:
return self._total_results
url = self._build_url("/_search")
request = self._build_request(0, -1)
resp = self._cb.post_object(url, body=request)
result = resp.json()
self._total_results = result["num_found"]
self._count_valid = True
return self._total_results
def _perform_query(self, from_row=0, max_rows=-1):
"""
Performs the query and returns the results of the query in an iterable fashion.
Args:
from_row (int): The row to start the query at (default 0).
max_rows (int): The maximum number of rows to be returned (default -1, meaning "all").
Returns:
Iterable: The iterated query.
"""
url = self._build_url("/_search")
current = from_row
numrows = 0
still_querying = True
while still_querying:
request = self._build_request(current, max_rows)
resp = self._cb.post_object(url, body=request)
result = resp.json()
self._total_results = result["num_found"]
# Prevent 500 Internal Server Error from retrieving behind MAX_RESULTS_LIMIT
if self._total_results > MAX_RESULTS_LIMIT:
self._total_results = MAX_RESULTS_LIMIT
self._count_valid = True
results = result.get("results", [])
for item in results:
yield self._doc_class(self._cb, item["id"], item)
current += 1
numrows += 1
if max_rows > 0 and numrows == max_rows:
still_querying = False
break
from_row = current
if current >= self._total_results:
still_querying = False
break
def facets(self, fieldlist, max_rows=0):
"""
Return information about the facets for this alert by search, using the defined criteria.
Args:
fieldlist (list): List of facet field names. Valid names are "ALERT_TYPE", "CATEGORY", "REPUTATION",
"WORKFLOW", "TAG", "POLICY_ID", "POLICY_NAME", "DEVICE_ID", "DEVICE_NAME",
"APPLICATION_HASH", "APPLICATION_NAME", "STATUS", "RUN_STATE", "POLICY_APPLIED_STATE",
"POLICY_APPLIED", and "SENSOR_ACTION".
max_rows (int): The maximum number of rows to return. 0 means return all rows.
Returns:
list: A list of facet information specified as dicts.
"""
if not all((field in BaseAlertSearchQuery.VALID_FACET_FIELDS) for field in fieldlist):
raise ApiError("One or more invalid term field names")
request = self._build_request(0, -1, False)
request["terms"] = {"fields": fieldlist, "rows": max_rows}
url = self._build_url("/_facet")
resp = self._cb.post_object(url, body=request)
result = resp.json()
return result.get("results", [])
def _update_status(self, status, remediation, comment):
"""
Updates the status of all alerts matching the given query.
Args:
status (str): The status to put the alerts into, either "OPEN" or "DISMISSED".
remediation (str): The remediation state to set for all alerts.
comment (str): The comment to set for all alerts.
Returns:
str: The request ID, which may be used to select a WorkflowStatus object.
"""
request = {"state": status, "criteria": self._build_criteria(), "query": self._query_builder._collapse()}
if remediation is not None:
request["remediation_state"] = remediation
if comment is not None:
request["comment"] = comment
resp = self._cb.post_object(self._bulkupdate_url.format(self._cb.credentials.org_key), body=request)
output = resp.json()
return output["request_id"]
def update(self, remediation=None, comment=None):
"""
Update all alerts matching the given query. The alerts will be left in an OPEN state after this request.
Args:
remediation (str): The remediation state to set for all alerts.
comment (str): The comment to set for all alerts.
Returns:
str: The request ID, which may be used to select a WorkflowStatus object.
"""
return self._update_status("OPEN", remediation, comment)
def dismiss(self, remediation=None, comment=None):
"""
Dismiss all alerts matching the given query. The alerts will be left in a DISMISSED state after this request.
Args:
remediation (str): The remediation state to set for all alerts.
comment (str): The comment to set for all alerts.
Returns:
str: The request ID, which may be used to select a WorkflowStatus object.
"""
return self._update_status("DISMISSED", remediation, comment)
class WatchlistAlertSearchQuery(BaseAlertSearchQuery):
"""Represents a query that is used to locate WatchlistAlert objects."""
def __init__(self, doc_class, cb):
"""
Initialize the WatchlistAlertSearchQuery.
Args:
doc_class (class): The model class that will be returned by this query.
cb (BaseAPI): Reference to API object used to communicate with the server.
"""
super().__init__(doc_class, cb)
self._bulkupdate_url = "/appservices/v6/orgs/{0}/alerts/watchlist/workflow/_criteria"
def set_watchlist_ids(self, ids):
"""
Restricts the alerts that this query is performed on to the specified watchlist ID values.
Args:
ids (list): List of string watchlist ID values.
Returns:
WatchlistAlertSearchQuery: This instance.
"""
if not all(isinstance(t, str) for t in ids):
raise ApiError("One or more invalid watchlist IDs")
self._update_criteria("watchlist_id", ids)
return self
def set_watchlist_names(self, names):
"""
Restricts the alerts that this query is performed on to the specified watchlist name values.
Args:
names (list): List of string watchlist name values.
Returns:
WatchlistAlertSearchQuery: This instance.
"""
if not all(isinstance(name, str) for name in names):
raise ApiError("One or more invalid watchlist names")
self._update_criteria("watchlist_name", names)
return self
class CBAnalyticsAlertSearchQuery(BaseAlertSearchQuery):
"""Represents a query that is used to locate CBAnalyticsAlert objects."""
VALID_THREAT_CATEGORIES = ["UNKNOWN", "NON_MALWARE", "NEW_MALWARE", "KNOWN_MALWARE", "RISKY_PROGRAM"]
VALID_LOCATIONS = ["ONSITE", "OFFSITE", "UNKNOWN"]
VALID_KILL_CHAIN_STATUSES = ["RECONNAISSANCE", "WEAPONIZE", "DELIVER_EXPLOIT", "INSTALL_RUN",
"COMMAND_AND_CONTROL", "EXECUTE_GOAL", "BREACH"]
VALID_POLICY_APPLIED = ["APPLIED", "NOT_APPLIED"]
VALID_RUN_STATES = ["DID_NOT_RUN", "RAN", "UNKNOWN"]
VALID_SENSOR_ACTIONS = ["POLICY_NOT_APPLIED", "ALLOW", "ALLOW_AND_LOG", "TERMINATE", "DENY"]
VALID_THREAT_CAUSE_VECTORS = ["EMAIL", "WEB", "GENERIC_SERVER", "GENERIC_CLIENT", "REMOTE_DRIVE",
"REMOVABLE_MEDIA", "UNKNOWN", "APP_STORE", "THIRD_PARTY"]
def __init__(self, doc_class, cb):
"""
Initialize the CBAnalyticsAlertSearchQuery.
Args:
doc_class (class): The model class that will be returned by this query.
cb (BaseAPI): Reference to API object used to communicate with the server.
"""
super().__init__(doc_class, cb)
self._bulkupdate_url = "/appservices/v6/orgs/{0}/alerts/cbanalytics/workflow/_criteria"
def set_blocked_threat_categories(self, categories):
"""
Restricts the alerts that this query is performed on to the specified threat categories that were blocked.
Args:
categories (list): List of threat categories to look for. Valid values are "UNKNOWN",
"NON_MALWARE", "NEW_MALWARE", "KNOWN_MALWARE", and "RISKY_PROGRAM".
Returns:
CBAnalyticsAlertSearchQuery: This instance.
"""
if not all((category in CBAnalyticsAlertSearchQuery.VALID_THREAT_CATEGORIES)
for category in categories):
raise ApiError("One or more invalid threat categories")
self._update_criteria("blocked_threat_category", categories)
return self
def set_device_locations(self, locations):
"""
Restricts the alerts that this query is performed on to the specified device locations.
Args:
locations (list): List of device locations to look for. Valid values are "ONSITE", "OFFSITE",
and "UNKNOWN".
Returns:
CBAnalyticsAlertSearchQuery: This instance.
"""
if not all((location in CBAnalyticsAlertSearchQuery.VALID_LOCATIONS)
for location in locations):
raise ApiError("One or more invalid device locations")
self._update_criteria("device_location", locations)
return self
def set_kill_chain_statuses(self, statuses):
"""
Restricts the alerts that this query is performed on to the specified kill chain statuses.
Args:
statuses (list): List of kill chain statuses to look for. Valid values are "RECONNAISSANCE",
"WEAPONIZE", "DELIVER_EXPLOIT", "INSTALL_RUN","COMMAND_AND_CONTROL", "EXECUTE_GOAL",
and "BREACH".
Returns:
CBAnalyticsAlertSearchQuery: This instance.
"""
if not all((status in CBAnalyticsAlertSearchQuery.VALID_KILL_CHAIN_STATUSES)
for status in statuses):
raise ApiError("One or more invalid kill chain status values")
self._update_criteria("kill_chain_status", statuses)
return self
def set_not_blocked_threat_categories(self, categories):
"""
Restricts the alerts that this query is performed on to the specified threat categories that were NOT blocked.
Args:
categories (list): List of threat categories to look for. Valid values are "UNKNOWN",
"NON_MALWARE", "NEW_MALWARE", "KNOWN_MALWARE", and "RISKY_PROGRAM".
Returns:
CBAnalyticsAlertSearchQuery: This instance.
"""
if not all((category in CBAnalyticsAlertSearchQuery.VALID_THREAT_CATEGORIES)
for category in categories):
raise ApiError("One or more invalid threat categories")
self._update_criteria("not_blocked_threat_category", categories)
return self
def set_policy_applied(self, applied_statuses):
"""
Restricts the alerts that this query is performed on to the specified policy status values.
Args:
applied_statuses (list): List of status values to look for. Valid values are "APPLIED" and "NOT_APPLIED".
Returns:
CBAnalyticsAlertSearchQuery: This instance.
"""
if not all((s in CBAnalyticsAlertSearchQuery.VALID_POLICY_APPLIED)
for s in applied_statuses):
raise ApiError("One or more invalid policy-applied values")
self._update_criteria("policy_applied", applied_statuses)
return self
def set_reason_code(self, reason):
"""
Restricts the alerts that this query is performed on to the specified reason codes (enum values).
Args:
reason (list): List of string reason codes to look for.
Returns:
CBAnalyticsAlertSearchQuery: This instance.
"""
if not all(isinstance(t, str) for t in reason):
raise ApiError("One or more invalid reason code values")
self._update_criteria("reason_code", reason)
return self
def set_run_states(self, states):
"""
Restricts the alerts that this query is performed on to the specified run states.
Args:
states (list): List of run states to look for. Valid values are "DID_NOT_RUN", "RAN", and "UNKNOWN".
Returns:
CBAnalyticsAlertSearchQuery: This instance.
"""
if not all((s in CBAnalyticsAlertSearchQuery.VALID_RUN_STATES)
for s in states):
raise ApiError("One or more invalid run states")
self._update_criteria("run_state", states)
return self
def set_sensor_actions(self, actions):
"""
Restricts the alerts that this query is performed on to the specified sensor actions.
Args:
actions (list): List of sensor actions to look for. Valid values are "POLICY_NOT_APPLIED", "ALLOW",
"ALLOW_AND_LOG", "TERMINATE", and "DENY".
Returns:
CBAnalyticsAlertSearchQuery: This instance.
"""
if not all((action in CBAnalyticsAlertSearchQuery.VALID_SENSOR_ACTIONS)
for action in actions):
raise ApiError("One or more invalid sensor actions")
self._update_criteria("sensor_action", actions)
return self
def set_threat_cause_vectors(self, vectors):
"""
Restricts the alerts that this query is performed on to the specified threat cause vectors.
Args:
vectors (list): List of threat cause vectors to look for. Valid values are "EMAIL", "WEB",
"GENERIC_SERVER", "GENERIC_CLIENT", "REMOTE_DRIVE", "REMOVABLE_MEDIA", "UNKNOWN",
"APP_STORE", and "THIRD_PARTY".
Returns:
CBAnalyticsAlertSearchQuery: This instance.
"""
if not all((vector in CBAnalyticsAlertSearchQuery.VALID_THREAT_CAUSE_VECTORS)
for vector in vectors):
raise ApiError("One or more invalid threat cause vectors")
self._update_criteria("threat_cause_vector", vectors)
return self
class DeviceControlAlertSearchQuery(BaseAlertSearchQuery):
"""Represents a query that is used to locate DeviceControlAlert objects."""
def __init__(self, doc_class, cb):
"""
Initialize the CBAnalyticsAlertSearchQuery.
Args:
doc_class (class): The model class that will be returned by this query.
cb (BaseAPI): Reference to API object used to communicate with the server.
"""
super().__init__(doc_class, cb)
self._bulkupdate_url = "/appservices/v6/orgs/{0}/alerts/cbanalytics/devicecontrol/_criteria"
def set_external_device_friendly_names(self, names):
"""
Restricts the alerts that this query is performed on to the specified external device friendly names.
Args:
names (list): List of external device friendly names to look for.
Returns:
DeviceControlAlertSearchQuery: This instance.
"""
if not all(isinstance(n, str) for n in names):
raise ApiError("One or more invalid device name values")
self._update_criteria("external_device_friendly_name", names)
return self
def set_external_device_ids(self, ids):
"""
Restricts the alerts that this query is performed on to the specified external device IDs.
Args:
ids (list): List of external device IDs to look for.
Returns:
DeviceControlAlertSearchQuery: This instance.
"""
if not all(isinstance(n, str) for n in ids):
raise ApiError("One or more invalid device ID values")
self._update_criteria("external_device_id", ids)
return self
def set_product_ids(self, ids):
"""
Restricts the alerts that this query is performed on to the specified product IDs.
Args:
ids (list): List of product IDs to look for.
Returns:
DeviceControlAlertSearchQuery: This instance.
"""
if not all(isinstance(n, str) for n in ids):
raise ApiError("One or more invalid product ID values")
self._update_criteria("product_id", ids)
return self
def set_product_names(self, names):
"""
Restricts the alerts that this query is performed on to the specified product names.
Args:
names (list): List of product names to look for.
Returns:
DeviceControlAlertSearchQuery: This instance.
"""
if not all(isinstance(n, str) for n in names):
raise ApiError("One or more invalid product name values")
self._update_criteria("product_name", names)
return self
def set_serial_numbers(self, serial_numbers):
"""
Restricts the alerts that this query is performed on to the specified serial numbers.
Args:
serial_numbers (list): List of serial numbers to look for.
Returns:
DeviceControlAlertSearchQuery: This instance.
"""
if not all(isinstance(n, str) for n in serial_numbers):
raise ApiError("One or more invalid serial number values")
self._update_criteria("serial_number", serial_numbers)
return self
def set_vendor_ids(self, ids):
"""
Restricts the alerts that this query is performed on to the specified vendor IDs.
Args:
ids (list): List of vendor IDs to look for.
Returns:
DeviceControlAlertSearchQuery: This instance.
"""
if not all(isinstance(n, str) for n in ids):
raise ApiError("One or more invalid vendor ID values")
self._update_criteria("vendor_id", ids)
return self
def set_vendor_names(self, names):
"""
Restricts the alerts that this query is performed on to the specified vendor names.
Args:
names (list): List of vendor names to look for.
Returns:
DeviceControlAlertSearchQuery: This instance.
"""
if not all(isinstance(n, str) for n in names):
raise ApiError("One or more invalid vendor name values")
self._update_criteria("vendor_name", names)
return self
|
[
"cbc_sdk.base.QueryBuilder",
"time.sleep",
"time.time",
"cbc_sdk.errors.ApiError",
"cbc_sdk.endpoint_standard.base.EnrichedEvent",
"cbc_sdk.errors.TimeoutError"
] |
[((2982, 2993), 'time.time', 'time.time', ([], {}), '()\n', (2991, 2993), False, 'import time\n'), ((4099, 4110), 'time.time', 'time.time', ([], {}), '()\n', (4108, 4110), False, 'import time\n'), ((13609, 13620), 'time.time', 'time.time', ([], {}), '()\n', (13618, 13620), False, 'import time\n'), ((16395, 16409), 'cbc_sdk.base.QueryBuilder', 'QueryBuilder', ([], {}), '()\n', (16407, 16409), False, 'from cbc_sdk.base import BaseQuery, UnrefreshableModel, QueryBuilder, QueryBuilderSupportMixin, IterableQueryMixin, CriteriaBuilderSupportMixin\n'), ((9432, 9443), 'time.time', 'time.time', ([], {}), '()\n', (9441, 9443), False, 'import time\n'), ((10247, 10262), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (10257, 10262), False, 'import time\n'), ((10303, 10389), 'cbc_sdk.errors.TimeoutError', 'TimeoutError', ([], {'message': '"""user-specified timeout exceeded while waiting for results"""'}), "(message=\n 'user-specified timeout exceeded while waiting for results')\n", (10315, 10389), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((17185, 17232), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid category values"""'], {}), "('One or more invalid category values')\n", (17193, 17232), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((19068, 19110), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid device IDs"""'], {}), "('One or more invalid device IDs')\n", (19076, 19110), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((19573, 19617), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid device names"""'], {}), "('One or more invalid device names')\n", (19581, 19617), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((20218, 20267), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid operating systems"""'], {}), "('One or more invalid operating systems')\n", (20226, 20267), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((20786, 20836), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid device OS versions"""'], {}), "('One or more invalid device OS versions')\n", (20794, 20836), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((21293, 21335), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid user names"""'], {}), "('One or more invalid user names')\n", (21301, 21335), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((22165, 22212), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid alert ID values"""'], {}), "('One or more invalid alert ID values')\n", (22173, 22212), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((22670, 22717), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid alert ID values"""'], {}), "('One or more invalid alert ID values')\n", (22678, 22717), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((23586, 23628), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid policy IDs"""'], {}), "('One or more invalid policy IDs')\n", (23594, 23628), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((24091, 24135), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid policy names"""'], {}), "('One or more invalid policy names')\n", (24099, 24135), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((24608, 24653), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid process names"""'], {}), "('One or more invalid process names')\n", (24616, 24653), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((25130, 25175), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid SHA256 values"""'], {}), "('One or more invalid SHA256 values')\n", (25138, 25175), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((25866, 25915), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid reputation values"""'], {}), "('One or more invalid reputation values')\n", (25874, 25915), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((26341, 26377), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid tags"""'], {}), "('One or more invalid tags')\n", (26349, 26377), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((26974, 27021), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid priority values"""'], {}), "('One or more invalid priority values')\n", (26982, 27021), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((27478, 27526), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid threat ID values"""'], {}), "('One or more invalid threat ID values')\n", (27486, 27526), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((28297, 28405), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""key must be one of create_time, first_event_time, last_event_time, or last_update_time"""'], {}), "(\n 'key must be one of create_time, first_event_time, last_event_time, or last_update_time'\n )\n", (28305, 28405), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((29763, 29812), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid alert type values"""'], {}), "('One or more invalid alert type values')\n", (29771, 29812), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((30356, 30410), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid workflow status values"""'], {}), "('One or more invalid workflow status values')\n", (30364, 30410), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((31274, 31318), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""invalid sort direction specified"""'], {}), "('invalid sort direction specified')\n", (31282, 31318), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((35623, 35671), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid term field names"""'], {}), "('One or more invalid term field names')\n", (35631, 35671), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((38894, 38939), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid watchlist IDs"""'], {}), "('One or more invalid watchlist IDs')\n", (38902, 38939), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((39409, 39456), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid watchlist names"""'], {}), "('One or more invalid watchlist names')\n", (39417, 39456), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((41485, 41534), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid threat categories"""'], {}), "('One or more invalid threat categories')\n", (41493, 41534), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((42174, 42222), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid device locations"""'], {}), "('One or more invalid device locations')\n", (42182, 42222), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((42974, 43030), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid kill chain status values"""'], {}), "('One or more invalid kill chain status values')\n", (42982, 43030), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((43755, 43804), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid threat categories"""'], {}), "('One or more invalid threat categories')\n", (43763, 43804), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((44422, 44475), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid policy-applied values"""'], {}), "('One or more invalid policy-applied values')\n", (44430, 44475), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((44963, 45013), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid reason code values"""'], {}), "('One or more invalid reason code values')\n", (44971, 45013), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((45568, 45610), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid run states"""'], {}), "('One or more invalid run states')\n", (45576, 45610), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((46256, 46302), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid sensor actions"""'], {}), "('One or more invalid sensor actions')\n", (46264, 46302), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((47063, 47115), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid threat cause vectors"""'], {}), "('One or more invalid threat cause vectors')\n", (47071, 47115), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((48216, 48266), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid device name values"""'], {}), "('One or more invalid device name values')\n", (48224, 48266), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((48752, 48800), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid device ID values"""'], {}), "('One or more invalid device ID values')\n", (48760, 48800), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((49249, 49298), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid product ID values"""'], {}), "('One or more invalid product ID values')\n", (49257, 49298), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((49751, 49802), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid product name values"""'], {}), "('One or more invalid product name values')\n", (49759, 49802), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((50289, 50341), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid serial number values"""'], {}), "('One or more invalid serial number values')\n", (50297, 50341), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((50793, 50841), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid vendor ID values"""'], {}), "('One or more invalid vendor ID values')\n", (50801, 50841), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((51290, 51340), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""One or more invalid vendor name values"""'], {}), "('One or more invalid vendor name values')\n", (51298, 51340), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((9963, 9978), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (9973, 9978), False, 'import time\n'), ((17912, 17976), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""cannot specify range= in addition to start= and end="""'], {}), "('cannot specify range= in addition to start= and end=')\n", (17920, 17976), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((18598, 18655), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""must specify either start= and end= or range="""'], {}), "('must specify either start= and end= or range=')\n", (18606, 18655), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((28526, 28590), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""cannot specify range= in addition to start= and end="""'], {}), "('cannot specify range= in addition to start= and end=')\n", (28534, 28590), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((29192, 29249), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""must specify either start= and end= or range="""'], {}), "('must specify either start= and end= or range=')\n", (29200, 29249), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((11081, 11123), 'cbc_sdk.endpoint_standard.base.EnrichedEvent', 'EnrichedEvent', (['self._cb'], {'initial_data': 'item'}), '(self._cb, initial_data=item)\n', (11094, 11123), False, 'from cbc_sdk.endpoint_standard.base import EnrichedEvent\n'), ((18427, 18490), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""cannot specify start= or end= in addition to range="""'], {}), "('cannot specify start= or end= in addition to range=')\n", (18435, 18490), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((29031, 29094), 'cbc_sdk.errors.ApiError', 'ApiError', (['"""cannot specify start= or end= in addition to range="""'], {}), "('cannot specify start= or end= in addition to range=')\n", (29039, 29094), False, 'from cbc_sdk.errors import ApiError, TimeoutError\n'), ((10112, 10123), 'time.time', 'time.time', ([], {}), '()\n', (10121, 10123), False, 'import time\n')]
|
#!/usr/bin/env python
#
# Generated Sun Jun 14 12:18:10 2015 by parse_xsd.py version 0.5.
#
import saml2
from saml2 import SamlBase
NAMESPACE = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd'
class TTimestampFault_(SamlBase):
"""The http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd:tTimestampFault element """
c_tag = 'tTimestampFault'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
def t_timestamp_fault__from_string(xml_string):
return saml2.create_class_from_xml_string(TTimestampFault_, xml_string)
class AttributedDateTime_(SamlBase):
"""The http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd:AttributedDateTime element """
c_tag = 'AttributedDateTime'
c_namespace = NAMESPACE
c_value_type = {'base': 'string'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['Id'] = ('Id', 'anyURI', False)
def __init__(self,
Id=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.Id=Id
def attributed_date_time__from_string(xml_string):
return saml2.create_class_from_xml_string(AttributedDateTime_, xml_string)
class AttributedURI_(SamlBase):
"""The http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd:AttributedURI element """
c_tag = 'AttributedURI'
c_namespace = NAMESPACE
c_value_type = {'base': 'anyURI'}
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_attributes['Id'] = ('Id', 'anyURI', False)
def __init__(self,
Id=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.Id=Id
def attributed_ur_i__from_string(xml_string):
return saml2.create_class_from_xml_string(AttributedURI_, xml_string)
class Expires(AttributedDateTime_):
"""The http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd:Expires element """
c_tag = 'Expires'
c_namespace = NAMESPACE
c_children = AttributedDateTime_.c_children.copy()
c_attributes = AttributedDateTime_.c_attributes.copy()
c_child_order = AttributedDateTime_.c_child_order[:]
c_cardinality = AttributedDateTime_.c_cardinality.copy()
def expires_from_string(xml_string):
return saml2.create_class_from_xml_string(Expires, xml_string)
class Created(AttributedDateTime_):
"""The http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd:Created element """
c_tag = 'Created'
c_namespace = NAMESPACE
c_children = AttributedDateTime_.c_children.copy()
c_attributes = AttributedDateTime_.c_attributes.copy()
c_child_order = AttributedDateTime_.c_child_order[:]
c_cardinality = AttributedDateTime_.c_cardinality.copy()
def created_from_string(xml_string):
return saml2.create_class_from_xml_string(Created, xml_string)
class TimestampType_(SamlBase):
"""The http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd:TimestampType element """
c_tag = 'TimestampType'
c_namespace = NAMESPACE
c_children = SamlBase.c_children.copy()
c_attributes = SamlBase.c_attributes.copy()
c_child_order = SamlBase.c_child_order[:]
c_cardinality = SamlBase.c_cardinality.copy()
c_children['{http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd}Created'] = ('created', Created)
c_cardinality['created'] = {"min":0, "max":1}
c_children['{http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd}Expires'] = ('expires', Expires)
c_cardinality['expires'] = {"min":0, "max":1}
c_attributes['Id'] = ('Id', 'anyURI', False)
c_child_order.extend(['created', 'expires'])
def __init__(self,
created=None,
expires=None,
Id=None,
text=None,
extension_elements=None,
extension_attributes=None,
):
SamlBase.__init__(self,
text=text,
extension_elements=extension_elements,
extension_attributes=extension_attributes,
)
self.created=created
self.expires=expires
self.Id=Id
def timestamp_type__from_string(xml_string):
return saml2.create_class_from_xml_string(TimestampType_, xml_string)
class Timestamp(TimestampType_):
"""The http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd:Timestamp element """
c_tag = 'Timestamp'
c_namespace = NAMESPACE
c_children = TimestampType_.c_children.copy()
c_attributes = TimestampType_.c_attributes.copy()
c_child_order = TimestampType_.c_child_order[:]
c_cardinality = TimestampType_.c_cardinality.copy()
def timestamp_from_string(xml_string):
return saml2.create_class_from_xml_string(Timestamp, xml_string)
#..................
AG_commonAtts = [
('Id', '', False),
]
ELEMENT_FROM_STRING = {
TTimestampFault_.c_tag: t_timestamp_fault__from_string,
AttributedDateTime_.c_tag: attributed_date_time__from_string,
AttributedURI_.c_tag: attributed_ur_i__from_string,
TimestampType_.c_tag: timestamp_type__from_string,
Timestamp.c_tag: timestamp_from_string,
Expires.c_tag: expires_from_string,
Created.c_tag: created_from_string,
}
ELEMENT_BY_TAG = {
'tTimestampFault': TTimestampFault_,
'AttributedDateTime': AttributedDateTime_,
'AttributedURI': AttributedURI_,
'TimestampType': TimestampType_,
'Timestamp': Timestamp,
'Expires': Expires,
'Created': Created,
}
def factory(tag, **kwargs):
return ELEMENT_BY_TAG[tag](**kwargs)
|
[
"saml2.SamlBase.__init__",
"saml2.create_class_from_xml_string",
"saml2.SamlBase.c_cardinality.copy",
"saml2.SamlBase.c_children.copy",
"saml2.SamlBase.c_attributes.copy"
] |
[((465, 491), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (489, 491), False, 'from saml2 import SamlBase\n'), ((511, 539), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (537, 539), False, 'from saml2 import SamlBase\n'), ((606, 635), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (633, 635), False, 'from saml2 import SamlBase\n'), ((696, 760), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['TTimestampFault_', 'xml_string'], {}), '(TTimestampFault_, xml_string)\n', (730, 760), False, 'import saml2\n'), ((1041, 1067), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (1065, 1067), False, 'from saml2 import SamlBase\n'), ((1087, 1115), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (1113, 1115), False, 'from saml2 import SamlBase\n'), ((1182, 1211), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (1209, 1211), False, 'from saml2 import SamlBase\n'), ((1690, 1757), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AttributedDateTime_', 'xml_string'], {}), '(AttributedDateTime_, xml_string)\n', (1724, 1757), False, 'import saml2\n'), ((2023, 2049), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (2047, 2049), False, 'from saml2 import SamlBase\n'), ((2069, 2097), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (2095, 2097), False, 'from saml2 import SamlBase\n'), ((2164, 2193), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (2191, 2193), False, 'from saml2 import SamlBase\n'), ((2667, 2729), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['AttributedURI_', 'xml_string'], {}), '(AttributedURI_, xml_string)\n', (2701, 2729), False, 'import saml2\n'), ((3213, 3268), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Expires', 'xml_string'], {}), '(Expires, xml_string)\n', (3247, 3268), False, 'import saml2\n'), ((3752, 3807), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Created', 'xml_string'], {}), '(Created, xml_string)\n', (3786, 3807), False, 'import saml2\n'), ((4035, 4061), 'saml2.SamlBase.c_children.copy', 'SamlBase.c_children.copy', ([], {}), '()\n', (4059, 4061), False, 'from saml2 import SamlBase\n'), ((4081, 4109), 'saml2.SamlBase.c_attributes.copy', 'SamlBase.c_attributes.copy', ([], {}), '()\n', (4107, 4109), False, 'from saml2 import SamlBase\n'), ((4176, 4205), 'saml2.SamlBase.c_cardinality.copy', 'SamlBase.c_cardinality.copy', ([], {}), '()\n', (4203, 4205), False, 'from saml2 import SamlBase\n'), ((5203, 5265), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['TimestampType_', 'xml_string'], {}), '(TimestampType_, xml_string)\n', (5237, 5265), False, 'import saml2\n'), ((5732, 5789), 'saml2.create_class_from_xml_string', 'saml2.create_class_from_xml_string', (['Timestamp', 'xml_string'], {}), '(Timestamp, xml_string)\n', (5766, 5789), False, 'import saml2\n'), ((1424, 1544), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (1441, 1544), False, 'from saml2 import SamlBase\n'), ((2406, 2526), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (2423, 2526), False, 'from saml2 import SamlBase\n'), ((4885, 5005), 'saml2.SamlBase.__init__', 'SamlBase.__init__', (['self'], {'text': 'text', 'extension_elements': 'extension_elements', 'extension_attributes': 'extension_attributes'}), '(self, text=text, extension_elements=extension_elements,\n extension_attributes=extension_attributes)\n', (4902, 5005), False, 'from saml2 import SamlBase\n')]
|
# Generated by Django 2.2.10 on 2020-07-11 19:13
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name="Social_disancing_violations",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("number_of_violations", models.IntegerField()),
("timestamp", models.DateTimeField()),
(
"photo_violation",
models.ImageField(upload_to="social_distancing_violation"),
),
],
),
migrations.CreateModel(
name="Profile",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("first_name", models.CharField(max_length=100)),
("last_name", models.CharField(max_length=100)),
("photo", models.ImageField(upload_to="Photos")),
("address", models.CharField(max_length=256)),
("Is_Manager", models.BooleanField(default=False)),
(
"user_ref",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL,
),
),
],
),
]
|
[
"django.db.migrations.swappable_dependency",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.AutoField",
"django.db.models.ImageField",
"django.db.models.IntegerField",
"django.db.models.DateTimeField"
] |
[((248, 305), 'django.db.migrations.swappable_dependency', 'migrations.swappable_dependency', (['settings.AUTH_USER_MODEL'], {}), '(settings.AUTH_USER_MODEL)\n', (279, 305), False, 'from django.db import migrations, models\n'), ((498, 591), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (514, 591), False, 'from django.db import migrations, models\n'), ((768, 789), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (787, 789), False, 'from django.db import migrations, models\n'), ((822, 844), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {}), '()\n', (842, 844), False, 'from django.db import migrations, models\n'), ((924, 982), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""social_distancing_violation"""'}), "(upload_to='social_distancing_violation')\n", (941, 982), False, 'from django.db import migrations, models\n'), ((1174, 1267), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1190, 1267), False, 'from django.db import migrations, models\n'), ((1434, 1466), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1450, 1466), False, 'from django.db import migrations, models\n'), ((1499, 1531), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1515, 1531), False, 'from django.db import migrations, models\n'), ((1560, 1597), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""Photos"""'}), "(upload_to='Photos')\n", (1577, 1597), False, 'from django.db import migrations, models\n'), ((1628, 1660), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(256)'}), '(max_length=256)\n', (1644, 1660), False, 'from django.db import migrations, models\n'), ((1694, 1728), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (1713, 1728), False, 'from django.db import migrations, models\n'), ((1801, 1897), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': 'settings.AUTH_USER_MODEL'}), '(on_delete=django.db.models.deletion.CASCADE, to=settings.\n AUTH_USER_MODEL)\n', (1818, 1897), False, 'from django.db import migrations, models\n')]
|
import configparser
import json
import os
import random
import time
import datetime
def iot_simulator(val_serial_nr):
list_values_front = []
list_values_rear = []
# set variables
TEMPERATURE = 20
HUMIDITY = 60
i = 1
while i < 11:
curr_datetime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
temperature = TEMPERATURE + (random.random() * 15)
humidity = HUMIDITY + (random.random() * 20)
dict_item_front = {"time": curr_datetime, "temperature": temperature, "humidity": humidity}
dict_item_rear = {"time": curr_datetime, "temperature": temperature, "humidity": humidity}
list_values_front.append(dict_item_front)
list_values_rear.append(dict_item_rear)
i += 1
time.sleep(1)
data_dct = {"serial_number": val_serial_nr,"sensor_details": [{"front_sensor": {"sensor_id": "FW909301", "last_rev": datetime.datetime(2020, 1, 18).strftime("%Y-%m-%d"), "data": list_values_front}},
{"rear_sensor": {"sensor_id": "FW909302", "last_rev": datetime.datetime(2020, 1, 14).strftime("%Y-%m-%d"), "data": list_values_rear}}]}
return data_dct
def create_json_file(path_dir,val_serial_nr):
try:
input_json = iot_simulator(val_serial_nr)
json_file_name = 'data_{serial_nr}_{time}.json'.format(serial_nr = val_serial_nr, time = datetime.datetime.now().strftime("%Y%m%d_%H%M%S"))
with open(path_dir+json_file_name, 'w+', encoding='utf-8') as f:
json.dump(input_json, f, ensure_ascii=False, indent=4)
except KeyboardInterrupt:
print("process interrupted")
if __name__ == '__main__':
config = configparser.ConfigParser()
config.read('param.ini')
output_dir = config['working_directory']['Output']
print ( "IoT - Simulated device" )
j = 0
while j < 20:
create_json_file(output_dir,'TY7894020492')
time.sleep(5)
j += 1
|
[
"json.dump",
"time.sleep",
"datetime.datetime",
"random.random",
"configparser.ConfigParser",
"datetime.datetime.now"
] |
[((1706, 1733), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (1731, 1733), False, 'import configparser\n'), ((797, 810), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (807, 810), False, 'import time\n'), ((1955, 1968), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (1965, 1968), False, 'import time\n'), ((1534, 1588), 'json.dump', 'json.dump', (['input_json', 'f'], {'ensure_ascii': '(False)', 'indent': '(4)'}), '(input_json, f, ensure_ascii=False, indent=4)\n', (1543, 1588), False, 'import json\n'), ((302, 325), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (323, 325), False, 'import datetime\n'), ((394, 409), 'random.random', 'random.random', ([], {}), '()\n', (407, 409), False, 'import random\n'), ((448, 463), 'random.random', 'random.random', ([], {}), '()\n', (461, 463), False, 'import random\n'), ((1396, 1419), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1417, 1419), False, 'import datetime\n'), ((935, 965), 'datetime.datetime', 'datetime.datetime', (['(2020)', '(1)', '(18)'], {}), '(2020, 1, 18)\n', (952, 965), False, 'import datetime\n'), ((1076, 1106), 'datetime.datetime', 'datetime.datetime', (['(2020)', '(1)', '(14)'], {}), '(2020, 1, 14)\n', (1093, 1106), False, 'import datetime\n')]
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from fundtotal import FundTotal
from downpage import FundPage
from portfolio import PortFolio
if __name__ == "__main__":
# ่ทๅๅ
จ้จๅบ้ๅบ็กๆฐๆฎ
# fund_total = FundTotal()
# fund_total.sort_all_fund()
# ไธ่ฝฝๅไธชๅบ้้กต้ข
# fund_page = FundPage()
# fund_page.down_pages()
# ่ทๅๆไปๆฐๆฎ
portfolio = PortFolio()
portfolio.get_portfolio()
|
[
"portfolio.PortFolio"
] |
[((351, 362), 'portfolio.PortFolio', 'PortFolio', ([], {}), '()\n', (360, 362), False, 'from portfolio import PortFolio\n')]
|
import cfbackup
from setuptools import setup, find_packages
install_requires = [
'cloudflare>=1.5.1',
'argparse>=1.2.1',
]
setup(
name='cfbackup',
version=cfbackup.__version__,
description=cfbackup.__doc__.strip(),
url='https://github.com/nordicdyno/cfbackup',
download_url='https://github.com/nordicdyno/cfbackup',
author=cfbackup.__author__,
author_email='<EMAIL>',
license=cfbackup.__licence__,
packages=find_packages(),
entry_points={
'console_scripts': [
'cfbackup = cfbackup.__main__:main',
],
},
install_requires=install_requires,
)
|
[
"cfbackup.__doc__.strip",
"setuptools.find_packages"
] |
[((211, 235), 'cfbackup.__doc__.strip', 'cfbackup.__doc__.strip', ([], {}), '()\n', (233, 235), False, 'import cfbackup\n'), ((453, 468), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (466, 468), False, 'from setuptools import setup, find_packages\n')]
|
# <Copyright 2019, Argo AI, LLC. Released under the MIT license.>
from typing import List, Optional
import numpy as np
from argoverse.utils import mayavi_wrapper
from argoverse.utils.mesh_grid import get_mesh_grid_as_point_cloud
from argoverse.visualization.mayavi_utils import (
Figure,
draw_mayavi_line_segment,
plot_3d_clipped_bbox_mayavi,
plot_points_3D_mayavi,
)
def populate_frustum_voxels(planes: List[np.ndarray], fig: Figure, axis_pair: str) -> Figure:
"""
Generate grid in xy plane, and then treat it as grid in xz (ground) plane
in camera coordinate system.
Args:
planes: list of length 5. Each list element is a Numpy array
of shape (4,) representing the equation of a plane,
e.g. (a,b,c,d) in ax+by+cz=d
fig: Mayavi figure to draw on
axis_pair: Either "xz" or "yz"
Returns:
Mayavi figure
"""
sparse_xz_voxel_grid = get_mesh_grid_as_point_cloud(-20, 20, 0, 40, downsample_factor=0.1)
sparse_voxel_grid = np.zeros((sparse_xz_voxel_grid.shape[0], 3))
if axis_pair == "xz":
sparse_voxel_grid[:, 0] = sparse_xz_voxel_grid[:, 0]
sparse_voxel_grid[:, 2] = sparse_xz_voxel_grid[:, 1]
elif axis_pair == "yz":
sparse_voxel_grid[:, 1] = sparse_xz_voxel_grid[:, 0]
sparse_voxel_grid[:, 2] = sparse_xz_voxel_grid[:, 1]
# keep only the points that have signed distance > 0 (inside the frustum, plane
# normals also point into the frustum)
for plane in planes:
signed_d = np.matmul(sparse_voxel_grid, plane[:3]) + plane[3]
sparse_voxel_grid = sparse_voxel_grid[np.where(signed_d > 0)]
plot_points_3D_mayavi(sparse_voxel_grid, fig, fixed_color=(1, 0, 0))
return fig
def plot_frustum_planes_and_normals(
planes: List[np.ndarray],
cuboid_verts: Optional[np.ndarray] = None,
near_clip_dist: float = 0.5,
) -> None:
"""
Args:
planes: list of length 5. Each list element is a Numpy array
of shape (4,) representing the equation of a plane,
e.g. (a,b,c,d) in ax+by+cz=d
cuboid_verts: Numpy array of shape (N,3) representing
cuboid vertices
Returns:
None
"""
fig = mayavi_wrapper.mlab.figure(bgcolor=(1, 1, 1), size=(2000, 1000)) # type: ignore
if cuboid_verts is not None:
# fig = plot_bbox_3d_mayavi(fig, cuboid_verts)
fig = plot_3d_clipped_bbox_mayavi(fig, planes, cuboid_verts)
P = np.array([0.0, 0.0, 0.0])
for i, plane in enumerate(planes):
(a, b, c, d) = plane
if i == 0:
color = (1, 0, 0) # red left
elif i == 1:
color = (0, 0, 1) # blue right
elif i == 2:
color = (1, 1, 0) # near yellow
P = np.array([0.0, 0.0, near_clip_dist])
elif i == 3:
color = (0, 1, 0) # low is green
elif i == 4:
color = (0, 1, 1) # top is teal
plane_pts = generate_grid_on_plane(a, b, c, d, P)
fig = plot_points_3D_mayavi(plane_pts, fig, color)
# plot the normals at (0,0,0.5) and normal vector (u,v,w) given by (a,b,c)
mayavi_wrapper.mlab.quiver3d( # type: ignore
0,
0,
0.5,
a * 1000,
b * 1000,
c * 1000,
color=color,
figure=fig,
line_width=8,
)
# draw teal line at top below the camera
pt1 = np.array([-5, 0, -5])
pt2 = np.array([5, 0, -5])
color = (0, 1, 1)
draw_mayavi_line_segment(fig, [pt1, pt2], color=color, line_width=8)
# draw blue line in middle
pt1 = np.array([-5, 5, -5])
pt2 = np.array([5, 5, -5])
color = (0, 0, 1)
draw_mayavi_line_segment(fig, [pt1, pt2], color=color, line_width=8)
# draw yellow, lowest line (+y axis is down)
pt1 = np.array([-5, 10, -5])
pt2 = np.array([5, 10, -5])
color = (1, 1, 0)
draw_mayavi_line_segment(fig, [pt1, pt2], color=color, line_width=8)
fig = populate_frustum_voxels(planes, fig, "xz")
fig = populate_frustum_voxels(planes, fig, "yz")
mayavi_wrapper.mlab.view(distance=200) # type: ignore
mayavi_wrapper.mlab.show() # type: ignore
def get_perpendicular(n: np.ndarray) -> np.ndarray:
"""
n guarantees that dot(n, getPerpendicular(n)) is zero, which is the
orthogonality condition, while also keeping the magnitude of the vector
as high as possible. Note that setting the component with the smallest
magnitude to 0 also guarantees that you don't get a 0,0,0 vector as a
result, unless that is already your input.
Args:
n: Numpy array of shape (3,)
Returns:
result: Numpy array of shape (3,)
"""
# find smallest component
i = np.argmin(n)
# get the other two indices
a = (i + 1) % 3
b = (i + 2) % 3
result = np.zeros(3)
result[i] = 0.0
result[a] = n[b]
result[b] = -n[a]
return result
def generate_grid_on_plane(a: float, b: float, c: float, d: float, P: np.ndarray, radius: float = 15) -> np.ndarray:
"""
Args:
a,b,c,d: Coefficients of ``ax + by + cz = d`` defining plane
P: Numpy array of shape (3,) representing point on the plane
radius: Radius (default 15)
Returns:
plane_pts: Numpy array of shape (N,3) with points on the input plane
"""
n = np.array([a, b, c]) # a,b,c from your equation
perp = get_perpendicular(n)
u = perp / np.linalg.norm(perp)
v = np.cross(u, n)
N = 100
# delta and epsilon are floats:
delta = radius / N # N is how many points you want max in one direction
epsilon = delta * 0.5
n_pts = int((2 * radius + epsilon) / delta)
pts = np.linspace(-radius, radius + epsilon, n_pts)
plane_pts: List[float] = []
for y in pts:
for x in pts:
# if (x*x+y*y < radius*radius): # only in the circle:
plane_pts += [P + x * u + y * v] # P is the point on the plane
return np.array(plane_pts)
|
[
"argoverse.utils.mayavi_wrapper.mlab.show",
"argoverse.visualization.mayavi_utils.plot_3d_clipped_bbox_mayavi",
"argoverse.utils.mayavi_wrapper.mlab.view",
"argoverse.utils.mayavi_wrapper.mlab.figure",
"numpy.zeros",
"numpy.cross",
"numpy.argmin",
"numpy.where",
"numpy.array",
"argoverse.visualization.mayavi_utils.plot_points_3D_mayavi",
"argoverse.visualization.mayavi_utils.draw_mayavi_line_segment",
"numpy.linspace",
"argoverse.utils.mayavi_wrapper.mlab.quiver3d",
"argoverse.utils.mesh_grid.get_mesh_grid_as_point_cloud",
"numpy.linalg.norm",
"numpy.matmul"
] |
[((935, 1002), 'argoverse.utils.mesh_grid.get_mesh_grid_as_point_cloud', 'get_mesh_grid_as_point_cloud', (['(-20)', '(20)', '(0)', '(40)'], {'downsample_factor': '(0.1)'}), '(-20, 20, 0, 40, downsample_factor=0.1)\n', (963, 1002), False, 'from argoverse.utils.mesh_grid import get_mesh_grid_as_point_cloud\n'), ((1027, 1071), 'numpy.zeros', 'np.zeros', (['(sparse_xz_voxel_grid.shape[0], 3)'], {}), '((sparse_xz_voxel_grid.shape[0], 3))\n', (1035, 1071), True, 'import numpy as np\n'), ((1669, 1737), 'argoverse.visualization.mayavi_utils.plot_points_3D_mayavi', 'plot_points_3D_mayavi', (['sparse_voxel_grid', 'fig'], {'fixed_color': '(1, 0, 0)'}), '(sparse_voxel_grid, fig, fixed_color=(1, 0, 0))\n', (1690, 1737), False, 'from argoverse.visualization.mayavi_utils import Figure, draw_mayavi_line_segment, plot_3d_clipped_bbox_mayavi, plot_points_3D_mayavi\n'), ((2240, 2304), 'argoverse.utils.mayavi_wrapper.mlab.figure', 'mayavi_wrapper.mlab.figure', ([], {'bgcolor': '(1, 1, 1)', 'size': '(2000, 1000)'}), '(bgcolor=(1, 1, 1), size=(2000, 1000))\n', (2266, 2304), False, 'from argoverse.utils import mayavi_wrapper\n'), ((2488, 2513), 'numpy.array', 'np.array', (['[0.0, 0.0, 0.0]'], {}), '([0.0, 0.0, 0.0])\n', (2496, 2513), True, 'import numpy as np\n'), ((3469, 3490), 'numpy.array', 'np.array', (['[-5, 0, -5]'], {}), '([-5, 0, -5])\n', (3477, 3490), True, 'import numpy as np\n'), ((3501, 3521), 'numpy.array', 'np.array', (['[5, 0, -5]'], {}), '([5, 0, -5])\n', (3509, 3521), True, 'import numpy as np\n'), ((3549, 3617), 'argoverse.visualization.mayavi_utils.draw_mayavi_line_segment', 'draw_mayavi_line_segment', (['fig', '[pt1, pt2]'], {'color': 'color', 'line_width': '(8)'}), '(fig, [pt1, pt2], color=color, line_width=8)\n', (3573, 3617), False, 'from argoverse.visualization.mayavi_utils import Figure, draw_mayavi_line_segment, plot_3d_clipped_bbox_mayavi, plot_points_3D_mayavi\n'), ((3660, 3681), 'numpy.array', 'np.array', (['[-5, 5, -5]'], {}), '([-5, 5, -5])\n', (3668, 3681), True, 'import numpy as np\n'), ((3692, 3712), 'numpy.array', 'np.array', (['[5, 5, -5]'], {}), '([5, 5, -5])\n', (3700, 3712), True, 'import numpy as np\n'), ((3739, 3807), 'argoverse.visualization.mayavi_utils.draw_mayavi_line_segment', 'draw_mayavi_line_segment', (['fig', '[pt1, pt2]'], {'color': 'color', 'line_width': '(8)'}), '(fig, [pt1, pt2], color=color, line_width=8)\n', (3763, 3807), False, 'from argoverse.visualization.mayavi_utils import Figure, draw_mayavi_line_segment, plot_3d_clipped_bbox_mayavi, plot_points_3D_mayavi\n'), ((3868, 3890), 'numpy.array', 'np.array', (['[-5, 10, -5]'], {}), '([-5, 10, -5])\n', (3876, 3890), True, 'import numpy as np\n'), ((3901, 3922), 'numpy.array', 'np.array', (['[5, 10, -5]'], {}), '([5, 10, -5])\n', (3909, 3922), True, 'import numpy as np\n'), ((3949, 4017), 'argoverse.visualization.mayavi_utils.draw_mayavi_line_segment', 'draw_mayavi_line_segment', (['fig', '[pt1, pt2]'], {'color': 'color', 'line_width': '(8)'}), '(fig, [pt1, pt2], color=color, line_width=8)\n', (3973, 4017), False, 'from argoverse.visualization.mayavi_utils import Figure, draw_mayavi_line_segment, plot_3d_clipped_bbox_mayavi, plot_points_3D_mayavi\n'), ((4130, 4168), 'argoverse.utils.mayavi_wrapper.mlab.view', 'mayavi_wrapper.mlab.view', ([], {'distance': '(200)'}), '(distance=200)\n', (4154, 4168), False, 'from argoverse.utils import mayavi_wrapper\n'), ((4189, 4215), 'argoverse.utils.mayavi_wrapper.mlab.show', 'mayavi_wrapper.mlab.show', ([], {}), '()\n', (4213, 4215), False, 'from argoverse.utils import mayavi_wrapper\n'), ((4788, 4800), 'numpy.argmin', 'np.argmin', (['n'], {}), '(n)\n', (4797, 4800), True, 'import numpy as np\n'), ((4888, 4899), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (4896, 4899), True, 'import numpy as np\n'), ((5399, 5418), 'numpy.array', 'np.array', (['[a, b, c]'], {}), '([a, b, c])\n', (5407, 5418), True, 'import numpy as np\n'), ((5523, 5537), 'numpy.cross', 'np.cross', (['u', 'n'], {}), '(u, n)\n', (5531, 5537), True, 'import numpy as np\n'), ((5749, 5794), 'numpy.linspace', 'np.linspace', (['(-radius)', '(radius + epsilon)', 'n_pts'], {}), '(-radius, radius + epsilon, n_pts)\n', (5760, 5794), True, 'import numpy as np\n'), ((6022, 6041), 'numpy.array', 'np.array', (['plane_pts'], {}), '(plane_pts)\n', (6030, 6041), True, 'import numpy as np\n'), ((2424, 2478), 'argoverse.visualization.mayavi_utils.plot_3d_clipped_bbox_mayavi', 'plot_3d_clipped_bbox_mayavi', (['fig', 'planes', 'cuboid_verts'], {}), '(fig, planes, cuboid_verts)\n', (2451, 2478), False, 'from argoverse.visualization.mayavi_utils import Figure, draw_mayavi_line_segment, plot_3d_clipped_bbox_mayavi, plot_points_3D_mayavi\n'), ((3033, 3077), 'argoverse.visualization.mayavi_utils.plot_points_3D_mayavi', 'plot_points_3D_mayavi', (['plane_pts', 'fig', 'color'], {}), '(plane_pts, fig, color)\n', (3054, 3077), False, 'from argoverse.visualization.mayavi_utils import Figure, draw_mayavi_line_segment, plot_3d_clipped_bbox_mayavi, plot_points_3D_mayavi\n'), ((3169, 3282), 'argoverse.utils.mayavi_wrapper.mlab.quiver3d', 'mayavi_wrapper.mlab.quiver3d', (['(0)', '(0)', '(0.5)', '(a * 1000)', '(b * 1000)', '(c * 1000)'], {'color': 'color', 'figure': 'fig', 'line_width': '(8)'}), '(0, 0, 0.5, a * 1000, b * 1000, c * 1000, color\n =color, figure=fig, line_width=8)\n', (3197, 3282), False, 'from argoverse.utils import mayavi_wrapper\n'), ((5494, 5514), 'numpy.linalg.norm', 'np.linalg.norm', (['perp'], {}), '(perp)\n', (5508, 5514), True, 'import numpy as np\n'), ((1543, 1582), 'numpy.matmul', 'np.matmul', (['sparse_voxel_grid', 'plane[:3]'], {}), '(sparse_voxel_grid, plane[:3])\n', (1552, 1582), True, 'import numpy as np\n'), ((1640, 1662), 'numpy.where', 'np.where', (['(signed_d > 0)'], {}), '(signed_d > 0)\n', (1648, 1662), True, 'import numpy as np\n'), ((2790, 2826), 'numpy.array', 'np.array', (['[0.0, 0.0, near_clip_dist]'], {}), '([0.0, 0.0, near_clip_dist])\n', (2798, 2826), True, 'import numpy as np\n')]
|
from django.contrib import admin
# Register your models here.
from .models import Pessoa
admin.site.register(Pessoa)
|
[
"django.contrib.admin.site.register"
] |
[((91, 118), 'django.contrib.admin.site.register', 'admin.site.register', (['Pessoa'], {}), '(Pessoa)\n', (110, 118), False, 'from django.contrib import admin\n')]
|
import sys
import math
def isPrime(num):
if num == 1: return False
if num <= 3: return True
for i in range(2, int(math.sqrt(num)) + 1):
if num % i == 0: return False
return True
M = int(sys.stdin.readline().rstrip())
N = int(sys.stdin.readline().rstrip())
min = 100001
sum = 0
for number in range(M, N + 1):
if isPrime(number):
if min > number:
min = number
sum += number
print(sum == 0 and -1 or sum)
if sum != 0: print(min)
|
[
"sys.stdin.readline",
"math.sqrt"
] |
[((214, 234), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (232, 234), False, 'import sys\n'), ((253, 273), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (271, 273), False, 'import sys\n'), ((128, 142), 'math.sqrt', 'math.sqrt', (['num'], {}), '(num)\n', (137, 142), False, 'import math\n')]
|
from scan_api import ScanAPI
from os import path
def run_scan(apk_src):
print_title("Start Scanning...")
scan_api = ScanAPI(apk_src)
def file_dir_exist(target_path):
exists = path.exists(target_path)
if not exists:
print("\"" + target_path + "\"" + " NOT EXISTS!")
return exists
def print_title(title):
# print break line with title
# ============== title =============
total_len = 100
decoration = "=" * ((total_len - len(title) - 1) // 2)
print(decoration + " " + title + " " + decoration)
def main():
apk_src = "../apks_unzip/Instagram_v173.0.0.39.120_apkpure.com"
if file_dir_exist(apk_src):
run_scan(apk_src)
if __name__ == "__main__":
main()
|
[
"scan_api.ScanAPI",
"os.path.exists"
] |
[((125, 141), 'scan_api.ScanAPI', 'ScanAPI', (['apk_src'], {}), '(apk_src)\n', (132, 141), False, 'from scan_api import ScanAPI\n'), ((189, 213), 'os.path.exists', 'path.exists', (['target_path'], {}), '(target_path)\n', (200, 213), False, 'from os import path\n')]
|
import json
import logging
import os
import shutil
import subprocess
import sys
import time
import uuid
from .SingleNodeDockerCluster import SingleNodeDockerCluster
from .utils import retry_check
class DockerTestCluster(SingleNodeDockerCluster):
def __init__(self):
self.segfault = False
super(DockerTestCluster, self).__init__()
def deploy_flow(self):
super(DockerTestCluster, self).deploy_flow()
def start_flow(self, name):
container = self.containers[name]
container.reload()
logging.info("Status before start: %s", container.status)
if container.status == 'exited':
logging.info("Start container: %s", name)
container.start()
return True
return False
def stop_flow(self, name):
container = self.containers[name]
container.reload()
logging.info("Status before stop: %s", container.status)
if container.status == 'running':
logging.info("Stop container: %s", name)
container.stop(timeout=0)
return True
return False
@staticmethod
def get_stdout_encoding():
# Use UTF-8 both when sys.stdout present but set to None (explicitly piped output
# and also some CI such as GitHub Actions).
encoding = getattr(sys.stdout, "encoding", None)
if encoding is None:
encoding = "utf8"
return encoding
def get_app_log(self):
for container in self.containers.values():
container = self.client.containers.get(container.id)
if b'Segmentation fault' in container.logs():
logging.warn('Container segfaulted: %s', container.name)
self.segfault=True
if container.status == 'running':
apps = [("MiNiFi", self.minifi_root + '/logs/minifi-app.log'), ("NiFi", self.nifi_root + '/logs/nifi-app.log'), ("Kafka", self.kafka_broker_root + '/logs/server.log')]
for app in apps:
app_log_status, app_log = container.exec_run('/bin/sh -c \'cat ' + app[1] + '\'')
if app_log_status == 0:
logging.info('%s app logs for container \'%s\':\n', app[0], container.name)
return app_log
break
else:
logging.warning("The container is running, but none of %s logs were found", " or ".join([x[0] for x in apps]))
else:
logging.info(container.status)
logging.info('Could not cat app logs for container \'%s\' because it is not running', container.name)
return None
def wait_for_app_logs(self, log, timeout_seconds, count=1):
wait_start_time = time.perf_counter()
for container_name, container in self.containers.items():
logging.info('Waiting for app-logs `%s` in container `%s`', log, container_name)
while (time.perf_counter() - wait_start_time) < timeout_seconds:
logs = self.get_app_log()
if logs is not None and count <= logs.decode("utf-8").count(log):
return True
if logs is not None:
for line in logs.decode("utf-8").splitlines():
logging.info("App-log: %s", line)
time.sleep(1)
return False
def log_nifi_output(self):
app_log = self.get_app_log()
if app_log is None:
return
for line in app_log.decode("utf-8").splitlines():
logging.info(line)
def check_minifi_container_started(self):
for container in self.containers.values():
container = self.client.containers.get(container.id)
if b'Segmentation fault' in container.logs():
logging.warn('Container segfaulted: %s', container.name)
raise Exception("Container failed to start up.")
def check_http_proxy_access(self, url):
output = subprocess.check_output(["docker", "exec", "http-proxy", "cat", "/var/log/squid/access.log"]).decode(self.get_stdout_encoding())
return url in output and \
((output.count("TCP_DENIED/407") != 0 and \
output.count("TCP_MISS/200") == output.count("TCP_DENIED/407")) or \
output.count("TCP_DENIED/407") == 0 and "TCP_MISS" in output)
@retry_check()
def check_s3_server_object_data(self, test_data):
s3_mock_dir = subprocess.check_output(["docker", "exec", "s3-server", "find", "/tmp/", "-type", "d", "-name", "s3mock*"]).decode(self.get_stdout_encoding()).strip()
file_data = subprocess.check_output(["docker", "exec", "s3-server", "cat", s3_mock_dir + "/test_bucket/test_object_key/fileData"]).decode(self.get_stdout_encoding())
return file_data == test_data
@retry_check()
def check_s3_server_object_metadata(self, content_type="application/octet-stream", metadata=dict()):
s3_mock_dir = subprocess.check_output(["docker", "exec", "s3-server", "find", "/tmp/", "-type", "d", "-name", "s3mock*"]).decode(self.get_stdout_encoding()).strip()
metadata_json = subprocess.check_output(["docker", "exec", "s3-server", "cat", s3_mock_dir + "/test_bucket/test_object_key/metadata"]).decode(self.get_stdout_encoding())
server_metadata = json.loads(metadata_json)
return server_metadata["contentType"] == content_type and metadata == server_metadata["userMetadata"]
@retry_check()
def is_s3_bucket_empty(self):
s3_mock_dir = subprocess.check_output(["docker", "exec", "s3-server", "find", "/tmp/", "-type", "d", "-name", "s3mock*"]).decode(self.get_stdout_encoding()).strip()
ls_result = subprocess.check_output(["docker", "exec", "s3-server", "ls", s3_mock_dir + "/test_bucket/"]).decode(self.get_stdout_encoding())
return not ls_result
def wait_for_container_logs(self, container_name, log, timeout, count=1):
logging.info('Waiting for logs `%s` in container `%s`', log, container_name)
container = self.containers[container_name]
check_count = 0
while check_count <= timeout:
if count <= container.logs().decode("utf-8").count(log):
return True
else:
check_count += 1
time.sleep(1)
return False
def segfault_happened(self):
return self.segfault
|
[
"json.loads",
"logging.warn",
"subprocess.check_output",
"time.perf_counter",
"time.sleep",
"logging.info"
] |
[((545, 602), 'logging.info', 'logging.info', (['"""Status before start: %s"""', 'container.status'], {}), "('Status before start: %s', container.status)\n", (557, 602), False, 'import logging\n'), ((882, 938), 'logging.info', 'logging.info', (['"""Status before stop: %s"""', 'container.status'], {}), "('Status before stop: %s', container.status)\n", (894, 938), False, 'import logging\n'), ((2784, 2803), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2801, 2803), False, 'import time\n'), ((5374, 5399), 'json.loads', 'json.loads', (['metadata_json'], {}), '(metadata_json)\n', (5384, 5399), False, 'import json\n'), ((6002, 6078), 'logging.info', 'logging.info', (['"""Waiting for logs `%s` in container `%s`"""', 'log', 'container_name'], {}), "('Waiting for logs `%s` in container `%s`', log, container_name)\n", (6014, 6078), False, 'import logging\n'), ((656, 697), 'logging.info', 'logging.info', (['"""Start container: %s"""', 'name'], {}), "('Start container: %s', name)\n", (668, 697), False, 'import logging\n'), ((993, 1033), 'logging.info', 'logging.info', (['"""Stop container: %s"""', 'name'], {}), "('Stop container: %s', name)\n", (1005, 1033), False, 'import logging\n'), ((2882, 2967), 'logging.info', 'logging.info', (['"""Waiting for app-logs `%s` in container `%s`"""', 'log', 'container_name'], {}), "('Waiting for app-logs `%s` in container `%s`', log, container_name\n )\n", (2894, 2967), False, 'import logging\n'), ((3595, 3613), 'logging.info', 'logging.info', (['line'], {}), '(line)\n', (3607, 3613), False, 'import logging\n'), ((1667, 1723), 'logging.warn', 'logging.warn', (['"""Container segfaulted: %s"""', 'container.name'], {}), "('Container segfaulted: %s', container.name)\n", (1679, 1723), False, 'import logging\n'), ((2524, 2554), 'logging.info', 'logging.info', (['container.status'], {}), '(container.status)\n', (2536, 2554), False, 'import logging\n'), ((2571, 2679), 'logging.info', 'logging.info', (['"""Could not cat app logs for container \'%s\' because it is not running"""', 'container.name'], {}), '(\n "Could not cat app logs for container \'%s\' because it is not running",\n container.name)\n', (2583, 2679), False, 'import logging\n'), ((3374, 3387), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (3384, 3387), False, 'import time\n'), ((3851, 3907), 'logging.warn', 'logging.warn', (['"""Container segfaulted: %s"""', 'container.name'], {}), "('Container segfaulted: %s', container.name)\n", (3863, 3907), False, 'import logging\n'), ((4035, 4132), 'subprocess.check_output', 'subprocess.check_output', (["['docker', 'exec', 'http-proxy', 'cat', '/var/log/squid/access.log']"], {}), "(['docker', 'exec', 'http-proxy', 'cat',\n '/var/log/squid/access.log'])\n", (4058, 4132), False, 'import subprocess\n'), ((4680, 4802), 'subprocess.check_output', 'subprocess.check_output', (["['docker', 'exec', 's3-server', 'cat', s3_mock_dir +\n '/test_bucket/test_object_key/fileData']"], {}), "(['docker', 'exec', 's3-server', 'cat', s3_mock_dir +\n '/test_bucket/test_object_key/fileData'])\n", (4703, 4802), False, 'import subprocess\n'), ((5194, 5316), 'subprocess.check_output', 'subprocess.check_output', (["['docker', 'exec', 's3-server', 'cat', s3_mock_dir +\n '/test_bucket/test_object_key/metadata']"], {}), "(['docker', 'exec', 's3-server', 'cat', s3_mock_dir +\n '/test_bucket/test_object_key/metadata'])\n", (5217, 5316), False, 'import subprocess\n'), ((5757, 5854), 'subprocess.check_output', 'subprocess.check_output', (["['docker', 'exec', 's3-server', 'ls', s3_mock_dir + '/test_bucket/']"], {}), "(['docker', 'exec', 's3-server', 'ls', s3_mock_dir +\n '/test_bucket/'])\n", (5780, 5854), False, 'import subprocess\n'), ((6357, 6370), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (6367, 6370), False, 'import time\n'), ((2982, 3001), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (2999, 3001), False, 'import time\n'), ((2192, 2265), 'logging.info', 'logging.info', (['"""%s app logs for container \'%s\':\n"""', 'app[0]', 'container.name'], {}), '("%s app logs for container \'%s\':\\n", app[0], container.name)\n', (2204, 2265), False, 'import logging\n'), ((3324, 3357), 'logging.info', 'logging.info', (['"""App-log: %s"""', 'line'], {}), "('App-log: %s', line)\n", (3336, 3357), False, 'import logging\n'), ((4509, 4620), 'subprocess.check_output', 'subprocess.check_output', (["['docker', 'exec', 's3-server', 'find', '/tmp/', '-type', 'd', '-name',\n 's3mock*']"], {}), "(['docker', 'exec', 's3-server', 'find', '/tmp/',\n '-type', 'd', '-name', 's3mock*'])\n", (4532, 4620), False, 'import subprocess\n'), ((5019, 5130), 'subprocess.check_output', 'subprocess.check_output', (["['docker', 'exec', 's3-server', 'find', '/tmp/', '-type', 'd', '-name',\n 's3mock*']"], {}), "(['docker', 'exec', 's3-server', 'find', '/tmp/',\n '-type', 'd', '-name', 's3mock*'])\n", (5042, 5130), False, 'import subprocess\n'), ((5586, 5697), 'subprocess.check_output', 'subprocess.check_output', (["['docker', 'exec', 's3-server', 'find', '/tmp/', '-type', 'd', '-name',\n 's3mock*']"], {}), "(['docker', 'exec', 's3-server', 'find', '/tmp/',\n '-type', 'd', '-name', 's3mock*'])\n", (5609, 5697), False, 'import subprocess\n')]
|
# -*- coding: utf-8 -*-
import subprocess as sp
import sys
import os
import ete3
import glob
import pandas as pd
from ftplib import FTP
# Run bash commands
def run_command(cmd, ommit=False):
if ommit:
try:
process = sp.Popen(cmd, shell=True)
except:
pass
process.communicate("Y\n")
if process.wait() != 0:
print("Error occurred, but you chose to ommit it")
else:
try:
process = sp.Popen(cmd, shell=True)
except OSError:
sys.exit("Error: Execution cmd failed")
process.communicate("Y\n")
if process.wait() != 0:
sys.exit("ERROR: Execution cmd failed")
def run_command_with_return(cmd):
try:
process = sp.Popen(cmd, shell=True, stdout=sp.PIPE).stdout
except:
sys.exit()
return process.readlines()
# Create a folder
def create_folder(name):
if not os.path.exists(name):
try:
os.mkdir(name)
except:
print("Unable to create directory " + name)
def loadPaths(pathsFile, tag):
"""Loads paths from a pathfile based on the tag parameter.
Parameters
----------
pathsFile : str
File created with create_pathfile().
tag : str
Tag of the files of interest
Returns
-------
type
Dictionary of gene:corresponding_file
"""
pathsList = {}
for line in open(pathsFile):
line = line.strip()
dades = line.split("\t")
if dades[0] == tag:
pathsList[dades[1]] = dades[2]
return pathsList
def build_sp2age(sptree, seed):
"""Build a species to age dictionary given a tree and its main seq.
Parameters
----------
sptree : tree
Phylogenetic tree of interest
seed : str
Main seq where the dictionary will start.
Returns
-------
type
Returns a dict with species:age_to_main as key:value
"""
spe2age = {}
node = sptree.get_leaves_by_name(seed)[0]
age = 1
while not node.is_root():
for leaf in node.get_leaves():
if leaf.name not in spe2age:
spe2age[leaf.name] = age
age += 1
node = node.up
for leaf in sptree.get_leaves():
if leaf.name not in spe2age:
spe2age[leaf.name] = age
return spe2age
def load_sequences(contigFile, delimiter):
"""Load sequences into a variable.
Parameters
----------
contigFile : str
File where the seqs are stored.
delimiter : str
Delimiter of sequences.
Returns
-------
type
Variable with sequences stored.
"""
seqs = {}
name = ""
s = []
for line in open(contigFile):
line = line.strip()
if ">" in line:
if name != "":
seqs[name] = "".join(s)
if delimiter == "":
name = line.replace(">", "")
else:
name = line.replace(">", "").split(delimiter)[0]
s = []
else:
s.append(line.upper())
if len(s) != 0:
seqs[name] = "".join(s)
return seqs
def load_species_name(node):
return node.split("_")[1]
def load_species_name_whole(node):
return node
def load_tree(tree):
t = ete3.PhyloTree(tree, sp_naming_function=load_species_name)
return t
def root_tree(t, spe2age=None, midpoint=False):
"""Root a Phylogenetic tree.
Parameters
----------
t : Tree
Tree to root.
spe2age : dict
Species2age dictionary if available.
midpoint : bool
Use midpoint rooting.
Returns
-------
tree
Rooted phylogenetic tree.
"""
if spe2age is not None:
try:
t.set_outgroup(t.get_farthest_oldest_leaf(spe2age))
except:
sys.exit("Something went wrong with sp2age dict!")
elif midpoint:
t.set_outgroup(t.get_midpoint_outgroup())
else:
sys.exit("Something went wrong with rooting!")
return t
def root_species_tree(t, spe2age=None, midpoint=False, out_list=None, force=False):
"""Root a species tree.
Parameters
----------
t : tree
Species tree to root.
spe2age : dict
Species 2 age dictionary if available.
midpoint : bool
Do midpoint rooting.
out_list : list
Lisst of outgroup species.
force:
if furthest species from species 2 age dictionary remove one and try again until a monophyletic outgroup can be set. The furthest species are removed in order of topological distance from seed (closest are removed first!). May not have much sense, therefore use it with caution!
Returns
-------
tree
Rooted species tree.
"""
p = ete3.PhyloTree(t.write(), sp_naming_function=None)
for n in p.get_leaves():
n.species = n.name
if spe2age is not None:
furthest = [k for k, v in spe2age.items() if v == max(spe2age.values())]
if len(furthest) > 1:
print(
"there are more than one furthest sequence, the root will be the ancestor node that comprise all of them."
)
print(furthest)
if p.get_common_ancestor(furthest) == p.get_tree_root():
if force:
red = sort_furthest_by_dist(spe2age, p)
num = len(furthest)
while num > 0:
red = red[1:]
if p.get_common_ancestor(red) == p.get_tree_root():
num = num - 1
continue
else:
print("Found monophyletic outgroup:")
print(red)
p.set_outgroup(p.get_common_ancestor(red))
break
else:
raise ValueError(
"Could not find a monophyletic clade containing all furthest species, try midppoint rooting or give a list of outgroup"
)
# p.set_outgroup(p.get_midpoint_outgroup())
else:
p.set_outgroup(p.get_common_ancestor(furthest))
else:
print("only one furthest sequence, this will be the outgroup")
print(furthest)
p.set_outgroup(p.get_farthest_oldest_leaf(spe2age))
if midpoint:
p.set_outgroup(p.get_midpoint_outgroup())
if out_list is not None:
if len(out_list) == 1:
p.set_outgroup(out_list[0])
else:
p.set_outgroup(p.get_common_ancestor(out_list))
return p
def sort_furthest_by_dist(s2a, tree):
furthest = [k for k, v in s2a.items() if v == max(s2a.values())]
seed = [k for k, v in s2a.items() if v == min(s2a.values())]
dist_furthest = {}
for el in furthest:
dest = tree & el
arr = tree & seed[0]
dist_furthest[el] = arr.get_distance(dest)
furthest = sorted(furthest, key=dist_furthest.get)
return furthest
def print_sequence(code, sequence, outfile):
outfile.write(">" + code + "\n")
i = 0
if sequence[-1] == "*":
sequence = sequence[:-1]
while i < len(sequence):
outfile.write(sequence[i : i + 60] + "\n")
i += 60
def create_pathfile(pathsFile, dir, tag):
"""Create a file with paths for a file in a specific dir.
Parameters
----------
pathsFile : str
outputfile
dir : str
Directory containing the files of interest
tag : str
Tag for the type of file. For now only "alg_aa" is supported though.
Returns
-------
type
Returns a file with paths for each object of interest.
"""
outfilePath = open(pathsFile, "w")
for firstDir in glob.glob(dir + "/*"):
code = firstDir.split("/")[-1].split(".")[0]
outfilePath.write(tag + "\t" + code + "\t" + firstDir + "\n")
outfilePath.close()
# get speciesrax data
# set alignment args as optional way better than this!
# mapping and family file not imporant
def get_generax_data(gene_trees, out_dir, aln_dir=None, keep_model=True):
"""Prepare data for generax or speciesrax.
Parameters
----------
gene_trees : str
Best trees file from phylomeDB.
out_dir : str
output directory.
aln_dir : str
directory where the alns are stored. It can be ignored if the alns are not useful.
keep_model : bool
Keep the best model found in phylomeDB
Returns
-------
type
A directory with the structure and files needed for GeneRax to run.
"""
gene_file = os.path.basename(gene_trees)
gene_noex = os.path.splitext(gene_file)[0]
gene_dir = os.path.dirname(gene_trees)
if not os.path.isdir(out_dir):
os.mkdir(out_dir)
if not os.path.isdir(out_dir + "/trees/"):
os.mkdir(out_dir + "/trees/")
if not os.path.isdir(out_dir + "/mapping/"):
os.mkdir(out_dir + "/mapping/")
out_map = out_dir + "/mapping/mapping"
out_family = out_dir + "/family.txt"
all_leaves = []
with open(out_family, "w") as f:
f.write("[FAMILIES]\n")
with open(gene_trees, "r") as g:
for line in g:
line = line.strip().split()
id = line[0]
tree = ete3.Tree(line[3])
leaves = [leaf.name for leaf in tree.iter_leaves()]
species = list(set(leaf.split("_")[1] for leaf in leaves))
sp_gene_dict = {}
for s in species:
sp_genes = []
for gene in leaves:
nm = gene.split("_")
if nm[1] == s:
sp_genes.append(gene)
sp_gene_dict[s] = sp_genes
out_mapfile = out_map + "_" + id + ".txt"
with open(out_mapfile, "w") as m:
for k in sp_gene_dict.keys():
if len(sp_gene_dict[k]) > 0:
m.write(str(k) + ":" + ";".join(sp_gene_dict[k]) + "\n")
f.write("- " + id + "\n")
id_file = out_dir + "/trees/" + id + ".gene.newick"
tree.write(outfile=id_file)
f.write("starting_gene_tree = " + id_file + "\n")
if keep_model:
model = line[1]
f.write("subst_model = " + model + "\n")
if aln_dir is not None:
aln_file = aln_dir + "/" + id + ".clean.fasta"
aln_file_u = aln_dir + "/" + id + ".clean_noU.fasta"
if os.path.exists(aln_file_u):
f.write("alignment = " + aln_file_u + "\n")
elif os.path.exists(aln_file):
f.write("alignment = " + aln_file + "\n")
else:
# sys.exit("could not find file: " + aln_file)
print("could not find file: " + aln_file)
continue
f.write("mapping = " + out_mapfile + "\n")
def scan_for_Us(aln_dir, what="U", replace=False, with_what="C"):
"""Scan for U (Selenocysteine) characters in alignment. useful
if using generax as alignments containing Us won't be parsed.
Parameters
----------
aln_dir : str
Directory where .
what : str
Character to find (U is default).
replace : bool
If true create a file with no Us named with _noU added.
Returns
-------
type
Returns if the alignments contain non canonical aminoacid and eventually a file with those aminoacid replaced.
"""
if os.path.exists(aln_dir):
is_u_ever = False
for file in os.listdir(aln_dir):
if file.endswith("clean.fasta"):
toread = os.path.join(aln_dir, file)
is_U = False
cmd = "grep -h -v '>' " + toread + " | grep " + what
files_u = run_command_with_return(cmd)
if len(files_u) > 0:
is_U = True
is_u_ever = True
if replace:
outname = "clean_no" + what
towrite = toread.replace("clean", outname)
cmd = (
"sed '/^>/! {s/"
+ what
+ "/"
+ with_what
+ "/g}' "
+ toread
+ " > "
+ towrite
)
run_command(cmd)
if is_U:
print(file + " Contains " + what)
if not is_u_ever:
print("No files contain " + what)
def get_astral_pro_data(gene_trees, out_file):
"""Obtain data to run astral-pro.
Parameters
----------
gene_trees : str
Best trees file from PhylomeDB.
out_file : str
outputfile.
Returns
-------
file
Write outputfile in specified directory.
"""
with open(out_file, "w") as o:
with open(gene_trees) as t:
for line in t:
line = line.split()
tree = ete3.Tree(line[3])
for leaf in tree.iter_leaves():
leaf.name = leaf.name.split("_")[1]
string = tree.write()
o.write(string + "\n")
def get_all_species(fileTree):
"""Get all mmnemonics code in best trees file in phylomedb.
Parameters
----------
fileTree : str
best tree file from PhylomeDB
Returns
-------
list
List of mnemonics codes found in file.
"""
set_sp = set()
with open(fileTree) as t:
for line in t:
line = line.split()
tree = ete3.Tree(line[3])
for leaf in tree.iter_leaves():
leaf.name = leaf.name.split("_")[1]
set_sp.add(leaf.name)
return list(set_sp)
def get_all_species_counts(fileTree):
"""Get all mmnemonics code and number of trees in which the species is present in best trees file in phylomedb.
Parameters
----------
fileTree : str
best tree file from PhylomeDB
Returns
-------
list
dictionary of mnemonics codes found in file + number of trees.
"""
dict_occurence = {}
with open(fileTree) as t:
for line in t:
line = line.split()
tree = ete3.Tree(line[3])
list_sp = [name.split("_")[1] for name in tree.get_leaf_names()]
set_sp = set(list_sp)
for leaf in set_sp:
if dict_occurence.get(leaf) is None:
dict_occurence[leaf] = [0, 0]
# if divided:
dict_occurence[leaf][1] += list_sp.count(leaf)
# else:
dict_occurence[leaf][0] += 1
return dict_occurence
def normalize_counts(counts_df, norm_dict):
df = counts_df[counts_df.index.isin(norm_dict.keys())]
# df = df[~df.index.str.contains("node_")]
df = df.apply(lambda value: value / norm_dict[value.name], axis=1)
return df
def get_all_models_counts(treefile, data=False):
with open(treefile) as t:
models = [(line.split()[0], line.split()[1]) for line in t]
dict_counts = dict((x, models.count(x)) for x in set(models))
if data:
df = []
for k in models:
if "+" in k[1]:
model = k[1][: k[1].find("+")]
else:
model = k[1]
if "+F" in k[1]:
freq = True
else:
freq = False
if "+I" in k[1]:
inv = True
else:
inv = False
if "+G" in k[1]:
gamma = "G"
num_cat = int(k[1][k[1].find("+G") + 2])
elif "+R" in k[1]:
gamma = "R"
num_cat = int(k[1][k[1].find("+R") + 2])
else:
gamma = "None"
num_cat = 0
df.append([k[0], model, freq, inv, gamma, num_cat])
dict_counts = pd.DataFrame(
df, columns=["gene", "model", "freq", "inv", "gamma", "num_cat"]
)
return dict_counts
def get_species_name(node_name_string):
spcode = node_name_string.split("_")[1]
return spcode
def get_ecce_data(
gene_trees, species_tree, out_dir, root=False, midpoint=False, ultrametric=False
):
"""Build data useful for ecceTERA.
Parameters
----------
gene_trees : str
Best trees file from PhylomeDB.
species_tree : str
Species tree
out_dir : str
output directory.
root : bool
Root the gene trees before comparing.
midpoint : bool
If root=True root by midpoint? If false the trees will be rooted with spe2age dictionary
ultrametric : bool
Convert the species tree to ultrametric (to run dated analysis)
Returns
-------
type
Data needed by ecceTERA in outdir ecce_best_trees file.
"""
gene_file = os.path.basename(gene_trees)
# gene_noex = os.path.splitext(gene_file)[0]
# gene_dir = os.path.dirname(gene_trees)
sp_file = os.path.basename(species_tree)
# sp_noex = os.path.splitext(sp_file)[0]
# sp_dir = os.path.dirname(species_tree)
sp = ete3.Tree(species_tree)
if not os.path.isdir(out_dir):
os.mkdir(out_dir)
if ultrametric:
sp.convert_to_ultrametric()
sp.write(outfile=out_dir + "/ecce_ultra_" + sp_file, format=5)
sp_names = sp.get_leaf_names()
out_gene = out_dir + "/ecce_" + gene_file
if root and not midpoint:
with open(gene_trees) as g:
first_line = g.readline().strip().split()
seed = first_line[0].split("_")[1]
sp2age = build_sp2age(sp, seed)
with open(out_gene, "a") as o:
with open(gene_trees) as g:
for line in g:
line = line.strip().split()
tree = line[3]
if root:
t = ete3.PhyloTree(tree, sp_naming_function=load_species_name)
if midpoint:
t.set_outgroup(t.get_midpoint_outgroup())
else:
t.set_outgroup(t.get_farthest_oldest_leaf(sp2age))
# t.resolve_polytomy()
else:
t = ete3.Tree(tree)
# t.resolve_polytomy()
for spec in sp_names:
num = 1
for leaf in t.iter_leaves():
nm = leaf.name.split("_")
if spec == nm[1]:
leaf.name = nm[1] + "_" + str(num) + "_" + nm[0]
num = num + 1
string = t.write()
o.write(string + "\n")
def get_tax_dict_uniprot(tree, uniprot_df): # add solve species??
"""Get taxonomic code from mnemonic starting from uniprot speclist file.
Parameters
----------
tree : tree
Tree with mnemo code as leaf names.
uniprot_df : str
file of uniprot speclist. It can be retireved with: "curl https://www.uniprot.org/docs/speclist.txt -o data/speclist_$(date +'%d_%m_%y').txt"
Returns
-------
dict
Dictionary with mnemos as key and ncbi taxid as values.
"""
ncbi = ete3.NCBITaxa()
sp = [nm for nm in tree.get_leaf_names()]
sp = list(set([s.split("_")[-1] if "_" in s else s for s in sp]))
sp_str = [el for el in sp if not el.isdecimal()]
sp_num = [el for el in sp if el.isdecimal()]
with open(uniprot_df) as f:
sp_line = [line.split() for line in f.readlines()]
sp_line = [line for line in sp_line if line != []]
taxo_dict = {
line[0]: line[2].replace(":", "") for line in sp_line if line[0] in sp_str
}
for el in sp_num:
if len(ncbi.get_taxid_translator([el])) > 0:
taxo_dict[el] = el
absent = set([abs for abs in sp if abs not in taxo_dict.keys()])
if absent:
print("Warning: " + " ".join(absent) + " was not found in the dictionary.")
print("You may want to add it manually")
return taxo_dict
def get_three_consistency(dict_leaves, trees):
# EXAMPLE
# treefile812 = "test_data/best_trees812.nwk"
#
# with open(treefile812) as bt:
# trees812 = [ap.load_tree(line.split()[3]) for line in bt]
#
# dict_cnidaria = {"cnidaria": ["NEMVE"], "ctenophora": ["MNELE"], "porifera": ["AMPQE"]}
#
# cnidaria = get_three_consistency(dict_cnidaria, trees812)
# cnidaria
all = [y for el in [*dict_leaves.values()] for y in el]
three_trees = []
taxa = list(dict_leaves.keys())
for tree in trees:
t = tree.copy()
tokeep = [node for node in t if node.species in all]
if len(tokeep) == 0:
continue
t.prune(tokeep)
num_1 = [n for n in t if n.species in dict_leaves[taxa[0]]]
num_2 = [n for n in t if n.species in dict_leaves[taxa[1]]]
num_3 = [n for n in t if n.species in dict_leaves[taxa[2]]]
for n in num_1:
n.name = taxa[0]
for n in num_2:
n.name = taxa[1]
for n in num_3:
n.name = taxa[2]
t.set_species_naming_function(lambda node: node.name)
t = t.collapse_lineage_specific_expansions()
if len(set([n.species for n in t])) < 3:
continue
for a in t.get_speciation_trees()[2]:
# # only get subtree with at least one from all groups
# num_1 = [n for n in a if n.name in dict_leaves[taxa[0]]]
# sp_1 = [n.name for n in num_1]
# num_2 = [n for n in a if n.name in dict_leaves[taxa[1]]]
# sp_2 = [n.name for n in num_2]
# num_3 = [n for n in a if n.name in dict_leaves[taxa[2]]]
# sp_3 = [n.name for n in num_3]
# if len(num_1) > 0 and len(num_2) > 0 and len(num_3) > 0:
# mono_1 = a.check_monophyly(sp_1, target_attr="name")[0]
# mono_2 = a.check_monophyly(sp_2, target_attr="name")[0]
# mono_3 = a.check_monophyly(sp_3, target_attr="name")[0]
# # only consider trees where each group is monophyletyc
# if mono_1 and mono_2 and mono_3:
if len(a.get_leaf_names()) == 3:
three_trees.append(a)
dict_topo = {}
for t in three_trees:
t.sort_descendants()
topo = t.write(format=9)
dict_topo[topo] = dict_topo.get(topo, 0) + 1
return dict_topo
def get_tax_dict_info(info_txt):
"""Get taxonomic code from phylomedb info file.
Parameters
----------
info_txt : str
info.txt file from phylomeDB ftp server.
Returns
-------
dict
Dictionary with mnemos as key and ncbi taxid as values.
"""
with open(info_txt) as i:
flag = False
lines = []
for line in i:
if "TaxaID" in line:
flag = True
if flag:
lines.append(line)
tax_dict = {el.split()[1].split(".")[0]: el.split()[0] for el in lines[2:]}
return tax_dict
def get_taxonomic_df(tax_dict, set_cols=False, fill=False):
ncbi = ete3.NCBITaxa()
tax_resolved = ncbi.get_taxid_translator([sp for sp in tax_dict.values()])
tax_dict = {k: v for k, v in tax_dict.items() if int(v) in tax_resolved.keys()}
mnemo_sp = {}
for mnemo in tax_dict:
id = int(tax_dict[mnemo])
try:
species = tax_resolved[id]
except KeyError:
species = mnemo
mnemo_sp[mnemo] = species
whole_tax_dict = {}
for key, value in tax_resolved.items():
# sp_name = "".join(ncbi.get_taxid_translator([key]).values())
lineage = ncbi.get_lineage(key)
names = ncbi.get_taxid_translator(lineage)
rank = ncbi.get_rank(lineage)
ordered_names = [names[taxid] for taxid in lineage]
ordered_clades = [rank[taxid] for taxid in lineage]
taxonomy = list(zip(ordered_names, ordered_clades))
# d = {k:{rank[k]:lineage[k]} for k in lineage.keys()}
whole_tax_dict[value] = taxonomy
if set_cols:
common_clades = [
"species",
"genus",
"family",
"order",
"class",
"phylum",
"kingdom",
"superkingdom",
]
else:
seen = []
for sp in whole_tax_dict:
set_single = set()
for id in whole_tax_dict[sp]:
set_single.add(id[1])
# for clade in whole_tax_dict[sp][id]:
# set_single.add(clade)
seen.append(set_single)
common_clades = list(set.intersection(*seen) - set(["no rank", "clade"]))
for key in whole_tax_dict:
if not set_cols:
new_tuple = [el for el in whole_tax_dict[key] if el[1] in common_clades]
else:
clades_present = [
el[1] for el in whole_tax_dict[key] if el[1] in common_clades
]
memory = ()
new_tuple = []
for clade in common_clades[::-1]:
if clade in clades_present:
toadd = [el for el in whole_tax_dict[key] if el[1] == clade][0]
memory = toadd
elif fill and len(memory) > 0:
toadd = (memory[0], clade)
else:
toadd = ("", clade)
new_tuple.append(toadd)
whole_tax_dict[key] = new_tuple
return whole_tax_dict
def get_dataframe(whole_tax_dict):
data = []
for key in whole_tax_dict:
row = [el[0] for el in whole_tax_dict[key]]
col_names = [el[1] for el in whole_tax_dict[key]][::-1]
data.append(row[::-1])
df = pd.DataFrame(data, columns=col_names)
return df
def annotate_taxonomy(sptree, whole_tax_dict, tax_dict):
out_sptree = sptree.copy("cpickle")
ncbi = ete3.NCBITaxa()
tax_resolved = ncbi.get_taxid_translator([sp for sp in tax_dict.values()])
tax_dict = {k: v for k, v in tax_dict.items() if int(v) in tax_resolved.keys()}
mnemo_sp = {}
for mnemo in tax_dict:
id = int(tax_dict[mnemo])
try:
species = tax_resolved[id]
except KeyError:
species = mnemo
mnemo_sp[mnemo] = species
for node in out_sptree.iter_leaves():
if node.name in mnemo_sp.keys():
node.species = mnemo_sp[node.name]
else:
node.species = node.name
all_phyla = set()
no_species_dict = {}
for key in whole_tax_dict:
no_sp = [el for el in whole_tax_dict[key] if el[1] != "species"]
no_species_dict[key] = no_sp
for key in whole_tax_dict:
for el in no_species_dict[key]:
all_phyla.add(el[0])
color_dict = {}
colors = ete3.random_color(num=len(set(all_phyla)))
color_dict = {el[1]: el[0] for el in list(zip(colors, list(all_phyla)))}
color_dict[""] = "white"
for key in no_species_dict:
new_tuple = [el + (color_dict[el[0]],) for el in no_species_dict[key]]
no_species_dict[key] = new_tuple
num_clades = [len(no_species_dict[key]) for key in no_species_dict][0]
for node in out_sptree.iter_leaves():
if node.name in tax_dict.keys():
sp_df = no_species_dict[node.species][::-1]
else:
sp_df = [("", "", "")] * num_clades
node.add_feature("col_df", sp_df)
return out_sptree
def annotate_boxes(taxo_sptree, whole_tax_dict, target=5):
clade_dict = {}
for tax in whole_tax_dict.values():
for el in tax:
if el[1] not in clade_dict:
clade_dict[el[1]] = set()
clade_dict[el[1]].add(el[0])
num_dict = {el: len(clade_dict[el]) for el in clade_dict}
key, value = min(num_dict.items(), key=lambda kv: abs(kv[1] - target))
out_sptree = taxo_sptree.copy("cpickle")
classes = set()
for node in out_sptree:
for row in node.col_df:
if key in row:
node.tax_class = row[0]
classes.add(node.tax_class)
color_dict = {}
colors = ete3.random_color(num=len(classes))
color_dict = {el[1]: el[0] for el in list(zip(colors, list(classes)))}
for tax_class in list(classes):
nodes = []
for node in out_sptree:
if node.species not in whole_tax_dict:
node.tax_class = ""
elif node.tax_class == tax_class:
nodes.append(node.name)
# nodes = [
# node.name for node in out_sptree node.tax_class == tax_class
# ]
nst = ete3.NodeStyle()
nst["bgcolor"] = color_dict[tax_class]
if len(nodes) > 1:
mrca = out_sptree.get_common_ancestor(nodes)
mrca.set_style(nst)
else:
node = out_sptree & nodes[0]
node.set_style(nst)
return out_sptree, color_dict
# def get_common_ancestor_rank(sptree, tax_dict):
def layout_species_taxon(node):
width = 100 # try to find a mulitplicator or something
# If node is a leaf, add the nodes name and a its scientific
# name
node.img_style["size"] = 0
if node.is_leaf():
name_face = ete3.faces.AttrFace("species")
ete3.faces.add_face_to_node(name_face, node, column=0, position="branch-right")
col_idx = 1
for clade in node.col_df:
rect = ete3.faces.RectFace(
width,
20,
bgcolor=clade[2],
fgcolor=clade[2],
label={"text": clade[0], "color": "white", "fontsize": 6},
)
ete3.faces.add_face_to_node(rect, node, column=col_idx, aligned=True)
col_idx += 1
def layout_species_std(node):
node.img_style["size"] = 0
if node.is_leaf():
name_face = ete3.faces.AttrFace("species")
ete3.faces.add_face_to_node(name_face, node, column=0, position="branch-right")
def viz_species_tree(
sptree,
legend=None,
taxonomy=False,
circular=False,
show=True,
render=None,
bs=False,
):
ts = ete3.TreeStyle()
ts.show_leaf_name = False
# ts.allow_face_overlap = True
ts.draw_aligned_faces_as_table = True
if legend is not None:
ts.legend_position = 4
for el in legend.items():
ts.legend.add_face(
ete3.faces.RectFace(width=10, height=10, fgcolor=el[1], bgcolor=el[1]),
column=0,
)
ts.legend.add_face(ete3.faces.TextFace(" " + el[0]), column=1)
if taxonomy:
ts.layout_fn = layout_species_taxon
else:
ts.layout_fn = layout_species_std
if circular:
ts.mode = "c"
if show:
sptree.show(tree_style=ts)
if bs:
ts.show_branch_support = True
if render is not None:
sptree.render(render, tree_style=ts)
def annotate_genetree(genetree, taxo_dict):
"""Annotate genetree in order to visualize the events.
Parameters
----------
genetree : tree
Gene tree.
taxo_dict : dict
Taxonomic dict obtained with get_tax_dict_*
Returns
-------
tree
tree where each leaf has a color based on taxonomic id
"""
colors = ete3.random_color(num=len(set(taxo_dict)))
mnemo_sp = {}
num = 0
for mnemo in taxo_dict:
# id = int(tax_dict[mnemo])
# species = tax_resolved[id]
col = colors[num]
num += 1
mnemo_sp[mnemo] = col
for node in genetree.iter_leaves():
node.mnemo = node.name.split("_")[1]
# node.species = mnemo_sp[node.mnemo][0]
if node.mnemo in taxo_dict.keys():
node.col = mnemo_sp[node.mnemo]
else:
node.col = "black"
return genetree
def layout_grax_nhx(node):
# If node is a leaf, add the nodes name and a its scientific
# name
if node.is_leaf():
nameFace = ete3.faces.TextFace(node.name, fgcolor=node.col)
ete3.faces.add_face_to_node(nameFace, node, column=0)
node.img_style["size"] = 7
node.img_style["shape"] = "circle"
node.img_style["fgcolor"] = node.col
if node.D != "N":
node.img_style["size"] = 15
node.img_style["shape"] = "circle"
node.img_style["fgcolor"] = "darkred"
elif node.H != "N":
node.img_style["size"] = 15
node.img_style["shape"] = "circle"
node.img_style["fgcolor"] = "darkgreen"
HGTFace = ete3.faces.TextFace("-".join(node.H.split("@")[1:]), fsize=5)
ete3.faces.add_face_to_node(HGTFace, node, column=0, position="branch-bottom")
else:
node.img_style["size"] = 0
def viz_grax_tree(genetree, show=True, render=None):
ts = ete3.TreeStyle()
ts.show_leaf_name = False
ts.show_branch_length = True
ts.show_branch_support = True
ts.layout_fn = layout_grax_nhx
if show:
genetree.show(tree_style=ts)
if render is not None:
genetree.render(render, tree_style=ts)
def read_per_sp_counts(events_file):
df = pd.read_csv(events_file, sep=" ")
df["#S"] = df["#S"].str.replace(r"S=", "")
df["#SL"] = df["#SL"].str.replace(r"SL=", "")
df["#D"] = df["#D"].str.replace(r"D=", "")
df["#T"] = df["#T"].str.replace(r"T=", "")
df["#TL"] = df["#TL"].str.replace(r"TL=", "")
df[df.columns] = df[df.columns].apply(pd.to_numeric, errors="coerce")
df.columns = [col.replace("#", "") for col in df.columns]
return df
def annotate_sptree_counts(sptree, counts_df):
out_sptree = sptree.copy("cpickle")
for node in out_sptree.traverse():
values = [int(el) for el in counts_df.loc[node.name]]
node.values = values
return out_sptree
def annotate_sptree_counts_both(sptree, counts_ecce, counts_grax, matches):
out_sptree = sptree.copy("cpickle")
for node in out_sptree.traverse():
ecce_values = [int(el) for el in counts_ecce.loc[node.name]]
node.ecce_values = ecce_values
grax_key = matches[node.name]
grax_values = [int(el) for el in counts_grax.loc[grax_key]]
node.grax_values = grax_values
return out_sptree
def layout_both_bars(node):
palette = ["#ed4c67", "#f69e1f", "#1289a7"] + ["#ed4c67", "#f69e1f", "#1289a7"]
values = node.ecce_values + node.grax_values[1:-1]
# values = [j - i for i, j in zip(node.grax_values[1:-1], node.ecce_values)]
B = ete3.faces.BarChartFace(values, colors=palette) # SL,D,T!
# Add node name to laef nodes
ete3.faces.add_face_to_node(B, node, 0, position="branch-right")
def layout_pies(node):
# if node.is_leaf():
# values = [int(el) for el in df.loc[node.name][2:-1]]
# norm_vals = [(val / sum(values))*100 for val in values]
# labels = ["D", "T"]
# else:
palette = ["#ed4c67", "#f69e1f", "#1289a7"]
if len(node.values) > 3:
values = node.values[1:-1]
else:
values = node.values
if sum(values) > 0:
norm_vals = [(val / sum(values)) * 100 for val in values]
else:
norm_vals = [0, 0, 0]
# B = faces.BarChartFace(values=values, labels=labels)
P = ete3.faces.PieChartFace(norm_vals, 100, 100, colors=palette) # SL,D,T!
# Add node name to laef nodes
# faces.add_face_to_node(B, node, 0)#, position="branch-right")
ete3.faces.add_face_to_node(P, node, 0, position="branch-right")
def viz_pies_tree(sptree, circular=False, show=True, render=None):
palette = ["#ed4c67", "#f69e1f", "#1289a7"]
labels = ["SL", "D", "T"]
legend = list(zip(labels, palette))
ts = ete3.TreeStyle()
ts.legend_position = 4
ts.layout_fn = layout_pies
if circular:
ts.mode = "c"
for el in legend:
ts.legend.add_face(
ete3.faces.RectFace(width=50, height=50, fgcolor=el[1], bgcolor=el[1]),
column=0,
)
ts.legend.add_face(ete3.faces.TextFace(" " + el[0], fsize=20), column=1)
if show:
sptree.show(tree_style=ts)
if render is not None:
sptree.render(render, tree_style=ts)
def viz_both_bars_tree(sptree, circular=False, show=True, render=None):
palette = ["#ed4c67", "#f69e1f", "#1289a7"]
labels = ["L", "D", "T"]
legend = list(zip(labels, palette))
ts = ete3.TreeStyle()
ts.legend_position = 4
ts.layout_fn = layout_both_bars
if circular:
ts.mode = "c"
for el in legend:
ts.legend.add_face(
ete3.faces.RectFace(width=50, height=50, fgcolor=el[1], bgcolor=el[1]),
column=0,
)
ts.legend.add_face(ete3.faces.TextFace(" " + el[0], fsize=20), column=1)
if show:
sptree.show(tree_style=ts)
if render is not None:
sptree.render(render, tree_style=ts)
def annotate_sptree_ancsize(sptree, counts_df):
out_sptree = sptree.copy("cpickle")
# very ugly code!
scale = 0
for node in out_sptree.traverse():
if not node.is_leaf():
anc_size = sum([int(el) for el in counts_df.loc[node.name][0:1]])
if anc_size > scale:
scale = anc_size
for node in out_sptree.traverse():
if not node.is_leaf():
anc_size = (
sum([int(el) for el in counts_df.loc[node.name][0:1]]) / scale * 10
)
node.anc_size = anc_size
return out_sptree, scale
# Comment 2: the best way to estimate the size of an ancestral genome is to count the number of S and SL events associated with this genome. Do not forget the SL events.
def layout_anc(node):
node.img_style["size"] = 0
if not node.is_leaf():
# norm_vals = [(val / sum(values)) * 100 for val in values]
C = ete3.faces.CircleFace(radius=node.anc_size, color="teal", style="sphere")
# C.opacity = 0.1
# Add node name to laef nodes
ete3.faces.add_face_to_node(
C, node, 0, position="float"
) # , position="branch-right")
def viz_ancsize_tree(sptree, scale, circular=False, show=True, render=None):
ts_anc = ete3.TreeStyle()
ts_anc.layout_fn = layout_anc
if circular:
ts_anc.mode = "c"
ts_anc.legend_position = 4
ts_anc.legend.add_face(
ete3.faces.CircleFace(10, color="teal", style="sphere"), column=0
)
ts_anc.legend.add_face(ete3.faces.TextFace(" " + "S+SL: " + str(scale)), column=1)
if show:
sptree.show(tree_style=ts_anc)
if render is not None:
sptree.render(render, tree_style=ts_anc)
def rename_tree_tax(tree, taxo_dict):
for leaf in tree.iter_leaves():
mnemo = leaf.name.split("_")[-1]
if mnemo in taxo_dict.keys():
leaf.name = taxo_dict[mnemo]
else:
leaf.name = mnemo
return tree
def analyze_rooting(spe2age):
# fracs = {}
num = len([k for k, v in spe2age.items() if v == max(spe2age.values())])
den = len(spe2age)
frac = str(num) + "/" + str(den)
dec = num / den
fracs = [frac, dec, num, den]
return fracs
def get_ftp_stats(phylome_ids=None, all=False):
"""Get files and dimensions for a phylome stored in FTP server.
Parameters
----------
phylome_ids : list
List of phylome ids (if not given all must be set to True).
all : bool
If set to True it will return the name-size dictionary for all phylomes in ftp.
Returns
-------
dict
A dicitonary where filenames are key and byte size are values.
"""
ftp = FTP("ftp.phylomedb.org")
ftp.login()
ftp.cwd("phylomedb")
ftp.cwd("phylomes")
if not all:
subdir = ["phylome_" + str(format(int(num), "04d")) for num in phylome_ids]
else:
subdir = [id for id in ftp.nlst() if "phylome_" in id]
ftp_dict = {}
for dir in subdir:
ftp.cwd(dir)
dim_id = {}
for el in ftp.nlst():
ftp.sendcmd("TYPE i") # Switch to Binary mode
dim = ftp.size(el)
dim_id[el] = dim
ftp_dict[dir] = dim_id
ftp.cwd("../")
ftp.close()
return ftp_dict
def get_ftp_files(
phylome_id, outdir=None, to_get=["best_trees", "all_algs", "phylome_info"]
):
"""Get files stored in ftp directory for specific phylome.
Parameters
----------
phylome_id : str or int
Phylome id.
outdir : str
Output directory where files will be stored. If not given it will be the same as in ftp server.
to_get : list
List of file names (without extension) to get from ftp. Default is to_get=["best_trees", "all_algs", "phylome_info"].
Returns
-------
type
An outdir with the requested files if present in ftp.
"""
ftp = FTP("ftp.phylomedb.org")
ftp.login()
ftp.cwd("phylomedb")
ftp.cwd("phylomes")
subdir = "phylome_" + str(format(int(phylome_id), "04d"))
ftp.cwd(subdir)
files_in_ftp = ftp.nlst()
if outdir is None:
outdir = subdir
create_folder(outdir)
for file in files_in_ftp:
# you could add warning if a requested file is not in nlst
file_nm = file.split(".")[0]
if file_nm in to_get:
local_filename = os.path.join(outdir, file)
print("file: " + file + " found, will be written in " + local_filename)
if os.path.isfile(local_filename):
print(local_filename + " already present!")
else:
with open(local_filename, "wb") as f:
ftp.retrbinary("RETR " + file, f.write)
ftp.close()
|
[
"os.mkdir",
"ete3.faces.PieChartFace",
"pandas.read_csv",
"ete3.faces.RectFace",
"subprocess.write",
"ete3.faces.CircleFace",
"os.path.isfile",
"ete3.PhyloTree",
"glob.glob",
"os.path.join",
"pandas.DataFrame",
"ete3.TreeStyle",
"os.path.dirname",
"subprocess.convert_to_ultrametric",
"os.path.exists",
"ete3.faces.TextFace",
"ete3.NodeStyle",
"subprocess.Popen",
"os.path.basename",
"ete3.NCBITaxa",
"ete3.faces.add_face_to_node",
"os.listdir",
"sys.exit",
"ete3.faces.AttrFace",
"os.path.isdir",
"subprocess.get_leaf_names",
"ftplib.FTP",
"os.path.splitext",
"ete3.Tree",
"ete3.faces.BarChartFace"
] |
[((3299, 3357), 'ete3.PhyloTree', 'ete3.PhyloTree', (['tree'], {'sp_naming_function': 'load_species_name'}), '(tree, sp_naming_function=load_species_name)\n', (3313, 3357), False, 'import ete3\n'), ((7818, 7839), 'glob.glob', 'glob.glob', (["(dir + '/*')"], {}), "(dir + '/*')\n", (7827, 7839), False, 'import glob\n'), ((8680, 8708), 'os.path.basename', 'os.path.basename', (['gene_trees'], {}), '(gene_trees)\n', (8696, 8708), False, 'import os\n'), ((8771, 8798), 'os.path.dirname', 'os.path.dirname', (['gene_trees'], {}), '(gene_trees)\n', (8786, 8798), False, 'import os\n'), ((11766, 11789), 'os.path.exists', 'os.path.exists', (['aln_dir'], {}), '(aln_dir)\n', (11780, 11789), False, 'import os\n'), ((17403, 17431), 'os.path.basename', 'os.path.basename', (['gene_trees'], {}), '(gene_trees)\n', (17419, 17431), False, 'import os\n'), ((17541, 17571), 'os.path.basename', 'os.path.basename', (['species_tree'], {}), '(species_tree)\n', (17557, 17571), False, 'import os\n'), ((17671, 17694), 'ete3.Tree', 'ete3.Tree', (['species_tree'], {}), '(species_tree)\n', (17680, 17694), False, 'import ete3\n'), ((17901, 17920), 'subprocess.get_leaf_names', 'sp.get_leaf_names', ([], {}), '()\n', (17918, 17920), True, 'import subprocess as sp\n'), ((19734, 19749), 'ete3.NCBITaxa', 'ete3.NCBITaxa', ([], {}), '()\n', (19747, 19749), False, 'import ete3\n'), ((23667, 23682), 'ete3.NCBITaxa', 'ete3.NCBITaxa', ([], {}), '()\n', (23680, 23682), False, 'import ete3\n'), ((26297, 26334), 'pandas.DataFrame', 'pd.DataFrame', (['data'], {'columns': 'col_names'}), '(data, columns=col_names)\n', (26309, 26334), True, 'import pandas as pd\n'), ((26459, 26474), 'ete3.NCBITaxa', 'ete3.NCBITaxa', ([], {}), '()\n', (26472, 26474), False, 'import ete3\n'), ((30675, 30691), 'ete3.TreeStyle', 'ete3.TreeStyle', ([], {}), '()\n', (30689, 30691), False, 'import ete3\n'), ((33305, 33321), 'ete3.TreeStyle', 'ete3.TreeStyle', ([], {}), '()\n', (33319, 33321), False, 'import ete3\n'), ((33626, 33659), 'pandas.read_csv', 'pd.read_csv', (['events_file'], {'sep': '""" """'}), "(events_file, sep=' ')\n", (33637, 33659), True, 'import pandas as pd\n'), ((34982, 35029), 'ete3.faces.BarChartFace', 'ete3.faces.BarChartFace', (['values'], {'colors': 'palette'}), '(values, colors=palette)\n', (35005, 35029), False, 'import ete3\n'), ((35079, 35143), 'ete3.faces.add_face_to_node', 'ete3.faces.add_face_to_node', (['B', 'node', '(0)'], {'position': '"""branch-right"""'}), "(B, node, 0, position='branch-right')\n", (35106, 35143), False, 'import ete3\n'), ((35701, 35761), 'ete3.faces.PieChartFace', 'ete3.faces.PieChartFace', (['norm_vals', '(100)', '(100)'], {'colors': 'palette'}), '(norm_vals, 100, 100, colors=palette)\n', (35724, 35761), False, 'import ete3\n'), ((35879, 35943), 'ete3.faces.add_face_to_node', 'ete3.faces.add_face_to_node', (['P', 'node', '(0)'], {'position': '"""branch-right"""'}), "(P, node, 0, position='branch-right')\n", (35906, 35943), False, 'import ete3\n'), ((36141, 36157), 'ete3.TreeStyle', 'ete3.TreeStyle', ([], {}), '()\n', (36155, 36157), False, 'import ete3\n'), ((36824, 36840), 'ete3.TreeStyle', 'ete3.TreeStyle', ([], {}), '()\n', (36838, 36840), False, 'import ete3\n'), ((38592, 38608), 'ete3.TreeStyle', 'ete3.TreeStyle', ([], {}), '()\n', (38606, 38608), False, 'import ete3\n'), ((40022, 40046), 'ftplib.FTP', 'FTP', (['"""ftp.phylomedb.org"""'], {}), "('ftp.phylomedb.org')\n", (40025, 40046), False, 'from ftplib import FTP\n'), ((41235, 41259), 'ftplib.FTP', 'FTP', (['"""ftp.phylomedb.org"""'], {}), "('ftp.phylomedb.org')\n", (41238, 41259), False, 'from ftplib import FTP\n'), ((926, 946), 'os.path.exists', 'os.path.exists', (['name'], {}), '(name)\n', (940, 946), False, 'import os\n'), ((8725, 8752), 'os.path.splitext', 'os.path.splitext', (['gene_file'], {}), '(gene_file)\n', (8741, 8752), False, 'import os\n'), ((8811, 8833), 'os.path.isdir', 'os.path.isdir', (['out_dir'], {}), '(out_dir)\n', (8824, 8833), False, 'import os\n'), ((8843, 8860), 'os.mkdir', 'os.mkdir', (['out_dir'], {}), '(out_dir)\n', (8851, 8860), False, 'import os\n'), ((8872, 8906), 'os.path.isdir', 'os.path.isdir', (["(out_dir + '/trees/')"], {}), "(out_dir + '/trees/')\n", (8885, 8906), False, 'import os\n'), ((8916, 8945), 'os.mkdir', 'os.mkdir', (["(out_dir + '/trees/')"], {}), "(out_dir + '/trees/')\n", (8924, 8945), False, 'import os\n'), ((8957, 8993), 'os.path.isdir', 'os.path.isdir', (["(out_dir + '/mapping/')"], {}), "(out_dir + '/mapping/')\n", (8970, 8993), False, 'import os\n'), ((9003, 9034), 'os.mkdir', 'os.mkdir', (["(out_dir + '/mapping/')"], {}), "(out_dir + '/mapping/')\n", (9011, 9034), False, 'import os\n'), ((11837, 11856), 'os.listdir', 'os.listdir', (['aln_dir'], {}), '(aln_dir)\n', (11847, 11856), False, 'import os\n'), ((17707, 17729), 'os.path.isdir', 'os.path.isdir', (['out_dir'], {}), '(out_dir)\n', (17720, 17729), False, 'import os\n'), ((17739, 17756), 'os.mkdir', 'os.mkdir', (['out_dir'], {}), '(out_dir)\n', (17747, 17756), False, 'import os\n'), ((17786, 17813), 'subprocess.convert_to_ultrametric', 'sp.convert_to_ultrametric', ([], {}), '()\n', (17811, 17813), True, 'import subprocess as sp\n'), ((17822, 17884), 'subprocess.write', 'sp.write', ([], {'outfile': "(out_dir + '/ecce_ultra_' + sp_file)", 'format': '(5)'}), "(outfile=out_dir + '/ecce_ultra_' + sp_file, format=5)\n", (17830, 17884), True, 'import subprocess as sp\n'), ((29182, 29198), 'ete3.NodeStyle', 'ete3.NodeStyle', ([], {}), '()\n', (29196, 29198), False, 'import ete3\n'), ((29779, 29809), 'ete3.faces.AttrFace', 'ete3.faces.AttrFace', (['"""species"""'], {}), "('species')\n", (29798, 29809), False, 'import ete3\n'), ((29818, 29897), 'ete3.faces.add_face_to_node', 'ete3.faces.add_face_to_node', (['name_face', 'node'], {'column': '(0)', 'position': '"""branch-right"""'}), "(name_face, node, column=0, position='branch-right')\n", (29845, 29897), False, 'import ete3\n'), ((30405, 30435), 'ete3.faces.AttrFace', 'ete3.faces.AttrFace', (['"""species"""'], {}), "('species')\n", (30424, 30435), False, 'import ete3\n'), ((30444, 30523), 'ete3.faces.add_face_to_node', 'ete3.faces.add_face_to_node', (['name_face', 'node'], {'column': '(0)', 'position': '"""branch-right"""'}), "(name_face, node, column=0, position='branch-right')\n", (30471, 30523), False, 'import ete3\n'), ((32497, 32545), 'ete3.faces.TextFace', 'ete3.faces.TextFace', (['node.name'], {'fgcolor': 'node.col'}), '(node.name, fgcolor=node.col)\n', (32516, 32545), False, 'import ete3\n'), ((32554, 32607), 'ete3.faces.add_face_to_node', 'ete3.faces.add_face_to_node', (['nameFace', 'node'], {'column': '(0)'}), '(nameFace, node, column=0)\n', (32581, 32607), False, 'import ete3\n'), ((38244, 38317), 'ete3.faces.CircleFace', 'ete3.faces.CircleFace', ([], {'radius': 'node.anc_size', 'color': '"""teal"""', 'style': '"""sphere"""'}), "(radius=node.anc_size, color='teal', style='sphere')\n", (38265, 38317), False, 'import ete3\n'), ((38390, 38447), 'ete3.faces.add_face_to_node', 'ete3.faces.add_face_to_node', (['C', 'node', '(0)'], {'position': '"""float"""'}), "(C, node, 0, position='float')\n", (38417, 38447), False, 'import ete3\n'), ((38753, 38808), 'ete3.faces.CircleFace', 'ete3.faces.CircleFace', (['(10)'], {'color': '"""teal"""', 'style': '"""sphere"""'}), "(10, color='teal', style='sphere')\n", (38774, 38808), False, 'import ete3\n'), ((241, 266), 'subprocess.Popen', 'sp.Popen', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (249, 266), True, 'import subprocess as sp\n'), ((475, 500), 'subprocess.Popen', 'sp.Popen', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (483, 500), True, 'import subprocess as sp\n'), ((656, 695), 'sys.exit', 'sys.exit', (['"""ERROR: Execution cmd failed"""'], {}), "('ERROR: Execution cmd failed')\n", (664, 695), False, 'import sys\n'), ((759, 800), 'subprocess.Popen', 'sp.Popen', (['cmd'], {'shell': '(True)', 'stdout': 'sp.PIPE'}), '(cmd, shell=True, stdout=sp.PIPE)\n', (767, 800), True, 'import subprocess as sp\n'), ((828, 838), 'sys.exit', 'sys.exit', ([], {}), '()\n', (836, 838), False, 'import sys\n'), ((973, 987), 'os.mkdir', 'os.mkdir', (['name'], {}), '(name)\n', (981, 987), False, 'import os\n'), ((3982, 4028), 'sys.exit', 'sys.exit', (['"""Something went wrong with rooting!"""'], {}), "('Something went wrong with rooting!')\n", (3990, 4028), False, 'import sys\n'), ((13993, 14011), 'ete3.Tree', 'ete3.Tree', (['line[3]'], {}), '(line[3])\n', (14002, 14011), False, 'import ete3\n'), ((14657, 14675), 'ete3.Tree', 'ete3.Tree', (['line[3]'], {}), '(line[3])\n', (14666, 14675), False, 'import ete3\n'), ((16445, 16523), 'pandas.DataFrame', 'pd.DataFrame', (['df'], {'columns': "['gene', 'model', 'freq', 'inv', 'gamma', 'num_cat']"}), "(df, columns=['gene', 'model', 'freq', 'inv', 'gamma', 'num_cat'])\n", (16457, 16523), True, 'import pandas as pd\n'), ((29971, 30101), 'ete3.faces.RectFace', 'ete3.faces.RectFace', (['width', '(20)'], {'bgcolor': 'clade[2]', 'fgcolor': 'clade[2]', 'label': "{'text': clade[0], 'color': 'white', 'fontsize': 6}"}), "(width, 20, bgcolor=clade[2], fgcolor=clade[2], label={\n 'text': clade[0], 'color': 'white', 'fontsize': 6})\n", (29990, 30101), False, 'import ete3\n'), ((30204, 30273), 'ete3.faces.add_face_to_node', 'ete3.faces.add_face_to_node', (['rect', 'node'], {'column': 'col_idx', 'aligned': '(True)'}), '(rect, node, column=col_idx, aligned=True)\n', (30231, 30273), False, 'import ete3\n'), ((33117, 33195), 'ete3.faces.add_face_to_node', 'ete3.faces.add_face_to_node', (['HGTFace', 'node'], {'column': '(0)', 'position': '"""branch-bottom"""'}), "(HGTFace, node, column=0, position='branch-bottom')\n", (33144, 33195), False, 'import ete3\n'), ((36318, 36388), 'ete3.faces.RectFace', 'ete3.faces.RectFace', ([], {'width': '(50)', 'height': '(50)', 'fgcolor': 'el[1]', 'bgcolor': 'el[1]'}), '(width=50, height=50, fgcolor=el[1], bgcolor=el[1])\n', (36337, 36388), False, 'import ete3\n'), ((36449, 36491), 'ete3.faces.TextFace', 'ete3.faces.TextFace', (["(' ' + el[0])"], {'fsize': '(20)'}), "(' ' + el[0], fsize=20)\n", (36468, 36491), False, 'import ete3\n'), ((37006, 37076), 'ete3.faces.RectFace', 'ete3.faces.RectFace', ([], {'width': '(50)', 'height': '(50)', 'fgcolor': 'el[1]', 'bgcolor': 'el[1]'}), '(width=50, height=50, fgcolor=el[1], bgcolor=el[1])\n', (37025, 37076), False, 'import ete3\n'), ((37137, 37179), 'ete3.faces.TextFace', 'ete3.faces.TextFace', (["(' ' + el[0])"], {'fsize': '(20)'}), "(' ' + el[0], fsize=20)\n", (37156, 37179), False, 'import ete3\n'), ((41709, 41735), 'os.path.join', 'os.path.join', (['outdir', 'file'], {}), '(outdir, file)\n', (41721, 41735), False, 'import os\n'), ((41835, 41865), 'os.path.isfile', 'os.path.isfile', (['local_filename'], {}), '(local_filename)\n', (41849, 41865), False, 'import os\n'), ((537, 576), 'sys.exit', 'sys.exit', (['"""Error: Execution cmd failed"""'], {}), "('Error: Execution cmd failed')\n", (545, 576), False, 'import sys\n'), ((3844, 3894), 'sys.exit', 'sys.exit', (['"""Something went wrong with sp2age dict!"""'], {}), "('Something went wrong with sp2age dict!')\n", (3852, 3894), False, 'import sys\n'), ((9376, 9394), 'ete3.Tree', 'ete3.Tree', (['line[3]'], {}), '(line[3])\n', (9385, 9394), False, 'import ete3\n'), ((11928, 11955), 'os.path.join', 'os.path.join', (['aln_dir', 'file'], {}), '(aln_dir, file)\n', (11940, 11955), False, 'import os\n'), ((13395, 13413), 'ete3.Tree', 'ete3.Tree', (['line[3]'], {}), '(line[3])\n', (13404, 13413), False, 'import ete3\n'), ((30939, 31009), 'ete3.faces.RectFace', 'ete3.faces.RectFace', ([], {'width': '(10)', 'height': '(10)', 'fgcolor': 'el[1]', 'bgcolor': 'el[1]'}), '(width=10, height=10, fgcolor=el[1], bgcolor=el[1])\n', (30958, 31009), False, 'import ete3\n'), ((31082, 31114), 'ete3.faces.TextFace', 'ete3.faces.TextFace', (["(' ' + el[0])"], {}), "(' ' + el[0])\n", (31101, 31114), False, 'import ete3\n'), ((10713, 10739), 'os.path.exists', 'os.path.exists', (['aln_file_u'], {}), '(aln_file_u)\n', (10727, 10739), False, 'import os\n'), ((18402, 18460), 'ete3.PhyloTree', 'ete3.PhyloTree', (['tree'], {'sp_naming_function': 'load_species_name'}), '(tree, sp_naming_function=load_species_name)\n', (18416, 18460), False, 'import ete3\n'), ((18750, 18765), 'ete3.Tree', 'ete3.Tree', (['tree'], {}), '(tree)\n', (18759, 18765), False, 'import ete3\n'), ((10834, 10858), 'os.path.exists', 'os.path.exists', (['aln_file'], {}), '(aln_file)\n', (10848, 10858), False, 'import os\n')]
|
'''Main function to train discrete BGAN.
'''
import logging
import lasagne
import numpy as np
import theano
import theano.tensor as T
from lib.data import load_stream
from lib.log_util import set_stream_logger
from lib.loss import get_losses_discrete
from lib.math import est_log_Z
from lib.train import setup, train
from lib.utils import config, make_argument_parser, print_section, setup_out_dir
from lib.viz import setup as setup_viz
from models import build
logger = logging.getLogger('BGAN')
def main(data_args=None, optimizer_args=None, model_args=None, loss_args=None,
train_args=None):
'''Main function for discrete BGAN.
'''
print_section('LOADING DATA') ##############################################
train_stream, training_samples, shape, viz_options = load_stream(
**data_args)
train_args['training_samples'] = training_samples
setup_viz(**viz_options)
model_args.update(**shape)
loss_args.update(**shape)
loss_args['batch_size'] = data_args['batch_size']
print_section('MODEL') #####################################################
noise_var = T.matrix('noise')
input_var = T.tensor4('inputs')
log_Z = theano.shared(lasagne.utils.floatX(0.), name='log_Z')
loss_args['loss_options'] = loss_args.get('loss_options', None) or {}
loss_args['loss_options']['log_Z'] = log_Z
logger.info('Building model and compiling GAN functions...')
logger.info('Model args: {}'.format(model_args))
generator, discriminator = build(noise_var, input_var, **model_args)
g_output_logit = lasagne.layers.get_output(generator)
g_results, d_results, log_Z_est = get_losses_discrete(
discriminator, g_output_logit, optimizer_args=optimizer_args,
**loss_args)
g_results.update(**{
'log Z': log_Z,
'log Z (est)': log_Z_est.mean()
})
print_section('OPTIMIZER') #################################################
train_d, train_g, gen = setup(input_var, noise_var, log_Z, generator,
discriminator, g_results, d_results,
discrete=True, **optimizer_args)
print_section('TRAIN') #####################################################
try:
train(train_d, train_g, gen, train_stream, **train_args)
except KeyboardInterrupt:
logger.info('Training interrupted')
print_section('DONE') ##################################################
exit(0)
_default_args = dict(
data_args=dict(
batch_size=64,
discrete=True,
downsample_to=(32, 32)
),
optimizer_args=dict(
optimizer='adam',
optimizer_options=dict(beta1=0.5),
learning_rate=1e-4,
),
model_args=dict(
arch='dcgan_28_pub',
dim_z=64,
dim_h=64,
leak=0.2,
use_batch_norm=False
),
loss_args=dict(
loss='binary_bgan',
n_samples=20
),
train_args=dict(
epochs=100,
num_iter_gen=1,
num_iter_disc=1,
summary_updates=None,
archive_every=10
)
)
if __name__ == '__main__':
parser = make_argument_parser()
args = parser.parse_args()
set_stream_logger(args.verbosity)
kwargs = {}
for k, v in _default_args.items():
kwargs[k] = {}
kwargs[k].update(**v)
kwargs['data_args']['source'] = args.source
if args.architecture is not None:
kwargs['model_args']['arch'] = args.architecture
out_paths = setup_out_dir(args.out_path, args.name)
kwargs['train_args'].update(**out_paths)
config(config_file=args.config_file, **kwargs)
kwargs['train_args']['batch_size'] = kwargs['data_args']['batch_size']
kwargs['train_args']['dim_z'] = kwargs['model_args']['dim_z']
main(**kwargs)
|
[
"lib.utils.print_section",
"lib.viz.setup",
"theano.tensor.tensor4",
"lib.data.load_stream",
"models.build",
"lib.loss.get_losses_discrete",
"lib.utils.make_argument_parser",
"lib.utils.config",
"lib.log_util.set_stream_logger",
"lasagne.utils.floatX",
"lib.train.train",
"lasagne.layers.get_output",
"lib.train.setup",
"lib.utils.setup_out_dir",
"logging.getLogger",
"theano.tensor.matrix"
] |
[((477, 502), 'logging.getLogger', 'logging.getLogger', (['"""BGAN"""'], {}), "('BGAN')\n", (494, 502), False, 'import logging\n'), ((675, 704), 'lib.utils.print_section', 'print_section', (['"""LOADING DATA"""'], {}), "('LOADING DATA')\n", (688, 704), False, 'from lib.utils import config, make_argument_parser, print_section, setup_out_dir\n'), ((809, 833), 'lib.data.load_stream', 'load_stream', ([], {}), '(**data_args)\n', (820, 833), False, 'from lib.data import load_stream\n'), ((901, 925), 'lib.viz.setup', 'setup_viz', ([], {}), '(**viz_options)\n', (910, 925), True, 'from lib.viz import setup as setup_viz\n'), ((1050, 1072), 'lib.utils.print_section', 'print_section', (['"""MODEL"""'], {}), "('MODEL')\n", (1063, 1072), False, 'from lib.utils import config, make_argument_parser, print_section, setup_out_dir\n'), ((1143, 1160), 'theano.tensor.matrix', 'T.matrix', (['"""noise"""'], {}), "('noise')\n", (1151, 1160), True, 'import theano.tensor as T\n'), ((1177, 1196), 'theano.tensor.tensor4', 'T.tensor4', (['"""inputs"""'], {}), "('inputs')\n", (1186, 1196), True, 'import theano.tensor as T\n'), ((1539, 1580), 'models.build', 'build', (['noise_var', 'input_var'], {}), '(noise_var, input_var, **model_args)\n', (1544, 1580), False, 'from models import build\n'), ((1603, 1639), 'lasagne.layers.get_output', 'lasagne.layers.get_output', (['generator'], {}), '(generator)\n', (1628, 1639), False, 'import lasagne\n'), ((1683, 1782), 'lib.loss.get_losses_discrete', 'get_losses_discrete', (['discriminator', 'g_output_logit'], {'optimizer_args': 'optimizer_args'}), '(discriminator, g_output_logit, optimizer_args=\n optimizer_args, **loss_args)\n', (1702, 1782), False, 'from lib.loss import get_losses_discrete\n'), ((1909, 1935), 'lib.utils.print_section', 'print_section', (['"""OPTIMIZER"""'], {}), "('OPTIMIZER')\n", (1922, 1935), False, 'from lib.utils import config, make_argument_parser, print_section, setup_out_dir\n'), ((2015, 2134), 'lib.train.setup', 'setup', (['input_var', 'noise_var', 'log_Z', 'generator', 'discriminator', 'g_results', 'd_results'], {'discrete': '(True)'}), '(input_var, noise_var, log_Z, generator, discriminator, g_results,\n d_results, discrete=True, **optimizer_args)\n', (2020, 2134), False, 'from lib.train import setup, train\n'), ((2214, 2236), 'lib.utils.print_section', 'print_section', (['"""TRAIN"""'], {}), "('TRAIN')\n", (2227, 2236), False, 'from lib.utils import config, make_argument_parser, print_section, setup_out_dir\n'), ((3204, 3226), 'lib.utils.make_argument_parser', 'make_argument_parser', ([], {}), '()\n', (3224, 3226), False, 'from lib.utils import config, make_argument_parser, print_section, setup_out_dir\n'), ((3262, 3295), 'lib.log_util.set_stream_logger', 'set_stream_logger', (['args.verbosity'], {}), '(args.verbosity)\n', (3279, 3295), False, 'from lib.log_util import set_stream_logger\n'), ((3582, 3621), 'lib.utils.setup_out_dir', 'setup_out_dir', (['args.out_path', 'args.name'], {}), '(args.out_path, args.name)\n', (3595, 3621), False, 'from lib.utils import config, make_argument_parser, print_section, setup_out_dir\n'), ((3671, 3717), 'lib.utils.config', 'config', ([], {'config_file': 'args.config_file'}), '(config_file=args.config_file, **kwargs)\n', (3677, 3717), False, 'from lib.utils import config, make_argument_parser, print_section, setup_out_dir\n'), ((1228, 1253), 'lasagne.utils.floatX', 'lasagne.utils.floatX', (['(0.0)'], {}), '(0.0)\n', (1248, 1253), False, 'import lasagne\n'), ((2308, 2364), 'lib.train.train', 'train', (['train_d', 'train_g', 'gen', 'train_stream'], {}), '(train_d, train_g, gen, train_stream, **train_args)\n', (2313, 2364), False, 'from lib.train import setup, train\n'), ((2447, 2468), 'lib.utils.print_section', 'print_section', (['"""DONE"""'], {}), "('DONE')\n", (2460, 2468), False, 'from lib.utils import config, make_argument_parser, print_section, setup_out_dir\n')]
|
import numpy as np
from pyloras._common import (
check_random_state,
safe_random_state,
)
def test_check_random_state():
rand = np.random.RandomState(12345)
assert isinstance(check_random_state(rand), np.random.Generator)
gen = np.random.default_rng(12345)
assert isinstance(check_random_state(gen), np.random.Generator)
def test_safe_random_state():
rand = np.random.RandomState(12345)
assert isinstance(safe_random_state(rand), np.random.RandomState)
gen = np.random.default_rng(12345)
assert isinstance(safe_random_state(gen), np.random.RandomState)
|
[
"numpy.random.default_rng",
"pyloras._common.check_random_state",
"numpy.random.RandomState",
"pyloras._common.safe_random_state"
] |
[((143, 171), 'numpy.random.RandomState', 'np.random.RandomState', (['(12345)'], {}), '(12345)\n', (164, 171), True, 'import numpy as np\n'), ((251, 279), 'numpy.random.default_rng', 'np.random.default_rng', (['(12345)'], {}), '(12345)\n', (272, 279), True, 'import numpy as np\n'), ((391, 419), 'numpy.random.RandomState', 'np.random.RandomState', (['(12345)'], {}), '(12345)\n', (412, 419), True, 'import numpy as np\n'), ((500, 528), 'numpy.random.default_rng', 'np.random.default_rng', (['(12345)'], {}), '(12345)\n', (521, 528), True, 'import numpy as np\n'), ((194, 218), 'pyloras._common.check_random_state', 'check_random_state', (['rand'], {}), '(rand)\n', (212, 218), False, 'from pyloras._common import check_random_state, safe_random_state\n'), ((302, 325), 'pyloras._common.check_random_state', 'check_random_state', (['gen'], {}), '(gen)\n', (320, 325), False, 'from pyloras._common import check_random_state, safe_random_state\n'), ((442, 465), 'pyloras._common.safe_random_state', 'safe_random_state', (['rand'], {}), '(rand)\n', (459, 465), False, 'from pyloras._common import check_random_state, safe_random_state\n'), ((551, 573), 'pyloras._common.safe_random_state', 'safe_random_state', (['gen'], {}), '(gen)\n', (568, 573), False, 'from pyloras._common import check_random_state, safe_random_state\n')]
|