code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
from __future__ import absolute_import, division, print_function
import stripe
import pytest
pytestmark = pytest.mark.asyncio
TEST_RESOURCE_ID = "si_123"
class TestSubscriptionItem(object):
async def test_is_listable(self, request_mock):
resources = await stripe.SubscriptionItem.list(subscription="sub_123")
request_mock.assert_requested(
"get", "/v1/subscription_items", {"subscription": "sub_123"}
)
assert isinstance(resources.data, list)
assert isinstance(resources.data[0], stripe.SubscriptionItem)
async def test_is_retrievable(self, request_mock):
resource = await stripe.SubscriptionItem.retrieve(TEST_RESOURCE_ID)
request_mock.assert_requested(
"get", "/v1/subscription_items/%s" % TEST_RESOURCE_ID
)
assert isinstance(resource, stripe.SubscriptionItem)
async def test_is_creatable(self, request_mock):
resource = await stripe.SubscriptionItem.create(
price="price_123", subscription="sub_123"
)
request_mock.assert_requested("post", "/v1/subscription_items")
assert isinstance(resource, stripe.SubscriptionItem)
async def test_is_saveable(self, request_mock):
resource = await stripe.SubscriptionItem.retrieve(TEST_RESOURCE_ID)
resource.price = "price_123"
await resource.save()
request_mock.assert_requested(
"post",
"/v1/subscription_items/%s" % TEST_RESOURCE_ID,
{"price": "price_123"},
)
async def test_is_modifiable(self, request_mock):
resource = await stripe.SubscriptionItem.modify(
TEST_RESOURCE_ID, price="price_123"
)
request_mock.assert_requested(
"post",
"/v1/subscription_items/%s" % TEST_RESOURCE_ID,
{"price": "price_123"},
)
assert isinstance(resource, stripe.SubscriptionItem)
async def test_is_deletable(self, request_mock):
resource = await stripe.SubscriptionItem.retrieve(TEST_RESOURCE_ID)
await resource.delete()
request_mock.assert_requested(
"delete", "/v1/subscription_items/%s" % TEST_RESOURCE_ID
)
assert resource.deleted is True
async def test_can_delete(self, request_mock):
resource = await stripe.SubscriptionItem.delete(TEST_RESOURCE_ID)
request_mock.assert_requested(
"delete", "/v1/subscription_items/%s" % TEST_RESOURCE_ID
)
assert resource.deleted is True
class TestUsageRecords(object):
async def test_is_creatable(self, request_mock):
resource = await stripe.SubscriptionItem.create_usage_record(
TEST_RESOURCE_ID,
quantity=5000,
timestamp=1524182400,
action="increment",
)
request_mock.assert_requested(
"post",
"/v1/subscription_items/%s/usage_records" % TEST_RESOURCE_ID,
)
assert isinstance(resource, stripe.UsageRecord)
class TestUsageRecordSummaries(object):
async def test_is_listable(self, request_mock):
resource = await stripe.SubscriptionItem.list_usage_record_summaries(
TEST_RESOURCE_ID
)
request_mock.assert_requested(
"get",
"/v1/subscription_items/%s/usage_record_summaries"
% TEST_RESOURCE_ID,
)
assert isinstance(resource.data, list)
assert isinstance(resource.data[0], stripe.UsageRecordSummary)
|
[
"stripe.SubscriptionItem.list_usage_record_summaries",
"stripe.SubscriptionItem.create",
"stripe.SubscriptionItem.create_usage_record",
"stripe.SubscriptionItem.retrieve",
"stripe.SubscriptionItem.modify",
"stripe.SubscriptionItem.delete",
"stripe.SubscriptionItem.list"
] |
[((275, 327), 'stripe.SubscriptionItem.list', 'stripe.SubscriptionItem.list', ([], {'subscription': '"""sub_123"""'}), "(subscription='sub_123')\n", (303, 327), False, 'import stripe\n'), ((649, 699), 'stripe.SubscriptionItem.retrieve', 'stripe.SubscriptionItem.retrieve', (['TEST_RESOURCE_ID'], {}), '(TEST_RESOURCE_ID)\n', (681, 699), False, 'import stripe\n'), ((955, 1028), 'stripe.SubscriptionItem.create', 'stripe.SubscriptionItem.create', ([], {'price': '"""price_123"""', 'subscription': '"""sub_123"""'}), "(price='price_123', subscription='sub_123')\n", (985, 1028), False, 'import stripe\n'), ((1262, 1312), 'stripe.SubscriptionItem.retrieve', 'stripe.SubscriptionItem.retrieve', (['TEST_RESOURCE_ID'], {}), '(TEST_RESOURCE_ID)\n', (1294, 1312), False, 'import stripe\n'), ((1625, 1692), 'stripe.SubscriptionItem.modify', 'stripe.SubscriptionItem.modify', (['TEST_RESOURCE_ID'], {'price': '"""price_123"""'}), "(TEST_RESOURCE_ID, price='price_123')\n", (1655, 1692), False, 'import stripe\n'), ((2020, 2070), 'stripe.SubscriptionItem.retrieve', 'stripe.SubscriptionItem.retrieve', (['TEST_RESOURCE_ID'], {}), '(TEST_RESOURCE_ID)\n', (2052, 2070), False, 'import stripe\n'), ((2338, 2386), 'stripe.SubscriptionItem.delete', 'stripe.SubscriptionItem.delete', (['TEST_RESOURCE_ID'], {}), '(TEST_RESOURCE_ID)\n', (2368, 2386), False, 'import stripe\n'), ((2657, 2779), 'stripe.SubscriptionItem.create_usage_record', 'stripe.SubscriptionItem.create_usage_record', (['TEST_RESOURCE_ID'], {'quantity': '(5000)', 'timestamp': '(1524182400)', 'action': '"""increment"""'}), "(TEST_RESOURCE_ID, quantity=5000,\n timestamp=1524182400, action='increment')\n", (2700, 2779), False, 'import stripe\n'), ((3153, 3222), 'stripe.SubscriptionItem.list_usage_record_summaries', 'stripe.SubscriptionItem.list_usage_record_summaries', (['TEST_RESOURCE_ID'], {}), '(TEST_RESOURCE_ID)\n', (3204, 3222), False, 'import stripe\n')]
|
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
import sys
import tensorflow as tf
from tensorflow.python.client import timeline
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
import seq2seq_model
from tensorflow.python.framework import graph_util
flags = tf.flags
logging = tf.logging
logging.set_verbosity(tf.logging.ERROR)
flags.DEFINE_integer("encoder_step", 100, "sequence length")
flags.DEFINE_integer("encoder_layer", 8, "num layer")
flags.DEFINE_integer("decoder_step", 30, "sequence length")
flags.DEFINE_integer("decoder_layer", 4, "num layer")
flags.DEFINE_integer("hidden_size", 128, "hidden size")
flags.DEFINE_integer("batch_size", 1, "mini batch size")
flags.DEFINE_boolean('profile', False, 'profile kernel runtime')
flags.DEFINE_string('backend', 'tf', 'tf or wolong or ngraph')
flags.DEFINE_integer("num_iter", 10, "mini batch size")
flags.DEFINE_integer("warmup", 5, "mini batch size")
flags.DEFINE_boolean('xla', False, 'enable xla')
flags.DEFINE_string('frozen_file', '', 'output path for the frozen pb file')
flags.DEFINE_integer("parallel", 0, "tf.ConfigProto.inter_op_parallelism_threads")
FLAGS = flags.FLAGS
import ctypes
_cudart = ctypes.CDLL('libcudart.so')
def profile_start():
ret = _cudart.cudaProfilerStart()
if ret != 0:
raise Exception("cudaProfilerStart() returned %d" % ret)
def profile_stop():
ret = _cudart.cudaProfilerStop()
if ret != 0:
raise Exception("cudaProfilerStop() returned %d" % ret)
def main(_):
profile_stop()
session_conf = tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=False,
graph_options=tf.GraphOptions(infer_shapes=True),
inter_op_parallelism_threads=FLAGS.parallel
)
if FLAGS.xla:
session_conf.graph_options.optimizer_options.global_jit_level = tf.OptimizerOptions.ON_1
with tf.Graph().as_default(), tf.Session(config=session_conf) as session:
profile_stop()
batch_size = FLAGS.batch_size
model = seq2seq_model.Seq2SeqModel(
batch_size, FLAGS.hidden_size, FLAGS.encoder_layer, FLAGS.encoder_step, FLAGS.decoder_layer, FLAGS.decoder_step)
eval_inputs = tf.placeholder(
tf.float32, [FLAGS.encoder_step, FLAGS.batch_size, FLAGS.hidden_size], 'eval_input')
eval_inputs_list = tf.split(value=eval_inputs, axis=0, num_or_size_splits=FLAGS.encoder_step)
for i in range(len(eval_inputs_list)):
eval_inputs_list[i] = tf.squeeze(eval_inputs_list[i],axis=[0])
logits = model(eval_inputs_list)
lstm_inputs = np.ones(
(FLAGS.encoder_step, FLAGS.batch_size, FLAGS.hidden_size))
session.run(tf.global_variables_initializer())
if FLAGS.frozen_file != '':
constant_graph = graph_util.convert_variables_to_constants(session, session.graph_def, [logits.name.split(':')[0]])
with tf.gfile.GFile(FLAGS.frozen_file, "wb") as f:
f.write(constant_graph.SerializeToString())
if not FLAGS.profile:
# warm up
for i in range(FLAGS.warmup):
res = session.run(logits, {
eval_inputs: lstm_inputs})
out_flat = res.flat
if (len(out_flat) > 0):
max_len = min(10, len(out_flat))
print(logits.name)
print(out_flat[:max_len], "...(size=", len(out_flat), "end with", out_flat[-1], ")")
iter_times = []
profile_start()
for i in range(FLAGS.num_iter):
start_time = time.time()
res = session.run(logits, {
eval_inputs: lstm_inputs})
iter_time = (time.time() - start_time) * 1000
iter_times.append(iter_time)
print("Iteration time %f ms" % (iter_time))
profile_stop()
print("Summary: [min, max, mean] = [%f, %f, %f] ms" % (
min(iter_times), max(iter_times), sum(iter_times) / len(iter_times)))
else:
profile_start()
options = tf.RunOptions(trace_level=tf.RunOptions.FULL_TRACE)
run_metadata = tf.RunMetadata()
for i in range(5):
start_time = time.time()
res = session.run(logits, {
eval_inputs: lstm_inputs},
options=options,
run_metadata=run_metadata)
end_time = (time.time() - start_time) * 1000
print("iteration time %f ms" % (end_time))
fetched_timeline = timeline.Timeline(run_metadata.step_stats)
chrome_trace = fetched_timeline.generate_chrome_trace_format()
with open('timelines/timeline_step_%d.json' % i, 'w') as f:
f.write(chrome_trace)
profile_stop()
if __name__ == "__main__":
tf.app.run()
|
[
"tensorflow.GraphOptions",
"tensorflow.global_variables_initializer",
"tensorflow.Session",
"numpy.ones",
"time.time",
"tensorflow.RunOptions",
"tensorflow.placeholder",
"tensorflow.python.client.timeline.Timeline",
"ctypes.CDLL",
"tensorflow.RunMetadata",
"seq2seq_model.Seq2SeqModel",
"tensorflow.Graph",
"tensorflow.squeeze",
"tensorflow.gfile.GFile",
"tensorflow.split",
"tensorflow.app.run"
] |
[((1369, 1396), 'ctypes.CDLL', 'ctypes.CDLL', (['"""libcudart.so"""'], {}), "('libcudart.so')\n", (1380, 1396), False, 'import ctypes\n'), ((5180, 5192), 'tensorflow.app.run', 'tf.app.run', ([], {}), '()\n', (5190, 5192), True, 'import tensorflow as tf\n'), ((2080, 2111), 'tensorflow.Session', 'tf.Session', ([], {'config': 'session_conf'}), '(config=session_conf)\n', (2090, 2111), True, 'import tensorflow as tf\n'), ((2202, 2346), 'seq2seq_model.Seq2SeqModel', 'seq2seq_model.Seq2SeqModel', (['batch_size', 'FLAGS.hidden_size', 'FLAGS.encoder_layer', 'FLAGS.encoder_step', 'FLAGS.decoder_layer', 'FLAGS.decoder_step'], {}), '(batch_size, FLAGS.hidden_size, FLAGS.\n encoder_layer, FLAGS.encoder_step, FLAGS.decoder_layer, FLAGS.decoder_step)\n', (2228, 2346), False, 'import seq2seq_model\n'), ((2378, 2482), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[FLAGS.encoder_step, FLAGS.batch_size, FLAGS.hidden_size]', '"""eval_input"""'], {}), "(tf.float32, [FLAGS.encoder_step, FLAGS.batch_size, FLAGS.\n hidden_size], 'eval_input')\n", (2392, 2482), True, 'import tensorflow as tf\n'), ((2519, 2593), 'tensorflow.split', 'tf.split', ([], {'value': 'eval_inputs', 'axis': '(0)', 'num_or_size_splits': 'FLAGS.encoder_step'}), '(value=eval_inputs, axis=0, num_or_size_splits=FLAGS.encoder_step)\n', (2527, 2593), True, 'import tensorflow as tf\n'), ((2781, 2847), 'numpy.ones', 'np.ones', (['(FLAGS.encoder_step, FLAGS.batch_size, FLAGS.hidden_size)'], {}), '((FLAGS.encoder_step, FLAGS.batch_size, FLAGS.hidden_size))\n', (2788, 2847), True, 'import numpy as np\n'), ((1837, 1871), 'tensorflow.GraphOptions', 'tf.GraphOptions', ([], {'infer_shapes': '(True)'}), '(infer_shapes=True)\n', (1852, 1871), True, 'import tensorflow as tf\n'), ((2675, 2716), 'tensorflow.squeeze', 'tf.squeeze', (['eval_inputs_list[i]'], {'axis': '[0]'}), '(eval_inputs_list[i], axis=[0])\n', (2685, 2716), True, 'import tensorflow as tf\n'), ((2882, 2915), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (2913, 2915), True, 'import tensorflow as tf\n'), ((4340, 4391), 'tensorflow.RunOptions', 'tf.RunOptions', ([], {'trace_level': 'tf.RunOptions.FULL_TRACE'}), '(trace_level=tf.RunOptions.FULL_TRACE)\n', (4353, 4391), True, 'import tensorflow as tf\n'), ((4419, 4435), 'tensorflow.RunMetadata', 'tf.RunMetadata', ([], {}), '()\n', (4433, 4435), True, 'import tensorflow as tf\n'), ((2055, 2065), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (2063, 2065), True, 'import tensorflow as tf\n'), ((3099, 3138), 'tensorflow.gfile.GFile', 'tf.gfile.GFile', (['FLAGS.frozen_file', '"""wb"""'], {}), "(FLAGS.frozen_file, 'wb')\n", (3113, 3138), True, 'import tensorflow as tf\n'), ((3809, 3820), 'time.time', 'time.time', ([], {}), '()\n', (3818, 3820), False, 'import time\n'), ((4496, 4507), 'time.time', 'time.time', ([], {}), '()\n', (4505, 4507), False, 'import time\n'), ((4880, 4922), 'tensorflow.python.client.timeline.Timeline', 'timeline.Timeline', (['run_metadata.step_stats'], {}), '(run_metadata.step_stats)\n', (4897, 4922), False, 'from tensorflow.python.client import timeline\n'), ((3955, 3966), 'time.time', 'time.time', ([], {}), '()\n', (3964, 3966), False, 'import time\n'), ((4753, 4764), 'time.time', 'time.time', ([], {}), '()\n', (4762, 4764), False, 'import time\n')]
|
import time
import random
import os
from spirecomm.spire.game import Game
from spirecomm.spire.character import Intent, PlayerClass
import spirecomm.spire.card
from spirecomm.spire.screen import RestOption
from spirecomm.communication.action import *
from spirecomm.ai.priorities import *
from spirecomm.ai.drafter import IroncladDraftModel
import csv
class SimpleAgent:
def __init__(self, chosen_class=PlayerClass.THE_SILENT, use_default_drafter=False, timestamp=None):
self.game = Game()
self.errors = 0
self.choose_good_card = False
self.skipped_cards = False
self.visited_shop = False
self.map_route = []
self.chosen_class = chosen_class
self.priorities = Priority()
self.drafter = IroncladDraftModel()
self.change_class(chosen_class)
self.use_default_drafter=use_default_drafter #if set to True, uses built in drafter from priorities module
self.timestamp = timestamp
def change_class(self, new_class):
self.chosen_class = new_class
if self.chosen_class == PlayerClass.THE_SILENT:
self.priorities = SilentPriority()
elif self.chosen_class == PlayerClass.IRONCLAD:
self.priorities = IroncladPriority()
elif self.chosen_class == PlayerClass.DEFECT:
self.priorities = DefectPowerPriority()
else:
self.priorities = random.choice(list(PlayerClass))
def handle_error(self, error):
raise Exception(error)
def get_next_action_in_game(self, game_state):
self.game = game_state
#time.sleep(0.07)
if self.game.choice_available or self.game.screen_type == ScreenType.GAME_OVER:
return self.handle_screen()
if self.game.proceed_available:
return ProceedAction()
if self.game.play_available:
if self.game.room_type == "MonsterRoomBoss" and len(self.game.get_real_potions()) > 0:
potion_action = self.use_next_potion()
if potion_action is not None:
return potion_action
return self.get_play_card_action()
if self.game.end_available:
return EndTurnAction()
# TODO: Possible fix for opening deck view on accident
if self.game.screen_type == None:
return ReturnAction()
if self.game.cancel_available:
return CancelAction()
def get_next_action_out_of_game(self):
return StartGameAction(self.chosen_class)
def is_monster_attacking(self):
for monster in self.game.monsters:
if monster.intent.is_attack() or monster.intent == Intent.NONE:
return True
return False
def get_incoming_damage(self):
incoming_damage = 0
for monster in self.game.monsters:
if not monster.is_gone and not monster.half_dead:
if monster.move_adjusted_damage is not None:
incoming_damage += monster.move_adjusted_damage * monster.move_hits
elif monster.intent == Intent.NONE:
incoming_damage += 5 * self.game.act
return incoming_damage
def get_low_hp_target(self):
available_monsters = [monster for monster in self.game.monsters if monster.current_hp > 0 and not monster.half_dead and not monster.is_gone]
best_monster = min(available_monsters, key=lambda x: x.current_hp)
return best_monster
def get_high_hp_target(self):
available_monsters = [monster for monster in self.game.monsters if monster.current_hp > 0 and not monster.half_dead and not monster.is_gone]
best_monster = max(available_monsters, key=lambda x: x.current_hp)
return best_monster
def many_monsters_alive(self):
available_monsters = [monster for monster in self.game.monsters if monster.current_hp > 0 and not monster.half_dead and not monster.is_gone]
return len(available_monsters) > 1
def get_play_card_action(self):
playable_cards = [card for card in self.game.hand if card.is_playable]
zero_cost_cards = [card for card in playable_cards if card.cost == 0]
zero_cost_attacks = [card for card in zero_cost_cards if card.type == spirecomm.spire.card.CardType.ATTACK]
zero_cost_non_attacks = [card for card in zero_cost_cards if card.type != spirecomm.spire.card.CardType.ATTACK]
nonzero_cost_cards = [card for card in playable_cards if card.cost != 0]
aoe_cards = [card for card in playable_cards if self.priorities.is_card_aoe(card)]
if self.game.player.block > self.get_incoming_damage() - (self.game.act + 4):
offensive_cards = [card for card in nonzero_cost_cards if not self.priorities.is_card_defensive(card)]
if len(offensive_cards) > 0:
nonzero_cost_cards = offensive_cards
else:
nonzero_cost_cards = [card for card in nonzero_cost_cards if not card.exhausts]
if len(playable_cards) == 0:
return EndTurnAction()
if len(zero_cost_non_attacks) > 0:
card_to_play = self.priorities.get_best_card_to_play(zero_cost_non_attacks)
elif len(nonzero_cost_cards) > 0:
card_to_play = self.priorities.get_best_card_to_play(nonzero_cost_cards)
if len(aoe_cards) > 0 and self.many_monsters_alive() and card_to_play.type == spirecomm.spire.card.CardType.ATTACK:
card_to_play = self.priorities.get_best_card_to_play(aoe_cards)
elif len(zero_cost_attacks) > 0:
card_to_play = self.priorities.get_best_card_to_play(zero_cost_attacks)
else:
# This shouldn't happen!
return EndTurnAction()
if card_to_play.has_target:
available_monsters = [monster for monster in self.game.monsters if monster.current_hp > 0 and not monster.half_dead and not monster.is_gone]
if len(available_monsters) == 0:
return EndTurnAction()
if card_to_play.type == spirecomm.spire.card.CardType.ATTACK:
target = self.get_low_hp_target()
else:
target = self.get_high_hp_target()
return PlayCardAction(card=card_to_play, target_monster=target)
else:
return PlayCardAction(card=card_to_play)
def use_next_potion(self):
for potion in self.game.get_real_potions():
if potion.can_use:
if potion.requires_target:
return PotionAction(True, potion=potion, target_monster=self.get_low_hp_target())
else:
return PotionAction(True, potion=potion)
def handle_screen(self):
if self.game.screen_type == ScreenType.EVENT:
if self.game.screen.event_id in ["Vampires", "Masked Bandits", "Knowing Skull", "Ghosts", "Liars Game", "Golden Idol", "Drug Dealer", "The Library"]:
return ChooseAction(len(self.game.screen.options) - 1)
else:
# NOTE: This looks like where Neow's blessing is chosen with the first option every time.
return ChooseAction(0)
elif self.game.screen_type == ScreenType.CHEST:
return OpenChestAction()
elif self.game.screen_type == ScreenType.SHOP_ROOM:
if not self.visited_shop:
self.visited_shop = True
return ChooseShopkeeperAction()
else:
self.visited_shop = False
return ProceedAction()
elif self.game.screen_type == ScreenType.SHOP_SCREEN:
if self.visited_shop:
return LeaveAction()
elif self.game.screen_type == ScreenType.REST:
return self.choose_rest_option()
elif self.game.screen_type == ScreenType.CARD_REWARD:
if self.use_default_drafter:
return self.default_choose_card_reward()
else:
return self.choose_card_reward()
elif self.game.screen_type == ScreenType.COMBAT_REWARD:
for reward_item in self.game.screen.rewards:
if reward_item.reward_type == RewardType.POTION and self.game.are_potions_full():
continue
elif reward_item.reward_type == RewardType.CARD and self.skipped_cards:
continue
else:
return CombatRewardAction(reward_item)
self.skipped_cards = False
return ProceedAction()
elif self.game.screen_type == ScreenType.MAP:
return self.make_map_choice()
elif self.game.screen_type == ScreenType.BOSS_REWARD:
relics = self.game.screen.relics
best_boss_relic = self.priorities.get_best_boss_relic(relics)
return BossRewardAction(best_boss_relic)
elif self.game.screen_type == ScreenType.SHOP_SCREEN:
if self.game.screen.purge_available and self.game.gold >= self.game.screen.purge_cost:
# TODO: This just purgest the first card in deck. Possibly hook into AI? Purity metrics? Purge card least like archetype?
return ChooseAction(name="purge")
for card in self.game.screen.cards:
if self.game.gold >= card.price and not self.priorities.should_skip(card):
return BuyCardAction(card)
for relic in self.game.screen.relics:
if self.game.gold >= relic.price:
return BuyRelicAction(relic)
return LeaveAction()
elif self.game.screen_type == ScreenType.GRID:
if not self.game.choice_available:
return ProceedAction()
if self.game.screen.for_upgrade or self.choose_good_card:
available_cards = self.priorities.get_sorted_cards(self.game.screen.cards)
else:
available_cards = self.priorities.get_sorted_cards(self.game.screen.cards, reverse=True)
num_cards = self.game.screen.num_cards
return CardSelectAction(available_cards[:num_cards])
elif self.game.screen_type == ScreenType.HAND_SELECT:
if not self.game.choice_available:
return ProceedAction()
# Usually, we don't want to choose the whole hand for a hand select. 3 seems like a good compromise.
num_cards = min(self.game.screen.num_cards, 3)
return CardSelectAction(self.priorities.get_cards_for_action(self.game.current_action, self.game.screen.cards, num_cards))
elif self.game.screen_type == ScreenType.GAME_OVER:
game_result = dict()
game_result['score'] = self.game.screen.score
if self.game.screen.victory == True:
game_result['score'] += 10000
game_result['floor'] = self.game.floor
game_result['seed'] = self.game.seed
game_result['choices'] = self.drafter.deck_pick
game_result['final_deck'] = self.drafter.deck
game_result['deck_vector'] = self.drafter.vectorize_deck()
game_result['time'] = time.time()
if self.use_default_drafter:
self.write_game_results(f'control_results_{self.timestamp}.csv', game_result)
else:
self.write_game_results(f'game_results_{self.timestamp}.csv', game_result)
return ProceedAction()
elif self.game.screen_type == None:
return ReturnAction()
else:
return ProceedAction()
def write_game_results(self, filepath:str, game_result:dict):
"""
takes in filepath and results and writes to csv file
:param filepath: filepath str. Writes to SlayTheSpire folder
:param game_result: dictionary of results
"""
mode = 'a'
if not os.path.exists(os.path.abspath(filepath)):
mode = 'w'
with open(filepath, mode) as file:
writer = csv.DictWriter(file, game_result.keys())
if mode == 'w':
writer.writeheader()
writer.writerow(game_result)
def choose_rest_option(self):
rest_options = self.game.screen.rest_options
if len(rest_options) > 0 and not self.game.screen.has_rested:
if RestOption.REST in rest_options and self.game.current_hp < self.game.max_hp / 2:
return RestAction(RestOption.REST)
elif RestOption.REST in rest_options and self.game.act != 1 and self.game.floor % 17 == 15 and self.game.current_hp < self.game.max_hp * 0.9:
return RestAction(RestOption.REST)
elif RestOption.SMITH in rest_options:
return RestAction(RestOption.SMITH)
elif RestOption.LIFT in rest_options:
return RestAction(RestOption.LIFT)
elif RestOption.DIG in rest_options:
return RestAction(RestOption.DIG)
elif RestOption.REST in rest_options and self.game.current_hp < self.game.max_hp:
return RestAction(RestOption.REST)
else:
return ChooseAction(0)
else:
return ProceedAction()
def count_copies_in_deck(self, card):
count = 0
for deck_card in self.game.deck:
if deck_card.card_id == card.card_id:
count += 1
return count
def default_choose_card_reward(self):
reward_cards = self.game.screen.cards
if self.game.screen.can_skip and not self.game.in_combat:
pickable_cards = [card for card in reward_cards if self.priorities.needs_more_copies(card, self.count_copies_in_deck(card))]
else:
pickable_cards = reward_cards
if len(pickable_cards) > 0:
potential_pick = self.priorities.get_best_card(pickable_cards)
return CardRewardAction(potential_pick)
elif self.game.screen.can_bowl:
return CardRewardAction(bowl=True)
else:
self.skipped_cards = True
return CancelAction()
def choose_card_reward(self):
"""
Function that chooses card rewards using neural net
:return: CardRewardAction with selected card
"""
reward_cards = self.game.screen.cards
self.drafter.update_floor(self.game.floor)
pick = self.drafter.choose_card(reward_cards)
return CardRewardAction(pick)
def generate_map_route(self):
node_rewards = self.priorities.MAP_NODE_PRIORITIES.get(self.game.act)
best_rewards = {0: {node.x: node_rewards[node.symbol] for node in self.game.map.nodes[0].values()}}
best_parents = {0: {node.x: 0 for node in self.game.map.nodes[0].values()}}
min_reward = min(node_rewards.values())
map_height = max(self.game.map.nodes.keys())
for y in range(0, map_height):
best_rewards[y+1] = {node.x: min_reward * 20 for node in self.game.map.nodes[y+1].values()}
best_parents[y+1] = {node.x: -1 for node in self.game.map.nodes[y+1].values()}
for x in best_rewards[y]:
node = self.game.map.get_node(x, y)
best_node_reward = best_rewards[y][x]
for child in node.children:
test_child_reward = best_node_reward + node_rewards[child.symbol]
if test_child_reward > best_rewards[y+1][child.x]:
best_rewards[y+1][child.x] = test_child_reward
best_parents[y+1][child.x] = node.x
best_path = [0] * (map_height + 1)
best_path[map_height] = max(best_rewards[map_height].keys(), key=lambda x: best_rewards[map_height][x])
for y in range(map_height, 0, -1):
best_path[y - 1] = best_parents[y][best_path[y]]
self.map_route = best_path
def make_map_choice(self):
if len(self.game.screen.next_nodes) > 0 and self.game.screen.next_nodes[0].y == 0:
self.generate_map_route()
self.game.screen.current_node.y = -1
if self.game.screen.boss_available:
return ChooseMapBossAction()
chosen_x = self.map_route[self.game.screen.current_node.y + 1]
for choice in self.game.screen.next_nodes:
if choice.x == chosen_x:
return ChooseMapNodeAction(choice)
# This should never happen
return ChooseAction(0)
def reset_drafter(self, filepath=None):
"""
helper to reset drafter to default configuration between runs
:param filepath: filepath to weights.npy
"""
if not filepath:
self.drafter = IroncladDraftModel()
else:
self.drafter = IroncladDraftModel(weights=filepath)
def update_timestamp(self):
"""
Sets timestamp attribute
:param timestamp:
:return:
"""
self.timestamp = str(int(time.time()))
return self.timestamp
|
[
"os.path.abspath",
"spirecomm.spire.game.Game",
"spirecomm.ai.drafter.IroncladDraftModel",
"time.time"
] |
[((497, 503), 'spirecomm.spire.game.Game', 'Game', ([], {}), '()\n', (501, 503), False, 'from spirecomm.spire.game import Game\n'), ((764, 784), 'spirecomm.ai.drafter.IroncladDraftModel', 'IroncladDraftModel', ([], {}), '()\n', (782, 784), False, 'from spirecomm.ai.drafter import IroncladDraftModel\n'), ((16695, 16715), 'spirecomm.ai.drafter.IroncladDraftModel', 'IroncladDraftModel', ([], {}), '()\n', (16713, 16715), False, 'from spirecomm.ai.drafter import IroncladDraftModel\n'), ((16757, 16793), 'spirecomm.ai.drafter.IroncladDraftModel', 'IroncladDraftModel', ([], {'weights': 'filepath'}), '(weights=filepath)\n', (16775, 16793), False, 'from spirecomm.ai.drafter import IroncladDraftModel\n'), ((11904, 11929), 'os.path.abspath', 'os.path.abspath', (['filepath'], {}), '(filepath)\n', (11919, 11929), False, 'import os\n'), ((16960, 16971), 'time.time', 'time.time', ([], {}), '()\n', (16969, 16971), False, 'import time\n'), ((11165, 11176), 'time.time', 'time.time', ([], {}), '()\n', (11174, 11176), False, 'import time\n')]
|
#!/usr/bin/env python3
import numpy as np
import sys
def make_instance():
# normal、fire、water、electric、grass、ice、fighting, poison, ground,
# flying, psychic, bug, rock, ghost, dragon, dark, steel, fairy
type_matrix = np.array([[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.5, 0.0, 1.0, 1.0, 0.5, 1.0],
[1.0, 0.5, 0.5, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 0.5, 1.0, 0.5, 1.0, 2.0, 1.0],
[1.0, 2.0, 0.5, 1.0, 0.5, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 1.0, 2.0, 1.0, 0.5, 1.0, 1.0, 1.0],
[1.0, 1.0, 2.0, 0.5, 0.5, 1.0, 1.0, 1.0, 0.0, 2.0, 1.0, 1.0, 1.0, 1.0, 0.5, 1.0, 1.0, 1.0],
[1.0, 0.5, 2.0, 1.0, 0.5, 1.0, 1.0, 0.5, 2.0, 0.5, 1.0, 0.5, 2.0, 1.0, 0.5, 1.0, 0.5, 1.0],
[1.0, 0.5, 0.5, 1.0, 2.0, 0.5, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 0.5, 1.0],
[2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 0.5, 1.0, 0.5, 0.5, 0.5, 2.0, 0.0, 1.0, 2.0, 2.0, 0.5],
[1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 0.5, 0.5, 1.0, 1.0, 1.0, 0.5, 0.5, 1.0, 1.0, 0.0, 2.0],
[1.0, 2.0, 1.0, 2.0, 0.5, 1.0, 1.0, 2.0, 1.0, 0.0, 1.0, 0.5, 2.0, 1.0, 1.0, 1.0, 2.0, 1.0],
[1.0, 1.0, 1.0, 0.5, 2.0, 1.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 0.5, 1.0, 1.0, 1.0, 0.5, 1.0],
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 0.5, 1.0, 1.0, 1.0, 1.0, 0.0, 0.5, 1.0],
[1.0, 0.5, 1.0, 1.0, 2.0, 1.0, 0.5, 0.5, 1.0, 0.5, 2.0, 1.0, 1.0, 0.5, 1.0, 2.0, 0.5, 0.5],
[1.0, 2.0, 1.0, 1.0, 1.0, 2.0, 0.5, 1.0, 0.5, 2.0, 1.0, 2.0, 1.0, 1.0, 1.0, 1.0, 0.5, 1.0],
[0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 2.0, 1.0, 0.5, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 0.5, 0.0],
[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.5, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 2.0, 1.0, 0.5, 1.0, 0.5],
[1.0, 0.5, 0.5, 0.5, 1.0, 2.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 1.0, 0.5, 2.0],
[1.0, 0.5, 1.0, 1.0, 1.0, 1.0, 2.0, 0.5, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 0.5, 1.0]])
# make weak_matrix
weak_matrix = np.where(type_matrix==2.0, 1.0, 0.0)
resist_matrix = np.where(type_matrix<1.0, 1.0, 0.0)
# set enemy & skill
# enemy1
enemy1 = [0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
skill1 = [[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
# enemy2
enemy2 = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
skill2 = [[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
# enemy3
enemy3 = [0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
skill3 = [[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]]
# combine enemy into one list
enemy = [enemy1, enemy2, enemy3]
# combine skill into one list
skill = [skill1, skill2, skill3]
return type_matrix, weak_matrix, resist_matrix, enemy, skill
|
[
"numpy.where",
"numpy.array"
] |
[((232, 1995), 'numpy.array', 'np.array', (['[[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.5, 0.0, 1.0,\n 1.0, 0.5, 1.0], [1.0, 0.5, 0.5, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 1.0,\n 2.0, 0.5, 1.0, 0.5, 1.0, 2.0, 1.0], [1.0, 2.0, 0.5, 1.0, 0.5, 1.0, 1.0,\n 1.0, 2.0, 1.0, 1.0, 1.0, 2.0, 1.0, 0.5, 1.0, 1.0, 1.0], [1.0, 1.0, 2.0,\n 0.5, 0.5, 1.0, 1.0, 1.0, 0.0, 2.0, 1.0, 1.0, 1.0, 1.0, 0.5, 1.0, 1.0, \n 1.0], [1.0, 0.5, 2.0, 1.0, 0.5, 1.0, 1.0, 0.5, 2.0, 0.5, 1.0, 0.5, 2.0,\n 1.0, 0.5, 1.0, 0.5, 1.0], [1.0, 0.5, 0.5, 1.0, 2.0, 0.5, 1.0, 1.0, 2.0,\n 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 0.5, 1.0], [2.0, 1.0, 1.0, 1.0, 1.0,\n 2.0, 1.0, 0.5, 1.0, 0.5, 0.5, 0.5, 2.0, 0.0, 1.0, 2.0, 2.0, 0.5], [1.0,\n 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 0.5, 0.5, 1.0, 1.0, 1.0, 0.5, 0.5, 1.0, \n 1.0, 0.0, 2.0], [1.0, 2.0, 1.0, 2.0, 0.5, 1.0, 1.0, 2.0, 1.0, 0.0, 1.0,\n 0.5, 2.0, 1.0, 1.0, 1.0, 2.0, 1.0], [1.0, 1.0, 1.0, 0.5, 2.0, 1.0, 2.0,\n 1.0, 1.0, 1.0, 1.0, 2.0, 0.5, 1.0, 1.0, 1.0, 0.5, 1.0], [1.0, 1.0, 1.0,\n 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 0.5, 1.0, 1.0, 1.0, 1.0, 0.0, 0.5, \n 1.0], [1.0, 0.5, 1.0, 1.0, 2.0, 1.0, 0.5, 0.5, 1.0, 0.5, 2.0, 1.0, 1.0,\n 0.5, 1.0, 2.0, 0.5, 0.5], [1.0, 2.0, 1.0, 1.0, 1.0, 2.0, 0.5, 1.0, 0.5,\n 2.0, 1.0, 2.0, 1.0, 1.0, 1.0, 1.0, 0.5, 1.0], [0.0, 1.0, 1.0, 1.0, 1.0,\n 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 2.0, 1.0, 0.5, 1.0, 1.0], [1.0,\n 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, \n 1.0, 0.5, 0.0], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.5, 1.0, 1.0, 1.0, 2.0,\n 1.0, 1.0, 2.0, 1.0, 0.5, 1.0, 0.5], [1.0, 0.5, 0.5, 0.5, 1.0, 2.0, 1.0,\n 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 1.0, 0.5, 2.0], [1.0, 0.5, 1.0,\n 1.0, 1.0, 1.0, 2.0, 0.5, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 0.5, 1.0]]'], {}), '([[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.5,\n 0.0, 1.0, 1.0, 0.5, 1.0], [1.0, 0.5, 0.5, 1.0, 2.0, 2.0, 1.0, 1.0, 1.0,\n 1.0, 1.0, 2.0, 0.5, 1.0, 0.5, 1.0, 2.0, 1.0], [1.0, 2.0, 0.5, 1.0, 0.5,\n 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 1.0, 2.0, 1.0, 0.5, 1.0, 1.0, 1.0], [1.0,\n 1.0, 2.0, 0.5, 0.5, 1.0, 1.0, 1.0, 0.0, 2.0, 1.0, 1.0, 1.0, 1.0, 0.5, \n 1.0, 1.0, 1.0], [1.0, 0.5, 2.0, 1.0, 0.5, 1.0, 1.0, 0.5, 2.0, 0.5, 1.0,\n 0.5, 2.0, 1.0, 0.5, 1.0, 0.5, 1.0], [1.0, 0.5, 0.5, 1.0, 2.0, 0.5, 1.0,\n 1.0, 2.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 0.5, 1.0], [2.0, 1.0, 1.0,\n 1.0, 1.0, 2.0, 1.0, 0.5, 1.0, 0.5, 0.5, 0.5, 2.0, 0.0, 1.0, 2.0, 2.0, \n 0.5], [1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 0.5, 0.5, 1.0, 1.0, 1.0, 0.5,\n 0.5, 1.0, 1.0, 0.0, 2.0], [1.0, 2.0, 1.0, 2.0, 0.5, 1.0, 1.0, 2.0, 1.0,\n 0.0, 1.0, 0.5, 2.0, 1.0, 1.0, 1.0, 2.0, 1.0], [1.0, 1.0, 1.0, 0.5, 2.0,\n 1.0, 2.0, 1.0, 1.0, 1.0, 1.0, 2.0, 0.5, 1.0, 1.0, 1.0, 0.5, 1.0], [1.0,\n 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 2.0, 1.0, 1.0, 0.5, 1.0, 1.0, 1.0, 1.0, \n 0.0, 0.5, 1.0], [1.0, 0.5, 1.0, 1.0, 2.0, 1.0, 0.5, 0.5, 1.0, 0.5, 2.0,\n 1.0, 1.0, 0.5, 1.0, 2.0, 0.5, 0.5], [1.0, 2.0, 1.0, 1.0, 1.0, 2.0, 0.5,\n 1.0, 0.5, 2.0, 1.0, 2.0, 1.0, 1.0, 1.0, 1.0, 0.5, 1.0], [0.0, 1.0, 1.0,\n 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 2.0, 1.0, 0.5, 1.0, \n 1.0], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0,\n 1.0, 2.0, 1.0, 0.5, 0.0], [1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.5, 1.0, 1.0,\n 1.0, 2.0, 1.0, 1.0, 2.0, 1.0, 0.5, 1.0, 0.5], [1.0, 0.5, 0.5, 0.5, 1.0,\n 2.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, 1.0, 1.0, 1.0, 0.5, 2.0], [1.0,\n 0.5, 1.0, 1.0, 1.0, 1.0, 2.0, 0.5, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 2.0, \n 2.0, 0.5, 1.0]])\n', (240, 1995), True, 'import numpy as np\n'), ((2432, 2470), 'numpy.where', 'np.where', (['(type_matrix == 2.0)', '(1.0)', '(0.0)'], {}), '(type_matrix == 2.0, 1.0, 0.0)\n', (2440, 2470), True, 'import numpy as np\n'), ((2489, 2526), 'numpy.where', 'np.where', (['(type_matrix < 1.0)', '(1.0)', '(0.0)'], {}), '(type_matrix < 1.0, 1.0, 0.0)\n', (2497, 2526), True, 'import numpy as np\n')]
|
#!/usr/bin/env python3
import argparse
import glob
import os
from pathlib import Path
import subprocess
import sys
from zipfile import ZipFile
def parse_arguments():
parser = argparse.ArgumentParser(
description="Tool for garbling PII in for PPRL purposes in the CODI project"
)
parser.add_argument("sourcefile", help="Source PII CSV file")
parser.add_argument("schemadir", help="Directory of linkage schema")
parser.add_argument("secretfile", help="Location of de-identification secret file")
parser.add_argument(
'-z', '--outputzip', dest='outputzip', default="garbled.zip",
help="Specify an name for the .zip file. Default is garbled.zip"
)
parser.add_argument(
'-o', '--outputdir', dest='outputdir', default="output",
help="Specify an output directory. Default is output/"
)
args = parser.parse_args()
if not Path(args.schemadir).exists():
parser.error("Unable to find directory: " + args.schemadir)
if not Path(args.secretfile).exists():
parser.error("Unable to find secret file: " + args.secretfile)
return args
def validate_secret_file(secret_file):
secret = None
with open(secret_file, "r") as secret_text:
secret = secret_text.read()
if len(secret) < 256:
sys.exit("Secret length not long enough to ensure proper de-identification")
return secret
def garble_pii(args):
schema_dir = Path(args.schemadir)
secret_file = Path(args.secretfile)
source_file = args.sourcefile
os.makedirs('output', exist_ok=True)
secret = validate_secret_file(secret_file)
clk_files = []
schema = glob.glob(args.schemadir + "/*.json")
for s in schema:
with open(s, "r") as schema_file:
file_contents = schema_file.read()
if "doubleHash" in file_contents:
sys.exit(
"The following schema uses doubleHash, which is insecure: "
+ str(s)
)
output_file = Path(args.outputdir, s.split('/')[-1])
completed_process = subprocess.run(
["anonlink", "hash", source_file, secret, str(s), str(output_file)],
check=True
)
clk_files.append(output_file)
return clk_files
def create_clk_zip(clk_files, args):
with ZipFile(os.path.join(args.outputdir, args.outputzip), "w") as garbled_zip:
for clk_file in clk_files:
garbled_zip.write(clk_file)
print("Zip file created at: " + args.outputdir + '/' + args.outputzip)
def main():
args = parse_arguments()
clk_files = garble_pii(args)
create_clk_zip(clk_files, args)
if __name__ == "__main__":
main()
|
[
"os.makedirs",
"argparse.ArgumentParser",
"pathlib.Path",
"glob.glob",
"os.path.join",
"sys.exit"
] |
[((182, 288), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Tool for garbling PII in for PPRL purposes in the CODI project"""'}), "(description=\n 'Tool for garbling PII in for PPRL purposes in the CODI project')\n", (205, 288), False, 'import argparse\n'), ((1452, 1472), 'pathlib.Path', 'Path', (['args.schemadir'], {}), '(args.schemadir)\n', (1456, 1472), False, 'from pathlib import Path\n'), ((1491, 1512), 'pathlib.Path', 'Path', (['args.secretfile'], {}), '(args.secretfile)\n', (1495, 1512), False, 'from pathlib import Path\n'), ((1551, 1587), 'os.makedirs', 'os.makedirs', (['"""output"""'], {'exist_ok': '(True)'}), "('output', exist_ok=True)\n", (1562, 1587), False, 'import os\n'), ((1667, 1704), 'glob.glob', 'glob.glob', (["(args.schemadir + '/*.json')"], {}), "(args.schemadir + '/*.json')\n", (1676, 1704), False, 'import glob\n'), ((1316, 1392), 'sys.exit', 'sys.exit', (['"""Secret length not long enough to ensure proper de-identification"""'], {}), "('Secret length not long enough to ensure proper de-identification')\n", (1324, 1392), False, 'import sys\n'), ((2348, 2392), 'os.path.join', 'os.path.join', (['args.outputdir', 'args.outputzip'], {}), '(args.outputdir, args.outputzip)\n', (2360, 2392), False, 'import os\n'), ((902, 922), 'pathlib.Path', 'Path', (['args.schemadir'], {}), '(args.schemadir)\n', (906, 922), False, 'from pathlib import Path\n'), ((1012, 1033), 'pathlib.Path', 'Path', (['args.secretfile'], {}), '(args.secretfile)\n', (1016, 1033), False, 'from pathlib import Path\n')]
|
import logging
import matplotlib.pyplot as plt
import numpy as np
import pytest
from shapely.affinity import rotate
from pyroll.core import SquareGroove, Profile
groove = SquareGroove(0, 3, tip_depth=20, tip_angle=91 / 180 * np.pi)
def test_from_groove():
Profile.from_groove(groove, width=45, height=50)
Profile.from_groove(groove, filling=0.9, gap=3)
def test_from_groove_errors():
with pytest.raises(TypeError):
Profile.from_groove(groove, width=55, filling=0.9, height=50, gap=3)
with pytest.raises(TypeError):
Profile.from_groove(groove, width=55, height=50, gap=3)
with pytest.raises(TypeError):
Profile.from_groove(groove, width=55, filling=0.9, height=50)
with pytest.raises(TypeError):
Profile.from_groove(groove, height=50)
with pytest.raises(TypeError):
Profile.from_groove(groove, gap=3)
with pytest.raises(TypeError):
Profile.from_groove(groove, width=55)
with pytest.raises(TypeError):
Profile.from_groove(groove, filling=0.9)
with pytest.raises(ValueError):
Profile.from_groove(groove, height=-1, width=50)
with pytest.raises(ValueError):
Profile.from_groove(groove, gap=-1, width=50)
with pytest.raises(ValueError):
Profile.from_groove(groove, width=-1, height=50)
with pytest.raises(ValueError):
Profile.from_groove(groove, filling=0, height=50)
def test_from_groove_warnings(caplog):
logging.getLogger("pyroll").error("Marker Error")
Profile.from_groove(groove, width=55, height=50)
Profile.from_groove(groove, filling=1.1, gap=3)
if not caplog.records:
pytest.xfail("Expected to fail if ran together with CLI tests, since CLI is modifying logging, so pytest does not capture.")
assert len([r for r in caplog.records if r.levelname == "WARNING" and r.msg.startswith("Encountered")]) > 1
def test_round():
p1 = Profile.round(radius=15)
p2 = Profile.round(diameter=30)
assert p1.cross_section == p2.cross_section
def test_round_errors():
with pytest.raises(ValueError):
Profile.round(radius=-1)
with pytest.raises(ValueError):
Profile.round(diameter=0)
def test_square():
p1 = Profile.square(side=10, corner_radius=1)
p2 = Profile.square(diagonal=10 * np.sqrt(2), corner_radius=1)
assert p1.cross_section == p2.cross_section
p3 = Profile.square(side=10)
p4 = Profile.square(diagonal=10 * np.sqrt(2))
assert p3.cross_section == p4.cross_section
def test_square_errors():
with pytest.raises(TypeError):
Profile.square(side=10, diagonal=10)
with pytest.raises(TypeError):
Profile.square()
with pytest.raises(ValueError):
Profile.square(side=-1)
with pytest.raises(ValueError):
Profile.square(diagonal=0)
with pytest.raises(ValueError):
Profile.square(corner_radius=-1, side=10)
def test_box():
Profile.box(height=10, width=20)
Profile.box(height=10, width=20, corner_radius=1)
def test_box_errors():
with pytest.raises(ValueError):
Profile.box(height=-1, width=5)
with pytest.raises(ValueError):
Profile.box(height=10, width=-1)
with pytest.raises(ValueError):
Profile.box(corner_radius=-1, height=10, width=5)
def test_diamond():
Profile.diamond(height=10, width=20)
Profile.diamond(height=10, width=20, corner_radius=1)
def test_diamond_errors():
with pytest.raises(ValueError):
Profile.diamond(height=-1, width=5)
with pytest.raises(ValueError):
Profile.diamond(height=10, width=-1)
with pytest.raises(ValueError):
Profile.diamond(corner_radius=-1, height=10, width=5)
def test_square_box_equivalence():
p1 = Profile.square(side=10, corner_radius=0)
p2 = Profile.box(height=10, width=10, corner_radius=0)
assert np.isclose(p1.cross_section.symmetric_difference(rotate(p2.cross_section, angle=45, origin=(0, 0))).area, 0)
p1 = Profile.square(side=10, corner_radius=2)
p2 = Profile.box(height=10, width=10, corner_radius=2)
assert np.isclose(p1.cross_section.symmetric_difference(rotate(p2.cross_section, angle=45, origin=(0, 0))).area, 0)
|
[
"pyroll.core.Profile.from_groove",
"pyroll.core.Profile.round",
"pyroll.core.Profile.diamond",
"pytest.raises",
"pytest.xfail",
"shapely.affinity.rotate",
"pyroll.core.Profile.square",
"pyroll.core.SquareGroove",
"pyroll.core.Profile.box",
"logging.getLogger",
"numpy.sqrt"
] |
[((174, 234), 'pyroll.core.SquareGroove', 'SquareGroove', (['(0)', '(3)'], {'tip_depth': '(20)', 'tip_angle': '(91 / 180 * np.pi)'}), '(0, 3, tip_depth=20, tip_angle=91 / 180 * np.pi)\n', (186, 234), False, 'from pyroll.core import SquareGroove, Profile\n'), ((265, 313), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'width': '(45)', 'height': '(50)'}), '(groove, width=45, height=50)\n', (284, 313), False, 'from pyroll.core import SquareGroove, Profile\n'), ((318, 365), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'filling': '(0.9)', 'gap': '(3)'}), '(groove, filling=0.9, gap=3)\n', (337, 365), False, 'from pyroll.core import SquareGroove, Profile\n'), ((1509, 1557), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'width': '(55)', 'height': '(50)'}), '(groove, width=55, height=50)\n', (1528, 1557), False, 'from pyroll.core import SquareGroove, Profile\n'), ((1562, 1609), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'filling': '(1.1)', 'gap': '(3)'}), '(groove, filling=1.1, gap=3)\n', (1581, 1609), False, 'from pyroll.core import SquareGroove, Profile\n'), ((1913, 1937), 'pyroll.core.Profile.round', 'Profile.round', ([], {'radius': '(15)'}), '(radius=15)\n', (1926, 1937), False, 'from pyroll.core import SquareGroove, Profile\n'), ((1947, 1973), 'pyroll.core.Profile.round', 'Profile.round', ([], {'diameter': '(30)'}), '(diameter=30)\n', (1960, 1973), False, 'from pyroll.core import SquareGroove, Profile\n'), ((2219, 2259), 'pyroll.core.Profile.square', 'Profile.square', ([], {'side': '(10)', 'corner_radius': '(1)'}), '(side=10, corner_radius=1)\n', (2233, 2259), False, 'from pyroll.core import SquareGroove, Profile\n'), ((2386, 2409), 'pyroll.core.Profile.square', 'Profile.square', ([], {'side': '(10)'}), '(side=10)\n', (2400, 2409), False, 'from pyroll.core import SquareGroove, Profile\n'), ((2924, 2956), 'pyroll.core.Profile.box', 'Profile.box', ([], {'height': '(10)', 'width': '(20)'}), '(height=10, width=20)\n', (2935, 2956), False, 'from pyroll.core import SquareGroove, Profile\n'), ((2961, 3010), 'pyroll.core.Profile.box', 'Profile.box', ([], {'height': '(10)', 'width': '(20)', 'corner_radius': '(1)'}), '(height=10, width=20, corner_radius=1)\n', (2972, 3010), False, 'from pyroll.core import SquareGroove, Profile\n'), ((3309, 3345), 'pyroll.core.Profile.diamond', 'Profile.diamond', ([], {'height': '(10)', 'width': '(20)'}), '(height=10, width=20)\n', (3324, 3345), False, 'from pyroll.core import SquareGroove, Profile\n'), ((3350, 3403), 'pyroll.core.Profile.diamond', 'Profile.diamond', ([], {'height': '(10)', 'width': '(20)', 'corner_radius': '(1)'}), '(height=10, width=20, corner_radius=1)\n', (3365, 3403), False, 'from pyroll.core import SquareGroove, Profile\n'), ((3738, 3778), 'pyroll.core.Profile.square', 'Profile.square', ([], {'side': '(10)', 'corner_radius': '(0)'}), '(side=10, corner_radius=0)\n', (3752, 3778), False, 'from pyroll.core import SquareGroove, Profile\n'), ((3788, 3837), 'pyroll.core.Profile.box', 'Profile.box', ([], {'height': '(10)', 'width': '(10)', 'corner_radius': '(0)'}), '(height=10, width=10, corner_radius=0)\n', (3799, 3837), False, 'from pyroll.core import SquareGroove, Profile\n'), ((3968, 4008), 'pyroll.core.Profile.square', 'Profile.square', ([], {'side': '(10)', 'corner_radius': '(2)'}), '(side=10, corner_radius=2)\n', (3982, 4008), False, 'from pyroll.core import SquareGroove, Profile\n'), ((4018, 4067), 'pyroll.core.Profile.box', 'Profile.box', ([], {'height': '(10)', 'width': '(10)', 'corner_radius': '(2)'}), '(height=10, width=10, corner_radius=2)\n', (4029, 4067), False, 'from pyroll.core import SquareGroove, Profile\n'), ((408, 432), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (421, 432), False, 'import pytest\n'), ((442, 510), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'width': '(55)', 'filling': '(0.9)', 'height': '(50)', 'gap': '(3)'}), '(groove, width=55, filling=0.9, height=50, gap=3)\n', (461, 510), False, 'from pyroll.core import SquareGroove, Profile\n'), ((520, 544), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (533, 544), False, 'import pytest\n'), ((554, 609), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'width': '(55)', 'height': '(50)', 'gap': '(3)'}), '(groove, width=55, height=50, gap=3)\n', (573, 609), False, 'from pyroll.core import SquareGroove, Profile\n'), ((619, 643), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (632, 643), False, 'import pytest\n'), ((653, 714), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'width': '(55)', 'filling': '(0.9)', 'height': '(50)'}), '(groove, width=55, filling=0.9, height=50)\n', (672, 714), False, 'from pyroll.core import SquareGroove, Profile\n'), ((724, 748), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (737, 748), False, 'import pytest\n'), ((758, 796), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'height': '(50)'}), '(groove, height=50)\n', (777, 796), False, 'from pyroll.core import SquareGroove, Profile\n'), ((806, 830), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (819, 830), False, 'import pytest\n'), ((840, 874), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'gap': '(3)'}), '(groove, gap=3)\n', (859, 874), False, 'from pyroll.core import SquareGroove, Profile\n'), ((884, 908), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (897, 908), False, 'import pytest\n'), ((918, 955), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'width': '(55)'}), '(groove, width=55)\n', (937, 955), False, 'from pyroll.core import SquareGroove, Profile\n'), ((965, 989), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (978, 989), False, 'import pytest\n'), ((999, 1039), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'filling': '(0.9)'}), '(groove, filling=0.9)\n', (1018, 1039), False, 'from pyroll.core import SquareGroove, Profile\n'), ((1049, 1074), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1062, 1074), False, 'import pytest\n'), ((1084, 1132), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'height': '(-1)', 'width': '(50)'}), '(groove, height=-1, width=50)\n', (1103, 1132), False, 'from pyroll.core import SquareGroove, Profile\n'), ((1142, 1167), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1155, 1167), False, 'import pytest\n'), ((1177, 1222), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'gap': '(-1)', 'width': '(50)'}), '(groove, gap=-1, width=50)\n', (1196, 1222), False, 'from pyroll.core import SquareGroove, Profile\n'), ((1232, 1257), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1245, 1257), False, 'import pytest\n'), ((1267, 1315), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'width': '(-1)', 'height': '(50)'}), '(groove, width=-1, height=50)\n', (1286, 1315), False, 'from pyroll.core import SquareGroove, Profile\n'), ((1325, 1350), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1338, 1350), False, 'import pytest\n'), ((1360, 1409), 'pyroll.core.Profile.from_groove', 'Profile.from_groove', (['groove'], {'filling': '(0)', 'height': '(50)'}), '(groove, filling=0, height=50)\n', (1379, 1409), False, 'from pyroll.core import SquareGroove, Profile\n'), ((1646, 1780), 'pytest.xfail', 'pytest.xfail', (['"""Expected to fail if ran together with CLI tests, since CLI is modifying logging, so pytest does not capture."""'], {}), "(\n 'Expected to fail if ran together with CLI tests, since CLI is modifying logging, so pytest does not capture.'\n )\n", (1658, 1780), False, 'import pytest\n'), ((2059, 2084), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2072, 2084), False, 'import pytest\n'), ((2094, 2118), 'pyroll.core.Profile.round', 'Profile.round', ([], {'radius': '(-1)'}), '(radius=-1)\n', (2107, 2118), False, 'from pyroll.core import SquareGroove, Profile\n'), ((2128, 2153), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2141, 2153), False, 'import pytest\n'), ((2163, 2188), 'pyroll.core.Profile.round', 'Profile.round', ([], {'diameter': '(0)'}), '(diameter=0)\n', (2176, 2188), False, 'from pyroll.core import SquareGroove, Profile\n'), ((2546, 2570), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (2559, 2570), False, 'import pytest\n'), ((2580, 2616), 'pyroll.core.Profile.square', 'Profile.square', ([], {'side': '(10)', 'diagonal': '(10)'}), '(side=10, diagonal=10)\n', (2594, 2616), False, 'from pyroll.core import SquareGroove, Profile\n'), ((2626, 2650), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (2639, 2650), False, 'import pytest\n'), ((2660, 2676), 'pyroll.core.Profile.square', 'Profile.square', ([], {}), '()\n', (2674, 2676), False, 'from pyroll.core import SquareGroove, Profile\n'), ((2686, 2711), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2699, 2711), False, 'import pytest\n'), ((2721, 2744), 'pyroll.core.Profile.square', 'Profile.square', ([], {'side': '(-1)'}), '(side=-1)\n', (2735, 2744), False, 'from pyroll.core import SquareGroove, Profile\n'), ((2754, 2779), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2767, 2779), False, 'import pytest\n'), ((2789, 2815), 'pyroll.core.Profile.square', 'Profile.square', ([], {'diagonal': '(0)'}), '(diagonal=0)\n', (2803, 2815), False, 'from pyroll.core import SquareGroove, Profile\n'), ((2825, 2850), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (2838, 2850), False, 'import pytest\n'), ((2860, 2901), 'pyroll.core.Profile.square', 'Profile.square', ([], {'corner_radius': '(-1)', 'side': '(10)'}), '(corner_radius=-1, side=10)\n', (2874, 2901), False, 'from pyroll.core import SquareGroove, Profile\n'), ((3045, 3070), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3058, 3070), False, 'import pytest\n'), ((3080, 3111), 'pyroll.core.Profile.box', 'Profile.box', ([], {'height': '(-1)', 'width': '(5)'}), '(height=-1, width=5)\n', (3091, 3111), False, 'from pyroll.core import SquareGroove, Profile\n'), ((3121, 3146), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3134, 3146), False, 'import pytest\n'), ((3156, 3188), 'pyroll.core.Profile.box', 'Profile.box', ([], {'height': '(10)', 'width': '(-1)'}), '(height=10, width=-1)\n', (3167, 3188), False, 'from pyroll.core import SquareGroove, Profile\n'), ((3198, 3223), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3211, 3223), False, 'import pytest\n'), ((3233, 3282), 'pyroll.core.Profile.box', 'Profile.box', ([], {'corner_radius': '(-1)', 'height': '(10)', 'width': '(5)'}), '(corner_radius=-1, height=10, width=5)\n', (3244, 3282), False, 'from pyroll.core import SquareGroove, Profile\n'), ((3442, 3467), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3455, 3467), False, 'import pytest\n'), ((3477, 3512), 'pyroll.core.Profile.diamond', 'Profile.diamond', ([], {'height': '(-1)', 'width': '(5)'}), '(height=-1, width=5)\n', (3492, 3512), False, 'from pyroll.core import SquareGroove, Profile\n'), ((3522, 3547), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3535, 3547), False, 'import pytest\n'), ((3557, 3593), 'pyroll.core.Profile.diamond', 'Profile.diamond', ([], {'height': '(10)', 'width': '(-1)'}), '(height=10, width=-1)\n', (3572, 3593), False, 'from pyroll.core import SquareGroove, Profile\n'), ((3603, 3628), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (3616, 3628), False, 'import pytest\n'), ((3638, 3691), 'pyroll.core.Profile.diamond', 'Profile.diamond', ([], {'corner_radius': '(-1)', 'height': '(10)', 'width': '(5)'}), '(corner_radius=-1, height=10, width=5)\n', (3653, 3691), False, 'from pyroll.core import SquareGroove, Profile\n'), ((1454, 1481), 'logging.getLogger', 'logging.getLogger', (['"""pyroll"""'], {}), "('pyroll')\n", (1471, 1481), False, 'import logging\n'), ((2298, 2308), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (2305, 2308), True, 'import numpy as np\n'), ((2448, 2458), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (2455, 2458), True, 'import numpy as np\n'), ((3898, 3947), 'shapely.affinity.rotate', 'rotate', (['p2.cross_section'], {'angle': '(45)', 'origin': '(0, 0)'}), '(p2.cross_section, angle=45, origin=(0, 0))\n', (3904, 3947), False, 'from shapely.affinity import rotate\n'), ((4128, 4177), 'shapely.affinity.rotate', 'rotate', (['p2.cross_section'], {'angle': '(45)', 'origin': '(0, 0)'}), '(p2.cross_section, angle=45, origin=(0, 0))\n', (4134, 4177), False, 'from shapely.affinity import rotate\n')]
|
import numpy as np
import pytest
from fisher.cfisher import pvalue, pvalue_npy
# Computed by ``fisher.test`` in R 3.2.2 and printed with
# ``sprintf(".16f")``.
@pytest.mark.parametrize("table,expected", [
([[100, 2], [1000, 5]],
(0.1300759363430016, 0.9797904453147230, 0.1300759363430016)),
([[2, 100], [5, 1000]],
(0.9797904453147230, 0.1300759363430016, 0.1300759363430016)),
([[2, 7], [8, 2]],
(0.0185217259520665, 0.9990149169715733, 0.0230141375652212)),
([[5, 1], [10, 10]],
(0.9782608695652173, 0.1652173913043478, 0.1973244147157191)),
([[5, 15], [20, 20]],
(0.0562577507439996, 0.9849086665340765, 0.0958044001247763)),
([[5, 16], [20, 25]],
(0.0891382278309642, 0.9723490195633506, 0.1725864953812995)),
([[10, 5], [10, 1]],
(0.1652173913043479, 0.9782608695652174, 0.1973244147157192)),
([[10, 5], [10, 0]],
(0.0565217391304348, 1.0000000000000000, 0.0612648221343874)),
([[5, 0], [1, 4]],
(1.0000000000000000, 0.0238095238095238, 0.0476190476190476)),
([[0, 5], [1, 4]],
(0.5000000000000000, 1.0000000000000000, 1.0000000000000000)),
([[5, 1], [0, 4]],
(1.0000000000000000, 0.0238095238095238, 0.0476190476190476)),
([[0, 1], [3, 2]],
(0.4999999999999999, 1.0000000000000000, 1.0000000000000000))
])
def test_against_r(table, expected):
epsilon = 1e-10
p = pvalue(table[0][0], table[0][1], table[1][0], table[1][1])
assert abs(p.left_tail - expected[0]) < epsilon
assert abs(p.right_tail - expected[1]) < epsilon
assert abs(p.two_tail - expected[2]) < epsilon
|
[
"pytest.mark.parametrize",
"fisher.cfisher.pvalue"
] |
[((164, 1152), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""table,expected"""', '[([[100, 2], [1000, 5]], (0.1300759363430016, 0.979790445314723, \n 0.1300759363430016)), ([[2, 100], [5, 1000]], (0.979790445314723, \n 0.1300759363430016, 0.1300759363430016)), ([[2, 7], [8, 2]], (\n 0.0185217259520665, 0.9990149169715733, 0.0230141375652212)), ([[5, 1],\n [10, 10]], (0.9782608695652173, 0.1652173913043478, 0.1973244147157191)\n ), ([[5, 15], [20, 20]], (0.0562577507439996, 0.9849086665340765, \n 0.0958044001247763)), ([[5, 16], [20, 25]], (0.0891382278309642, \n 0.9723490195633506, 0.1725864953812995)), ([[10, 5], [10, 1]], (\n 0.1652173913043479, 0.9782608695652174, 0.1973244147157192)), ([[10, 5],\n [10, 0]], (0.0565217391304348, 1.0, 0.0612648221343874)), ([[5, 0], [1,\n 4]], (1.0, 0.0238095238095238, 0.0476190476190476)), ([[0, 5], [1, 4]],\n (0.5, 1.0, 1.0)), ([[5, 1], [0, 4]], (1.0, 0.0238095238095238, \n 0.0476190476190476)), ([[0, 1], [3, 2]], (0.4999999999999999, 1.0, 1.0))]'], {}), "('table,expected', [([[100, 2], [1000, 5]], (\n 0.1300759363430016, 0.979790445314723, 0.1300759363430016)), ([[2, 100],\n [5, 1000]], (0.979790445314723, 0.1300759363430016, 0.1300759363430016)\n ), ([[2, 7], [8, 2]], (0.0185217259520665, 0.9990149169715733, \n 0.0230141375652212)), ([[5, 1], [10, 10]], (0.9782608695652173, \n 0.1652173913043478, 0.1973244147157191)), ([[5, 15], [20, 20]], (\n 0.0562577507439996, 0.9849086665340765, 0.0958044001247763)), ([[5, 16],\n [20, 25]], (0.0891382278309642, 0.9723490195633506, 0.1725864953812995)\n ), ([[10, 5], [10, 1]], (0.1652173913043479, 0.9782608695652174, \n 0.1973244147157192)), ([[10, 5], [10, 0]], (0.0565217391304348, 1.0, \n 0.0612648221343874)), ([[5, 0], [1, 4]], (1.0, 0.0238095238095238, \n 0.0476190476190476)), ([[0, 5], [1, 4]], (0.5, 1.0, 1.0)), ([[5, 1], [0,\n 4]], (1.0, 0.0238095238095238, 0.0476190476190476)), ([[0, 1], [3, 2]],\n (0.4999999999999999, 1.0, 1.0))])\n", (187, 1152), False, 'import pytest\n'), ((1389, 1447), 'fisher.cfisher.pvalue', 'pvalue', (['table[0][0]', 'table[0][1]', 'table[1][0]', 'table[1][1]'], {}), '(table[0][0], table[0][1], table[1][0], table[1][1])\n', (1395, 1447), False, 'from fisher.cfisher import pvalue, pvalue_npy\n')]
|
"""Tests for core module."""
from dataclasses import dataclass
from pathlib import Path
import pytest
from .helpers import append
from .helpers import branch
from .helpers import touch
from .helpers import write
from retrocookie import core
from retrocookie import git
from retrocookie import retrocookie
def in_template(path: Path) -> Path:
"""Prepend the template directory to the path."""
return "{{cookiecutter.project_slug}}" / path
@dataclass
class Example:
"""Example data for the test cases."""
path: Path = Path("README.md")
text: str = "Lorem Ipsum\n"
@pytest.fixture
def example() -> Example:
"""Fixture with example data."""
return Example()
@pytest.mark.parametrize(
"text, expected",
[
("Lorem Ipsum\n", "Lorem Ipsum\n"),
(
"This project is called example.\n",
"This project is called {{cookiecutter.project_slug}}.\n",
),
(
"python-version: ${{ matrix.python-version }}",
'python-version: ${{"{{"}} matrix.python-version {{"}}"}}',
),
],
)
def test_rewrite(
cookiecutter_repository: git.Repository,
cookiecutter_instance_repository: git.Repository,
text: str,
expected: str,
example: Example,
) -> None:
"""It rewrites the file contents as expected."""
cookiecutter, instance = cookiecutter_repository, cookiecutter_instance_repository
with branch(instance, "topic", create=True):
append(instance, example.path, text)
retrocookie(
instance.path,
path=cookiecutter.path,
branch="topic",
create_branch="topic",
)
with branch(cookiecutter, "topic"):
assert expected in cookiecutter.read_text(in_template(example.path))
def test_branch(
cookiecutter_repository: git.Repository,
cookiecutter_instance_repository: git.Repository,
example: Example,
) -> None:
"""It creates the specified branch."""
cookiecutter, instance = cookiecutter_repository, cookiecutter_instance_repository
with branch(instance, "topic", create=True):
append(instance, example.path, example.text)
retrocookie(
instance.path,
path=cookiecutter.path,
branch="topic",
create_branch="just-another-branch",
)
with branch(cookiecutter, "just-another-branch"):
assert example.text in cookiecutter.read_text(in_template(example.path))
def test_upstream(
cookiecutter_repository: git.Repository,
cookiecutter_instance_repository: git.Repository,
example: Example,
) -> None:
"""It does not apply changes from the upstream branch."""
cookiecutter, instance = cookiecutter_repository, cookiecutter_instance_repository
another = Path("file.txt")
with branch(instance, "upstream", create=True):
touch(instance, another)
with branch(instance, "topic", create=True):
append(instance, example.path, example.text)
retrocookie(
instance.path,
path=cookiecutter.path,
upstream="upstream",
branch="topic",
create_branch="topic",
)
with branch(cookiecutter, "topic"):
assert not cookiecutter.exists(another)
assert example.text in cookiecutter.read_text(in_template(example.path))
def test_single_commit(
cookiecutter_repository: git.Repository,
cookiecutter_instance_repository: git.Repository,
example: Example,
) -> None:
"""It cherry-picks the specified commit."""
cookiecutter, instance = cookiecutter_repository, cookiecutter_instance_repository
append(instance, example.path, example.text)
retrocookie(instance.path, ["HEAD"], path=cookiecutter.path)
assert example.text in cookiecutter.read_text(in_template(example.path))
def test_multiple_commits_sequential(
cookiecutter_repository: git.Repository,
cookiecutter_instance_repository: git.Repository,
) -> None:
"""It cherry-picks the specified commits."""
cookiecutter, instance = cookiecutter_repository, cookiecutter_instance_repository
names = "first", "second"
for name in names:
touch(instance, Path(name))
retrocookie(instance.path, ["HEAD~2.."], path=cookiecutter.path)
for name in names:
path = in_template(Path(name))
assert cookiecutter.exists(path)
def test_multiple_commits_parallel(
cookiecutter_repository: git.Repository,
cookiecutter_instance_repository: git.Repository,
) -> None:
"""It cherry-picks the specified commits."""
cookiecutter, instance = cookiecutter_repository, cookiecutter_instance_repository
names = "first", "second"
for name in names:
with branch(instance, name, create=True):
touch(instance, Path(name))
retrocookie(instance.path, names, path=cookiecutter.path)
for name in names:
path = in_template(Path(name))
assert cookiecutter.exists(path)
def test_find_template_directory_fails(tmp_path: Path) -> None:
"""It raises an exception when there is no template directory."""
repository = git.Repository.init(tmp_path)
with pytest.raises(Exception):
core.find_template_directory(repository)
def test_load_context_error(cookiecutter_instance_repository: git.Repository) -> None:
"""It raises an exception when .cookiecutter.json is not JSON dictionary."""
write(cookiecutter_instance_repository, Path(".cookiecutter.json"), "[]")
with pytest.raises(TypeError):
core.load_context(cookiecutter_instance_repository, "HEAD")
|
[
"retrocookie.git.Repository.init",
"retrocookie.core.find_template_directory",
"pytest.raises",
"pathlib.Path",
"retrocookie.core.load_context",
"pytest.mark.parametrize",
"retrocookie.retrocookie"
] |
[((694, 1001), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""text, expected"""', '[(\'Lorem Ipsum\\n\', \'Lorem Ipsum\\n\'), (\'This project is called example.\\n\',\n """This project is called {{cookiecutter.project_slug}}.\n"""), (\n \'python-version: ${{ matrix.python-version }}\',\n \'python-version: ${{"{{"}} matrix.python-version {{"}}"}}\')]'], {}), '(\'text, expected\', [(\'Lorem Ipsum\\n\',\n \'Lorem Ipsum\\n\'), ("""This project is called example.\n""",\n \'This project is called {{cookiecutter.project_slug}}.\\n\'), (\n \'python-version: ${{ matrix.python-version }}\',\n \'python-version: ${{"{{"}} matrix.python-version {{"}}"}}\')])\n', (717, 1001), False, 'import pytest\n'), ((539, 556), 'pathlib.Path', 'Path', (['"""README.md"""'], {}), "('README.md')\n", (543, 556), False, 'from pathlib import Path\n'), ((1518, 1611), 'retrocookie.retrocookie', 'retrocookie', (['instance.path'], {'path': 'cookiecutter.path', 'branch': '"""topic"""', 'create_branch': '"""topic"""'}), "(instance.path, path=cookiecutter.path, branch='topic',\n create_branch='topic')\n", (1529, 1611), False, 'from retrocookie import retrocookie\n'), ((2154, 2261), 'retrocookie.retrocookie', 'retrocookie', (['instance.path'], {'path': 'cookiecutter.path', 'branch': '"""topic"""', 'create_branch': '"""just-another-branch"""'}), "(instance.path, path=cookiecutter.path, branch='topic',\n create_branch='just-another-branch')\n", (2165, 2261), False, 'from retrocookie import retrocookie\n'), ((2750, 2766), 'pathlib.Path', 'Path', (['"""file.txt"""'], {}), "('file.txt')\n", (2754, 2766), False, 'from pathlib import Path\n'), ((2968, 3082), 'retrocookie.retrocookie', 'retrocookie', (['instance.path'], {'path': 'cookiecutter.path', 'upstream': '"""upstream"""', 'branch': '"""topic"""', 'create_branch': '"""topic"""'}), "(instance.path, path=cookiecutter.path, upstream='upstream',\n branch='topic', create_branch='topic')\n", (2979, 3082), False, 'from retrocookie import retrocookie\n'), ((3643, 3703), 'retrocookie.retrocookie', 'retrocookie', (['instance.path', "['HEAD']"], {'path': 'cookiecutter.path'}), "(instance.path, ['HEAD'], path=cookiecutter.path)\n", (3654, 3703), False, 'from retrocookie import retrocookie\n'), ((4163, 4227), 'retrocookie.retrocookie', 'retrocookie', (['instance.path', "['HEAD~2..']"], {'path': 'cookiecutter.path'}), "(instance.path, ['HEAD~2..'], path=cookiecutter.path)\n", (4174, 4227), False, 'from retrocookie import retrocookie\n'), ((4765, 4822), 'retrocookie.retrocookie', 'retrocookie', (['instance.path', 'names'], {'path': 'cookiecutter.path'}), '(instance.path, names, path=cookiecutter.path)\n', (4776, 4822), False, 'from retrocookie import retrocookie\n'), ((5080, 5109), 'retrocookie.git.Repository.init', 'git.Repository.init', (['tmp_path'], {}), '(tmp_path)\n', (5099, 5109), False, 'from retrocookie import git\n'), ((5119, 5143), 'pytest.raises', 'pytest.raises', (['Exception'], {}), '(Exception)\n', (5132, 5143), False, 'import pytest\n'), ((5153, 5193), 'retrocookie.core.find_template_directory', 'core.find_template_directory', (['repository'], {}), '(repository)\n', (5181, 5193), False, 'from retrocookie import core\n'), ((5408, 5434), 'pathlib.Path', 'Path', (['""".cookiecutter.json"""'], {}), "('.cookiecutter.json')\n", (5412, 5434), False, 'from pathlib import Path\n'), ((5451, 5475), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (5464, 5475), False, 'import pytest\n'), ((5485, 5544), 'retrocookie.core.load_context', 'core.load_context', (['cookiecutter_instance_repository', '"""HEAD"""'], {}), "(cookiecutter_instance_repository, 'HEAD')\n", (5502, 5544), False, 'from retrocookie import core\n'), ((4146, 4156), 'pathlib.Path', 'Path', (['name'], {}), '(name)\n', (4150, 4156), False, 'from pathlib import Path\n'), ((4279, 4289), 'pathlib.Path', 'Path', (['name'], {}), '(name)\n', (4283, 4289), False, 'from pathlib import Path\n'), ((4874, 4884), 'pathlib.Path', 'Path', (['name'], {}), '(name)\n', (4878, 4884), False, 'from pathlib import Path\n'), ((4748, 4758), 'pathlib.Path', 'Path', (['name'], {}), '(name)\n', (4752, 4758), False, 'from pathlib import Path\n')]
|
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.urls import reverse
from rest_framework.test import APIClient
from rest_framework import status
from core.models import Tag, Recipe
from recipe.serializers import TagSerializer
TAG_URL = reverse('recipe:tag-list')
class PublicTagsApiTests(TestCase):
"""Test the publicly available tags API """
def setUp(self):
self.client = APIClient()
def test_login_required(self):
"""Test that login is required for retrieving tags"""
res = self.client.get(TAG_URL)
self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
class PrivateTagsApiTests(TestCase):
"""Test the authorized user tags API"""
def setUp(self):
self.client = APIClient()
self.user = get_user_model().objects.create_user(
'<EMAIL>',
'test123'
)
self.client.force_authenticate(self.user)
def test_retrieve_tags(self):
"""Test retrieving tags"""
Tag.objects.create(user=self.user, name="TestTag1")
Tag.objects.create(user=self.user, name="TestTag2")
res = self.client.get(TAG_URL)
tags = Tag.objects.all().order_by('-name')
serializer = TagSerializer(tags, many=True)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(res.data, serializer.data)
def test_tags_limited_to_user(self):
"""Test that tags returned are for the authenticaded user"""
user2 = get_user_model().objects.create_user(
'<EMAIL>',
'test1234'
)
Tag.objects.create(user=user2, name='TestTagUser2')
tag = Tag.objects.create(user=self.user, name="TestTagUser1")
res = self.client.get(TAG_URL)
self.assertEqual(res.status_code, status.HTTP_200_OK)
self.assertEqual(len(res.data), 1)
self.assertEqual(res.data[0]['name'], tag.name)
def test_create_tag_succesfull(self):
"""Test creating a new tag"""
payload = {'name': 'TestTag'}
self.client.post(TAG_URL, payload)
exists = Tag.objects.filter(
user=self.user,
name=payload['name']
).exists()
self.assertTrue(exists)
def test_create_tag_invalid(self):
"""Test creating a new task with invalid payload"""
payload = {'name': ''}
res = self.client.post(TAG_URL, payload)
self.assertEqual(res.status_code, status.HTTP_400_BAD_REQUEST)
def test_retrieve_tag_assign_to_recipe(self):
"""Test filtering tags by those assigned to recipes"""
tag1 = Tag.objects.create(user=self.user, name='breakfast')
tag2 = Tag.objects.create(user=self.user, name='lunch')
recipe = Recipe.objects.create(
title='coriander eggs on toast',
time_minutes=10,
price=5.00,
user=self.user)
recipe.tags.add(tag1)
res = self.client.get(TAG_URL, {'assigned_only': 1})
serializer1 = TagSerializer(tag1)
serializer2 = TagSerializer(tag2)
self.assertIn(serializer1.data, res.data)
self.assertNotIn(serializer2.data, res.data)
|
[
"core.models.Tag.objects.create",
"core.models.Tag.objects.filter",
"core.models.Recipe.objects.create",
"django.contrib.auth.get_user_model",
"django.urls.reverse",
"recipe.serializers.TagSerializer",
"core.models.Tag.objects.all",
"rest_framework.test.APIClient"
] |
[((281, 307), 'django.urls.reverse', 'reverse', (['"""recipe:tag-list"""'], {}), "('recipe:tag-list')\n", (288, 307), False, 'from django.urls import reverse\n'), ((438, 449), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (447, 449), False, 'from rest_framework.test import APIClient\n'), ((787, 798), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (796, 798), False, 'from rest_framework.test import APIClient\n'), ((1040, 1091), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'self.user', 'name': '"""TestTag1"""'}), "(user=self.user, name='TestTag1')\n", (1058, 1091), False, 'from core.models import Tag, Recipe\n'), ((1100, 1151), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'self.user', 'name': '"""TestTag2"""'}), "(user=self.user, name='TestTag2')\n", (1118, 1151), False, 'from core.models import Tag, Recipe\n'), ((1265, 1295), 'recipe.serializers.TagSerializer', 'TagSerializer', (['tags'], {'many': '(True)'}), '(tags, many=True)\n', (1278, 1295), False, 'from recipe.serializers import TagSerializer\n'), ((1641, 1692), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'user2', 'name': '"""TestTagUser2"""'}), "(user=user2, name='TestTagUser2')\n", (1659, 1692), False, 'from core.models import Tag, Recipe\n'), ((1707, 1762), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'self.user', 'name': '"""TestTagUser1"""'}), "(user=self.user, name='TestTagUser1')\n", (1725, 1762), False, 'from core.models import Tag, Recipe\n'), ((2661, 2713), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'self.user', 'name': '"""breakfast"""'}), "(user=self.user, name='breakfast')\n", (2679, 2713), False, 'from core.models import Tag, Recipe\n'), ((2729, 2777), 'core.models.Tag.objects.create', 'Tag.objects.create', ([], {'user': 'self.user', 'name': '"""lunch"""'}), "(user=self.user, name='lunch')\n", (2747, 2777), False, 'from core.models import Tag, Recipe\n'), ((2795, 2897), 'core.models.Recipe.objects.create', 'Recipe.objects.create', ([], {'title': '"""coriander eggs on toast"""', 'time_minutes': '(10)', 'price': '(5.0)', 'user': 'self.user'}), "(title='coriander eggs on toast', time_minutes=10,\n price=5.0, user=self.user)\n", (2816, 2897), False, 'from core.models import Tag, Recipe\n'), ((3060, 3079), 'recipe.serializers.TagSerializer', 'TagSerializer', (['tag1'], {}), '(tag1)\n', (3073, 3079), False, 'from recipe.serializers import TagSerializer\n'), ((3102, 3121), 'recipe.serializers.TagSerializer', 'TagSerializer', (['tag2'], {}), '(tag2)\n', (3115, 3121), False, 'from recipe.serializers import TagSerializer\n'), ((1208, 1225), 'core.models.Tag.objects.all', 'Tag.objects.all', ([], {}), '()\n', (1223, 1225), False, 'from core.models import Tag, Recipe\n'), ((2146, 2202), 'core.models.Tag.objects.filter', 'Tag.objects.filter', ([], {'user': 'self.user', 'name': "payload['name']"}), "(user=self.user, name=payload['name'])\n", (2164, 2202), False, 'from core.models import Tag, Recipe\n'), ((819, 835), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (833, 835), False, 'from django.contrib.auth import get_user_model\n'), ((1538, 1554), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (1552, 1554), False, 'from django.contrib.auth import get_user_model\n')]
|
import random
import torch
import numpy as np
from torch import nn
from torch.nn.utils.rnn import pad_sequence
from torch.utils.data import Dataset, DataLoader, random_split
from torchvision import transforms, utils
class ParallelCNN(nn.Module):
def __init__(self, para_ker, pool_kernel=6, drop=0.5):
"""
Multiple CNN layer apply on input and concatenate the output
:param para_ker: List of kernel size that will be used
:param pool_kernel: Pooling parameter after CNN
:param drop: Dropout parameter
"""
super(ParallelCNN, self).__init__()
self.lseq = nn.ModuleList()
for k in para_ker:
seq = nn.Sequential(
nn.Conv1d(4, 4, kernel_size=k, padding="same"),
nn.ReLU(),
nn.MaxPool1d(pool_kernel),
nn.Dropout(drop)
)
self.lseq.append(seq)
def forward(self, inputs):
"""
:param inputs: DNA onehot sequences [batch_size x 4 x length]
:return: Stack CNN output feature from different kernel size [batch_size x 12 x length]
"""
_x = list()
for seq in self.lseq:
x = seq(inputs)
_x.append(x)
# concate outputs of every conv layer to a tensor
_x = torch.cat(_x, 1)
return _x
class BidirectionalLSTM(nn.Module):
def __init__(self, input_size, hidden_size, output_size):
super(BidirectionalLSTM, self).__init__()
self.rnn = nn.LSTM(input_size, hidden_size, bidirectional=True, batch_first=True)
self.linear = nn.Linear(hidden_size * 2, output_size)
def forward(self, inputs):
"""
:param inputs: visual feature [batch_size x T x input_size]
:return: contextual feature [batch_size x T x output_size]
"""
self.rnn.flatten_parameters()
recurrent, _ = self.rnn(inputs) # batch_size x T x input_size -> batch_size x T x (2*hidden_size)
output = self.linear(recurrent) # batch_size x T x output_size
return output
class DeePromoter(nn.Module):
def __init__(self, para_ker, input_shape=(64, 300, 4), pool_kernel=6, drop=0.5):
"""
Deepromoter
:param para_ker: List of kernel size that will be used
:param input_shape: Specifies the input shape for model(fixed)
:param pool_kernel: Pooling parameter after CNN
:param drop: Dropout parameter
"""
super(DeePromoter, self).__init__()
binode = len(para_ker) * 4
self.pconv = ParallelCNN(para_ker, pool_kernel, drop)
self.bilstm = BidirectionalLSTM(binode, binode, binode)
self.flatten = nn.Flatten()
x = torch.zeros(input_shape)
shape = self.get_feature_shape(x)
self.fc = nn.Sequential(
nn.Linear(shape, shape),
nn.ReLU(),
nn.Linear(shape, 2),
)
def get_feature_shape(self, x):
"""Pass a dummy input through to find the shape
after flatten layer for Linear layer construction"""
x = x.permute(0, 2, 1)
x = self.pconv(x)
x = x.permute(0, 2, 1)
x = self.bilstm(x)
x = self.flatten(x)
return x.shape[1]
def forward(self, x):
x = x.permute(0, 2, 1)
x = self.pconv(x)
x = x.permute(0, 2, 1)
x = self.bilstm(x)
x = self.flatten(x)
x = self.fc(x)
return x
|
[
"torch.nn.Dropout",
"torch.nn.ReLU",
"torch.nn.ModuleList",
"torch.nn.Conv1d",
"torch.nn.MaxPool1d",
"torch.cat",
"torch.nn.Linear",
"torch.zeros",
"torch.nn.LSTM",
"torch.nn.Flatten"
] |
[((622, 637), 'torch.nn.ModuleList', 'nn.ModuleList', ([], {}), '()\n', (635, 637), False, 'from torch import nn\n'), ((1309, 1325), 'torch.cat', 'torch.cat', (['_x', '(1)'], {}), '(_x, 1)\n', (1318, 1325), False, 'import torch\n'), ((1513, 1583), 'torch.nn.LSTM', 'nn.LSTM', (['input_size', 'hidden_size'], {'bidirectional': '(True)', 'batch_first': '(True)'}), '(input_size, hidden_size, bidirectional=True, batch_first=True)\n', (1520, 1583), False, 'from torch import nn\n'), ((1606, 1645), 'torch.nn.Linear', 'nn.Linear', (['(hidden_size * 2)', 'output_size'], {}), '(hidden_size * 2, output_size)\n', (1615, 1645), False, 'from torch import nn\n'), ((2696, 2708), 'torch.nn.Flatten', 'nn.Flatten', ([], {}), '()\n', (2706, 2708), False, 'from torch import nn\n'), ((2721, 2745), 'torch.zeros', 'torch.zeros', (['input_shape'], {}), '(input_shape)\n', (2732, 2745), False, 'import torch\n'), ((2834, 2857), 'torch.nn.Linear', 'nn.Linear', (['shape', 'shape'], {}), '(shape, shape)\n', (2843, 2857), False, 'from torch import nn\n'), ((2871, 2880), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (2878, 2880), False, 'from torch import nn\n'), ((2894, 2913), 'torch.nn.Linear', 'nn.Linear', (['shape', '(2)'], {}), '(shape, 2)\n', (2903, 2913), False, 'from torch import nn\n'), ((714, 760), 'torch.nn.Conv1d', 'nn.Conv1d', (['(4)', '(4)'], {'kernel_size': 'k', 'padding': '"""same"""'}), "(4, 4, kernel_size=k, padding='same')\n", (723, 760), False, 'from torch import nn\n'), ((778, 787), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (785, 787), False, 'from torch import nn\n'), ((805, 830), 'torch.nn.MaxPool1d', 'nn.MaxPool1d', (['pool_kernel'], {}), '(pool_kernel)\n', (817, 830), False, 'from torch import nn\n'), ((848, 864), 'torch.nn.Dropout', 'nn.Dropout', (['drop'], {}), '(drop)\n', (858, 864), False, 'from torch import nn\n')]
|
"""This module contains logic for different API request types."""
import datetime
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Optional, Tuple, Union
import requests
from requests import Response
from vater.errors import (
ERROR_CODE_MAPPING,
InvalidRequestData,
MaximumParameterNumberExceeded,
UnknownExternalApiError,
)
from vater.models import Subject, SubjectSchema
class RequestType(ABC):
"""Base class for all request types."""
def __init__(self, url_pattern: str, *args, validators=None, **kwargs) -> None:
"""Initialize instance parameters."""
self.params: Dict[str, Any] = {}
self.url_pattern = url_pattern
self.validators = {} if validators is None else validators
self.validated_params: dict = {}
def _get_url(self) -> None:
"""Interpolate endpoint url."""
url = self.url_pattern
for key, value in self.validated_params.items(): # type: ignore
if f"{{{key}}}" in self.url_pattern:
if isinstance(value, (str, datetime.date)):
url = url.replace(f"{{{key}}}", str(value))
else:
url = url.replace(f"{{{key}}}", ",".join(value))
self.url = self.client.base_url + url # type: ignore
def register_params(self, **kwargs: Any) -> None:
"""Register parameters to the instance."""
self.client = kwargs.pop("client")
self.params = kwargs
if self.params["date"] is None: # type: ignore
self.params["date"] = datetime.date.today() # type: ignore
def validate(self) -> None:
"""Validate given parameters."""
for param, value in self.params.items(): # type: ignore
try:
for validator in self.validators[param]:
self.validated_params[param] = validator(value)
except KeyError:
self.validated_params[param] = value
def send_request(self) -> Response:
"""Get response from the API."""
self._get_url()
response = requests.get(self.url)
if response.status_code == 400:
raise InvalidRequestData(ERROR_CODE_MAPPING[response.json()["code"]])
elif response.status_code != 200:
raise UnknownExternalApiError(response.status_code, response.text)
return response
@abstractmethod
def result(self):
"""Return request result."""
class CheckRequest(RequestType):
"""Class for check requests type."""
def result(self) -> Union[dict, Tuple[bool, str]]:
"""Return check result if account is assigned to the subject and request id."""
self.validate()
response = self.send_request()
if self.params.get("raw"): # type: ignore
return response.json()
result = response.json()["result"]
return result["accountAssigned"] == "TAK", result["requestId"]
class SearchRequest(RequestType):
"""Class for search requests type."""
PARAM_LIMIT = 30
def __init__(self, url_pattern: str, many: bool = False, *args, **kwargs) -> None:
"""Initialize additional `many` attribute."""
super().__init__(url_pattern, *args, **kwargs)
self.many = many
def validate(self) -> None:
"""Validate given parameters."""
super().validate()
if not self.many:
return
param = ({*self.params} - {"raw", "date"}).pop() # type: ignore
if len(self.params[param]) > self.PARAM_LIMIT: # type: ignore
raise MaximumParameterNumberExceeded(param, self.PARAM_LIMIT)
def result(
self
) -> Union[dict, Tuple[Union[List[Subject], Optional[Subject]], str]]:
"""Return subject/subjects mapped to the specific parameter and request id."""
self.validate()
response = self.send_request()
if self.params.get("raw"): # type: ignore
return response.json()
result = response.json()["result"]
if not self.many and result["subject"] is None:
return None, result["requestId"]
return (
SubjectSchema().load(
result["subjects" if self.many else "subject"], many=self.many
),
result["requestId"],
)
|
[
"datetime.date.today",
"vater.errors.UnknownExternalApiError",
"vater.models.SubjectSchema",
"requests.get",
"vater.errors.MaximumParameterNumberExceeded"
] |
[((2104, 2126), 'requests.get', 'requests.get', (['self.url'], {}), '(self.url)\n', (2116, 2126), False, 'import requests\n'), ((1578, 1599), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (1597, 1599), False, 'import datetime\n'), ((3595, 3650), 'vater.errors.MaximumParameterNumberExceeded', 'MaximumParameterNumberExceeded', (['param', 'self.PARAM_LIMIT'], {}), '(param, self.PARAM_LIMIT)\n', (3625, 3650), False, 'from vater.errors import ERROR_CODE_MAPPING, InvalidRequestData, MaximumParameterNumberExceeded, UnknownExternalApiError\n'), ((2310, 2370), 'vater.errors.UnknownExternalApiError', 'UnknownExternalApiError', (['response.status_code', 'response.text'], {}), '(response.status_code, response.text)\n', (2333, 2370), False, 'from vater.errors import ERROR_CODE_MAPPING, InvalidRequestData, MaximumParameterNumberExceeded, UnknownExternalApiError\n'), ((4169, 4184), 'vater.models.SubjectSchema', 'SubjectSchema', ([], {}), '()\n', (4182, 4184), False, 'from vater.models import Subject, SubjectSchema\n')]
|
import re
try:
import setuptools
except ImportError:
import distutils.core
setup = distutils.core.setup
else:
setup = setuptools.setup
setup(
name='txwac',
version=(re
.compile(r".*__version__ = '(.*?)'", re.S)
.match(open('txwac.py').read())
.group(1)),
url='https://github.com/trenton42/txwac/',
license=open('LICENSE').read(),
author='wac',
author_email='<EMAIL>',
description='Writing RESTful API clients.',
long_description=(
open('README.rst').read() + '\n\n' +
open('HISTORY.rst').read()
),
py_modules=['txwac'],
package_data={'': ['LICENSE']},
include_package_data=True,
tests_require=[
'mock>=0.8',
'simplejson >= 2.1',
'unittest2 >= 0.5.1',
'iso8601',
],
install_requires=[
'treq'
],
test_suite='trial',
classifiers=[
'Intended Audience :: Developers',
'Development Status :: 4 - Beta',
'Natural Language :: English',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
],
)
|
[
"re.compile"
] |
[((192, 235), 're.compile', 're.compile', (['""".*__version__ = \'(.*?)\'"""', 're.S'], {}), '(".*__version__ = \'(.*?)\'", re.S)\n', (202, 235), False, 'import re\n')]
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
import sys
import os
import struct
"""
python str16_bin_convert.py file(can include path)
"""
#input file
#output file
def Str16_to_binFile(inputpath, outpath):
with open(inputpath, 'r') as f:
str_buffer = f.read()
#print(str_buffer)
with open(outpath,'wb') as f1:
j = 0
while(j < len(str_buffer) -1):
a = str_buffer[j:j+2]
j+=2
#print(a)
b = int("0x"+a, 16)
f1.write(struct.pack('B', b))
def binFile_to_Str16(inputpath, outpath):
buffer = []
Str16 = ''
with open(inputpath, 'rb') as f:
buffer = f.read()
print(type(buffer))
for i in buffer:
#获取每个字节并转换成十进制数字
b = struct.unpack('B', i)[0]
#16进制转换
c = hex(b)
#将16进制数字去掉0X
d = str(c[2:]).upper()
if(len(d)) == 1: #如果不足2位,前面需要补0
d = '0'+ d
Str16 = Str16 + d
#print(Str16)
with open(outpath, 'w') as f:
f.write(Str16)
if __name__ == '__main__':
print('-' * 80)
print('Usage python 16str_bin_convert.py input_file_path ')
print('python 16str_bin_convert.py ./a.bin:意思是把a.bin中二进制按字节转换成相同的字符串!')
print('python 16str_bin_convert.py ./a.txt: 意思是把txt中的字符串转成相同的二进制文件!')
print('例如:二进制文件内容是 "0x9D 0x2F 0x0D....",转换成字符串是9D2F0D...')
print('输出文件在同一路径下')
print('-' * 80)
if(len(sys.argv) < 2):
print('请检查参数!')
else:
input_file_path = sys.argv[1]
filepath, tempFileName = os.path.split(sys.argv[1])
filename, extension = os.path.splitext(tempFileName)
if(filepath == ''):
filepath = filepath + '.'
print(filepath)
if(extension == '.bin'):
output_file = filepath + '/' + filename + '.txt'
binFile_to_Str16(input_file_path, output_file)
print('输出文件:' + output_file + 'finshed!')
elif(extension == '.txt'):
output_file = filepath + '/' + filename + '.bin'
print(output_file)
Str16_to_binFile(input_file_path, output_file)
print('输出文件:' + output_file + 'finshed!')
|
[
"struct.unpack",
"os.path.split",
"os.path.splitext",
"struct.pack"
] |
[((1406, 1432), 'os.path.split', 'os.path.split', (['sys.argv[1]'], {}), '(sys.argv[1])\n', (1419, 1432), False, 'import os\n'), ((1458, 1488), 'os.path.splitext', 'os.path.splitext', (['tempFileName'], {}), '(tempFileName)\n', (1474, 1488), False, 'import os\n'), ((468, 487), 'struct.pack', 'struct.pack', (['"""B"""', 'b'], {}), "('B', b)\n", (479, 487), False, 'import struct\n'), ((689, 710), 'struct.unpack', 'struct.unpack', (['"""B"""', 'i'], {}), "('B', i)\n", (702, 710), False, 'import struct\n')]
|
import numpy as np
from matplotlib import pyplot
try:
import ConfigParser
except ModuleNotFoundError:
import configparser as ConfigParser
import argparse
import h5py
from scipy.signal import savgol_filter
import Pointing
from os import listdir, getcwd
from os.path import isfile, join
import Mapping
import Pointing
import mpi4py
import FitSource
import EphemNew
import healpy as hp
def cel2gal(ra,dec, inverse=False):
_r, _d = ra*np.pi/180., (np.pi/2. - dec*np.pi/180.)
if inverse:
r = hp.Rotator(coord=['G','C'])
else:
r = hp.Rotator(coord=['C','G'])
_d, _r = r(_d, _r)
return _r*180./np.pi, (np.pi/2. - _d)*180./np.pi
def SlewDistance(az):
daz = np.abs(az[:az.size-1] - az[1:az.size])
# loop over spikes
start = np.argmax(daz)
peaks = [start]
searchRange = 1000
indices = np.arange(daz.size).astype(int)
find = np.zeros(daz.size).astype(bool)
thres = 0.01
while True:
find = find | (indices > start-searchRange) & (indices < start + searchRange)
if (np.sum(find) == daz.size):
break
start = (indices[~find])[np.argmax(daz[~find])]
peaks += [start]
if np.max(daz[find]) < thres:
break
peaks = np.sort(np.array(peaks))
peakAz = az[peaks]
slewDist = np.abs(peakAz[:peakAz.size//2 *2:2] - peakAz[1:peakAz.size//2 *2:2])
return np.median(slewDist)
def main(filename, plotDir='Plots/'):
"""
"""
# Which pixels and sidebands?
pixelOffsets = Pointing.GetPixelOffsets('COMAP_FEEDS.dat')
# READ IN THE DATA
d = h5py.File(filename)
tod = d['spectrometer/tod']
mjd = d['spectrometer/MJD'][:]
if len(d['pointing/az'].shape) > 1:
az = d['pointing/az'][0,:]
el = d['pointing/el'][0,:]
else:
az = d['pointing/az'][:]
el = d['pointing/el'][:]
mjdpoint = d['pointing/MJD'][:]
slewDist = SlewDistance(az)
ra, dec, pa, az, el, mjd = Pointing.GetPointing(az, el, mjd,
mjdpoint, pixelOffsets,
lon=Pointing.comap_lon,
lat=Pointing.comap_lat)
# Calculate data sizes:
nHorns = tod.shape[0]
nSBs = tod.shape[1]
nFreqs = tod.shape[2]
nSamps = tod.shape[3]
# Calculate the position of Jupiter
clon, clat, diam = EphemNew.rdplan(mjd[0:1], 5,
Pointing.comap_lon*np.pi/180.,
Pointing.comap_lat*np.pi/180.)
EphemNew.precess(clon, clat, mjd[0:1])
# Loop over horns/SBs
P1out = None
prefix = filename.split('/')[-1].split('.')[0]
for iHorn in range(nHorns):
print('Processing Horn {:d}'.format(iHorn+1))
_tod = np.nanmean(np.nanmean(tod[iHorn,:,5:-5,:],axis=0),axis=0)
#Tim: Pass this function whatever chunk of time-ordered data you have in memory
P1, P1e, cross, mweight, weight, model = FitSource.FitTOD(_tod,
ra[0,:], # horn 0 because we want the relative offset from Focal Plane
dec[0,:],
clon*180./np.pi,
clat*180./np.pi,
pa[0,:],
prefix='{}_Horn{}'.format(prefix, iHorn+1),
plotDir=plotDir)
if isinstance(P1out, type(None)):
P1out = np.zeros((nHorns, len(P1)))
Peout = np.zeros((nHorns, len(P1e)))
mout = np.zeros(mweight.shape)
hout = np.zeros(weight.shape)
if not isinstance(P1, type(None)):
P1out[iHorn, :] = P1
Peout[iHorn, :] = P1e
mout += mweight*(model+1)**2
hout += weight*(model+1)**2
pyplot.imshow(mout/hout, extent=[-100/2. * 1.5, 100/2.*1.5,-100/2. * 1.5, 100/2.*1.5] )
pyplot.xlabel('Az offset (arcmin)')
pyplot.ylabel('EL offset (arcmin)')
pyplot.title('{}'.format(prefix))
pyplot.grid(True)
pyplot.savefig('{}/FeedPositions_{}.png'.format(plotDir, prefix), bbox_inches='tight')
pyplot.clf()
meanMJD = np.mean(mjd)
meanEl = np.median(el)
meanAz = np.median(az)
d.close()
print('SLEW DISTANCE', slewDist)
return P1out, Peout, mout/hout, meanMJD, meanEl, meanAz
from mpi4py import MPI
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--filename', type=str)
parser.add_argument('--filelist', default=None, type=str)
parser.add_argument('--fitoutputdir', default='.', type=str)
args = parser.parse_args()
P1 = None
if isinstance(args.filelist, type(None)):
main(args.filename)
else:
filelist = np.loadtxt(args.filelist, dtype=str)
for i, f in enumerate(filelist):
print('Opening',f)
_P1, _P1e, m, meanMJD, meanEl, meanAz = main(f)
prefix = f.split('/')[-1].split('.h')[0]
output = h5py.File('{}/{}_JupiterFits.h5'.format(args.fitoutputdir, prefix))
output['P1'] = _P1
output['P1e'] = _P1e
coords = np.zeros(3)
coords[:] = meanAz, meanEl, meanMJD,
output['coords'] = coords
output['map'] = m
output.close()
|
[
"numpy.abs",
"argparse.ArgumentParser",
"numpy.sum",
"numpy.argmax",
"matplotlib.pyplot.clf",
"EphemNew.rdplan",
"numpy.mean",
"numpy.arange",
"Pointing.GetPixelOffsets",
"numpy.nanmean",
"matplotlib.pyplot.imshow",
"healpy.Rotator",
"numpy.max",
"numpy.loadtxt",
"Pointing.GetPointing",
"h5py.File",
"numpy.median",
"EphemNew.precess",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.grid",
"numpy.zeros",
"numpy.array",
"matplotlib.pyplot.xlabel"
] |
[((706, 746), 'numpy.abs', 'np.abs', (['(az[:az.size - 1] - az[1:az.size])'], {}), '(az[:az.size - 1] - az[1:az.size])\n', (712, 746), True, 'import numpy as np\n'), ((785, 799), 'numpy.argmax', 'np.argmax', (['daz'], {}), '(daz)\n', (794, 799), True, 'import numpy as np\n'), ((1321, 1395), 'numpy.abs', 'np.abs', (['(peakAz[:peakAz.size // 2 * 2:2] - peakAz[1:peakAz.size // 2 * 2:2])'], {}), '(peakAz[:peakAz.size // 2 * 2:2] - peakAz[1:peakAz.size // 2 * 2:2])\n', (1327, 1395), True, 'import numpy as np\n'), ((1402, 1421), 'numpy.median', 'np.median', (['slewDist'], {}), '(slewDist)\n', (1411, 1421), True, 'import numpy as np\n'), ((1549, 1592), 'Pointing.GetPixelOffsets', 'Pointing.GetPixelOffsets', (['"""COMAP_FEEDS.dat"""'], {}), "('COMAP_FEEDS.dat')\n", (1573, 1592), False, 'import Pointing\n'), ((1625, 1644), 'h5py.File', 'h5py.File', (['filename'], {}), '(filename)\n', (1634, 1644), False, 'import h5py\n'), ((2001, 2111), 'Pointing.GetPointing', 'Pointing.GetPointing', (['az', 'el', 'mjd', 'mjdpoint', 'pixelOffsets'], {'lon': 'Pointing.comap_lon', 'lat': 'Pointing.comap_lat'}), '(az, el, mjd, mjdpoint, pixelOffsets, lon=Pointing.\n comap_lon, lat=Pointing.comap_lat)\n', (2021, 2111), False, 'import Pointing\n'), ((2469, 2574), 'EphemNew.rdplan', 'EphemNew.rdplan', (['mjd[0:1]', '(5)', '(Pointing.comap_lon * np.pi / 180.0)', '(Pointing.comap_lat * np.pi / 180.0)'], {}), '(mjd[0:1], 5, Pointing.comap_lon * np.pi / 180.0, Pointing.\n comap_lat * np.pi / 180.0)\n', (2484, 2574), False, 'import EphemNew\n'), ((2645, 2683), 'EphemNew.precess', 'EphemNew.precess', (['clon', 'clat', 'mjd[0:1]'], {}), '(clon, clat, mjd[0:1])\n', (2661, 2683), False, 'import EphemNew\n'), ((4181, 4290), 'matplotlib.pyplot.imshow', 'pyplot.imshow', (['(mout / hout)'], {'extent': '[-100 / 2.0 * 1.5, 100 / 2.0 * 1.5, -100 / 2.0 * 1.5, 100 / 2.0 * 1.5]'}), '(mout / hout, extent=[-100 / 2.0 * 1.5, 100 / 2.0 * 1.5, -100 /\n 2.0 * 1.5, 100 / 2.0 * 1.5])\n', (4194, 4290), False, 'from matplotlib import pyplot\n'), ((4273, 4308), 'matplotlib.pyplot.xlabel', 'pyplot.xlabel', (['"""Az offset (arcmin)"""'], {}), "('Az offset (arcmin)')\n", (4286, 4308), False, 'from matplotlib import pyplot\n'), ((4313, 4348), 'matplotlib.pyplot.ylabel', 'pyplot.ylabel', (['"""EL offset (arcmin)"""'], {}), "('EL offset (arcmin)')\n", (4326, 4348), False, 'from matplotlib import pyplot\n'), ((4391, 4408), 'matplotlib.pyplot.grid', 'pyplot.grid', (['(True)'], {}), '(True)\n', (4402, 4408), False, 'from matplotlib import pyplot\n'), ((4504, 4516), 'matplotlib.pyplot.clf', 'pyplot.clf', ([], {}), '()\n', (4514, 4516), False, 'from matplotlib import pyplot\n'), ((4537, 4549), 'numpy.mean', 'np.mean', (['mjd'], {}), '(mjd)\n', (4544, 4549), True, 'import numpy as np\n'), ((4564, 4577), 'numpy.median', 'np.median', (['el'], {}), '(el)\n', (4573, 4577), True, 'import numpy as np\n'), ((4592, 4605), 'numpy.median', 'np.median', (['az'], {}), '(az)\n', (4601, 4605), True, 'import numpy as np\n'), ((4782, 4807), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (4805, 4807), False, 'import argparse\n'), ((514, 542), 'healpy.Rotator', 'hp.Rotator', ([], {'coord': "['G', 'C']"}), "(coord=['G', 'C'])\n", (524, 542), True, 'import healpy as hp\n'), ((564, 592), 'healpy.Rotator', 'hp.Rotator', ([], {'coord': "['C', 'G']"}), "(coord=['C', 'G'])\n", (574, 592), True, 'import healpy as hp\n'), ((1265, 1280), 'numpy.array', 'np.array', (['peaks'], {}), '(peaks)\n', (1273, 1280), True, 'import numpy as np\n'), ((5132, 5168), 'numpy.loadtxt', 'np.loadtxt', (['args.filelist'], {'dtype': 'str'}), '(args.filelist, dtype=str)\n', (5142, 5168), True, 'import numpy as np\n'), ((857, 876), 'numpy.arange', 'np.arange', (['daz.size'], {}), '(daz.size)\n', (866, 876), True, 'import numpy as np\n'), ((900, 918), 'numpy.zeros', 'np.zeros', (['daz.size'], {}), '(daz.size)\n', (908, 918), True, 'import numpy as np\n'), ((1063, 1075), 'numpy.sum', 'np.sum', (['find'], {}), '(find)\n', (1069, 1075), True, 'import numpy as np\n'), ((1141, 1162), 'numpy.argmax', 'np.argmax', (['daz[~find]'], {}), '(daz[~find])\n', (1150, 1162), True, 'import numpy as np\n'), ((1200, 1217), 'numpy.max', 'np.max', (['daz[find]'], {}), '(daz[find])\n', (1206, 1217), True, 'import numpy as np\n'), ((2891, 2933), 'numpy.nanmean', 'np.nanmean', (['tod[iHorn, :, 5:-5, :]'], {'axis': '(0)'}), '(tod[iHorn, :, 5:-5, :], axis=0)\n', (2901, 2933), True, 'import numpy as np\n'), ((3918, 3941), 'numpy.zeros', 'np.zeros', (['mweight.shape'], {}), '(mweight.shape)\n', (3926, 3941), True, 'import numpy as np\n'), ((3961, 3983), 'numpy.zeros', 'np.zeros', (['weight.shape'], {}), '(weight.shape)\n', (3969, 3983), True, 'import numpy as np\n'), ((5529, 5540), 'numpy.zeros', 'np.zeros', (['(3)'], {}), '(3)\n', (5537, 5540), True, 'import numpy as np\n')]
|
"""
Collection of utility functions
"""
from copy import deepcopy
from functools import partial
from inspect import getmembers
from operator import itemgetter
from os import environ, path
from sys import version_info
def camel_case(st, upper=False):
"""
Convert string to camel-case (upper or lower)
:param st: input string
:type st: ```str```
:param upper: upper camelcase if True, else lower camelcase
:type upper: ```bool```
:return: camel case representation of input string
:rtype: ```str```
"""
output = "".join(x for x in st.title() if x.isalnum())
return getattr(output[0], "upper" if upper else "lower")() + output[1:]
def common_dataset_handler(
ds_builder,
scale,
K,
as_numpy,
acquire_and_concat_validation_to_train=True,
**download_and_prepare_kwargs
):
"""
Helper function that is to be used by the different dataset builders
:param ds_builder: dataset builder
:type ds_builder: ```Union[tfds.core.DatasetBuilder, Tuple[tf.data.Dataset, tf.data.Dataset],
Tuple[np.ndarray, np.ndarray]```
:param scale: rescale input (divide) by this amount, None for do nothing
:type scale: ```Optional[Union[int, float]]```
:param K: backend engine, e.g., `np` or `tf`
:type K: ```Literal['np', 'tf']```
:param as_numpy: Convert to numpy ndarrays
:type as_numpy: ```bool```
:param acquire_and_concat_validation_to_train: Whether to acquire the validation split
and then concatenate it to train
:param download_and_prepare_kwargs:
:type download_and_prepare_kwargs: ```**download_and_prepare_kwargs```
:return: Train and tests dataset splits
:rtype: ```Union[Tuple[tf.data.Dataset,tf.data.Dataset,tfds.core.DatasetInfo], Tuple[np.ndarray,np.ndarray,Any]]```
"""
as_dataset_kwargs, info = {"batch_size": -1}, None
if hasattr(ds_builder, "download_and_prepare") and hasattr(
ds_builder, "as_dataset"
):
info, test_ds, train_ds = _handle_tfds(
acquire_and_concat_validation_to_train,
as_dataset_kwargs,
download_and_prepare_kwargs,
ds_builder,
info,
)
elif hasattr(ds_builder, "train_stream") and hasattr(ds_builder, "eval_stream"):
return ds_builder # Handled elsewhere, this is from trax
else:
train_ds, test_ds = ds_builder
if as_numpy:
train_ds, test_ds = to_numpy(train_ds, K), to_numpy(test_ds, K)
if K is not None and scale is not None:
if isinstance(scale, tuple):
assert scale[0] == scale[1]
scale = scale[0]
train_ds["image"] = K.float32(train_ds["image"]) / scale
test_ds["image"] = K.float32(test_ds["image"]) / scale
return train_ds, test_ds, info or train_ds._info
def _handle_tfds(
acquire_and_concat_validation_to_train,
as_dataset_kwargs,
download_and_prepare_kwargs,
ds_builder,
info,
):
"""
Helper function that is to be used by the different dataset builders
:param acquire_and_concat_validation_to_train: Whether to acquire the validation split
and then concatenate it to train
:type acquire_and_concat_validation_to_train: ```bool```
:param as_dataset_kwargs:
:type as_dataset_kwargs: ```**as_dataset_kwargs```
:param download_and_prepare_kwargs:
:type download_and_prepare_kwargs: ```**download_and_prepare_kwargs```
:param ds_builder: dataset builder
:type ds_builder: ```tfds.core.DatasetBuilder```
:param info: Dataset info
:type info: ```tfds.core.DatasetInfo```
:return: Train and tests dataset splits
:rtype: ```Union[Tuple[tf.data.Dataset,tf.data.Dataset,tfds.core.DatasetInfo], Tuple[np.ndarray,np.ndarray,Any]]```
"""
train_ds, test_ds, dl_and_prep = None, None, True
if (
"download_config" in download_and_prepare_kwargs
and download_and_prepare_kwargs["download_config"].manual_dir
):
dl_and_prep = not path.isdir(ds_builder._data_dir)
if dl_and_prep:
name_slash = "{}{}{}".format(path.sep, ds_builder.name, path.sep)
other_data_dir = ds_builder._data_dir.replace(
name_slash, "{}downloads{}".format(path.sep, name_slash)
)
dl_and_prep = not path.isdir(other_data_dir)
if not dl_and_prep:
ds_builder._data_dir = other_data_dir
if not dl_and_prep:
import tensorflow_datasets.public_api as tfds
info = ds_builder.info
ds_builder = tfds.builder(
ds_builder.name,
data_dir=environ.get(
"TFDS_DATA_DIR",
path.dirname(path.dirname(ds_builder._data_dir)),
),
)
as_dataset_kwargs.update({"as_supervised": True, "batch_size": 1})
if dl_and_prep:
ds_builder.download_and_prepare(**download_and_prepare_kwargs)
if train_ds is None:
train_ds = ds_builder.as_dataset(split="train", **as_dataset_kwargs)
valid_ds_key = next(
filter(partial(str.startswith, "valid"), ds_builder.info.splits), None
)
if valid_ds_key and acquire_and_concat_validation_to_train:
print("train was", train_ds.cardinality())
valid_ds = ds_builder.as_dataset(split=valid_ds_key, **as_dataset_kwargs)
print("validation is", valid_ds.cardinality())
train_ds = train_ds.concatenate(valid_ds)
print("train now", train_ds.cardinality())
if test_ds is None:
test_ds = ds_builder.as_dataset(split="test", **as_dataset_kwargs)
return info, test_ds, train_ds
def to_numpy(obj, K=None, device=None):
"""
Convert input to numpy
:param obj: Any input that can be converted to numpy (raises error otherwise)
:type obj: ```Any```
:param K: backend engine, e.g., `np` or `tf`; defaults to `np`
:type K: ```Literal['np', 'tf']```
:param device: The (optional) Device to which x should be transferred.
If given, then the result is committed to the device.
If the device parameter is None, then this operation behaves like the identity function
if the operand is on any device already, otherwise it transfers the data to the default device, uncommitted.
:type device: ```Optional[Device]```
:return: numpy type, probably np.ndarray
:rtype: ```np.ndarray```
"""
module_name = "numpy" if K is None else K.__name__
if obj is None:
return None if K is None else K.nan
elif type(obj).__module__ == module_name:
return obj
elif hasattr(obj, "as_numpy"):
return obj.as_numpy()
elif hasattr(obj, "numpy"):
return obj.numpy()
elif isinstance(obj, dict) and "image" in obj and "label" in obj:
if module_name == "jax.numpy":
def __to_numpy(o, _K=None):
"""
Convert input to a DeviceArray
:param o: An object with a `numpy` method
:type o: ```Any```
:param _K: backend engine, e.g., `np` or `tf`; defaults to `np`
:type _K: ```Literal['np', 'tf']```
:return: The array on the device
:rtype: ```DeviceArray```
"""
import jax
return jax.device_put(o.numpy(), device=device)
else:
__to_numpy = _to_numpy
return {
"image": __to_numpy(obj["image"], K),
"label": __to_numpy(obj["label"], K),
}
elif type(obj).__name__ == "PrefetchDataset":
# ^`isinstance` said `arg 2 must be a type or tuple of types`
import tensorflow_datasets as tfds
return tfds.as_numpy(obj)
raise TypeError("Unable to convert {!r} to numpy".format(type(obj)))
# Alias need unlike in JavaScript where you have proper hoisting
_to_numpy = to_numpy
def to_d(obj):
"""
Convert the input to a dictionary
:param obj: input value. Will have `dir` run against it if not a dict.
:type obj: ```Union[dict, Any]```
:return: Dictionary representation of input
:rtype: ```dict```
"""
return (
obj
if isinstance(obj, dict)
else dict(
filter(lambda key_inst: not key_inst[0].startswith("_"), getmembers(obj))
)
)
# The next 2 functions are from https://stackoverflow.com/a/1653248
def parse_to_argv_gen(s):
"""
Generate a sys.argv style parse of the input string
:param s: Input string
:type s: ```str```
:return: Generator of tokens; like in sys.argv
:rtype: ```Iterator[str]```
"""
_QUOTE_CHARS_DICT = {
"\\": "\\",
" ": " ",
'"': '"',
"r": "\r",
"n": "\n",
"t": "\t",
}
quoted, s_iter, join_string, c_list, c = False, iter(s), s[0:0], [], " "
err = "Bytes must be decoded to Unicode first"
while True:
# Skip whitespace
try:
while True:
assert isinstance(c, str) and version_info[0] >= 3, err
if not c.isspace():
break
c = next(s_iter)
except StopIteration:
break
# Read word
try:
while True:
assert isinstance(c, str) and version_info[0] >= 3, err
if not quoted and c.isspace():
break
if c == '"':
quoted, c = not quoted, None
elif c == "\\":
c = _QUOTE_CHARS_DICT.get(next(s_iter))
if c is not None:
c_list.append(c)
c = next(s_iter)
yield join_string.join(c_list)
c_list.clear()
except StopIteration:
yield join_string.join(c_list)
break
def parse_to_argv(s):
"""
Do a sys.argv style parse of the input string
:param s: Input string
:type s: ```str```
:return: List of tokens; like in sys.argv
:rtype: ```List[str]```
"""
return list(parse_to_argv_gen(s))
def pop_at_index(
input_list, key, default=None, process_key=lambda k: k, process_val=lambda v: v
):
"""
If key in index, remove it from list, and return it
:param input_list: Input list
:type input_list: ```list```
:param key: Lookup key
:type key: ```str```
:param default: The default value if key not in l
:type default: ```Optional[Any]```
:param process_key: Postprocess the key
:type process_key: ```Callable[[Any], Any]```
:param process_val: Postprocess the val
:type process_val: ```Callable[[Any], Any]```
:return: default if not in list, else the value from the list (and list is now minus that elem)
:rtype: ```Optional[Any]```
"""
# if process_key is not None and not isinstance(key, tuple):
# return default
try:
if process_key:
idx = next(
map(
itemgetter(0),
filter(
None,
filter(
lambda idx_e: process_key(idx_e[1]) == key,
enumerate(input_list),
),
),
)
)
else:
idx = input_list.index(key)
except (ValueError, StopIteration):
if isinstance(default, (list, tuple)) and len(default) == 1:
return default[0]
return default
else:
return deepcopy(process_val(input_list.pop(idx)))
def set_attr(object, attribute, value):
"""
Sets the named attribute on the given object to the specified value. Then returns it.
setattr(x, 'y', v) is equivalent to ``x.y = v''
:param object: The object
:type object: ```Any```
:param attribute: The attribute
:type attribute: ```str```
:param value: The value
:type value: ```Any```
"""
setattr(object, attribute, value)
return object
__all__ = [
"camel_case",
"common_dataset_handler",
"parse_to_argv",
"pop_at_index",
"set_attr",
"to_d",
"to_numpy",
]
|
[
"functools.partial",
"os.path.isdir",
"tensorflow_datasets.as_numpy",
"os.path.dirname",
"operator.itemgetter",
"inspect.getmembers"
] |
[((4009, 4041), 'os.path.isdir', 'path.isdir', (['ds_builder._data_dir'], {}), '(ds_builder._data_dir)\n', (4019, 4041), False, 'from os import environ, path\n'), ((4320, 4346), 'os.path.isdir', 'path.isdir', (['other_data_dir'], {}), '(other_data_dir)\n', (4330, 4346), False, 'from os import environ, path\n'), ((5126, 5158), 'functools.partial', 'partial', (['str.startswith', '"""valid"""'], {}), "(str.startswith, 'valid')\n", (5133, 5158), False, 'from functools import partial\n'), ((8370, 8385), 'inspect.getmembers', 'getmembers', (['obj'], {}), '(obj)\n', (8380, 8385), False, 'from inspect import getmembers\n'), ((11070, 11083), 'operator.itemgetter', 'itemgetter', (['(0)'], {}), '(0)\n', (11080, 11083), False, 'from operator import itemgetter\n'), ((4736, 4770), 'os.path.dirname', 'path.dirname', (['ds_builder._data_dir'], {}), '(ds_builder._data_dir)\n', (4748, 4770), False, 'from os import environ, path\n'), ((7786, 7804), 'tensorflow_datasets.as_numpy', 'tfds.as_numpy', (['obj'], {}), '(obj)\n', (7799, 7804), True, 'import tensorflow_datasets as tfds\n')]
|
from linkace_cli.api.base import APIBase
from linkace_cli import models
from linkace_cli.api.tags import Tags
from linkace_cli.api.lists import Lists
class Search(APIBase):
def __init__(self, base_url, api_token):
super(Search, self).__init__(base_url, api_token)
self.tags = Tags(base_url, api_token)
self.lists = Lists(base_url, api_token)
def get_links_by_tag_exact(self, tag_id: int):
return self.tags.links(tag_id)
def get_links_by_tag_query(self, query: str):
tag_ids = self.api.get('search/tags', {'query': query})
print(tag_ids)
links = []
for tag_id in tag_ids.keys():
links.extend(self.tags.links(tag_id))
# Deduplicate results based on ID
return list({v['id']: v for v in links}.values())
def get_links_by_list_exact(self, list_id: int):
return self.lists.links(list_id)
def get_links_by_list_query(self, query: str):
list_ids = self.api.get('search/lists', {'query': query})
links = []
for list_id in list_ids:
links.extend(self.lists.links(list_id))
# Deduplicate results based on ID
return list({v['id']: v for v in links}.values())
def get_links_by_query(self, query: str):
params = {
'query': query,
'search_title': query,
}
resp = self.api.get('search/links', params=params)
resp = models.LinksPagination().load(resp)
links = resp['data']
while(resp['next_page_url']):
resp = models.LinksPagination().load(self.api.get(resp['next_page_url']))
links.extend(resp['data'])
return links
|
[
"linkace_cli.api.tags.Tags",
"linkace_cli.api.lists.Lists",
"linkace_cli.models.LinksPagination"
] |
[((299, 324), 'linkace_cli.api.tags.Tags', 'Tags', (['base_url', 'api_token'], {}), '(base_url, api_token)\n', (303, 324), False, 'from linkace_cli.api.tags import Tags\n'), ((346, 372), 'linkace_cli.api.lists.Lists', 'Lists', (['base_url', 'api_token'], {}), '(base_url, api_token)\n', (351, 372), False, 'from linkace_cli.api.lists import Lists\n'), ((1444, 1468), 'linkace_cli.models.LinksPagination', 'models.LinksPagination', ([], {}), '()\n', (1466, 1468), False, 'from linkace_cli import models\n'), ((1567, 1591), 'linkace_cli.models.LinksPagination', 'models.LinksPagination', ([], {}), '()\n', (1589, 1591), False, 'from linkace_cli import models\n')]
|
from src.models import AlleleGeninteraction, Alleledbentity, Complexdbentity, CurationReference, Dnasequenceannotation, Functionalcomplementannotation, Literatureannotation, Locusdbentity, Pathwaydbentity, Proteinabundanceannotation, Referencedbentity
from . import fixtures as factory
from mock import Mock
class MockQueryFilter(object):
def __init__(self, query_params, query_result):
self._return = query_result
self._params = query_params
def one_or_none(self):
if self._return.__class__ == list:
return self._return[0]
else:
return self._return
def first(self):
return self._return
def order_by(self, *args, **kwargs):
return self
def group_by(self, *args, **kwargs):
return self
def asc(self, *args, **kwargs):
return self
def all(self):
if self._return is None:
return []
elif self._return.__class__ == list:
return self._return
else:
return [self._return]
def count(self):
return 7
def query_params(self):
return self._params
def distinct(self, *args, **kwargs):
return self
def outerjoin(self, *args, **kwargs):
return self
def scalar(self,*args,**kwargs):
return 7
def join(self, *args, **kwargs):
return self
def join(self, *args, **kwargs):
return self
def join(self, *args, **kwargs):
return self
def filter_by(self, *args, **kwargs):
return self
def filter(self, *args, **kwargs):
return self
class MockQuery(object):
def __init__(self, query_result):
self._query_result = query_result
def filter_by(self, **query_params):
self._query_filter = MockQueryFilter(query_params, self._query_result)
self._full_params = query_params
return self._query_filter
def filter(self, *query_params):
self._query_filter = MockQueryFilter(query_params[0], self._query_result)
self._full_params = query_params
return self._query_filter
def all(self):
return self._query_result
def distinct(self, *query_params):
if len(query_params) == 0 and self._query_result:
return self._query_result
else:
return self
def outerjoin(self,query_params):
return self
def join(self, *args, **kwargs):
return self
def join(self, *args, **kwargs):
return self
def count(self):
return 1
def join(self, *args, **kwargs):
return self
def order_by(self, query_params):
return self
def limit(self, query_params):
return self
class MockFileStorage(object):
pass
def go_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Go'>":
go = factory.GoFactory()
return MockQuery(go)
if len(args) == 2 and str(args[0]) == 'Goannotation.dbentity_id' and str(args[1]) == 'count(nex.goannotation.dbentity_id)':
go = factory.GoFactory()
goannot = factory.GoannotationFactory()
goannot.go = go
return MockQuery(goannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.GoRelation'>":
gochild = factory.GoFactory()
goparent = factory.GoFactory()
gorel = factory.GoRelationFactory()
ro = factory.RoFactory()
gorel.child = gochild
gorel.parent = goparent
gorel.ro = ro
return MockQuery(gorel)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.GoUrl'>":
gourl = factory.GoUrlFactory()
return MockQuery(gourl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.GoAlias'>":
goalias = factory.GoAliasFactory()
return MockQuery(goalias)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Locusdbentity'>":
locus = factory.LocusdbentityFactory()
return MockQuery(locus)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Goannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
dbent = factory.DbentityFactory()
go = factory.GoFactory()
goannot = factory.GoannotationFactory()
goannot.go = go
goannot.dbentity = dbent
goannot.reference = refdbentity
goannot.source = source
return MockQuery(goannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.EcoAlias'>":
ecoalias = factory.EcoAliasFactory()
return MockQuery(ecoalias)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.EcoUrl'>":
ecourl = factory.EcoUrlFactory()
return MockQuery(ecourl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Goextension'>":
ro = factory.RoFactory()
goext = factory.GoextensionFactory()
goext.ro = ro
return MockQuery(goext)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Dbentity'>":
dbent = factory.DbentityFactory()
return MockQuery(dbent)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Chebi'>":
chebi = factory.ChebiFactory()
return MockQuery(chebi)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Gosupportingevidence'>":
goevd = factory.GosupportingevidenceFactory()
return MockQuery(goevd)
def locus_expression_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Locusdbentity'>":
locus = factory.LocusdbentityFactory()
return MockQuery(locus)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Expressionannotation'>":
expannot = factory.ExpressionannotationFactory()
return MockQuery(expannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Dataset'>":
dataset = factory.DatasetFactory()
return MockQuery(dataset)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Referencedbentity'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
return MockQuery(refdbentity)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DatasetKeyword'>":
dskw = factory.DatasetKeywordFactory()
return MockQuery(dskw)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DatasetReference'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
dsref = factory.DatasetReferenceFactory()
dsref.reference = refdbentity
ds = factory.DatasetFactory()
dsref.dataset = ds
return MockQuery((dsref,))
elif len(args) == 1 and str(args[0]) == 'Referencedocument.html':
refdoc = factory.ReferencedocumentFactory()
return MockQuery(refdoc.html)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Datasetsample'>":
dss = factory.DatasetsampleFactory()
return MockQuery(dss)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DatasetUrl'>":
dsurl = factory.DatasetUrlFactory()
return MockQuery(dsurl)
def complex_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Complexdbentity'>":
complex = factory.ComplexdbentityFactory()
return MockQuery(complex)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Complexbindingannotation'>":
bind = factory.ComplexbindingannotationFactory()
interactor = factory.InteractorFactory()
locus =factory.LocusdbentityFactory()
interactor.locus = locus
bind.interactor = interactor
bindingInteractor = factory.InteractorFactory()
locus2 =factory.LocusdbentityFactory()
bindingInteractor.locus = locus2
bind.binding_interactor = bindingInteractor
return MockQuery(bind)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ComplexAlias'>":
alias = factory.ComplexAliasFactory()
return MockQuery(alias)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ComplexGo'>":
complexGo = factory.ComplexGoFactory()
go = factory.GoFactory()
complexGo.go = go
return MockQuery(complexGo)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ComplexReference'>":
complexRef = factory.ComplexReferenceFactory()
ref = factory.ReferencedbentityFactory()
complexRef.reference = ref
return MockQuery(complexRef)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ReferenceUrl'>":
refUrl = factory.ReferenceUrlFactory()
return MockQuery(refUrl)
elif len(args) == 2 and str(args[0]) == 'Goannotation.dbentity_id' and str(args[1]) == 'count(nex.goannotation.dbentity_id)':
goAnnot = factory.GoannotationFactory()
return MockQuery(goAnnot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.GoRelation'>":
goRel = factory.GoRelationFactory()
return MockQuery(goRel)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.GoUrl'>":
goUrl = factory.GoUrlFactory()
return MockQuery(goUrl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.GoAlias'>":
goAlias = factory.GoAliasFactory()
return MockQuery(goAlias)
def locus_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Locusdbentity'>":
locus = factory.LocusdbentityFactory()
return MockQuery(locus)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Proteinabundanceannotation'>":
protein_abundance_annotation = factory.ProteinabundanceAnnotationFactory()
eco = factory.EcoFactory()
protein_abundance_annotation.eco = eco
efo = factory.EfoFactory()
protein_abundance_annotation.efo = efo
db_entity = factory.DbentityFactory()
protein_abundance_annotation.dbentity = db_entity
ref = factory.ReferencedbentityFactory()
protein_abundance_annotation.reference = ref
orig_ref = factory.ReferencedbentityFactory()
protein_abundance_annotation.original_reference = orig_ref
chebi = factory.ChebiFactory()
protein_abundance_annotation.chebi = chebi
go = factory.GoFactory()
protein_abundance_annotation.go = go
src = factory.SourceFactory()
protein_abundance_annotation.src = src
tax = factory.TaxonomyFactory()
protein_abundance_annotation.tax = tax
return MockQuery(protein_abundance_annotation)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Bindingmotifannotation'>":
bind = factory.BindingmotifannotationFactory()
return MockQuery(bind)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Complexbindingannotation'>":
bind = factory.ComplexbindingannotationFactory()
return MockQuery(bind)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Go'>":
go = factory.GoFactory()
return MockQuery(go)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Phenotypeannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
mut = factory.ApoFactory()
exp = factory.ApoFactory()
pheno = factory.PhenotypeFactory()
db = factory.DbentityFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.mutant = mut
phenoannot.experiment = exp
phenoannot.phenotype = pheno
phenoannot.dbentity = db
phenoannot.reference = refdbentity
return MockQuery(phenoannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Straindbentity'>":
s_name = factory.StraindbentityFactory()
return MockQuery(s_name)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Apo'>":
apo = factory.ApoFactory()
return MockQuery(apo)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Interactor'>":
interactor = factory.InteractorFactory()
return MockQuery(interactor)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.PhenotypeannotationCond'>":
phenocond = factory.PhenotypeannotationCondFactory()
return MockQuery(phenocond)
elif len(args) == 2 and str(args[0]) == 'Chebi.display_name' and str(args[1]) == 'Chebi.obj_url':
chebi = factory.ChebiFactory()
return MockQuery((chebi.display_name, chebi.obj_url))
elif len(args) == 2 and str(args[0]) == 'Dbentity.display_name' and str(args[1]) == 'Dbentity.format_name':
db = factory.DbentityFactory()
return MockQuery(db.format_name)
elif len(args) == 1 and str(args[0]) == 'Proteinsequenceannotation.annotation_id':
prtseq = factory.ProteinsequenceannotationFactory()
return MockQuery((prtseq.annotation_id,))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Proteinsequenceannotation'>":
prtseq = factory.ProteinsequenceannotationFactory()
return MockQuery(prtseq)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ProteinsequenceDetail'>":
prtseq = factory.ProteinsequenceannotationFactory()
prtseqdetail = factory.ProteinsequenceDetailFactory()
prtseqdetail.annotation = prtseq
return MockQuery(prtseqdetail)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Goslimannotation'>":
goslimannot = factory.GoslimannotationFactory()
goslim = factory.GoslimFactory()
goslimannot.goslim = goslim
return MockQuery(goslimannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Goannotation'>":
go = factory.GoFactory()
goannot = factory.GoannotationFactory()
goannot.go = go
return MockQuery(goannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Disease'>":
do = factory.DiseaseFactory()
return MockQuery(do)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Diseaseannotation'>":
do = factory.DiseaseFactory()
doannot = factory.DiseaseannotationFactory()
doannot.do = do
dbentity = factory.DbentityFactory()
doannot.dbentity = dbentity
eco = factory.EcoFactory()
doannot.eco = eco
ref = factory.ReferencedbentityFactory()
doannot.reference = ref
src = factory.SourceFactory()
doannot.source = src
taxonomy = factory.TaxonomyFactory()
doannot.taxonomy = taxonomy
return MockQuery(doannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.EcoAlias'>":
ecoalias = factory.EcoAliasFactory()
return MockQuery(ecoalias)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.EcoUrl'>":
ecourl = factory.EcoUrlFactory()
return MockQuery(ecourl)
elif len(args) == 1 and str(args[0]) == 'Locussummary.html':
ls = factory.LocussummaryFactory()
return MockQuery(ls.html)
elif len(args) == 2 and str(args[0]) == 'Phenotypeannotation.taxonomy_id' and str(
args[1]) == 'count(nex.phenotypeannotation.taxonomy_id)':
pheno = factory.PhenotypeFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.phenotype = pheno
return MockQuery((phenoannot.taxonomy_id, 20))
elif len(args) == 2 and str(args[0]) == 'Phenotypeannotation.taxonomy_id' and str(
args[1]) == 'Phenotypeannotation.annotation_id':
pheno = factory.PhenotypeFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.phenotype = pheno
return MockQuery(phenoannot)
elif len(args) == 2 and str(args[0]) == 'PhenotypeannotationCond.annotation_id' and str(args[1]) == 'count(DISTINCT nex.phenotypeannotation_cond.group_id)':
phenocond = factory.PhenotypeannotationCondFactory()
return MockQuery((phenocond.annotation_id, 20))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Straindbentity'>":
s_name = factory.StraindbentityFactory()
return MockQuery(s_name)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Phenotypeannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
mut = factory.ApoFactory()
exp = factory.ApoFactory()
pheno = factory.PhenotypeFactory()
db = factory.DbentityFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.mutant = mut
phenoannot.experiment = exp
phenoannot.phenotype = pheno
phenoannot.dbentity = db
phenoannot.reference = refdbentity
return MockQuery(phenoannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.PhenotypeannotationCond'>":
phenocond = factory.PhenotypeannotationCondFactory()
return MockQuery(phenocond)
elif len(args) == 2 and str(args[0]) == 'Chebi.display_name' and str(args[1]) == 'Chebi.obj_url':
chebi = factory.ChebiFactory()
return MockQuery((chebi.display_name, chebi.obj_url))
elif len(args) == 2 and str(args[0]) == 'Goannotation.dbentity_id' and str(args[1]) == 'count(nex.goannotation.dbentity_id)':
goannot = factory.GoannotationFactory()
return MockQuery(goannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Apo'>":
apo = factory.ApoFactory()
return MockQuery(apo)
elif len(args) == 2 and str(args[0]) == 'Physinteractionannotation.biogrid_experimental_system' and str(args[1]) == 'count(nex.physinteractionannotation.annotation_id)':
physannot = factory.PhysinteractionannotationFactory()
return MockQuery((physannot.biogrid_experimental_system, 20))
elif len(args) == 2 and str(args[0]) == 'Geninteractionannotation.biogrid_experimental_system' and str(args[1]) == 'count(nex.geninteractionannotation.annotation_id)':
genannot = factory.GeninteractionannotationFactory()
return MockQuery((genannot.biogrid_experimental_system, 20))
elif len(args) == 1 and str(args[0]) == 'Physinteractionannotation.dbentity2_id':
physannot = factory.PhysinteractionannotationFactory()
return MockQuery(physannot.dbentity2_id)
elif len(args) == 1 and str(args[0]) == 'Physinteractionannotation.dbentity1_id':
physannot = factory.PhysinteractionannotationFactory()
return MockQuery(physannot.dbentity1_id)
elif len(args) == 1 and str(args[0]) == 'Geninteractionannotation.dbentity2_id':
genannot = factory.GeninteractionannotationFactory()
return MockQuery(genannot.dbentity2_id)
elif len(args) == 1 and str(args[0]) == 'Geninteractionannotation.dbentity1_id':
genannot = factory.GeninteractionannotationFactory()
return MockQuery(genannot.dbentity1_id)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Regulationannotation'>":
regannot = factory.RegulationannotationFactory()
eco = factory.EcoFactory()
go = factory.GoFactory()
reference = factory.ReferencedbentityFactory()
regulator = factory.DbentityFactory()
source = factory.SourceFactory()
target = factory.DbentityFactory()
taxonomy = factory.TaxonomyFactory()
regannot.eco = eco
regannot.go = go
regannot.reference = reference
regannot.regulator = regulator
regannot.source = source
regannot.target = target
regannot.taxonomy = taxonomy
return MockQuery(regannot)
elif len(args) == 2 and str(args[0]) == 'Regulationannotation.target_id' and str(args[1]) == 'Regulationannotation.regulator_id':
regannot = factory.RegulationannotationFactory()
return MockQuery((regannot.target_id, regannot.regulator_id))
elif len(args) == 2 and str(args[0]) == 'Literatureannotation.topic' and str(args[1]) == 'count(nex.literatureannotation.annotation_id)':
litannot = factory.LiteratureannotationFactory()
return MockQuery((litannot.topic, 20))
elif len(args) == 1 and str(args[0]) == 'Literatureannotation.reference_id':
litannot = factory.LiteratureannotationFactory()
return MockQuery(litannot.reference_id)
elif len(args) == 1 and str(args[0]) == 'Geninteractionannotation.reference_id':
genannot = factory.GeninteractionannotationFactory()
return MockQuery(genannot.reference_id)
elif len(args) == 1 and str(args[0]) == 'Physinteractionannotation.reference_id':
physannot = factory.PhysinteractionannotationFactory()
return MockQuery(physannot.reference_id)
elif len(args) == 1 and str(args[0]) == 'Regulationannotation.reference_id':
regannot = factory.RegulationannotationFactory()
return MockQuery(regannot.reference_id)
elif len(args) == 1 and str(args[0]) == 'Regulationannotation.target_id':
regannot = factory.RegulationannotationFactory()
return MockQuery(regannot.target_id)
elif len(args) == 1 and str(args[0]) == 'Literatureannotation.reference_id':
litannot = factory.LiteratureannotationFactory()
return MockQuery(litannot.reference_id)
elif len(args) == 1 and str(args[0]) == 'Phenotypeannotation.reference_id':
phenannot = factory.PhenotypeannotationFactory()
return MockQuery(phenannot.reference_id)
elif len(args) == 1 and str(args[0]) == 'Goannotation.reference_id':
goannot = factory.GoannotationFactory()
return MockQuery(goannot.reference_id)
elif len(args) == 1 and str(args[0]) == 'ReferenceAlias.reference_id':
refalias = factory.ReferenceAliasFactory()
return MockQuery(refalias.reference_id)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.LocusAlias'>":
localias = factory.LocusAliasFactory()
source = factory.SourceFactory()
localias.source = source
return MockQuery(localias)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.LocusAliasReferences'>":
localiasref = factory.LocusAliasReferencesFactory()
source = factory.SourceFactory()
ref = factory.ReferencedbentityFactory()
localiasref.reference = ref
localiasref.source = source
return MockQuery(localiasref)
elif len(args) == 1 and str(args[0]) == 'Apo.apo_id':
apo = factory.ApoFactory()
return MockQuery(apo.apo_id)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ReferenceUrl'>":
refurl = factory.ReferenceUrlFactory()
return MockQuery(refurl)
elif len(args) == 1 and str(args[0]) == 'Dnasequenceannotation.so_id':
dnaseq = factory.DnasequenceannotationFactory()
return MockQuery((dnaseq.so_id,))
elif len(args) == 1 and str(args[0]) == 'So.display_name':
so = factory.SoFactory()
return MockQuery(so.display_name)
elif len(args) == 3 and str(args[0]) == 'Locussummary.summary_id' and str(args[1]) == 'Locussummary.html' and str(args[2]) == 'Locussummary.date_created':
ls = factory.LocussummaryFactory()
return MockQuery((ls.summary_id, ls.html, ls.date_created))
elif len(args) == 5 and str(args[0]) == 'Locussummary.summary_id' \
and str(args[1]) == 'Locussummary.html' and str(args[2]) == 'Locussummary.date_created' \
and str(args[3]) == 'Locussummary.summary_order' and str(args[4]) == 'Locussummary.summary_type':
ls = factory.LocussummaryFactory()
return MockQuery((ls.summary_id, ls.html, ls.date_created, ls.summary_order, ls.summary_type))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.LocusReferences'>":
lref = factory.LocusReferencesFactory()
ref = factory.ReferencedbentityFactory()
lref.reference = ref
return MockQuery(lref)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.LocusRelation'>":
lrel = factory.LocusRelationFactory()
parent = factory.LocusdbentityFactory()
child = factory.LocusdbentityFactory()
source = factory.SourceFactory()
ro = factory.RoFactory()
lrel.parent = parent
lrel.child = child
lrel.source = source
lrel.ro = ro
return MockQuery(lrel)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.LocusRelationReference'>":
lrel_ref = factory.LocusRelationReferenceFactory()
ref = factory.ReferencedbentityFactory()
lrel_ref.reference = ref
return MockQuery(lrel_ref)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.LocussummaryReference'>":
lsref = factory.LocussummaryReferenceFactory()
ref = factory.ReferencedbentityFactory()
source = factory.SourceFactory()
summary = factory.LocussummaryFactory()
lsref.source = source
lsref.reference = ref
lsref.summary = summary
return MockQuery(lsref)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Locusnote'>":
lnote = factory.LocusnoteFactory()
source = factory.SourceFactory()
lnote.source = source
return MockQuery(lnote)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.LocusnoteReference'>":
lnote_ref = factory.LocusnoteFactory()
note = factory.LocusnoteFactory()
ref = factory.ReferencedbentityFactory()
source = factory.SourceFactory()
lnote_ref.note = note
lnote_ref.reference = ref
lnote_ref.source = source
return MockQuery(lnote_ref)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.LocusUrl'>":
lurl = factory.LocusUrlFactory()
return MockQuery(lurl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Locusnoteannotation'>":
laf = factory.LocusnoteannotationFactory()
return MockQuery(laf)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Pathwayannotation'>":
paf = factory.PathwayannotationFactory()
dbentity = factory.DbentityFactory()
ec = factory.EcFactory()
pathway = factory.PathwaydbentityFactory()
ref = factory.ReferencedbentityFactory()
src = factory.SourceFactory()
tax = factory.TaxonomyFactory()
paf.dbentity = dbentity
paf.ec = ec
paf.pathway = pathway
paf.reference = ref
paf.source = src
paf.taxonomy = tax
return MockQuery(paf)
elif len(args) == 1 and str(args[0]) == 'PathwayUrl.obj_url':
path_url = factory.PathwayUrlFactory()
return MockQuery(path_url.obj_url)
elif len(args) == 1 and str(args[0]) == 'Dbentity.display_name':
dbentity = factory.DbentityFactory()
return MockQuery(dbentity.display_name)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Reservedname'>":
rname = factory.ReservednameFactory()
return MockQuery(rname)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Posttranslationannotation'>":
pta = factory.PosttranslationannotationFactory()
source = factory.SourceFactory()
psi = factory.PsimodFactory()
pta.source = source
pta.psimod = psi
return MockQuery(pta)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Referencedbentity'>":
refdb = factory.ReferencedbentityFactory()
return MockQuery(refdb)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Proteinexptannotation'>":
prt = factory.ProteinexptannotationFactory()
return MockQuery(prt)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Proteindomainannotation'>":
pda = factory.ProteindomainannotationFactory()
pd = factory.ProteindomainFactory()
source = factory.SourceFactory()
db = factory.DbentityFactory()
pd.source = source
pda.proteindomain = pd
pda.dbentity = db
return MockQuery(pda)
elif len(args) == 3 and str(args[0]) == 'Dbentity.display_name' and str(args[1]) == 'Dbentity.format_name' and str(args[2]) == 'Dbentity.obj_url':
db = factory.DbentityFactory()
return MockQuery((db.display_name, db.format_name, db.obj_url))
elif len(args) == 4 and str(args[0]) == 'Dbentity.dbentity_id' and str(args[1]) == 'Dbentity.display_name' and str(args[2]) == 'Dbentity.format_name' and str(args[3]) == 'Dbentity.obj_url':
db = factory.DbentityFactory()
return MockQuery((db.dbentity_id, db.display_name, db.format_name, db.obj_url))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Proteindomain'>":
pd = factory.ProteindomainFactory()
source = factory.SourceFactory()
pd.source = source
return MockQuery(pd)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ProteindomainUrl'>":
pdurl = factory.ProteindomainUrlFactory()
pd = factory.ProteindomainFactory()
source = factory.SourceFactory()
pd.source = source
return MockQuery(pdurl)
elif len(args) == 1 and str(args[0]) == 'Proteindomainannotation.dbentity_id':
pda = factory.ProteindomainannotationFactory()
return MockQuery((pda.dbentity_id))
elif len(args) == 1 and str(args[0]) == 'Dbentity.format_name':
db = factory.DbentityFactory()
return MockQuery((db.format_name,))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Locussummary'>":
locus_summary = factory.LocussummaryFactory()
return MockQuery(locus_summary)
elif len(args) == 1 and str(args[0]) == "LocussummaryReference.reference_id":
locus_summary_reference = factory.LocussummaryReferenceFactory()
return MockQuery(locus_summary_reference.reference_id)
elif len(args) == 1 and str(args[0]) == "Referencedbentity.pmid":
reference = factory.ReferencedbentityFactory()
reference.pmid = []
return MockQuery(reference.pmid)
elif len(args) == 2 and str(args[0]) == "<class 'src.models.LocusAliasReferences'>" and str(args[1]) == "Referencedbentity.pmid":
locus_alias_reference = factory.LocusAliasReferencesFactory()
reference = factory.ReferencedbentityFactory()
return MockQuery((locus_alias_reference,reference.pmid))
elif len(args) == 2 and str(args[0]) == "<class 'src.models.LocusReferences'>" and str(args[1]) == "Referencedbentity.pmid":
locus_reference = factory.LocusReferencesFactory()
reference = factory.ReferencedbentityFactory()
return MockQuery((locus_reference, reference.pmid))
elif len(args) == 1 and str(args[0]) == "LocusAlias.display_name":
locus_alias = factory.LocusAliasFactory()
return MockQuery(locus_alias)
return MockQuery((db.format_name))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Expressionannotation'>":
exp = factory.ExpressionannotationFactory()
return MockQuery(exp)
elif len(args) == 3 and str(args[0]) == 'Expressionannotation.dbentity_id' and str(args[1]) == 'Expressionannotation.datasetsample_id' and str(args[2]) == 'Expressionannotation.normalized_expression_value':
exp = factory.ExpressionannotationFactory()
return MockQuery(exp)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Literatureannotation'>":
lit_annot = factory.LiteratureannotationFactory()
return MockQuery(lit_annot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Diseasesupportingevidence'>":
dis_evidence = factory.DiseasesupportingevidenceFactory()
do_annot = factory.DiseaseannotationFactory()
dis_evidence.annotation = do_annot
return MockQuery(dis_evidence)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Dbentity'>":
dbentity = factory.DbentityFactory()
src = factory.SourceFactory()
dbentity.source = src
return MockQuery(dbentity)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Physinteractionannotation'>":
phys_annot = factory.PhysinteractionannotationFactory()
dbentity1 = factory.DbentityFactory()
phys_annot.dbentity1 = dbentity1
dbentity2 = factory.DbentityFactory()
phys_annot.dbentity2 = dbentity2
psimod = factory.PsimodFactory()
phys_annot.psimod = psimod
ref = factory.ReferencedbentityFactory()
phys_annot.reference = ref
src = factory.SourceFactory()
phys_annot.source = src
taxonomy = factory.TaxonomyFactory()
phys_annot.taxonomy = taxonomy
return MockQuery(phys_annot)
elif len(args) == 1 and args[0] == Functionalcomplementannotation:
complement = factory.FunctionalcomplementannotationFactory()
complement.dbentity = factory.DbentityFactory()
complement.reference = factory.ReferencedbentityFactory()
complement.source = factory.SourceFactory()
complement.eco = factory.EcoFactory()
complement.ro = factory.RoFactory()
complement.taxonomy = factory.TaxonomyFactory()
return MockQuery(complement)
elif len(args) == 1 and args[0] == Dnasequenceannotation:
sequence = factory.DnasequenceannotationFactory()
sequence.config = factory.ContigFactory()
sequence.dbentity = factory.DbentityFactory()
sequence.file = factory.FiledbentityFactory()
sequence.genomerelease = factory.GenomereleaseFactory()
sequence.reference = factory.ReferencedbentityFactory()
sequence.so = factory.SoFactory()
sequence.source = factory.SourceFactory()
sequence.taxonomy = factory.TaxonomyFactory()
return MockQuery(sequence)
else:
print("Locus side effect condition not handled!!!!")
print(args[0])
def phenotype_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Phenotype'>":
obs = factory.ApoFactory()
qual = factory.ApoFactory()
pheno = factory.PhenotypeFactory()
pheno.observable = obs
pheno.qualifier = qual
return MockQuery(pheno)
elif len(args) == 2 and str(args[0]) == 'Phenotypeannotation.taxonomy_id' and str(args[1]) == 'count(nex.phenotypeannotation.taxonomy_id)':
pheno = factory.PhenotypeFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.phenotype = pheno
return MockQuery((phenoannot.taxonomy_id, 20))
elif len(args) == 2 and str(args[0]) == 'Phenotypeannotation.taxonomy_id' and str(args[1]) == 'Phenotypeannotation.annotation_id':
pheno = factory.PhenotypeFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.phenotype = pheno
return MockQuery(phenoannot)
elif len(args) == 2 and str(args[0]) == 'PhenotypeannotationCond.annotation_id' and str(args[1]) == 'count(DISTINCT nex.phenotypeannotation_cond.group_id)':
phenocond = factory.PhenotypeannotationCondFactory()
return MockQuery((phenocond.annotation_id, 20))
elif len(args) == 2 and str(args[0]) == 'PhenotypeannotationCond.annotation_id' and str(args[1]) == ' func.count(distinct(PhenotypeannotationCond.group_id))':
phenocond = factory.PhenotypeannotationCondFactory()
return MockQuery((phenocond.annotation_id, 20))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Straindbentity'>":
s_name = factory.StraindbentityFactory()
return MockQuery(s_name)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Phenotypeannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
mut = factory.ApoFactory()
exp = factory.ApoFactory()
pheno = factory.PhenotypeFactory()
db = factory.DbentityFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.mutant = mut
phenoannot.experiment = exp
phenoannot.phenotype = pheno
phenoannot.dbentity = db
phenoannot.reference = refdbentity
return MockQuery(phenoannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.PhenotypeannotationCond'>":
phenocond = factory.PhenotypeannotationCondFactory()
return MockQuery(phenocond)
elif len(args) == 2 and str(args[0]) == 'Chebi.display_name' and str(args[1]) == 'Chebi.obj_url':
chebi = factory.ChebiFactory()
return MockQuery((chebi.display_name, chebi.obj_url))
elif len(args) == 2 and str(args[0]) == 'Goannotation.dbentity_id' and str(args[1]) == 'count(nex.goannotation.dbentity_id)':
goannot = factory.GoannotationFactory()
return MockQuery(goannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Apo'>":
apo = factory.ApoFactory()
return MockQuery(apo)
def observable_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Apo'>":
apo = factory.ApoFactory()
return MockQuery(apo)
elif len(args) == 3 and str(args[0]) == 'Phenotype.obj_url' and str(args[1]) == 'Phenotype.qualifier_id' and str(args[2]) == 'Phenotype.phenotype_id':
pheno = factory.PhenotypeFactory()
return MockQuery((pheno.obj_url, pheno.qualifier_id, pheno.phenotype_id,))
elif len(args) == 2 and str(args[0]) == 'Phenotypeannotation.dbentity_id' and str(args[1]) == 'count(nex.phenotypeannotation.dbentity_id)':
pheno = factory.PhenotypeFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.phenotype = pheno
return MockQuery((phenoannot.dbentity_id, 20))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ApoRelation'>":
parent = factory.ApoFactory()
child = factory.ApoFactory()
ro = factory.RoFactory()
aporel = factory.ApoRelationFactory()
aporel.parent = parent
aporel.child = child
aporel.ro = ro
return MockQuery(aporel)
elif len(args) == 1 and str(args[0]) == 'Phenotype.phenotype_id':
pheno = factory.PhenotypeFactory()
return MockQuery((pheno.phenotype_id,))
elif len(args) == 1 and str(args[0]) == 'Apo.display_name':
apo = factory.ApoFactory()
return MockQuery(apo.display_name)
elif len(args) == 2 and str(args[0]) == 'Phenotypeannotation.taxonomy_id' and str(args[1]) == 'count(nex.phenotypeannotation.taxonomy_id)':
pheno = factory.PhenotypeFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.phenotype = pheno
return MockQuery((phenoannot.taxonomy_id, 20))
elif len(args) == 2 and str(args[0]) == 'Phenotypeannotation.taxonomy_id' and str(args[1]) == 'Phenotypeannotation.annotation_id':
pheno = factory.PhenotypeFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.phenotype = pheno
return MockQuery((phenoannot),)
elif len(args) == 2 and str(args[0]) == 'PhenotypeannotationCond.annotation_id' and str(args[1]) == 'count(DISTINCT nex.phenotypeannotation_cond.group_id)':
phenocond = factory.PhenotypeannotationCondFactory()
return MockQuery((phenocond.annotation_id, 20))
elif len(args) == 1 and str(args[0]) == 'Chebi.obj_url':
chebi = factory.ChebiFactory()
return MockQuery(chebi.obj_url)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Straindbentity'>":
s_name = factory.StraindbentityFactory()
return MockQuery(s_name)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Phenotypeannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
mut = factory.ApoFactory()
exp = factory.ApoFactory()
pheno = factory.PhenotypeFactory()
db = factory.DbentityFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.mutant = mut
phenoannot.experiment = exp
phenoannot.phenotype = pheno
phenoannot.dbentity = db
phenoannot.reference = refdbentity
return MockQuery(phenoannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Phenotype'>":
pheno = factory.PhenotypeFactory()
return MockQuery(pheno)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.PhenotypeannotationCond'>":
phenocond = factory.PhenotypeannotationCondFactory()
return MockQuery(phenocond)
elif len(args) == 2 and str(args[0]) == 'Chebi.display_name' and str(args[1]) == 'Chebi.obj_url':
chebi = factory.ChebiFactory()
return MockQuery((chebi.display_name, chebi.obj_url))
elif len(args) == 2 and str(args[0]) == 'Goannotation.dbentity_id' and str(args[1]) == 'count(nex.goannotation.dbentity_id)':
goannot = factory.GoannotationFactory()
return MockQuery(goannot)
else:
print("the problem is the condition!!!!")
print(args[0])
print(args[1])
def disease_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Disease'>":
dis = factory.DiseaseFactory()
return MockQuery(dis)
if len(args) == 2 and str(args[0]) == 'Diseaseannotation.dbentity_id' and str(args[1]) == 'count(nex.diseaseannotation.dbentity_id)':
dis = factory.DiseaseFactory()
disannot = factory.DiseaseannotationFactory()
disannot.dis = dis
return MockQuery(disannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DiseaseRelation'>":
dischild = factory.DiseaseFactory()
disparent = factory.DiseaseFactory()
disrel = factory.DiseaseRelationFactory()
ro = factory.RoFactory()
disrel.child = dischild
disrel.parent = disparent
disrel.ro = ro
return MockQuery(disrel)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DiseaseUrl'>":
disurl = factory.DiseaseUrlFactory()
return MockQuery(disurl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DiseaseAlias'>":
disalias = factory.DiseaseAliasFactory()
return MockQuery(disalias)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Locusdbentity'>":
locus = factory.LocusdbentityFactory()
return MockQuery(locus)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Diseaseannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
dbent = factory.DbentityFactory()
dis = factory.DiseaseFactory()
disannot = factory.DiseaseannotationFactory()
disannot.disease = dis
disannot.dbentity = dbent
disannot.reference = refdbentity
disannot.source = source
return MockQuery(disannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.EcoAlias'>":
ecoalias = factory.EcoAliasFactory()
return MockQuery(ecoalias)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.EcoUrl'>":
ecourl = factory.EcoUrlFactory()
return MockQuery(ecourl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Dbentity'>":
dbent = factory.DbentityFactory()
return MockQuery(dbent)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Diseasesupportingevidence'>":
disevd = factory.DiseasesupportingevidenceFactory()
return MockQuery(disevd)
elif len(args) == 3 and str(args[0]) == "<class 'src.models.Diseaseannotation'>" and str(args[1]) == 'Diseasesupportingevidence.dbxref_id' and str(args[2]) == 'Diseasesupportingevidence.obj_url':
dis = factory.DiseaseFactory()
disannot = factory.DiseaseannotationFactory()
disannot.dis = dis
return MockQuery(disannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Referencedbentity'>":
refdb = factory.ReferencedbentityFactory()
return MockQuery(refdb)
def chemical_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Chebi'>":
chem = factory.ChebiFactory()
return MockQuery(chem)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ChebiAlia'>":
chebi_alias = factory.ChebiAliaFactory()
return MockQuery(chebi_alias)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ChebiUrl'>":
url = factory.ChebiUrlFactory()
return MockQuery(url)
elif len(args) == 1 and str(args[0]) == 'PhenotypeannotationCond.annotation_id':
phenocond = factory.PhenotypeannotationCondFactory()
return MockQuery([(phenocond.annotation_id,)])
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Phenotypeannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
db_entity = factory.DbentityFactory()
pheno = factory.PhenotypeFactory()
phenoannot = factory.Phenotypeannotation()
phenoannot.phenotype = pheno
phenoannot.dbentity = db_entity
phenoannot.reference = refdbentity
return MockQuery(phenoannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.PhenotypeannotationCond'>":
phenocond = factory.PhenotypeannotationCondFactory()
return MockQuery(phenocond)
elif len(args) == 1 and str(args[0]) == 'Chebi.obj_url':
chebi = factory.ChebiFactory()
return MockQuery(chebi.obj_url)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Straindbentity'>":
s_name = factory.StraindbentityFactory()
return MockQuery(s_name)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Apo'>":
apo = factory.ApoFactory()
return MockQuery(apo)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Interactor'>":
interactor = factory.InteractorFactory()
return MockQuery(interactor)
elif len(args) == 1 and str(args[0]) == "Interactor.interactor_id":
interactor = factory.InteractorFactory()
return MockQuery(interactor)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Complexbindingannotation'>":
bind = factory.ComplexbindingannotationFactory()
return MockQuery(bind)
elif len(args) == 1 and str(args[0]) == "Goextension.annotation_id":
ro = factory.RoFactory()
goext = factory.GoextensionFactory()
goext.ro = ro
return MockQuery(goext)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Goannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
dbent = factory.DbentityFactory()
go = factory.GoFactory()
goannot = factory.GoannotationFactory()
goannot.go = go
goannot.dbentity = dbent
goannot.reference = refdbentity
goannot.source = source
return MockQuery(goannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.EcoAlias'>":
ecoalias = factory.EcAliasFactory()
return MockQuery(ecoalias)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.EcoUrl'>":
ecourl = factory.EcoUrlFactory()
return MockQuery(ecourl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Goextension'>":
ro = factory.RoFactory()
goext = factory.GoextensionFactory()
goext.ro = ro
return MockQuery(goext)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Dbentity'>":
db = factory.DbentityFactory()
return MockQuery(db)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Gosupportingevidence'>":
goev = factory.GosupportingevidenceFactory()
return MockQuery(goev)
elif len(args) == 1 and args[0] == Proteinabundanceannotation:
prot = factory.ProteinabundanceAnnotationFactory()
prot.eco = factory.EcoFactory()
prot.efo = factory.EfoFactory()
prot.dbentity = factory.DbentityFactory()
prot.reference = factory.ReferencedbentityFactory()
prot.original_reference = factory.ReferencedbentityFactory()
prot.chebi = factory.ChebiFactory()
prot.go = factory.GoFactory()
prot.source = factory.SourceFactory()
prot.taxonomy = factory.TaxonomyFactory()
return MockQuery(prot)
elif len(args) == 1 and args[0] == Referencedbentity:
ref = factory.ReferencedbentityFactory()
ref.book = factory.BookFactory()
ref.journal = factory.JournalFactory()
return MockQuery(ref)
elif len(args) == 1 and args[0] == Pathwaydbentity:
pathway = factory.PathwaydbentityFactory()
return MockQuery(pathway)
elif len(args) == 1:
cheb = factory.ChebiAliaFactory()
return MockQuery(cheb)
else:
print("COULDN'T FIND ANYTHING CHEMICAL SIDE EFFECT")
print("args = {}, type is {}".format(args[0], type(args[0])))
return None
def author_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Referenceauthor'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdb = factory.ReferencedbentityFactory()
refauth = factory.ReferenceauthorFactory()
refauth.reference = refdb
return MockQuery(refauth)
elif len(args) == 1 and str(args[0]) == 'Referencedocument.html':
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdb = factory.ReferencedbentityFactory()
refdb.journal = journal
refdoc = factory.ReferencedocumentFactory()
return MockQuery(refdoc.html)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ReferenceUrl'>":
refurl = factory.ReferenceUrlFactory()
return MockQuery(refurl)
elif len(args) == 1 and str(args[0]) == 'Referencetype.display_name':
reftype = factory.ReferencetypeFactory()
return MockQuery((reftype.display_name))
def keywords_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == 'DISTINCT nex.dataset_keyword.keyword_id':
dskw = factory.DatasetKeywordFactory()
kw = factory.KeywordFactory()
dskw.keyword = kw
return MockQuery((dskw.keyword_id))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DatasetKeyword'>":
dskw = factory.DatasetKeywordFactory()
return MockQuery([dskw])
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Dataset'>":
ds = factory.DatasetFactory()
return MockQuery([ds])
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Keyword'>":
kw = factory.KeywordFactory()
return MockQuery([kw])
def dataset_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Dataset'>":
ds_name = factory.DatasetFactory()
return MockQuery(ds_name)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DatasetKeyword'>":
dskw = factory.DatasetKeywordFactory()
kw = factory.KeywordFactory()
dskw.keyword = kw
return MockQuery(dskw)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Keyword'>":
kw = factory.KeywordFactory()
return MockQuery(kw)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DatasetReference'>":
dsref = factory.DatasetReferenceFactory()
return MockQuery((dsref),)
elif len(args) == 1 and str(args[0]) == 'Referencedocument.html':
refdoc = factory.ReferencedocumentFactory()
return MockQuery(refdoc.html)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Datasetsample'>":
dss = factory.DatasetsampleFactory()
return MockQuery(dss)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DatasetUrl'>":
dsurl = factory.DatasetUrlFactory()
return MockQuery(dsurl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DatasetFile'>":
dsf = factory.DatasetFileFactory()
f = factory.FiledbentityFactory()
dsf.file = f
return MockQuery(dsf)
def side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Straindbentity'>":
s_name = factory.StraindbentityFactory()
return MockQuery(s_name)
if len(args) == 3 and str(args[0]) == 'StrainUrl.display_name' and str(args[1]) == 'StrainUrl.url_type' and str(
args[2]) == 'StrainUrl.obj_url':
strain_url = factory.StrainUrlFactory()
return MockQuery((strain_url.display_name, strain_url.url_type, strain_url.obj_url))
elif len(args) == 2 and str(args[0]) == 'Strainsummary.summary_id' and str(args[1]) == 'Strainsummary.html':
strain_summary = factory.StrainsummaryFactory()
return MockQuery((strain_summary.summary_id, strain_summary.html))
elif len(args) == 1 and str(args[0]) == 'StrainsummaryReference.reference_id':
strain_ref = factory.StrainsummaryReferenceFactory()
return MockQuery([(strain_ref.reference_id,)])
elif len(args) == 1 and str(args[0]) == 'ReferenceUrl.reference_id':
refurl = factory.ReferenceUrlFactory()
return MockQuery(refurl.obj_url)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Referencedbentity'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
return MockQuery(refdbentity)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ReferenceUrl'>":
refurl = factory.ReferenceUrlFactory()
return MockQuery(refurl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Contig'>":
c_name = factory.ContigFactory()
return MockQuery(c_name)
elif len(args) == 2 and str(args[0]) == 'Contig.format_name' and str(args[1]) == 'Contig.obj_url':
c_name = factory.ContigFactory()
return MockQuery((c_name.format_name, c_name.obj_url))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Ec'>":
ec = factory.EcFactory()
return MockQuery(ec)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.EcUrl'>":
ecurl = factory.EcUrlFactory()
return MockQuery(ecurl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Psimod'>":
psimod = factory.PsimodFactory()
return MockQuery([psimod])
elif len(args) == 1 and str(args[0]) == "Posttranslationannotation.psimod_id":
ptm = factory.PsimodFactory()
return MockQuery([ptm])
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Dbentity'>":
dbentity = factory.DbentityFactory()
return MockQuery(dbentity)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Posttranslationannotation'>":
ptm = factory.PosttranslationannotationFactory()
dbentity = factory.DbentityFactory()
reference = factory.ReferencedbentityFactory()
source = factory.SourceFactory()
psimod = factory.PsimodFactory()
ptm.dbentity = dbentity
ptm.reference = reference
ptm.source = source
ptm.psimod = psimod
return MockQuery(ptm)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Colleague'>":
colleague = factory.ColleagueFactory()
return MockQuery([colleague,colleague])
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Colleaguetriage'>":
colleague_triage = factory.ColleaguetriageFactory()
return MockQuery([colleague_triage])
elif len(args) == 1 and str(args[0]) == "<class 'src.models.CuratorActivity'>":
curator_activity = factory.CuratorActivityFactory()
return MockQuery([curator_activity])
# def mock_extract_id_request(request, classname):
# return 'S000203483'
def locus_reference_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Locusdbentity'>":
locus = factory.LocusdbentityFactory()
return MockQuery(locus)
elif len(args) == 1 and str(args[0]) == "Literatureannotation.reference_id":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
litannot = factory.LiteratureannotationFactory()
db = factory.DbentityFactory()
litannot.reference = refdbentity
litannot.dbentity = db
return MockQuery((litannot.reference_id,))
elif len(args) == 1 and str(args[0]) == "Geninteractionannotation.reference_id":
gen = factory.GeninteractionannotationFactory()
return MockQuery((gen.reference_id,))
elif len(args) == 1 and str(args[0]) == "Physinteractionannotation.reference_id":
gen = factory.PhysinteractionannotationFactory()
return MockQuery((gen.reference_id,))
elif len(args) == 1 and str(args[0]) == "Regulationannotation.reference_id":
reg = factory.RegulationannotationFactory()
return MockQuery((reg.reference_id,))
elif len(args) == 1 and str(args[0]) == "Phenotypeannotation.reference_id":
pheno = factory.PhenotypeannotationFactory()
return MockQuery((pheno.reference_id,))
elif len(args) == 1 and str(args[0]) == "Goannotation.reference_id":
go = factory.GoannotationFactory()
return MockQuery((go.reference_id,))
elif len(args) == 1 and str(args[0]) == "Diseaseannotation.reference_id":
do = factory.DiseaseannotationFactory()
return MockQuery((do.reference_id,))
elif len(args) == 1 and str(args[0]) == "ReferenceAlias.reference_id":
refalias = factory.ReferenceAliasFactory()
return MockQuery(refalias.reference_id)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Referencedbentity'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
return MockQuery(refdbentity)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ReferenceUrl'>":
refurl = factory.ReferenceUrlFactory()
return MockQuery(refurl)
elif len(args) == 1 and str(args[0]) == "Apo.apo_id":
apo = factory.ApoFactory()
return MockQuery(apo.apo_id)
elif len(args) == 2 and str(args[0]) == "Phenotypeannotation.reference_id" and str(args[1]) == "Phenotypeannotation.experiment_id":
phen = factory.PhenotypeannotationFactory()
return MockQuery((phen.reference_id, phen.experiment_id))
elif len(args) == 2 and str(args[0]) == "Literatureannotation.reference_id" and str(args[1]) == "Literatureannotation.topic":
lit = factory.LiteratureannotationFactory()
return MockQuery((lit.reference_id, lit.topic))
else:
print("the problem is the condition!!!!")
print(args[0])
print(args[1])
def protein_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Posttranslationannotation'>":
pta = factory.PosttranslationannotationFactory()
return MockQuery(pta)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Referencedbentity'>":
refdb = factory.ReferencedbentityFactory()
return MockQuery(refdb)
def sequence_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Locusdbentity'>":
locus = factory.LocusdbentityFactory()
return MockQuery(locus)
elif len(args) == 1 and str(args[0]) == 'Locusdbentity.dbentity_id':
locus = factory.LocusdbentityFactory()
return MockQuery((locus.dbentity_id,))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Dnasequenceannotation'>":
dnaseq = factory.DnasequenceannotationFactory()
contig = factory.ContigFactory()
locus = factory.LocusdbentityFactory()
dnaseq.contig = contig
dnaseq.dbentity = locus
return MockQuery(dnaseq)
elif len(args) == 1 and str(args[0]) == 'Dnasequenceannotation.so_id':
dnaseq = factory.DnasequenceannotationFactory()
return MockQuery([(dnaseq.so_id,)])
elif len(args) == 1 and str(args[0]) == 'So.display_name':
so = factory.SoFactory()
return MockQuery(so.display_name)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Proteinsequenceannotation'>":
prtseq = factory.ProteinsequenceannotationFactory()
contig = factory.ContigFactory()
prtseq.contig = contig
return MockQuery(prtseq)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Dnasubsequence'>":
dnasubseq = factory.DnasubsequenceFactory()
return MockQuery(dnasubseq)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Straindbentity'>":
s_name = factory.StraindbentityFactory()
return MockQuery(s_name)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Contig'>":
c_name = factory.ContigFactory()
return MockQuery(c_name)
elif len(args) == 2 and str(args[0]) == 'Dnasequenceannotation.so_id' and str(args[1]) == 'count(nex.dnasequenceannotation.annotation_id)':
dnaseq = factory.DnasequenceannotationFactory()
return MockQuery((dnaseq.so_id, 20))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.So'>":
so = factory.SoFactory()
return MockQuery(so)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ContigUrl'>":
ctgurl = factory.ContigUrlFactory()
return MockQuery(ctgurl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ProteinsequenceDetail'>":
prtseq = factory.ProteinsequenceDetailFactory()
return MockQuery(prtseq)
def reference_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Referencedbentity'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
return MockQuery(refdbentity)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Locusdbentity'>":
locus = factory.LocusdbentityFactory()
return MockQuery(locus)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DatasetReference'>":
datasetref = factory.DatasetReferenceFactory()
datasetf = factory.DatasetFactory()
datasetref.dataset = datasetf
return MockQuery(datasetref)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Dataset'>":
dataset = factory.DatasetFactory()
return MockQuery(dataset)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.DatasetKeyword'>":
datasetkw = factory.DatasetKeywordFactory()
datasetkw.keyword = factory.KeywordFactory()
return MockQuery(datasetkw)
elif len(args) == 1 and str(args[0]) == 'Referencedocument.html':
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdb = factory.ReferencedbentityFactory()
refdb.journal = journal
refdoc = factory.ReferencedocumentFactory()
return MockQuery(refdoc.html)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ReferenceUrl'>":
refurl = factory.ReferenceUrlFactory()
return MockQuery(refurl)
elif len(args) == 1 and str(args[0]) == 'Referencetype.display_name':
reftype = factory.ReferencetypeFactory()
return MockQuery((reftype.display_name))
elif len(args) == 2 and str(args[0]) == 'Referenceauthor.display_name' and str(args[1]) == 'Referenceauthor.obj_url':
refauthor = factory.ReferenceauthorFactory()
return MockQuery((refauthor.display_name, refauthor.obj_url))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ReferenceRelation'>":
refrel = factory.ReferenceRelationFactory()
refrel.child = factory.ReferencedbentityFactory()
refrel.parent = factory.ReferencedbentityFactory()
return MockQuery((refrel))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ReferenceUrl'>":
refurl = factory.ReferenceUrlFactory()
return MockQuery(refurl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Physinteractionannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
intannot = factory.PhysinteractionannotationFactory()
intannot.reference = refdbentity
intannot.source = source
db1 = factory.DbentityFactory(dbentity_id=1)
db2 = factory.DbentityFactory(dbentity_id=2)
intannot.dbentity1 = db1
intannot.dbentity2= db2
return MockQuery((intannot))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Geninteractionannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
db1 = factory.DbentityFactory(dbentity_id=1)
db2 = factory.DbentityFactory(dbentity_id=2)
genannot = factory.GeninteractionannotationFactory()
genannot.dbentity1 = db1
genannot.dbentity2= db2
genannot.reference = refdbentity
genannot.source = source
return MockQuery((genannot))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Goannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
ecof = factory.EcoFactory()
go = factory.GoFactory()
db = factory.DbentityFactory()
goannot = factory.GoannotationFactory()
goannot.reference = refdbentity
goannot.dbentity = db
goannot.eco = ecof
goannot.go = go
goannot.source = source
return MockQuery(goannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.EcoAlias'>":
# ecof = factory.EcoFactory()
ecoalias = factory.EcoAliasFactory()
# ecoalias.eco = ecof
return MockQuery(ecoalias)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.EcoUrl'>":
ecourl = factory.EcoUrlFactory()
return MockQuery(ecourl)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Goextension'>":
ro = factory.RoFactory()
goext = factory.GoextensionFactory()
goext.ro = ro
return MockQuery(goext)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Dbentity'>":
db = factory.DbentityFactory()
return MockQuery(db)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Gosupportingevidence'>":
goev = factory.GosupportingevidenceFactory()
return MockQuery(goev)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Phenotypeannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
pheno = factory.PhenotypeFactory()
db = factory.DbentityFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.reference = refdbentity
phenoannot.phenotype = pheno
phenoannot.dbentity = db
return MockQuery(phenoannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Diseaseannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
disease = factory.DiseaseFactory()
db = factory.DbentityFactory()
diseaseannot = factory.PhenotypeannotationFactory()
diseaseannot.reference = refdbentity
diseaseannot.disease = disease
diseaseannot.dbentity = db
return MockQuery(diseaseannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.PhenotypeannotationCond'>":
cond = factory.PhenotypeannotationCondFactory()
return MockQuery(cond)
elif len(args) == 1 and str(args[0]) == 'Chebi.obj_url':
chebi = factory.ChebiFactory()
return MockQuery(chebi.obj_url)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Straindbentity'>":
s_name = factory.StraindbentityFactory()
return MockQuery(s_name)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Apo'>":
apo = factory.ApoFactory()
return MockQuery(apo)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Regulationannotation'>":
target = factory.DbentityFactory()
regulator = factory.DbentityFactory()
regannot = factory.RegulationannotationFactory()
regannot.target = target
regannot.regulator = regulator
return MockQuery((regannot))
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Literatureannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
dbentity = factory.DbentityFactory()
litannot = factory.LiteratureannotationFactory()
litannot.dbentity = dbentity
litannot.reference = refdbentity
return MockQuery(litannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Straindbentity'>":
s_name = factory.StraindbentityFactory()
return MockQuery(s_name)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.ReferenceFile'>":
file = factory.FiledbentityFactory()
referencefile = factory.ReferenceFileFactory()
referencefile.file = file
return MockQuery(referencefile)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Referencetriage'>":
reference_triage = factory.ReferencetriageFactory()
return MockQuery([reference_triage])
elif len(args) == 2 and str(args[0]) == "<class 'src.models.CurationReference'>" and str(args[1]) == "<class 'src.models.Locusdbentity'>":
curator_reference = factory.CurationReferenceFactory()
locus_dbentity = factory.LocusdbentityFactory()
mock = Mock()
mock.Locusdbentity = locus_dbentity
mock.CurationReference = curator_reference
return MockQuery([mock])
elif len(args) == 2 and str(args[0]) == "<class 'src.models.Literatureannotation'>" and str(args[1]) == "<class 'src.models.Locusdbentity'>":
literature_annotation = factory.LiteratureannotationFactory()
locus_dbentity = factory.LocusdbentityFactory()
mock = Mock()
mock.Locusdbentity = locus_dbentity
mock.Literatureannotation = literature_annotation
return MockQuery([mock])
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Posttranslationannotation'>":
ptm = factory.PosttranslationannotationFactory()
return MockQuery(ptm)
elif len(args) == 1 and args[0] == AlleleGeninteraction:
allelegen = factory.AlleleGeninteractionFactory()
allelegen.allele1 = factory.AlleledbentityFactory()
allelegen.allele2 = factory.AlleledbentityFactory()
allelegen.soruce = factory.SourceFactory()
allelegen.interaction = factory.GeninteractionannotationFactory()
return MockQuery(allelegen)
elif len(args) == 1 and args[0] == Functionalcomplementannotation:
func = factory.FunctionalcomplementannotationFactory()
return MockQuery(func)
elif len(args) == 2 and args[0] == CurationReference and args[1] == Complexdbentity:
mock = Mock()
mock.CurationReference = factory.CurationReferenceFactory()
mock.ComplexdbentityFactory = factory.ComplexdbentityFactory()
return MockQuery([mock])
elif len(args) == 2 and args[0] == CurationReference and args[1] == Pathwaydbentity:
mock = Mock()
mock.CurationReference = factory.CurationReferenceFactory()
mock.Pathwaydbentity = factory.PathwaydbentityFactory()
return MockQuery([mock])
elif len(args) == 2 and args[0] == CurationReference and args[1] == Alleledbentity:
mock = Mock()
mock.CurationReference = factory.CurationReferenceFactory()
mock.Alleledbentity = factory.AlleledbentityFactory()
return MockQuery([mock])
elif len(args) == 2 and args[0] == Literatureannotation and args[1] == Complexdbentity:
mock = Mock()
mock.Literatureannotation = factory.LiteratureannotationFactory()
mock.Complexdbentity = factory.ComplexdbentityFactory()
return MockQuery([mock])
elif len(args) == 2 and args[0] == Literatureannotation and args[1] == Pathwaydbentity:
lit = factory.LiteratureannotationFactory()
pathway = factory.ComplexdbentityFactory()
mock = Mock()
mock.Literatureannotation = lit
mock.Complexdbentity = pathway
return MockQuery([mock])
elif len(args) == 2 and args[0] == Literatureannotation and args[1] == Alleledbentity:
mock = Mock()
mock.Literatureannotation = factory.LiteratureannotationFactory()
mock.Complexdbentity = factory.AlleledbentityFactory()
return MockQuery([mock])
else:
print("the problem is the condition!!!!")
print(args)
def reference_phenotype_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Referencedbentity'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
return MockQuery(refdbentity)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Phenotypeannotation'>":
source = factory.SourceFactory()
journal = factory.JournalFactory()
book = factory.BookFactory()
refdbentity = factory.ReferencedbentityFactory()
refdbentity.journal = journal
#pheno = factory.PhenotypeFactory()
db = factory.DbentityFactory()
phenoannot = factory.PhenotypeannotationFactory()
phenoannot.reference = refdbentity
#phenoannot.phenotype = pheno
phenoannot.dbentity = db
return MockQuery(phenoannot)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.PhenotypeannotationCond'>":
cond = factory.PhenotypeannotationCondFactory()
return MockQuery(cond)
elif len(args) == 1 and str(args[0]) == 'Chebi.obj_url':
chebi = factory.ChebiFactory()
return MockQuery(chebi.obj_url)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Straindbentity'>":
s_name = factory.StraindbentityFactory()
return MockQuery(s_name)
elif len(args) == 1 and str(args[0]) == "<class 'src.models.Apo'>":
apo = factory.ApoFactory()
return MockQuery(apo)
def strain_side_effect(*args, **kwargs):
if len(args) == 1 and str(args[0]) == "<class 'src.models.Straindbentity'>":
s_name = factory.StraindbentityFactory()
return MockQuery([s_name])
|
[
"mock.Mock"
] |
[((74123, 74129), 'mock.Mock', 'Mock', ([], {}), '()\n', (74127, 74129), False, 'from mock import Mock\n'), ((74601, 74607), 'mock.Mock', 'Mock', ([], {}), '()\n', (74605, 74607), False, 'from mock import Mock\n'), ((75738, 75744), 'mock.Mock', 'Mock', ([], {}), '()\n', (75742, 75744), False, 'from mock import Mock\n'), ((76061, 76067), 'mock.Mock', 'Mock', ([], {}), '()\n', (76065, 76067), False, 'from mock import Mock\n'), ((76376, 76382), 'mock.Mock', 'Mock', ([], {}), '()\n', (76380, 76382), False, 'from mock import Mock\n'), ((76693, 76699), 'mock.Mock', 'Mock', ([], {}), '()\n', (76697, 76699), False, 'from mock import Mock\n'), ((77137, 77143), 'mock.Mock', 'Mock', ([], {}), '()\n', (77141, 77143), False, 'from mock import Mock\n'), ((77402, 77408), 'mock.Mock', 'Mock', ([], {}), '()\n', (77406, 77408), False, 'from mock import Mock\n')]
|
from __future__ import with_statement
import os
import pickle
import sys
import unittest
import logging
APP_ROOT = os.getenv('APP_ROOT')
import currypy
#import pypatterns.filter as FilterModule
#import pypatterns.relational as RelationalModule
sys.path.insert(0,"../data")
class TestCase(unittest.TestCase):
"""
COLUMNS = ['column1', 'column2', 'column3']
PICKLE_PATH = os.path.sep + os.path.join('tmp', 'TestRelationalPickle.pickle')
def setUp(self):
return
def tearDown(self):
if os.path.exists(TestCase.PICKLE_PATH):
os.unlink(TestCase.PICKLE_PATH)
return
def testTable(self):
columns = TestCase.COLUMNS
table = RelationalModule.createTable('test',columns)
rowValuesList = [
[1,2,3],
[1,'2','3'],
[None,None,[]]
]
for rowValues in rowValuesList:
row = table.addRow()
map(row.setColumn, columns, rowValues)
self.assertPickleable(table)
unpickledTable = self.assertJsonPickleable(table)
self.assertEquals(table.rowCount(), unpickledTable.rowCount())
for actualValues, expectedValues in zip(unpickledTable.retrieve(columns=['column1', 'column2', 'column3']), rowValuesList):
self.assertEquals(actualValues, expectedValues)
return
def testRow(self):
columns = TestCase.COLUMNS
table = RelationalModule.createTable('test',columns)
expectedValues = [1,2,3]
row1 = table.addRow()
map(row1.setColumn, columns, expectedValues)
for column, expectedValue in zip(columns, expectedValues):
assert row1.getColumn(column) == expectedValue
pass
self.assertPickleable(row1)
unpickledRow = self.assertJsonPickleable(row1)
self.assertEquals(row1.values(), unpickledRow.values())
return
def assertPickleable(self, objectToPickle):
with open(TestCase.PICKLE_PATH, 'w') as f:
pickle.dump(objectToPickle, f)
with open(TestCase.PICKLE_PATH, 'r') as f:
newObject = pickle.load(f)
return
def assertJsonPickleable(self, objectToPickle):
import jsonpickle
pickle = jsonpickle.encode(objectToPickle)
unpickledObject = jsonpickle.decode(pickle)
return unpickledObject
"""
# END class TestCase
pass
def main():
suite = unittest.makeSuite(TestCase,'test')
runner = unittest.TextTestRunner()
runner.run(suite)
return
if __name__=="__main__":
main()
|
[
"unittest.TextTestRunner",
"sys.path.insert",
"unittest.makeSuite",
"os.getenv"
] |
[((118, 139), 'os.getenv', 'os.getenv', (['"""APP_ROOT"""'], {}), "('APP_ROOT')\n", (127, 139), False, 'import os\n'), ((250, 279), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""../data"""'], {}), "(0, '../data')\n", (265, 279), False, 'import sys\n'), ((2480, 2516), 'unittest.makeSuite', 'unittest.makeSuite', (['TestCase', '"""test"""'], {}), "(TestCase, 'test')\n", (2498, 2516), False, 'import unittest\n'), ((2529, 2554), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {}), '()\n', (2552, 2554), False, 'import unittest\n')]
|
from flask import Flask, escape, request, redirect, url_for, render_template
from database import *
from news import *
app = Flask(__name__)
@app.route('/register/', methods=['GET','POST'])
def regist():
if request.method =='POST':
username = request.form['username']
password = request.form['password']
repassword = request.form['repassword']
users = GETUSER()
if password == repassword:
if username in users:
return 'user already exist'
else:
insertuser(username, password)
createtable(username)
#after register userinformation is saved in user list as a dictionary
return redirect('/')
#user will be redirect to login page after register.
else:
return('password should be identical to repassword')
return render_template('regist.html')
@app.route('/', methods=['GET','POST'])
def index():
return render_template('index.html')
#this is the login page, we post our information and it can check wheter our information is in user list
@app.route('/login', methods=['GET','POST'])
def login():
if request.method =='POST':
username = request.form['username']
password = request.form['password']
users = GETUSER()
if username in users:
if password == users[username]:
return redirect(url_for('main', name = username))
else:
return render_template('login_return.html', text = 'Wrong Password!')
else:
return render_template('login_return.html', text = 'Username Not Found, please register first!')
return render_template('login.html')
#check if we have user information in our list, if we do the user is successfully login.
#else, he or she either does not regist or enters wrong information
@app.route('/mainpage/<name>', methods = ['GET', 'POST'])
def main(name):
return render_template('mainpage.html', name = name)
@app.route('/profile/<name>', methods = ['GET', 'POST'])
def profile(name):
filenames = GETALL(password, name)
return render_template('profile.html', name = name, filenames = filenames)
@app.route('/file/display/<name>/<filename>', methods = ['GET', 'POST'])
def display(name, filename):
file, content = GET(password, name, filename)
return render_template('display_file.html', name=name, file=file, content=content)
@app.route('/uploader/<name>', methods = ['GET', 'POST'])
def uploader(name):
if request.method == 'POST':
f = request.files['file']
f.save('./File_buffer/' + f.filename)
POST(password, name, './File_buffer/' + f.filename, f.filename)
return render_template("return.html", name=name)
@app.route('/file/file_update/<name>', methods = ['GET', 'POST'])
def update(name):
return render_template('update.html', name=name)
@app.route('/file/file_update_result/<name>', methods = ['GET', 'POST'])
def updating(name):
if request.method == 'POST':
creator = request.form['author']
new_content = request.form['new_content']
PUT(password, name, creator, new_content)
return redirect(url_for('main', name = name))
@app.route('/file/file_delete/<name>', methods = ['GET', 'POST'])
def delete(name):
return render_template('delete.html', name=name)
@app.route('/file/file_deleting/<name>', methods = ['GET', 'POST'])
def deleting(name):
if request.method == 'POST':
creator = request.form['author']
DELETE(password, name, creator)
return redirect(url_for('main', name = name))
@app.route('/file/file_query/<name>', methods = ['GET', 'POST'])
def query(name):
if request.method == 'POST':
keyword = request.form['keyword']
pass_key = keyword + ' '
results = search(password, name, pass_key)
return render_template('display.html', name=name, results = results, keyword = pass_key)
@app.route('/news/<name>', methods = ['GET', 'POST'])
def news(name):
return render_template('news_search.html', name=name)
@app.route('/news/query/<name>', methods = ['GET', 'POST'])
def news_query(name):
if request.method == 'POST':
keyword = request.form['news_keyword']
pagenum = request.form['page']
title, date, link = search_news(keyword, int(pagenum))
if title != '':
return render_template('news_display.html', name=name,title=title, date=date, link=link)
else:
return render_template('news_display.html', name=name, title='No file matched', date=date, link=link)
if __name__ == '__main__':
app.run()
|
[
"flask.redirect",
"flask.url_for",
"flask.Flask",
"flask.render_template"
] |
[((130, 145), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (135, 145), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((789, 819), 'flask.render_template', 'render_template', (['"""regist.html"""'], {}), "('regist.html')\n", (804, 819), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((886, 915), 'flask.render_template', 'render_template', (['"""index.html"""'], {}), "('index.html')\n", (901, 915), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((1534, 1563), 'flask.render_template', 'render_template', (['"""login.html"""'], {}), "('login.html')\n", (1549, 1563), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((1818, 1861), 'flask.render_template', 'render_template', (['"""mainpage.html"""'], {'name': 'name'}), "('mainpage.html', name=name)\n", (1833, 1861), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((1990, 2053), 'flask.render_template', 'render_template', (['"""profile.html"""'], {'name': 'name', 'filenames': 'filenames'}), "('profile.html', name=name, filenames=filenames)\n", (2005, 2053), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((2221, 2296), 'flask.render_template', 'render_template', (['"""display_file.html"""'], {'name': 'name', 'file': 'file', 'content': 'content'}), "('display_file.html', name=name, file=file, content=content)\n", (2236, 2296), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((2696, 2737), 'flask.render_template', 'render_template', (['"""update.html"""'], {'name': 'name'}), "('update.html', name=name)\n", (2711, 2737), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((3137, 3178), 'flask.render_template', 'render_template', (['"""delete.html"""'], {'name': 'name'}), "('delete.html', name=name)\n", (3152, 3178), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((3658, 3735), 'flask.render_template', 'render_template', (['"""display.html"""'], {'name': 'name', 'results': 'results', 'keyword': 'pass_key'}), "('display.html', name=name, results=results, keyword=pass_key)\n", (3673, 3735), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((3825, 3871), 'flask.render_template', 'render_template', (['"""news_search.html"""'], {'name': 'name'}), "('news_search.html', name=name)\n", (3840, 3871), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((2557, 2598), 'flask.render_template', 'render_template', (['"""return.html"""'], {'name': 'name'}), "('return.html', name=name)\n", (2572, 2598), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((3010, 3036), 'flask.url_for', 'url_for', (['"""main"""'], {'name': 'name'}), "('main', name=name)\n", (3017, 3036), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((3391, 3417), 'flask.url_for', 'url_for', (['"""main"""'], {'name': 'name'}), "('main', name=name)\n", (3398, 3417), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((4151, 4237), 'flask.render_template', 'render_template', (['"""news_display.html"""'], {'name': 'name', 'title': 'title', 'date': 'date', 'link': 'link'}), "('news_display.html', name=name, title=title, date=date,\n link=link)\n", (4166, 4237), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((4251, 4349), 'flask.render_template', 'render_template', (['"""news_display.html"""'], {'name': 'name', 'title': '"""No file matched"""', 'date': 'date', 'link': 'link'}), "('news_display.html', name=name, title='No file matched',\n date=date, link=link)\n", (4266, 4349), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((1435, 1527), 'flask.render_template', 'render_template', (['"""login_return.html"""'], {'text': '"""Username Not Found, please register first!"""'}), "('login_return.html', text=\n 'Username Not Found, please register first!')\n", (1450, 1527), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((641, 654), 'flask.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (649, 654), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((1352, 1412), 'flask.render_template', 'render_template', (['"""login_return.html"""'], {'text': '"""Wrong Password!"""'}), "('login_return.html', text='Wrong Password!')\n", (1367, 1412), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n'), ((1296, 1326), 'flask.url_for', 'url_for', (['"""main"""'], {'name': 'username'}), "('main', name=username)\n", (1303, 1326), False, 'from flask import Flask, escape, request, redirect, url_for, render_template\n')]
|
import pandas as pd
from aggregate import team_goals
from transform import transform
pbp = pd.read_csv('data/nhl_pbp20172018.csv')
# note that you can use the "uncleaned pbp files in this code,
# you just will not be able to index on the the standard three-letter abbreviations for all teams
print(team_goals(pbp))
pbp = transform(pbp)
print(pbp.head())
|
[
"pandas.read_csv",
"aggregate.team_goals",
"transform.transform"
] |
[((92, 131), 'pandas.read_csv', 'pd.read_csv', (['"""data/nhl_pbp20172018.csv"""'], {}), "('data/nhl_pbp20172018.csv')\n", (103, 131), True, 'import pandas as pd\n'), ((325, 339), 'transform.transform', 'transform', (['pbp'], {}), '(pbp)\n', (334, 339), False, 'from transform import transform\n'), ((300, 315), 'aggregate.team_goals', 'team_goals', (['pbp'], {}), '(pbp)\n', (310, 315), False, 'from aggregate import team_goals\n')]
|
"""The tests for dispatcher camera component."""
import asyncio
from homeassistant.setup import async_setup_component
from homeassistant.helpers.dispatcher import async_dispatcher_send
@asyncio.coroutine
def test_run_camera_setup(hass, test_client):
"""Test that it fetches the given dispatcher data."""
yield from async_setup_component(hass, 'camera', {
'camera': {
'platform': 'dispatcher',
'name': 'dispatcher',
'signal': 'test_camera',
}})
client = yield from test_client(hass.http.app)
async_dispatcher_send(hass, 'test_camera', b'test')
yield from hass.async_block_till_done()
resp = yield from client.get('/api/camera_proxy/camera.dispatcher')
assert resp.status == 200
body = yield from resp.text()
assert body == 'test'
async_dispatcher_send(hass, 'test_camera', b'test2')
yield from hass.async_block_till_done()
resp = yield from client.get('/api/camera_proxy/camera.dispatcher')
assert resp.status == 200
body = yield from resp.text()
assert body == 'test2'
|
[
"homeassistant.setup.async_setup_component",
"homeassistant.helpers.dispatcher.async_dispatcher_send"
] |
[((564, 615), 'homeassistant.helpers.dispatcher.async_dispatcher_send', 'async_dispatcher_send', (['hass', '"""test_camera"""', "b'test'"], {}), "(hass, 'test_camera', b'test')\n", (585, 615), False, 'from homeassistant.helpers.dispatcher import async_dispatcher_send\n'), ((829, 881), 'homeassistant.helpers.dispatcher.async_dispatcher_send', 'async_dispatcher_send', (['hass', '"""test_camera"""', "b'test2'"], {}), "(hass, 'test_camera', b'test2')\n", (850, 881), False, 'from homeassistant.helpers.dispatcher import async_dispatcher_send\n'), ((326, 454), 'homeassistant.setup.async_setup_component', 'async_setup_component', (['hass', '"""camera"""', "{'camera': {'platform': 'dispatcher', 'name': 'dispatcher', 'signal':\n 'test_camera'}}"], {}), "(hass, 'camera', {'camera': {'platform': 'dispatcher',\n 'name': 'dispatcher', 'signal': 'test_camera'}})\n", (347, 454), False, 'from homeassistant.setup import async_setup_component\n')]
|
import unittest
from test_results_parsing.parser_cutest import CuTestParser, FailedCuTest
from typing import List
from . import *
class TestCutestParser(unittest.TestCase):
def setUp(self) -> None:
return super().setUp()
def test_parse_string_with_colon(self) -> None:
# Arrange
self._parser = CuTestParser()
parsed_line = ["5) Test_CuAssertPtrEquals: /input/tests/AllTests.c:55: expected <Test Hest: Blæst> but was <Pøls: 1 2 3>"]
expected_expected = "Test Hest: Blæst"
expected_actual = "Pøls: 1 2 3"
# Act
failed_cutests = self._parser.parse(parsed_line)
actual_expected = failed_cutests[0].expected
actual_actual = failed_cutests[0].actual
# Assert
self.assertEqual(expected_expected, actual_expected)
self.assertEqual(actual_actual, expected_actual)
def test_parse_int_result(self) -> None:
# Arrange
self._parser = CuTestParser()
parsed_line = ["5) Test_CuAssertPtrEquals: /input/tests/AllTests.c:55: expected <100> but was <69>"]
expected_expected = "100"
expected_actual = "69"
# Act
failed_cutests = self._parser.parse(parsed_line)
actual_expected = failed_cutests[0].expected
actual_actual = failed_cutests[0].actual
# Assert
self.assertEqual(expected_expected, actual_expected)
self.assertEqual(actual_actual, expected_actual)
def test_parse_double_result(self) -> None:
# Arrange
self._parser = CuTestParser()
parsed_line = ["5) Test_CuAssertPtrEquals: /input/tests/AllTests.c:55: expected <200.00> but was <69.00>"]
expected_expected = "200.00"
expected_actual = "69.00"
# Act
failed_cutests = self._parser.parse(parsed_line)
actual_expected = failed_cutests[0].expected
actual_actual = failed_cutests[0].actual
# Assert
self.assertEqual(expected_expected, actual_expected)
self.assertEqual(actual_actual, expected_actual)
def test_parse_pointer_result(self) -> None:
# Arrange
self._parser = CuTestParser()
parsed_line = ["5) Test_CuAssertPtrEquals: /input/tests/AllTests.c:55: expected pointer <0x0x16c17e0> but was <0x0x16c1800>"]
expected_expected = "0x0x16c17e0"
expected_actual = "0x0x16c1800"
# Act
failed_cutests = self._parser.parse(parsed_line)
actual_expected = failed_cutests[0].expected
actual_actual = failed_cutests[0].actual
# Assert
self.assertEqual(expected_expected, actual_expected)
self.assertEqual(actual_actual, expected_actual)
def test_parse_assert_result(self) -> None:
# Arrange
self._parser = CuTestParser()
parsed_line = ["5) Test_CuAssertGuguGaga: /input/tests/AllTests.c:55: assert failed"]
expected_expected = "true"
expected_actual = "false"
# Act
failed_cutests = self._parser.parse(parsed_line)
actual_expected = failed_cutests[0].expected
actual_actual = failed_cutests[0].actual
# Assert
self.assertEqual(expected_expected, actual_expected)
self.assertEqual(actual_actual, expected_actual)
def test_parse_no_testname(self) -> None:
# Arrange
self._parser = CuTestParser()
parsed_line = ["/input/tests/AllTests.c:55: assert failed"]
expected_fail_list: List[FailedCuTest] = []
# Act
failed_cutests = self._parser.parse(parsed_line)
# Assert
self.assertEqual(failed_cutests, expected_fail_list)
def test_parse_no_testmessage(self) -> None:
# Arrange
self._parser = CuTestParser()
parsed_line = ["500) Test_CuAssertHest: /input/tests/AllTests.c:55: Noget helt andet"]
expected_fail_list: List[FailedCuTest] = [None]
# Act
failed_cutests = self._parser.parse(parsed_line)
# Assert
self.assertEqual(failed_cutests, expected_fail_list)
def test_single_parse_pointer(self) -> None:
# Arrange
self._parser = CuTestParser()
parsed_line = "5) Test_CuAssertPtrEquals: /input/tests/AllTests.c:55: expected pointer <0x0x16c17e0> but was <0x0x16c1800>"
expected_expected = "0x0x16c17e0"
expected_actual = "0x0x16c1800"
# Act
failed_cutest = self._parser.parse_single_line(parsed_line)
actual_expected = failed_cutest.expected
actual_actual = failed_cutest.actual
# Assert
self.assertEqual(expected_expected, actual_expected)
self.assertEqual(actual_actual, expected_actual)
def test_single_single_parse_int(self) -> None:
# Arrange
self._parser = CuTestParser()
parsed_line = "1) addTest_1_1: /input/tests/AllTests.c:25: expected <12> but was <1>"
expected_expected = "12"
expected_actual = "1"
# Act
failed_cutest = self._parser.parse_single_line(parsed_line)
actual_expected = failed_cutest.expected
actual_actual = failed_cutest.actual
# Assert
self.assertEqual(expected_expected, actual_expected)
self.assertEqual(actual_actual, expected_actual)
|
[
"test_results_parsing.parser_cutest.CuTestParser"
] |
[((329, 343), 'test_results_parsing.parser_cutest.CuTestParser', 'CuTestParser', ([], {}), '()\n', (341, 343), False, 'from test_results_parsing.parser_cutest import CuTestParser, FailedCuTest\n'), ((962, 976), 'test_results_parsing.parser_cutest.CuTestParser', 'CuTestParser', ([], {}), '()\n', (974, 976), False, 'from test_results_parsing.parser_cutest import CuTestParser, FailedCuTest\n'), ((1561, 1575), 'test_results_parsing.parser_cutest.CuTestParser', 'CuTestParser', ([], {}), '()\n', (1573, 1575), False, 'from test_results_parsing.parser_cutest import CuTestParser, FailedCuTest\n'), ((2177, 2191), 'test_results_parsing.parser_cutest.CuTestParser', 'CuTestParser', ([], {}), '()\n', (2189, 2191), False, 'from test_results_parsing.parser_cutest import CuTestParser, FailedCuTest\n'), ((2814, 2828), 'test_results_parsing.parser_cutest.CuTestParser', 'CuTestParser', ([], {}), '()\n', (2826, 2828), False, 'from test_results_parsing.parser_cutest import CuTestParser, FailedCuTest\n'), ((3392, 3406), 'test_results_parsing.parser_cutest.CuTestParser', 'CuTestParser', ([], {}), '()\n', (3404, 3406), False, 'from test_results_parsing.parser_cutest import CuTestParser, FailedCuTest\n'), ((3778, 3792), 'test_results_parsing.parser_cutest.CuTestParser', 'CuTestParser', ([], {}), '()\n', (3790, 3792), False, 'from test_results_parsing.parser_cutest import CuTestParser, FailedCuTest\n'), ((4200, 4214), 'test_results_parsing.parser_cutest.CuTestParser', 'CuTestParser', ([], {}), '()\n', (4212, 4214), False, 'from test_results_parsing.parser_cutest import CuTestParser, FailedCuTest\n'), ((4846, 4860), 'test_results_parsing.parser_cutest.CuTestParser', 'CuTestParser', ([], {}), '()\n', (4858, 4860), False, 'from test_results_parsing.parser_cutest import CuTestParser, FailedCuTest\n')]
|
import os
import random
cwd = os.path.abspath(os.getcwd())
location_chrome = "../browsers/chromedriver"
location_firefox = "../browsers/geckodriver"
DOMAIN = "http://local.school.portnov.com:4520/#"
browsers = [
"chrome",
"firefox"
]
BROWSER_TYPE = random.choice(browsers)
CHROME_EXECUTABLE_PATH = os.path.join(cwd, location_chrome)
FIREFOX_EXECUTABLE_PATH = os.path.join(cwd, location_firefox)
EXPLICIT_TIMEOUT = 10
# Just example of some othe timeouts
# SLOW_TIMEOUT = 30
|
[
"os.getcwd",
"os.path.join",
"random.choice"
] |
[((259, 282), 'random.choice', 'random.choice', (['browsers'], {}), '(browsers)\n', (272, 282), False, 'import random\n'), ((308, 342), 'os.path.join', 'os.path.join', (['cwd', 'location_chrome'], {}), '(cwd, location_chrome)\n', (320, 342), False, 'import os\n'), ((369, 404), 'os.path.join', 'os.path.join', (['cwd', 'location_firefox'], {}), '(cwd, location_firefox)\n', (381, 404), False, 'import os\n'), ((47, 58), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (56, 58), False, 'import os\n')]
|
from glob import glob
import os.path
import cv2
def purge_augmentation(data_folder):
for f in glob(os.path.join(data_folder, 'image_2', 'equ_*.png')):
os.remove(f)
for f in glob(os.path.join(data_folder, 'image_2', 'flipped_*.png')):
os.remove(f)
for f in glob(os.path.join(data_folder, 'gt_image_2', 'flipped_*.png')):
os.remove(f)
def histogram_equalization(img):
img_yuv = cv2.cvtColor(img, cv2.COLOR_BGR2YUV)
# equalize the histogram of the Y channel
img_yuv[:,:,0] = cv2.equalizeHist(img_yuv[:,:,0])
# convert the YUV image back to RGB format
return cv2.cvtColor(img_yuv, cv2.COLOR_YUV2BGR)
def add_images_of_histogram_equalization(data_folder, image_paths):
for image_path in image_paths:
img = cv2.imread(image_path)
equ_img = histogram_equalization(img)
new_img_name = 'equ_' + os.path.basename(image_path)
cv2.imwrite(os.path.join(data_folder, 'image_2', new_img_name), equ_img)
def add_images_of_flip(data_folder, image_paths):
for image_path in image_paths:
img = cv2.imread(image_path)
flipped_img = cv2.flip(img, 1)
new_img_name = 'flipped_' + os.path.basename(image_path)
cv2.imwrite(os.path.join(data_folder, new_img_name), flipped_img)
def augment_images():
data_folder = 'data/data_road/training'
purge_augmentation(data_folder)
image_paths = glob(os.path.join(data_folder, 'image_2', '*.png'))
gt_image_paths = glob(os.path.join(data_folder, 'gt_image_2', '*.png'))
add_images_of_histogram_equalization(data_folder, image_paths)
add_images_of_flip(os.path.join(data_folder, 'image_2'), image_paths)
add_images_of_flip(os.path.join(data_folder, 'gt_image_2'), gt_image_paths)
augment_images()
|
[
"cv2.cvtColor",
"cv2.equalizeHist",
"cv2.flip",
"cv2.imread"
] |
[((419, 455), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2YUV'], {}), '(img, cv2.COLOR_BGR2YUV)\n', (431, 455), False, 'import cv2\n'), ((523, 557), 'cv2.equalizeHist', 'cv2.equalizeHist', (['img_yuv[:, :, 0]'], {}), '(img_yuv[:, :, 0])\n', (539, 557), False, 'import cv2\n'), ((614, 654), 'cv2.cvtColor', 'cv2.cvtColor', (['img_yuv', 'cv2.COLOR_YUV2BGR'], {}), '(img_yuv, cv2.COLOR_YUV2BGR)\n', (626, 654), False, 'import cv2\n'), ((773, 795), 'cv2.imread', 'cv2.imread', (['image_path'], {}), '(image_path)\n', (783, 795), False, 'import cv2\n'), ((1084, 1106), 'cv2.imread', 'cv2.imread', (['image_path'], {}), '(image_path)\n', (1094, 1106), False, 'import cv2\n'), ((1129, 1145), 'cv2.flip', 'cv2.flip', (['img', '(1)'], {}), '(img, 1)\n', (1137, 1145), False, 'import cv2\n')]
|
from time import time_ns
from ctypes import POINTER, c_int16, c_uint32
import matplotlib.pyplot as plt
import numpy as np
from picosdk.ps2000 import ps2000
from picosdk.functions import assert_pico2000_ok
from picosdk.ctypes_wrapper import C_CALLBACK_FUNCTION_FACTORY
from enum import IntEnum
class Channel(IntEnum):
PS2000_CHANNEL_A = 0
PS2000_CHANNEL_B = 1
class PotentialRange(IntEnum):
PS2000_10MV = 0
PS2000_20MV = 1
PS2000_50MV = 2
PS2000_100MV = 3
PS2000_200MV = 4
PS2000_500MV = 5
PS2000_1V = 6
PS2000_2V = 7
PS2000_5V = 8
PS2000_10V = 9
PS2000_20V = 10
class TimeUnit(IntEnum):
FEMTOSECOND = 0
PICOSECOND = 1
NANOSECOND = 2
MICROSECOND = 3
MILLISECOND = 4
SECOND = 5
CALLBACK = C_CALLBACK_FUNCTION_FACTORY(None, POINTER(POINTER(c_int16)), c_int16, c_uint32, c_int16, c_int16, c_uint32)
# reimplement this because the other one only takes ctypes
def adc_to_mv(values, range_, bitness=16):
v_ranges = [10, 20, 50, 100, 200, 500, 1_000, 2_000, 5_000, 10_000, 20_000]
return [(x * v_ranges[range_]) / (2**(bitness - 1) - 1) for x in values]
def determine_time_unit(interval_ns):
unit = 0
units = ['ns', 'us', 'ms', 's']
while interval_ns > 5_000:
interval_ns /= 1000
unit += 1
return interval_ns, units[unit]
class StreamingDevice:
def __init__(self, gather_values, potential_range=PotentialRange.PS2000_50MV):
self.device = ps2000.open_unit()
self.potential_range = potential_range
self.gather_values = gather_values
res = ps2000.ps2000_set_channel(self.device.handle, Channel.PS2000_CHANNEL_A, True, True, potential_range)
assert_pico2000_ok(res)
# start 'fast-streaming' mode
res = ps2000.ps2000_run_streaming_ns(
self.device.handle,
500,
TimeUnit.NANOSECOND,
100_000,
False,
1,
50_000
)
assert_pico2000_ok(res)
self.start_time = time_ns()
self.end_time = time_ns()
def close(self):
ps2000.ps2000_stop(self.device.handle)
self.device.close()
def gather(self):
adc_values = []
def get_overview_buffers(buffers, _overflow, _triggered_at, _triggered, _auto_stop, n_values):
adc_values.extend(buffers[0][0:n_values])
callback = CALLBACK(get_overview_buffers)
while len(adc_values) < self.gather_values:
ps2000.ps2000_get_streaming_last_values(
self.device.handle,
callback
)
self.end_time = time_ns()
return adc_to_mv(adc_values, self.potential_range)
stream = StreamingDevice(6_000_000)
values = stream.gather()
stream.close()
print('Values gathered: {}'.format(len(values)))
fig, ax = plt.subplots()
interval, units = determine_time_unit(stream.end_time - stream.start_time)
ax.set_xlabel('time/{}'.format(units))
ax.set_ylabel('voltage/mV')
ax.plot(np.linspace(0, interval, len(values)), values)
plt.show()
|
[
"matplotlib.pyplot.show",
"picosdk.ps2000.ps2000.ps2000_run_streaming_ns",
"picosdk.ps2000.ps2000.ps2000_get_streaming_last_values",
"picosdk.ps2000.ps2000.open_unit",
"picosdk.ps2000.ps2000.ps2000_stop",
"picosdk.functions.assert_pico2000_ok",
"time.time_ns",
"picosdk.ps2000.ps2000.ps2000_set_channel",
"matplotlib.pyplot.subplots",
"ctypes.POINTER"
] |
[((2858, 2872), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2870, 2872), True, 'import matplotlib.pyplot as plt\n'), ((3072, 3082), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3080, 3082), True, 'import matplotlib.pyplot as plt\n'), ((816, 832), 'ctypes.POINTER', 'POINTER', (['c_int16'], {}), '(c_int16)\n', (823, 832), False, 'from ctypes import POINTER, c_int16, c_uint32\n'), ((1478, 1496), 'picosdk.ps2000.ps2000.open_unit', 'ps2000.open_unit', ([], {}), '()\n', (1494, 1496), False, 'from picosdk.ps2000 import ps2000\n'), ((1603, 1708), 'picosdk.ps2000.ps2000.ps2000_set_channel', 'ps2000.ps2000_set_channel', (['self.device.handle', 'Channel.PS2000_CHANNEL_A', '(True)', '(True)', 'potential_range'], {}), '(self.device.handle, Channel.PS2000_CHANNEL_A, \n True, True, potential_range)\n', (1628, 1708), False, 'from picosdk.ps2000 import ps2000\n'), ((1712, 1735), 'picosdk.functions.assert_pico2000_ok', 'assert_pico2000_ok', (['res'], {}), '(res)\n', (1730, 1735), False, 'from picosdk.functions import assert_pico2000_ok\n'), ((1789, 1894), 'picosdk.ps2000.ps2000.ps2000_run_streaming_ns', 'ps2000.ps2000_run_streaming_ns', (['self.device.handle', '(500)', 'TimeUnit.NANOSECOND', '(100000)', '(False)', '(1)', '(50000)'], {}), '(self.device.handle, 500, TimeUnit.NANOSECOND,\n 100000, False, 1, 50000)\n', (1819, 1894), False, 'from picosdk.ps2000 import ps2000\n'), ((1995, 2018), 'picosdk.functions.assert_pico2000_ok', 'assert_pico2000_ok', (['res'], {}), '(res)\n', (2013, 2018), False, 'from picosdk.functions import assert_pico2000_ok\n'), ((2046, 2055), 'time.time_ns', 'time_ns', ([], {}), '()\n', (2053, 2055), False, 'from time import time_ns\n'), ((2080, 2089), 'time.time_ns', 'time_ns', ([], {}), '()\n', (2087, 2089), False, 'from time import time_ns\n'), ((2120, 2158), 'picosdk.ps2000.ps2000.ps2000_stop', 'ps2000.ps2000_stop', (['self.device.handle'], {}), '(self.device.handle)\n', (2138, 2158), False, 'from picosdk.ps2000 import ps2000\n'), ((2649, 2658), 'time.time_ns', 'time_ns', ([], {}), '()\n', (2656, 2658), False, 'from time import time_ns\n'), ((2508, 2577), 'picosdk.ps2000.ps2000.ps2000_get_streaming_last_values', 'ps2000.ps2000_get_streaming_last_values', (['self.device.handle', 'callback'], {}), '(self.device.handle, callback)\n', (2547, 2577), False, 'from picosdk.ps2000 import ps2000\n')]
|
from collections import defaultdict
from typing import List
class Solution:
def subdomainVisits(self, cpdomains: List[str]) -> List[str]:
total_count = defaultdict(int)
for cpdomain in cpdomains:
count, domain = cpdomain.split(' ')
while domain:
total_count[domain] += int(count)
domain = domain.partition('.')[2]
return [f'{count} {domain}' for domain, count in total_count.items()]
|
[
"collections.defaultdict"
] |
[((166, 182), 'collections.defaultdict', 'defaultdict', (['int'], {}), '(int)\n', (177, 182), False, 'from collections import defaultdict\n')]
|
import math
from io import BytesIO
import os
from time import sleep
from PIL import Image
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from geo_utils import get_meters_per_px, get_distance, get_latlng_inc_for_px_inc
class Screenshotter:
def __init__(self, start, end, zoom, out, add_transit, tile_size_px):
self.tile_size_px = tile_size_px
self.start_lat, self.start_lng = start
self.end_lat, self.end_lng = end
self.zoom = zoom
self.out = out
self.add_transit = add_transit
# Creates the driver and sets viewport size.
chrome_options = Options()
chrome_options.add_argument("--headless")
self.driver = webdriver.Chrome(chrome_options=chrome_options)
window_size = self.driver.execute_script("""
return [window.outerWidth - window.innerWidth + arguments[0],
window.outerHeight - window.innerHeight + arguments[1]];
""", self.tile_size_px, self.tile_size_px+200)
self.driver.set_window_size(*window_size)
# Builds the maps url for given params.
def build_url(self, lat, lng, zoom, add_transit):
url = 'https://www.google.com/maps/@{},{},{}z'.format(lat, lng, zoom)
if add_transit:
url += '/data=!5m1!1e2'
url += '?hl=en'
return url
# Builds a tile filename for given params.
def build_filename(self, row, col):
filename = 'tile_({:03d},{:03d}).png'.format(row, col)
filename = os.path.join(self.out, filename)
return filename
# Generates (lat, lng) pairs that correspond to tile centres. We are doing this as a
# separate step to know how many tiles there are before actually saving them.
def generate_pairs(self):
# (Y = row = lat decreasing, X = col = lng increasing)
# TODO: Translate start and end so that they are exactly in the corners of the image.
pairs = []
curr_lng = self.start_lng
lng_inc = get_latlng_inc_for_px_inc(self.start_lat, self.zoom, self.tile_size_px)[1]
while True:
# Initialize a new column.
curr_col = []
pairs.append(curr_col)
curr_lat = self.start_lat
while True:
# Save the current (lat, lng) pair.
curr_col.append((curr_lat, curr_lng))
# Check if the next row is out of bounds.
if curr_lat <= self.end_lat:
break
# Go to the next row.
lat_inc = get_latlng_inc_for_px_inc(curr_lat, self.zoom, self.tile_size_px)[0]
curr_lat -= lat_inc
# Check if the next column is out of bounds.
if curr_lng >= self.end_lng:
break
# Go to the next column.
curr_lng += lng_inc
return pairs
# Main screenshotter method that saves all tiles specified by input parameters.
def fetch_tiles(self):
print('[screenshotter] Starting the screenshotting process.')
# Create the output directory if it doesn't exist.
if not os.path.exists(self.out):
os.makedirs(self.out)
# Generate all (lat, lng) pairs.
pairs = self.generate_pairs()
nb_cols, nb_rows = len(pairs), len(pairs[0])
nb_tiles = nb_cols * nb_rows
print('[screenshotter] Done generating pairs. There will be {} tiles in total ({} x {}).'
.format(nb_tiles, nb_rows, nb_cols))
tiles_fetched = 0
for col in range(nb_cols):
for row in range(nb_rows):
# Skip fetching if the tile is already present in the directory.
filename = self.build_filename(row, col)
if os.path.exists(filename):
print("[screenshotter] Tile {}/{}: ({},{}) already exists in the output dir, skipping."
.format(tiles_fetched+1, nb_tiles, row, col), end='\r')
else:
# Fetch the tile, crop UI, and save.
latlng = pairs[col][row]
url = self.build_url(latlng[0], latlng[1], self.zoom, self.add_transit)
print("[screenshotter] Fetching tile {}/{}: ({},{}) from url {}"
.format(tiles_fetched+1, nb_tiles, row, col, url), end='\r')
self.driver.get(url)
png = self.driver.get_screenshot_as_png()
img = Image.open(BytesIO(png))
img = img.crop((0, 100, self.tile_size_px, self.tile_size_px + 100))
img.save(filename)
sleep(0.1)
tiles_fetched += 1
print("\n[screenshotter] Done fetching tiles.")
|
[
"selenium.webdriver.chrome.options.Options",
"io.BytesIO",
"os.makedirs",
"os.path.exists",
"time.sleep",
"selenium.webdriver.Chrome",
"geo_utils.get_latlng_inc_for_px_inc",
"os.path.join"
] |
[((651, 660), 'selenium.webdriver.chrome.options.Options', 'Options', ([], {}), '()\n', (658, 660), False, 'from selenium.webdriver.chrome.options import Options\n'), ((733, 780), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'chrome_options': 'chrome_options'}), '(chrome_options=chrome_options)\n', (749, 780), False, 'from selenium import webdriver\n'), ((1540, 1572), 'os.path.join', 'os.path.join', (['self.out', 'filename'], {}), '(self.out, filename)\n', (1552, 1572), False, 'import os\n'), ((2032, 2103), 'geo_utils.get_latlng_inc_for_px_inc', 'get_latlng_inc_for_px_inc', (['self.start_lat', 'self.zoom', 'self.tile_size_px'], {}), '(self.start_lat, self.zoom, self.tile_size_px)\n', (2057, 2103), False, 'from geo_utils import get_meters_per_px, get_distance, get_latlng_inc_for_px_inc\n'), ((3164, 3188), 'os.path.exists', 'os.path.exists', (['self.out'], {}), '(self.out)\n', (3178, 3188), False, 'import os\n'), ((3202, 3223), 'os.makedirs', 'os.makedirs', (['self.out'], {}), '(self.out)\n', (3213, 3223), False, 'import os\n'), ((3799, 3823), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (3813, 3823), False, 'import os\n'), ((2588, 2653), 'geo_utils.get_latlng_inc_for_px_inc', 'get_latlng_inc_for_px_inc', (['curr_lat', 'self.zoom', 'self.tile_size_px'], {}), '(curr_lat, self.zoom, self.tile_size_px)\n', (2613, 2653), False, 'from geo_utils import get_meters_per_px, get_distance, get_latlng_inc_for_px_inc\n'), ((4705, 4715), 'time.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (4710, 4715), False, 'from time import sleep\n'), ((4543, 4555), 'io.BytesIO', 'BytesIO', (['png'], {}), '(png)\n', (4550, 4555), False, 'from io import BytesIO\n')]
|
import re
import itertools
"""
--- Day 13: Knights of the Dinner Table ---
In years past, the holiday feast with your family hasn't gone so well. Not everyone gets along! This year, you resolve,
will be different. You're going to find the optimal seating arrangement and avoid all those awkward conversations.
You start by writing up a list of everyone invited and the amount their happiness would increase or decrease if they
were to find themselves sitting next to each other person. You have a circular table that will be just big enough to fit
everyone comfortably, and so each person will have exactly two neighbors.
For example, suppose you have only four attendees planned, and you calculate their potential happiness as follows:
Alice would gain 54 happiness units by sitting next to Bob.
Alice would lose 79 happiness units by sitting next to Carol.
Alice would lose 2 happiness units by sitting next to David.
Bob would gain 83 happiness units by sitting next to Alice.
Bob would lose 7 happiness units by sitting next to Carol.
Bob would lose 63 happiness units by sitting next to David.
Carol would lose 62 happiness units by sitting next to Alice.
Carol would gain 60 happiness units by sitting next to Bob.
Carol would gain 55 happiness units by sitting next to David.
David would gain 46 happiness units by sitting next to Alice.
David would lose 7 happiness units by sitting next to Bob.
David would gain 41 happiness units by sitting next to Carol.
Then, if you seat Alice next to David, Alice would lose 2 happiness units (because David talks so much), but David would
gain 46 happiness units (because Alice is such a good listener), for a total change of 44.
If you continue around the table, you could then seat Bob next to Alice (Bob gains 83, Alice gains 54). Finally, seat
Carol, who sits next to Bob (Carol gains 60, Bob loses 7) and David (Carol gains 55, David gains 41). The arrangement
looks like this:
+41 +46
+55 David -2
Carol Alice
+60 Bob +54
-7 +83
After trying every other seating arrangement in this hypothetical scenario, you find that this one is the most optimal,
with a total change in happiness of 330.
What is the total change in happiness for the optimal seating arrangement of the actual guest list?
Your puzzle answer was 618.
--- Part Two ---
In all the commotion, you realize that you forgot to seat yourself. At this point, you're pretty apathetic toward the
whole thing, and your happiness wouldn't really go up or down regardless of who you sit next to. You assume everyone
else would be just as ambivalent about sitting next to you, too.
So, add yourself to the list, and give all happiness relationships that involve you a score of 0.
What is the total change in happiness for the optimal seating arrangement that actually includes yourself?
Your puzzle answer was 601.
"""
def parse_seatings(attendees):
def find_happines_modifier(attendee):
happiness = int(re.match(r".*?(\d+).*", attendee).group(1))
return happiness if "gain" in attendee else -happiness
parsed_attendees = [(
attendee.split(" ")[0],
attendee.replace(".", "").split(" ")[-1].strip(),
find_happines_modifier(attendee),
) for attendee in attendees]
result = dict()
for attendee in parsed_attendees:
who = attendee[0]
neighbour = attendee[1]
happiness = attendee[2]
if who not in result:
result[who] = dict()
result[who][neighbour] = happiness
return result
def include_me(attendees, happines):
attendees = attendees.copy()
for name in attendees.keys():
attendees[name]["me"] = happines
attendees["me"] = dict((name, 0) for name in attendees.keys())
return attendees
def count_happiness(attendees, order):
total = 0
for index in range(-1, len(order) - 1):
who = order[index]
neighbour = order[index + 1]
total += attendees[who][neighbour] + attendees[neighbour][who]
return total
def find_seatings_with_happiness(attendees):
return map(
lambda order: (order, count_happiness(attendees, order)),
itertools.permutations(attendees.keys()))
def happines_change(attendees):
return max(
find_seatings_with_happiness(attendees),
key=lambda order_with_happiness: order_with_happiness[1])
if __name__ == "__main__":
with open("13_seatings.txt") as file:
attendees_list = parse_seatings(file.readlines())
includeing_me = include_me(attendees_list, 0)
print("Best order will be: ", happines_change(attendees_list))
print("Best order with me included will be: ", happines_change(includeing_me))
|
[
"re.match"
] |
[((2974, 3007), 're.match', 're.match', (['""".*?(\\\\d+).*"""', 'attendee'], {}), "('.*?(\\\\d+).*', attendee)\n", (2982, 3007), False, 'import re\n')]
|
from avatar_sgg.config.util import get_config
import collections
import pandas as pd
import string
import json
import random
import torch
import torch.utils.data as data
import os
import sng_parser
import numpy as np
def get_ade20k_caption_annotations(path_prefix=None):
"""
Precondition: checkout the https://github.com/clp-research/image-description-sequences under the location
of the ade20k_dir directory
:return: a dictionary containing the paths to the images as keys. Each image has a dictionary with a "caption" key
and a "category" key.
"""
conf = get_config()["ade20k"]
ade20k_dir = conf["root_dir"]
ade20k_caption_dir = conf["caption_dir"]
captions_file = os.path.join(ade20k_caption_dir, "captions.csv")
sequences_file = os.path.join(ade20k_caption_dir, "sequences.csv")
captions_df = pd.read_csv(captions_file, sep="\t", header=0)
sequences_df = pd.read_csv(sequences_file, sep="\t", header=0)
sequences_df["d1"] = sequences_df["d1"].map(lambda a: a if a[-1] in string.punctuation else a + ". ")
sequences_df["d2"] = sequences_df["d2"].map(lambda a: a if a[-1] in string.punctuation else a + ". ")
sequences_df["d3"] = sequences_df["d3"].map(lambda a: a if a[-1] in string.punctuation else a + ". ")
sequences_df["d4"] = sequences_df["d4"].map(lambda a: a if a[-1] in string.punctuation else a + ". ")
sequences_df["d5"] = sequences_df["d5"].map(lambda a: a if a[-1] in string.punctuation else a + ". ")
sequences_df["merged_sequences"] = sequences_df[["d1", "d2", "d3", "d4", "d5"]].agg(lambda x: ''.join(x.values),
axis=1).T
sequences_fram = sequences_df[["image_id", "image_path", "image_cat", "merged_sequences"]]
captions_df = pd.merge(captions_df, sequences_fram, how='inner', left_on=['image_id'], right_on=['image_id'])
if path_prefix is None:
print("Using Real Image Paths as Key.")
captions_df["image_path"] = captions_df["image_path"].map(
lambda a: os.path.join("file://", ade20k_dir, "images", a))
else:
captions_df["image_path"] = captions_df["image_path"].map(
lambda a: os.path.join(path_prefix, a))
captions_df.drop(["Unnamed: 0"], axis=1)
captions_list = [{"image_id": row["image_id"], "id": row["caption_id"], "caption": row["caption"],
"image_path": row["image_path"], "image_cat": row["image_cat"],
"merged_sequences": row["merged_sequences"]} for i, row in captions_df.iterrows()]
# { id: list(captions_df[captions_df["image_id"] == id ]["caption"]) for id in ids }
# Group all captions together having the same image ID.
image_path_to_caption = collections.defaultdict(dict)
for val in captions_list:
caption = val['caption']
category = val['image_cat']
image_path = val["image_path"]
merged_sequences = val["merged_sequences"]
image_path_to_caption[image_path]["category"] = category
image_path_to_caption[image_path]["merged_sequences"] = merged_sequences
if "caption" not in image_path_to_caption[image_path].keys():
image_path_to_caption[image_path]["caption"] = [caption]
else:
image_path_to_caption[image_path]["caption"].append(caption)
return image_path_to_caption
def get_ade20k_split(test_proportion: int = 15, test_size: int = 10, path_prefix=None):
"""
Returns train, dev and test split.
Dev has only one image.
TODO: probably better to use cross validation for the splits
:param test_proportion:
:return:
"""
assert test_proportion > 0 and test_proportion < 100
captions = get_ade20k_caption_annotations(path_prefix)
# Make the split consistent
random.seed(1)
keys = list(captions.keys())
random.shuffle(keys)
start_idx = test_size
dev = {k: captions[k] for k in keys[:test_size]}
size = len(keys[start_idx:])
test_idx = int(test_proportion * size / 100)
test = {k: captions[k] for k in keys[start_idx:test_idx]}
train = {k: captions[k] for k in keys[test_idx:]}
return train, dev, test
def get_categories(split):
cat = {}
one_key = list(split.keys())[0]
if "category" in split[one_key].keys():
cat = {i: split[k]["category"] for i, k in enumerate(split)}
return cat
def group_entry_per_category(category):
category_to_entry_lookup = collections.defaultdict(list)
for k, v in category.items():
category_to_entry_lookup[v].append(k)
def generate_text_graph(self, captions):
raw_graphs = None
if type(captions) is list:
raw_graphs = [sng_parser.parse(cap) for cap in captions]
elif type(captions) is str:
raw_graphs = [sng_parser.parse(captions)]
else:
assert raw_graphs is not None
def output_split_list_with_new_prefix(split, old, new, file_path):
"""
:param split:
:param old: old prefix
:param new: new prefix
:param file_path: where to write the file
:return:
"""
prefix_index_end = len(old)
new_paths = []
for k in split.keys():
idx_start = k.find(old)
new_paths.append(new + k[idx_start + prefix_index_end:])
with open(file_path, 'w') as outfile:
json.dump(new_paths, outfile)
print("Saved", file_path)
def generate_text_graph(split, output_path, caption_number=None):
if not os.path.isfile(output_path):
text_graphs = {}
conf = get_config()["scene_graph"]
cap_graph_file = conf["capgraphs_file"]
cap_graph = json.load(open(cap_graph_file))
txt_rel_vocab = list(set(cap_graph['cap_predicate'].keys()))
txt_rel2id = {key: i + 1 for i, key in enumerate(txt_rel_vocab)}
txt_obj_vocab = list(set(cap_graph['cap_category'].keys()))
txt_obj2id = {key: i + 1 for i, key in enumerate(txt_obj_vocab)}
# generate union object vocabulary
txt_obj_vocab = list(set(cap_graph['cap_category'].keys()))
for k in split.keys():
if caption_number is not None:
captions = split[k]["caption"][caption_number]
else:
captions = split[k]["caption"]
if type(captions) is list:
raw_graphs = [sng_parser.parse(cap) for cap in captions]
elif type(captions) is str:
raw_graphs = [sng_parser.parse(captions)]
else:
assert raw_graphs is not None
cleaned_graphs = []
for i, g in enumerate(raw_graphs):
entities = g["entities"]
relations = g["relations"]
filtered_entities = [e["lemma_head"] if e["lemma_head"] in txt_obj_vocab else 'none' for e in
entities]
filtered_relations = [[r["subject"], r["object"], r["lemma_relation"]] for r in relations if
r["lemma_relation"] in txt_rel_vocab]
extracted_graph = {'entities': filtered_entities, 'relations': filtered_relations}
cleaned_graphs.append(extracted_graph)
encode_txt = {'entities': [], 'relations': []}
for item in cleaned_graphs:
entities = [txt_obj2id[e] for e in item['entities']]
relations = [[entities[r[0]], entities[r[1]], txt_rel2id[r[2]]] for r in item['relations']]
encode_txt['entities'] = encode_txt['entities'] + entities
encode_txt['relations'] = encode_txt['relations'] + relations
# === for text_graph =============================================here
entities = encode_txt['entities']
relations = encode_txt['relations']
if len(relations) == 0:
txt_graph = np.zeros((len(entities), 1))
else:
txt_graph = np.zeros((len(entities), len(relations)))
text_graph = []
for i, es in enumerate(entities):
for j, rs in enumerate(relations):
if es in rs:
txt_graph[i, j] = 1
else:
txt_graph[i, j] = 0
text_graph.append(txt_graph.tolist())
text_graphs[k] = {
'txt': encode_txt,
'text_graph': text_graph,
'category': split[k]["category"]}#needed later to perform the category based recall
with open(output_path, 'w') as outfile:
print("Saving Text Graphs under:", output_path)
json.dump(text_graphs, outfile)
else:
print("Loading:", output_path)
text_graphs = json.load(open(output_path))
return text_graphs
def get_preprocessed_text_text_graphs_for_test():
"""
This function returns the captions of the ADE20K test sets, as graph. They are not merged
and are available as tuple in the "entry" key.
:return:
"""
conf = get_config()
_, _, test = get_ade20k_split(path_prefix="images")
txt_graphs_1 = generate_text_graph(test, conf["scene_graph"]["ade20k_text_graph_1"], 0)
txt_graphs_2 = generate_text_graph(test, conf["scene_graph"]["ade20k_text_graph_2"], 1)
txt_keys = list(txt_graphs_1.keys())
txt_graphs = {}
for k in txt_keys:
item = txt_graphs_1[k]
item2 = txt_graphs_2[k]
if len(item["txt"]['entities']) < 2 \
or len(item2["txt"]["entities"]) < 2 \
or len(item["txt"]['relations']) < 1 \
or len(item2["txt"]['relations']) < 1:
print("no relationship detected, skipping:", k)
continue
else:
txt_graphs[k] = {"entry": (item, item2), "category": item["category"]}
return txt_graphs
def get_preprocessed_image_text_graphs_for_test():
"""
Returns a dictionary (key identifies an image), of dictionaries of this form:
{ 'img': encode_txt,
'image_graph': text_graph,
'txt': encode_txt,
'text_graph': text_graph}
:return:
"""
conf = get_config()
_, _, test = get_ade20k_split(path_prefix="images")
img_graphs = json.load(open(conf["scene_graph"]["ade20k_image_sg_test"]))
txt_graphs = generate_text_graph(test, conf["scene_graph"]["ade20k_text_sg_test"])
txt_keys = list(txt_graphs.keys())
for k in list(img_graphs.keys()):
assert k in txt_keys
for k in txt_keys:
item = img_graphs[k]
if len(item["img"]['entities']) < 2 \
or len(txt_graphs[k]["txt"]['entities']) < 2 \
or len(item["img"]['relations']) < 1 \
or len(txt_graphs[k]["txt"]['relations']) < 1:
print("no relationship detected, skipping:", k)
del(img_graphs[k])
del (txt_graphs[k])
continue
else:
item.update(txt_graphs[k])
return img_graphs
def get_preprocessed_image_graphs_for_map_world():
conf = get_config()
img_graphs = json.load(open(conf["scene_graph"]["ade20k_map_world_preprocessed_img_graph"]))
return img_graphs
if __name__ == "__main__":
print("Start")
conf = get_config()
train, dev, test = get_ade20k_split(path_prefix="images")
print(f"Train Split: {len(train)}")
print(f"Dev Split: {len(dev)}")
print(f"Test Split: {len(test)}")
# output_split_list_with_new_prefix(test, "/media/rafi/Samsung_T5/_DATASETS/ADE20K/",
# "/data/ImageCorpora/ADE20K_2016_07_26/",
# get_config()["output_dir"] + "/ade20k_caption_test.json")
graph = get_preprocessed_image_text_graphs_for_test()
print("Done")
|
[
"json.dump",
"pandas.read_csv",
"random.shuffle",
"pandas.merge",
"avatar_sgg.config.util.get_config",
"collections.defaultdict",
"os.path.isfile",
"random.seed",
"sng_parser.parse",
"os.path.join"
] |
[((711, 759), 'os.path.join', 'os.path.join', (['ade20k_caption_dir', '"""captions.csv"""'], {}), "(ade20k_caption_dir, 'captions.csv')\n", (723, 759), False, 'import os\n'), ((781, 830), 'os.path.join', 'os.path.join', (['ade20k_caption_dir', '"""sequences.csv"""'], {}), "(ade20k_caption_dir, 'sequences.csv')\n", (793, 830), False, 'import os\n'), ((849, 895), 'pandas.read_csv', 'pd.read_csv', (['captions_file'], {'sep': '"""\t"""', 'header': '(0)'}), "(captions_file, sep='\\t', header=0)\n", (860, 895), True, 'import pandas as pd\n'), ((915, 962), 'pandas.read_csv', 'pd.read_csv', (['sequences_file'], {'sep': '"""\t"""', 'header': '(0)'}), "(sequences_file, sep='\\t', header=0)\n", (926, 962), True, 'import pandas as pd\n'), ((1821, 1920), 'pandas.merge', 'pd.merge', (['captions_df', 'sequences_fram'], {'how': '"""inner"""', 'left_on': "['image_id']", 'right_on': "['image_id']"}), "(captions_df, sequences_fram, how='inner', left_on=['image_id'],\n right_on=['image_id'])\n", (1829, 1920), True, 'import pandas as pd\n'), ((2780, 2809), 'collections.defaultdict', 'collections.defaultdict', (['dict'], {}), '(dict)\n', (2803, 2809), False, 'import collections\n'), ((3836, 3850), 'random.seed', 'random.seed', (['(1)'], {}), '(1)\n', (3847, 3850), False, 'import random\n'), ((3888, 3908), 'random.shuffle', 'random.shuffle', (['keys'], {}), '(keys)\n', (3902, 3908), False, 'import random\n'), ((4493, 4522), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (4516, 4522), False, 'import collections\n'), ((9078, 9090), 'avatar_sgg.config.util.get_config', 'get_config', ([], {}), '()\n', (9088, 9090), False, 'from avatar_sgg.config.util import get_config\n'), ((10193, 10205), 'avatar_sgg.config.util.get_config', 'get_config', ([], {}), '()\n', (10203, 10205), False, 'from avatar_sgg.config.util import get_config\n'), ((11098, 11110), 'avatar_sgg.config.util.get_config', 'get_config', ([], {}), '()\n', (11108, 11110), False, 'from avatar_sgg.config.util import get_config\n'), ((11288, 11300), 'avatar_sgg.config.util.get_config', 'get_config', ([], {}), '()\n', (11298, 11300), False, 'from avatar_sgg.config.util import get_config\n'), ((588, 600), 'avatar_sgg.config.util.get_config', 'get_config', ([], {}), '()\n', (598, 600), False, 'from avatar_sgg.config.util import get_config\n'), ((5370, 5399), 'json.dump', 'json.dump', (['new_paths', 'outfile'], {}), '(new_paths, outfile)\n', (5379, 5399), False, 'import json\n'), ((5510, 5537), 'os.path.isfile', 'os.path.isfile', (['output_path'], {}), '(output_path)\n', (5524, 5537), False, 'import os\n'), ((5579, 5591), 'avatar_sgg.config.util.get_config', 'get_config', ([], {}), '()\n', (5589, 5591), False, 'from avatar_sgg.config.util import get_config\n'), ((8685, 8716), 'json.dump', 'json.dump', (['text_graphs', 'outfile'], {}), '(text_graphs, outfile)\n', (8694, 8716), False, 'import json\n'), ((2082, 2130), 'os.path.join', 'os.path.join', (['"""file://"""', 'ade20k_dir', '"""images"""', 'a'], {}), "('file://', ade20k_dir, 'images', a)\n", (2094, 2130), False, 'import os\n'), ((2231, 2259), 'os.path.join', 'os.path.join', (['path_prefix', 'a'], {}), '(path_prefix, a)\n', (2243, 2259), False, 'import os\n'), ((4737, 4758), 'sng_parser.parse', 'sng_parser.parse', (['cap'], {}), '(cap)\n', (4753, 4758), False, 'import sng_parser\n'), ((4842, 4868), 'sng_parser.parse', 'sng_parser.parse', (['captions'], {}), '(captions)\n', (4858, 4868), False, 'import sng_parser\n'), ((6376, 6397), 'sng_parser.parse', 'sng_parser.parse', (['cap'], {}), '(cap)\n', (6392, 6397), False, 'import sng_parser\n'), ((6489, 6515), 'sng_parser.parse', 'sng_parser.parse', (['captions'], {}), '(captions)\n', (6505, 6515), False, 'import sng_parser\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# || ____ _ __
# +------+ / __ )(_) /_______________ _____ ___
# | 0xBC | / __ / / __/ ___/ ___/ __ `/_ / / _ \
# +------+ / /_/ / / /_/ /__/ / / /_/ / / /_/ __/
# || || /_____/_/\__/\___/_/ \__,_/ /___/\___/
#
# Copyright (C) 2011-2013 Bitcraze AB
#
# Crazyflie Nano Quadcopter Client
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
This dialogue is used to configure different log configurations that is used to
enable logging of data from the Crazyflie. These can then be used in different
views in the UI.
"""
import logging
import cfclient
from PyQt5 import Qt, QtWidgets, uic
from PyQt5.QtCore import * # noqa
from PyQt5.QtWidgets import * # noqa
from PyQt5.Qt import * # noqa
from cflib.crazyflie.log import LogConfig
__author__ = 'Bitcraze AB'
__all__ = ['LogConfigDialogue']
logger = logging.getLogger(__name__)
(logconfig_widget_class, connect_widget_base_class) = (
uic.loadUiType(cfclient.module_path + '/ui/dialogs/logconfigdialogue.ui'))
NAME_FIELD = 0
ID_FIELD = 1
PTYPE_FIELD = 2
CTYPE_FIELD = 3
class LogConfigDialogue(QtWidgets.QWidget, logconfig_widget_class):
def __init__(self, helper, *args):
super(LogConfigDialogue, self).__init__(*args)
self.setupUi(self)
self.helper = helper
self.logTree.setHeaderLabels(['Name', 'ID', 'Unpack', 'Storage'])
self.varTree.setHeaderLabels(['Name', 'ID', 'Unpack', 'Storage'])
self.addButton.clicked.connect(lambda: self.moveNode(self.logTree,
self.varTree))
self.removeButton.clicked.connect(lambda: self.moveNode(self.varTree,
self.logTree))
self.cancelButton.clicked.connect(self.close)
self.loadButton.clicked.connect(self.loadConfig)
self.saveButton.clicked.connect(self.saveConfig)
self.loggingPeriod.textChanged.connect(self.periodChanged)
self.packetSize.setMaximum(26)
self.currentSize = 0
self.packetSize.setValue(0)
self.period = 0
def decodeSize(self, s):
size = 0
if ("16" in s):
size = 2
if ("float" in s):
size = 4
if ("8" in s):
size = 1
if ("FP16" in s):
size = 2
if ("32" in s):
size = 4
return size
def sortTrees(self):
self.varTree.invisibleRootItem().sortChildren(NAME_FIELD,
Qt.AscendingOrder)
for node in self.getNodeChildren(self.varTree.invisibleRootItem()):
node.sortChildren(NAME_FIELD, Qt.AscendingOrder)
self.logTree.invisibleRootItem().sortChildren(NAME_FIELD,
Qt.AscendingOrder)
for node in self.getNodeChildren(self.logTree.invisibleRootItem()):
node.sortChildren(NAME_FIELD, Qt.AscendingOrder)
def getNodeChildren(self, treeNode):
children = []
for i in range(treeNode.childCount()):
children.append(treeNode.child(i))
return children
def updatePacketSizeBar(self):
self.currentSize = 0
for node in self.getNodeChildren(self.varTree.invisibleRootItem()):
for leaf in self.getNodeChildren(node):
self.currentSize = (self.currentSize +
self.decodeSize(leaf.text(CTYPE_FIELD)))
if self.currentSize > 26:
self.packetSize.setMaximum(self.currentSize / 26.0 * 100.0)
self.packetSize.setFormat("%v%")
self.packetSize.setValue(self.currentSize / 26.0 * 100.0)
else:
self.packetSize.setMaximum(26)
self.packetSize.setFormat("%p%")
self.packetSize.setValue(self.currentSize)
def addNewVar(self, logTreeItem, target):
parentName = logTreeItem.parent().text(NAME_FIELD)
varParent = target.findItems(parentName, Qt.MatchExactly, NAME_FIELD)
item = logTreeItem.clone()
if (len(varParent) == 0):
newParent = QtWidgets.QTreeWidgetItem()
newParent.setData(0, Qt.DisplayRole, parentName)
newParent.addChild(item)
target.addTopLevelItem(newParent)
target.expandItem(newParent)
else:
parent = varParent[0]
parent.addChild(item)
def moveNodeItem(self, source, target, item):
if (item.parent() is None):
children = self.getNodeChildren(item)
for c in children:
self.addNewVar(c, target)
source.takeTopLevelItem(source.indexOfTopLevelItem(item))
elif (item.parent().childCount() > 1):
self.addNewVar(item, target)
item.parent().removeChild(item)
else:
self.addNewVar(item, target)
# item.parent().removeChild(item)
source.takeTopLevelItem(source.indexOfTopLevelItem(item.parent()))
self.updatePacketSizeBar()
self.sortTrees()
self.checkAndEnableSaveButton()
def checkAndEnableSaveButton(self):
if self.currentSize > 0 and self.period > 0 and self.currentSize <= 26:
self.saveButton.setEnabled(True)
else:
self.saveButton.setEnabled(False)
def moveNode(self, source, target):
self.moveNodeItem(source, target, source.currentItem())
def moveNodeByName(self, source, target, parentName, itemName):
parents = source.findItems(parentName, Qt.MatchExactly, NAME_FIELD)
node = None
if (len(parents) > 0):
parent = parents[0]
for n in range(parent.childCount()):
if (parent.child(n).text(NAME_FIELD) == itemName):
node = parent.child(n)
break
if (node is not None):
self.moveNodeItem(source, target, node)
return True
return False
def showEvent(self, event):
self.updateToc()
self.populateDropDown()
toc = self.helper.cf.log.toc
if (len(list(toc.toc.keys())) > 0):
self.configNameCombo.setEnabled(True)
else:
self.configNameCombo.setEnabled(False)
def resetTrees(self):
self.varTree.clear()
self.updateToc()
def periodChanged(self, value):
try:
self.period = int(value)
self.checkAndEnableSaveButton()
except Exception:
self.period = 0
def showErrorPopup(self, caption, message):
self.box = QMessageBox() # noqa
self.box.setWindowTitle(caption)
self.box.setText(message)
# self.box.setButtonText(1, "Ok")
self.box.setWindowFlags(Qt.Dialog | Qt.MSWindowsFixedSizeDialogHint)
self.box.show()
def updateToc(self):
self.logTree.clear()
toc = self.helper.cf.log.toc
for group in list(toc.toc.keys()):
groupItem = QtWidgets.QTreeWidgetItem()
groupItem.setData(NAME_FIELD, Qt.DisplayRole, group)
for param in list(toc.toc[group].keys()):
item = QtWidgets.QTreeWidgetItem()
item.setData(NAME_FIELD, Qt.DisplayRole, param)
item.setData(ID_FIELD, Qt.DisplayRole,
toc.toc[group][param].ident)
item.setData(PTYPE_FIELD, Qt.DisplayRole,
toc.toc[group][param].pytype)
item.setData(CTYPE_FIELD, Qt.DisplayRole,
toc.toc[group][param].ctype)
groupItem.addChild(item)
self.logTree.addTopLevelItem(groupItem)
self.logTree.expandItem(groupItem)
self.sortTrees()
def populateDropDown(self):
self.configNameCombo.clear()
toc = self.helper.logConfigReader.getLogConfigs()
for d in toc:
self.configNameCombo.addItem(d.name)
if (len(toc) > 0):
self.loadButton.setEnabled(True)
def loadConfig(self):
cText = self.configNameCombo.currentText()
config = None
for d in self.helper.logConfigReader.getLogConfigs():
if (d.name == cText):
config = d
if (config is None):
logger.warning("Could not load config")
else:
self.resetTrees()
self.loggingPeriod.setText("%d" % config.period_in_ms)
self.period = config.period_in_ms
for v in config.variables:
if (v.is_toc_variable()):
parts = v.name.split(".")
varParent = parts[0]
varName = parts[1]
if self.moveNodeByName(
self.logTree, self.varTree, varParent,
varName) is False:
logger.warning("Could not find node %s.%s!!",
varParent, varName)
else:
logger.warning("Error: Mem vars not supported!")
def saveConfig(self):
updatedConfig = self.createConfigFromSelection()
try:
self.helper.logConfigReader.saveLogConfigFile(updatedConfig)
self.close()
except Exception as e:
self.showErrorPopup("Error when saving file", "Error: %s" % e)
self.helper.cf.log.add_config(updatedConfig)
def createConfigFromSelection(self):
logconfig = LogConfig(str(self.configNameCombo.currentText()),
self.period)
for node in self.getNodeChildren(self.varTree.invisibleRootItem()):
parentName = node.text(NAME_FIELD)
for leaf in self.getNodeChildren(node):
varName = leaf.text(NAME_FIELD)
varType = str(leaf.text(CTYPE_FIELD))
completeName = "%s.%s" % (parentName, varName)
logconfig.add_variable(completeName, varType)
return logconfig
|
[
"PyQt5.uic.loadUiType",
"PyQt5.QtWidgets.QTreeWidgetItem",
"logging.getLogger"
] |
[((1568, 1595), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1585, 1595), False, 'import logging\n'), ((1657, 1730), 'PyQt5.uic.loadUiType', 'uic.loadUiType', (["(cfclient.module_path + '/ui/dialogs/logconfigdialogue.ui')"], {}), "(cfclient.module_path + '/ui/dialogs/logconfigdialogue.ui')\n", (1671, 1730), False, 'from PyQt5 import Qt, QtWidgets, uic\n'), ((4875, 4902), 'PyQt5.QtWidgets.QTreeWidgetItem', 'QtWidgets.QTreeWidgetItem', ([], {}), '()\n', (4900, 4902), False, 'from PyQt5 import Qt, QtWidgets, uic\n'), ((7755, 7782), 'PyQt5.QtWidgets.QTreeWidgetItem', 'QtWidgets.QTreeWidgetItem', ([], {}), '()\n', (7780, 7782), False, 'from PyQt5 import Qt, QtWidgets, uic\n'), ((7925, 7952), 'PyQt5.QtWidgets.QTreeWidgetItem', 'QtWidgets.QTreeWidgetItem', ([], {}), '()\n', (7950, 7952), False, 'from PyQt5 import Qt, QtWidgets, uic\n')]
|
from itertools import product
from sys import stdout
import json
import ctypes
from litex.tools.litex_client import RemoteClient
from litescope.software.driver.analyzer import LiteScopeAnalyzerDriver
wb = RemoteClient(csr_csv="test/csr.csv")
wb.open()
analyzer = LiteScopeAnalyzerDriver(wb.regs, "analyzer", debug=True, config_csv="test/analyzer.csv")
analyzer.configure_subsampler(1) ## increase this to "skip" cycles, e.g. subsample
analyzer.configure_group(0)
# trigger conditions will depend upon each other in sequence
analyzer.add_rising_edge_trigger("puf_reset")
analyzer.run(offset=8, length=512) ### CHANGE THIS TO MATCH DEPTH offset=32 by default
for i, j in product(range(2), repeat=2):
wb.regs.teropuf_reset.write(1) # enable reset
wb.regs.teropuf_cell0_select.write(i)
wb.regs.teropuf_cell1_select.write(j)
wb.regs.teropuf_reset.write(0) # disable reset
print(f'Comparator from set {i} and {j}:')
for _ in range(10):
print(wb.regs.teropuf_bit_value.read())
print(ctypes.c_int32(wb.regs.teropuf_bit_value.read()).value)
analyzer.wait_done()
analyzer.upload()
analyzer.save("test/dump.vcd")
wb.close()
|
[
"litescope.software.driver.analyzer.LiteScopeAnalyzerDriver",
"litex.tools.litex_client.RemoteClient"
] |
[((207, 243), 'litex.tools.litex_client.RemoteClient', 'RemoteClient', ([], {'csr_csv': '"""test/csr.csv"""'}), "(csr_csv='test/csr.csv')\n", (219, 243), False, 'from litex.tools.litex_client import RemoteClient\n'), ((266, 359), 'litescope.software.driver.analyzer.LiteScopeAnalyzerDriver', 'LiteScopeAnalyzerDriver', (['wb.regs', '"""analyzer"""'], {'debug': '(True)', 'config_csv': '"""test/analyzer.csv"""'}), "(wb.regs, 'analyzer', debug=True, config_csv=\n 'test/analyzer.csv')\n", (289, 359), False, 'from litescope.software.driver.analyzer import LiteScopeAnalyzerDriver\n')]
|
from functools import wraps
import json
import logging
from urllib.parse import urlencode
import requests
from requests.exceptions import HTTPError, RequestException
from w3lib.url import canonicalize_url
from django.conf import settings
from directory_client_core.cache_control import ETagCacheControl
logger = logging.getLogger(__name__)
MESSAGE_CACHE_HIT = 'Fallback cache hit. Using cached content.'
MESSAGE_CACHE_MISS = 'Fallback cache miss. Cannot use any content.'
MESSAGE_NOT_FOUND = 'Resource not found.'
class ThrottlingFilter(logging.Filter):
"""
Filters out records that have been seen within the past <period of time>
thereby reducing noise.
How this works:
- with `cache.add` the entry is stored only if the key is not yet
present in the cache
- cache.add returns True if the entry is stored, otherwise False
- these cache entries expire after <period of time>.
Therefore `filter` returns True if the key hasn't been seen in the past
<period of time>, and False if it has. The logger takes this to mean
"don't log this"
"""
def __init__(self, cache):
self.cache = cache
self.timeout_in_seconds = getattr(
settings,
'DIRECTORY_CLIENT_CORE_CACHE_LOG_THROTTLING_SECONDS',
None
) or 60*60*24 # default 24 hours
def create_cache_key(sef, record):
return f'noise-{record.getMessage()}-{record.url}'
def filter(self, record):
key = self.create_cache_key(record)
return self.cache.add(key, '', timeout=self.timeout_in_seconds)
class PopulateResponseMixin:
@classmethod
def from_response(cls, raw_response):
response = cls()
response.__setstate__(raw_response.__getstate__())
return response
class LiveResponse(PopulateResponseMixin, requests.Response):
pass
class FailureResponse(PopulateResponseMixin, requests.Response):
pass
class CacheResponse(requests.Response):
@classmethod
def from_cached_content(cls, cached_content):
response = cls()
response.status_code = 200
response._content = cached_content
return response
def fallback(cache):
"""
Caches content retrieved by the client, thus allowing the cached
content to be used later if the live content cannot be retrieved.
"""
log_filter = ThrottlingFilter(cache=cache)
logger.filters = []
logger.addFilter(log_filter)
def get_cache_control(cached_content):
if cached_content:
parsed = json.loads(cached_content.decode())
if 'etag' in parsed:
return ETagCacheControl(f'"{parsed["etag"]}"')
def closure(func):
@wraps(func)
def wrapper(client, url, params={}, *args, **kwargs):
cache_key = canonicalize_url(url + '?' + urlencode(params))
cached_content = cache.get(cache_key, {})
try:
response = func(
client,
url=url,
params=params,
cache_control=get_cache_control(cached_content),
*args,
**kwargs,
)
except RequestException:
# Failed to create the request e.g., the remote server is down,
# perhaps a timeout occurred, or even connection closed by
# remote, etc.
if cached_content:
logger.error(MESSAGE_CACHE_HIT, extra={'url': url})
return CacheResponse.from_cached_content(cached_content)
else:
raise
else:
log_context = {'status_code': response.status_code, 'url': url}
if response.status_code == 404:
logger.error(MESSAGE_NOT_FOUND, extra=log_context)
return LiveResponse.from_response(response)
elif response.status_code == 304:
return CacheResponse.from_cached_content(cached_content)
elif not response.ok:
# Successfully requested the content, but the response is
# not OK (e.g., 500, 403, etc)
if cached_content:
logger.error(MESSAGE_CACHE_HIT, extra=log_context)
return CacheResponse.from_cached_content(cached_content)
else:
logger.exception(MESSAGE_CACHE_MISS, extra=log_context)
return FailureResponse.from_response(response)
else:
cache.set(
cache_key,
response.content,
settings.DIRECTORY_CLIENT_CORE_CACHE_EXPIRE_SECONDS
)
return LiveResponse.from_response(response)
raise NotImplementedError('unreachable')
return wrapper
return closure
|
[
"directory_client_core.cache_control.ETagCacheControl",
"functools.wraps",
"logging.getLogger",
"urllib.parse.urlencode"
] |
[((317, 344), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (334, 344), False, 'import logging\n'), ((2737, 2748), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (2742, 2748), False, 'from functools import wraps\n'), ((2664, 2705), 'directory_client_core.cache_control.ETagCacheControl', 'ETagCacheControl', (['f""""{parsed[\'etag\']}\\""""'], {}), '(f\'"{parsed[\\\'etag\\\']}"\')\n', (2680, 2705), False, 'from directory_client_core.cache_control import ETagCacheControl\n'), ((2864, 2881), 'urllib.parse.urlencode', 'urlencode', (['params'], {}), '(params)\n', (2873, 2881), False, 'from urllib.parse import urlencode\n')]
|
#///////////////////////////////////////////////////////////////////////////////
#// BSD 3-Clause License
#//
#// Copyright (C) 2018-2019, New York University , Max Planck Gesellschaft
#// Copyright note valid unless otherwise stated in individual files.
#// All rights reserved.
#///////////////////////////////////////////////////////////////////////////////
# brief Example for using the PinBulletWrapper for a quadruped robot.
from __future__ import print_function
import os
import rospkg
import numpy as np
import time
import robot_properties_solo
from robot_properties_solo.config import SoloConfig
import pybullet as p
import pinocchio as se3
from pinocchio.utils import zero
from py_pinocchio_bullet.wrapper import PinBulletWrapper
class QuadrupedRobot(PinBulletWrapper):
def __init__(self, physicsClient=None):
if physicsClient is None:
self.physicsClient = p.connect(p.DIRECT)
p.setGravity(0,0, -9.81)
p.setPhysicsEngineParameter(fixedTimeStep=1.0/1000.0, numSubSteps=1)
# Load the plain.
plain_urdf = (rospkg.RosPack().get_path("robot_properties_solo") +
"/urdf/plane_with_restitution.urdf")
self.planeId = p.loadURDF(plain_urdf)
# Load the robot
robotStartPos = [0.,0,0.40]
robotStartOrientation = p.getQuaternionFromEuler([0,0,0])
self.urdf_path = SoloConfig.urdf_path
self.robotId = p.loadURDF(self.urdf_path, robotStartPos,
robotStartOrientation, flags=p.URDF_USE_INERTIA_FROM_FILE,
useFixedBase=False)
p.getBasePositionAndOrientation(self.robotId)
# Create the robot wrapper in pinocchio.
package_dirs = [os.path.dirname(os.path.dirname(self.urdf_path)) + '/urdf']
self.pin_robot = SoloConfig.buildRobotWrapper()
# Query all the joints.
num_joints = p.getNumJoints(self.robotId)
for ji in range(num_joints):
p.changeDynamics(self.robotId, ji, linearDamping=.04,
angularDamping=0.04, restitution=0.0, lateralFriction=0.5)
self.base_link_name = "base_link"
self.joint_names = ['FL_HFE', 'FL_KFE', 'FR_HFE', 'FR_KFE', 'HL_HFE',
'HL_KFE', 'HR_HFE', 'HR_KFE']
controlled_joints = ['FL_HFE', 'FL_KFE', 'FR_HFE', 'FR_KFE', 'HL_HFE',
'HL_KFE', 'HR_HFE', 'HR_KFE']
# Creates the wrapper by calling the super.__init__.
super(QuadrupedRobot,self).__init__(self.robotId, self.pin_robot,
controlled_joints,
['FL_ANKLE', 'FR_ANKLE', 'HL_ANKLE', 'HR_ANKLE']
)
if __name__ == "__main__":
np.set_printoptions(precision=2, suppress=True)
# Setup pybullet for the quadruped and a wrapper to pinocchio.
quad = QuadrupedRobot()
# Get the current state and modify the joints to have the legs
# bend inwards.
q, dq = quad.get_state()
q[7] = q[9] = 0.8
q[11] = q[13] = -0.8
q[8] = q[10] = -1.6
q[12] = q[14] = 1.6
# Take the initial joint states as desired state.
q_des = q[7:].copy()
# Update the simulation state to the new initial configuration.
quad.reset_state(q, dq)
# Run the simulator for 2000 steps = 2 seconds.
for i in range(2000):
# Get the current state (position and velocity)
q, dq = quad.get_state()
active_contact_frames, contact_forces = quad.get_force()
# Alternative, if you want to use properties from the pinocchio robot
# like the jacobian or similar, you can also get the state and update
# the pinocchio internals with one call:
#
# q, dq = quad.get_state_update_pinocchio()
if i % 100 == 0:
print('Forces:', active_contact_frames, contact_forces)
# Compute the command torques at the joints. The torque
# vector only takes the actuated joints (excluding the base)
tau = 5. * (q_des - q[7:]) - 0.1 * dq[6:]
# Send the commands to the robot.
quad.send_joint_command(tau)
# Step the simulator and sleep.
p.stepSimulation()
time.sleep(0.001)
# Print the final active force frames and the forces
force_frames, forces = quad.get_force()
print("Active force_frames:", force_frames)
print("Corresponding forces:", forces)
|
[
"pybullet.getQuaternionFromEuler",
"pybullet.connect",
"numpy.set_printoptions",
"pybullet.stepSimulation",
"pybullet.setGravity",
"pybullet.changeDynamics",
"pybullet.getBasePositionAndOrientation",
"rospkg.RosPack",
"os.path.dirname",
"time.sleep",
"pybullet.setPhysicsEngineParameter",
"robot_properties_solo.config.SoloConfig.buildRobotWrapper",
"pybullet.getNumJoints",
"pybullet.loadURDF"
] |
[((2638, 2685), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'precision': '(2)', 'suppress': '(True)'}), '(precision=2, suppress=True)\n', (2657, 2685), True, 'import numpy as np\n'), ((1220, 1242), 'pybullet.loadURDF', 'p.loadURDF', (['plain_urdf'], {}), '(plain_urdf)\n', (1230, 1242), True, 'import pybullet as p\n'), ((1337, 1372), 'pybullet.getQuaternionFromEuler', 'p.getQuaternionFromEuler', (['[0, 0, 0]'], {}), '([0, 0, 0])\n', (1361, 1372), True, 'import pybullet as p\n'), ((1441, 1566), 'pybullet.loadURDF', 'p.loadURDF', (['self.urdf_path', 'robotStartPos', 'robotStartOrientation'], {'flags': 'p.URDF_USE_INERTIA_FROM_FILE', 'useFixedBase': '(False)'}), '(self.urdf_path, robotStartPos, robotStartOrientation, flags=p.\n URDF_USE_INERTIA_FROM_FILE, useFixedBase=False)\n', (1451, 1566), True, 'import pybullet as p\n'), ((1594, 1639), 'pybullet.getBasePositionAndOrientation', 'p.getBasePositionAndOrientation', (['self.robotId'], {}), '(self.robotId)\n', (1625, 1639), True, 'import pybullet as p\n'), ((1799, 1829), 'robot_properties_solo.config.SoloConfig.buildRobotWrapper', 'SoloConfig.buildRobotWrapper', ([], {}), '()\n', (1827, 1829), False, 'from robot_properties_solo.config import SoloConfig\n'), ((1884, 1912), 'pybullet.getNumJoints', 'p.getNumJoints', (['self.robotId'], {}), '(self.robotId)\n', (1898, 1912), True, 'import pybullet as p\n'), ((4080, 4098), 'pybullet.stepSimulation', 'p.stepSimulation', ([], {}), '()\n', (4096, 4098), True, 'import pybullet as p\n'), ((4107, 4124), 'time.sleep', 'time.sleep', (['(0.001)'], {}), '(0.001)\n', (4117, 4124), False, 'import time\n'), ((898, 917), 'pybullet.connect', 'p.connect', (['p.DIRECT'], {}), '(p.DIRECT)\n', (907, 917), True, 'import pybullet as p\n'), ((930, 955), 'pybullet.setGravity', 'p.setGravity', (['(0)', '(0)', '(-9.81)'], {}), '(0, 0, -9.81)\n', (942, 955), True, 'import pybullet as p\n'), ((967, 1037), 'pybullet.setPhysicsEngineParameter', 'p.setPhysicsEngineParameter', ([], {'fixedTimeStep': '(1.0 / 1000.0)', 'numSubSteps': '(1)'}), '(fixedTimeStep=1.0 / 1000.0, numSubSteps=1)\n', (994, 1037), True, 'import pybullet as p\n'), ((1963, 2080), 'pybullet.changeDynamics', 'p.changeDynamics', (['self.robotId', 'ji'], {'linearDamping': '(0.04)', 'angularDamping': '(0.04)', 'restitution': '(0.0)', 'lateralFriction': '(0.5)'}), '(self.robotId, ji, linearDamping=0.04, angularDamping=0.04,\n restitution=0.0, lateralFriction=0.5)\n', (1979, 2080), True, 'import pybullet as p\n'), ((1085, 1101), 'rospkg.RosPack', 'rospkg.RosPack', ([], {}), '()\n', (1099, 1101), False, 'import rospkg\n'), ((1730, 1761), 'os.path.dirname', 'os.path.dirname', (['self.urdf_path'], {}), '(self.urdf_path)\n', (1745, 1761), False, 'import os\n')]
|
# -*- encoding: utf-8 -*-
from glob import glob
from os.path import basename, splitext
from setuptools import find_packages, setup
with open('README.md', 'r') as f:
long_description = f.read()
setup(
name='flatspace',
license='GPLv2',
version='0.0.1',
description='Space is flat here',
long_description=long_description,
long_description_content_type='text/markdown',
entry_points={
'console_scripts': [
'flatspace=flatspace.cli:cli',
],
},
install_requires=[],
url='',
classifiers=[
'License :: BHP',
'Development Status :: 4 - Beta',
'Operating System :: Unix',
'Operating System :: POSIX',
'Programming Language :: Python :: 3',
'Topic :: Utilities',
],
keywords=[],
extras_require={},
setup_requires=[],
packages=find_packages(where='src'),
package_dir={'': 'src'},
py_modules=[splitext(basename(path))[0] for path in glob('src/*.py')],
include_package_data=True,
zip_safe=False,
package_data={
'': ['config/*.yml'],
},
)
|
[
"os.path.basename",
"setuptools.find_packages",
"glob.glob"
] |
[((864, 890), 'setuptools.find_packages', 'find_packages', ([], {'where': '"""src"""'}), "(where='src')\n", (877, 890), False, 'from setuptools import find_packages, setup\n'), ((977, 993), 'glob.glob', 'glob', (['"""src/*.py"""'], {}), "('src/*.py')\n", (981, 993), False, 'from glob import glob\n'), ((946, 960), 'os.path.basename', 'basename', (['path'], {}), '(path)\n', (954, 960), False, 'from os.path import basename, splitext\n')]
|
from flask import request, jsonify, Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
db_string = "postgres://postgres:example@db:5432/postgres"
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = db_string
db = SQLAlchemy(app)
migrate = Migrate(app, db)
class ListModel(db.Model):
__tablename__ = 'list'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String())
def __init__(self, name):
self.name = name
def __repr__(self):
return f"<List {self.name}>"
@app.route('/items/list', methods=['GET'])
def list_items():
items = ListModel.query.order_by(ListModel.id).all()
results = [
{
"id": item.id,
"name": item.name,
} for item in items]
print(results, flush=True)
return jsonify(results)
@app.route('/items/create', methods=['POST'])
def create_item():
new_item = ListModel(name="")
db.session.add(new_item)
db.session.commit()
return {"message": f"Item has been created successfully."}
@app.route('/items/update', methods=["POST"])
def update_item():
if request.is_json:
data = request.get_json()
print(data, flush=True)
item = ListModel.query.get_or_404(data["id"])
item.name = data["name"]
db.session.add(item)
db.session.commit()
return {"message": f"Item {item.name} has been updated successfully."}
else:
return {"error": "The request payload is not in JSON format"}
@app.route("/items/delete", methods=["DELETE"])
def delete_item():
if request.is_json:
data = request.get_json()
item = ListModel.query.get_or_404(data["id"])
db.session.delete(item)
db.session.commit()
return {"message": f"Item {item.name} has been deleted successfully."}
else:
return {"error": "The request payload is not in JSON format"}
if __name__ == "__main__":
app.run()
|
[
"flask.Flask",
"flask.jsonify",
"flask_sqlalchemy.SQLAlchemy",
"flask_migrate.Migrate",
"flask.request.get_json"
] |
[((183, 198), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (188, 198), False, 'from flask import request, jsonify, Flask\n'), ((254, 269), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (264, 269), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((280, 296), 'flask_migrate.Migrate', 'Migrate', (['app', 'db'], {}), '(app, db)\n', (287, 296), False, 'from flask_migrate import Migrate\n'), ((829, 845), 'flask.jsonify', 'jsonify', (['results'], {}), '(results)\n', (836, 845), False, 'from flask import request, jsonify, Flask\n'), ((1166, 1184), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1182, 1184), False, 'from flask import request, jsonify, Flask\n'), ((1627, 1645), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1643, 1645), False, 'from flask import request, jsonify, Flask\n')]
|
import os
import sys
import mimetypes
import hashlib
import logging
import time
from multiprocessing import Queue, Process
from base64 import b64encode
from collections import deque, OrderedDict
import requests
from filestack.config import HEADERS
from filestack.utils.utils import store_params
log = logging.getLogger(__name__)
log.setLevel(logging.ERROR)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter("%(asctime)s - %(processName)s[%(process)d] - %(levelname)s - %(message)s"))
log.addHandler(handler)
UPLOAD_HOST = 'https://upload.filestackapi.com'
MB = 1024 ** 2
DEFAULT_PART_SIZE = 8 * MB
DEFAULT_CHUNK_SIZE = 8 * MB
NUM_OF_UPLOADERS = 4
NUM_OF_COMMITTERS = 2
MAX_DELAY = 4
class ResponseNotOk(Exception):
pass
class S3UploadException(Exception):
pass
class UploadManager(object):
def __init__(self, apikey, filepath, storage, params, security, upload_q, commit_q, response_q):
self.chunk_size = DEFAULT_CHUNK_SIZE
self.apikey = apikey
self.filepath = filepath
self.storage = storage
self.params = params
self.security = security
self.upload_q = upload_q
self.commit_q = commit_q
self.response_q = response_q
self.filename = os.path.split(filepath)[1]
self.filesize = os.path.getsize(filepath)
self.mimetype = mimetypes.guess_type(filepath)[0]
self.start_response = None
self.parts = OrderedDict()
self._currently_processed = 0
def run(self):
self._multipart_start()
self._create_parts()
self._manage_upload_process()
def _multipart_start(self):
data = {
'apikey': self.apikey,
'filename': self.filename,
'mimetype': self.mimetype,
'size': self.filesize,
'store_location': self.storage,
'multipart': True
}
if self.params:
data.update(store_params(self.params))
if self.security:
data.update({
'policy': self.security['policy'],
'signature': self.security['signature']
})
response = requests.post(
UPLOAD_HOST + '/multipart/start',
data=data,
files={'file': (self.filename, '', None)},
params=self.params,
headers=HEADERS
)
self.start_response = response.json()
def _multipart_complete(self):
response_code = 0
data = {
'apikey': self.apikey,
'uri': self.start_response['uri'],
'region': self.start_response['region'],
'upload_id': self.start_response['upload_id'],
'filename': self.filename,
'size': self.filesize,
'mimetype': self.mimetype,
'multipart': True,
'store_location': self.storage
}
if self.params:
data.update(store_params(self.params))
while response_code != 200:
log.info('Waiting for complete')
response = requests.post(
UPLOAD_HOST + '/multipart/complete',
data=data,
files={'file': (self.filename, '', None)},
params=self.params,
headers=HEADERS
)
if not response.ok:
log.error('Unexpected backend response: %s', response.content)
raise Exception(response.content)
response_code = response.status_code
log.info('Got response %s, %s', response, response.content)
self.response_q.put(response)
def _create_parts(self):
for index, seek_point in enumerate(
self._get_byte_ranges(self.filesize, DEFAULT_PART_SIZE)):
chunks = deque()
for ch in self._get_byte_ranges(seek_point['size'], self.chunk_size):
chunks.appendleft({'offset': ch['seek'], 'size': ch['size']})
self.parts[index + 1] = {
'seek': seek_point['seek'],
'size': seek_point['size'],
'currently_processed': 0,
'chunks': chunks
}
def _split_chunk(self, chunk):
return [
{'offset': ch['seek'], 'size': ch['size']}
for ch in self._get_byte_ranges(chunk['size'], self.chunk_size, start=chunk['offset'])
]
def _get_next_chunk(self):
for part_num in self.parts:
if self.parts[part_num]['chunks']:
return part_num, self.parts[part_num]['chunks'].pop()
return None, None
def _feed_uploaders(self):
while self._currently_processed < NUM_OF_UPLOADERS:
part_num, chunk = self._get_next_chunk()
if not chunk:
break
if chunk['size'] > self.chunk_size:
smaller_chunks = self._split_chunk(chunk)
chunk, rest = smaller_chunks[0], smaller_chunks[1:]
for c in reversed(rest):
self.parts[part_num]['chunks'].append(c)
self._submit_upload_job(part_num, chunk)
def _manage_upload_process(self):
self._feed_uploaders()
while self.parts:
response = self.response_q.get(block=True)
log.info('Got response %s', response)
if response['worker'] == 'uploader':
self.parts[response['part']]['currently_processed'] -= 1
self._currently_processed -= 1
old_chunk = response['chunk']
if not response['success']:
log.warning('Failed response received %s', response)
if response['delay']:
# this means uploader got a response, but it wasn't ok (status code >= 400)
# resubmit with requested delay if max delay not exceeded
if response['delay'] > MAX_DELAY:
log.error('Max delay exceeded for chunk %s', old_chunk)
return
self._submit_upload_job(response['part'], old_chunk, delay=response['delay'])
continue
if old_chunk['size'] <= self.chunk_size:
log.info(
'Failed to upload %s bytes. Changing chunk size from %s to %s bytes',
old_chunk['size'], self.chunk_size, self.chunk_size / 2
)
self.chunk_size /= 2
if self.chunk_size < 32 * 1024:
log.error('Minimal chunk size failed')
return
new_chunks = self._split_chunk(old_chunk)
for new_chunk in reversed(new_chunks):
self.parts[response['part']]['chunks'].append(new_chunk)
self._feed_uploaders()
continue
if not self.parts[response['part']]['chunks'] and self.parts[response['part']]['currently_processed'] == 0:
log.info('No more chunks for part %s, time to commit', response['part'])
self.commit_q.put({
'apikey': self.apikey,
'uri': self.start_response['uri'],
'region': self.start_response['region'],
'upload_id': self.start_response['upload_id'],
'size': self.filesize,
'part': response['part'],
'store_location': self.storage,
'filename': self.filename,
})
self._feed_uploaders()
elif response['worker'] == 'committer':
log.info('Got commit done message %s', response)
log.info('Removing part %s', response['part'])
self.parts.pop(response['part'])
if self._get_next_chunk()[1] is None:
self._multipart_complete()
def _submit_upload_job(self, part_num, chunk, delay=0):
self.upload_q.put({
'chunk': chunk,
'apikey': self.apikey,
'store_location': self.storage,
'part': part_num,
'seek': self.parts[part_num]['seek'],
'offset': chunk['offset'],
'size': chunk['size'],
'filepath': self.filepath,
'filename': self.filename,
'filesize': self.filesize,
'uri': self.start_response['uri'],
'region': self.start_response['region'],
'upload_id': self.start_response['upload_id'],
'delay': delay
})
self.parts[part_num]['currently_processed'] += 1
self._currently_processed += 1
@staticmethod
def _get_byte_ranges(filesize, part_size, start=0, bytes_to_read=None):
if bytes_to_read is None:
bytes_to_read = filesize
ranges = []
pos = start
while bytes_to_read > 0:
point = {'seek': pos}
if bytes_to_read > part_size:
size = part_size
bytes_to_read -= part_size
pos += part_size
else:
size = bytes_to_read
bytes_to_read = 0
point['size'] = size
ranges.append(point)
return ranges
def manage_upload(apikey, filepath, storage, params, security, upload_q, commit_q, response_q):
manager = UploadManager(apikey, filepath, storage, params, security, upload_q, commit_q, response_q)
manager.run()
def consume_upload_job(upload_q, response_q):
log.info('Uploader ready')
while True:
job = upload_q.get(block=True)
if job == 'die':
break # we need a way to stop it in tests (other than terminate())
log.info(
'Uploader got chunk %s for part %s',
job['chunk'], job['part']
)
log.debug('Job details: %s', job)
delay = job.get('delay', 0)
time.sleep(delay)
log.info('Uploader waiting for %s seconds', delay)
with open(job['filepath'], 'rb') as f:
f.seek(job['seek'] + job['offset'])
chunk = f.read(job['size'])
success = True
try:
backend_resp = requests.post(
UPLOAD_HOST + '/multipart/upload',
data={
'apikey': job['apikey'],
'part': job['part'],
'size': job['size'],
'md5': b64encode(hashlib.md5(chunk).digest()).strip(),
'uri': job['uri'],
'region': job['region'],
'upload_id': job['upload_id'],
'store_location': job['store_location'],
'multipart': True,
'offset': job['offset']
},
files={'file': (job['filename'], '', None)},
headers=HEADERS
)
if not backend_resp.ok:
raise ResponseNotOk('Incorrect backend response %s', backend_resp)
backend_data = backend_resp.json()
try:
s3_resp = requests.put(
backend_data['url'],
headers=backend_data['headers'],
data=chunk
)
except Exception as e:
log.warning('Upload to S3 failed %s', e)
raise S3UploadException(str(e))
if not s3_resp.ok:
raise ResponseNotOk('Incorrect S3 response %s', s3_resp)
except ResponseNotOk:
delay = delay * 1.3 or 1
success = False
except S3UploadException:
delay = 0
success = False
except Exception as e:
delay = 0
log.error('Request to backend failed %s', e)
success = False
response_q.put({
'worker': 'uploader',
'chunk': job['chunk'],
'part': job['part'],
'offset': job['offset'],
'size': job['size'],
'success': success,
'delay': delay
})
log.info(
'Uploader finished chunk %s for part %s. Success: %s',
job['chunk'], job['part'], success
)
def commit_part(commit_q, response_q):
log.info('Committer ready')
while True:
job = commit_q.get(block=True)
if job == 'die':
break # we need a way to stop it in tests (other than terminate())
log.info('Committer got job for part %s', job['part'])
log.debug('Job details: %s', job)
requests.post(
UPLOAD_HOST + '/multipart/commit',
data={
'apikey': job['apikey'],
'uri': job['uri'],
'region': job['region'],
'upload_id': job['upload_id'],
'size': job['size'],
'part': job['part'],
'store_location': job['store_location']
},
files={'file': (job['filename'], '', None)},
headers=HEADERS
)
response_q.put({
'worker': 'committer',
'success': True,
'part': job['part']
})
log.info('Commit job done')
def upload(apikey, filepath, storage, params=None, security=None):
upload_q = Queue()
commit_q = Queue()
response_q = Queue()
manager_proc = Process(
target=manage_upload,
name='manager',
args=(apikey, filepath, storage, params, security, upload_q, commit_q, response_q)
)
side_processes = [
Process(
target=consume_upload_job,
name='uploader',
args=(upload_q, response_q)
) for _ in range(NUM_OF_UPLOADERS)
]
for _ in range(NUM_OF_COMMITTERS):
side_processes.append(
Process(
target=commit_part,
name='committer',
args=(commit_q, response_q)
)
)
for proc in side_processes:
proc.start()
manager_proc.start()
manager_proc.join()
for proc in side_processes:
proc.terminate()
try:
final_response = response_q.get(block=True, timeout=1)
if not isinstance(final_response, requests.Response):
raise Exception()
return final_response
except Exception:
raise Exception('Upload aborted')
|
[
"hashlib.md5",
"os.path.getsize",
"logging.StreamHandler",
"collections.deque",
"time.sleep",
"logging.Formatter",
"filestack.utils.utils.store_params",
"multiprocessing.Queue",
"requests.put",
"collections.OrderedDict",
"requests.post",
"multiprocessing.Process",
"os.path.split",
"logging.getLogger",
"mimetypes.guess_type"
] |
[((305, 332), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (322, 332), False, 'import logging\n'), ((372, 405), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (393, 405), False, 'import logging\n'), ((427, 525), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(processName)s[%(process)d] - %(levelname)s - %(message)s"""'], {}), "(\n '%(asctime)s - %(processName)s[%(process)d] - %(levelname)s - %(message)s')\n", (444, 525), False, 'import logging\n'), ((13529, 13536), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (13534, 13536), False, 'from multiprocessing import Queue, Process\n'), ((13552, 13559), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (13557, 13559), False, 'from multiprocessing import Queue, Process\n'), ((13577, 13584), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (13582, 13584), False, 'from multiprocessing import Queue, Process\n'), ((13605, 13738), 'multiprocessing.Process', 'Process', ([], {'target': 'manage_upload', 'name': '"""manager"""', 'args': '(apikey, filepath, storage, params, security, upload_q, commit_q, response_q)'}), "(target=manage_upload, name='manager', args=(apikey, filepath,\n storage, params, security, upload_q, commit_q, response_q))\n", (13612, 13738), False, 'from multiprocessing import Queue, Process\n'), ((1323, 1348), 'os.path.getsize', 'os.path.getsize', (['filepath'], {}), '(filepath)\n', (1338, 1348), False, 'import os\n'), ((1463, 1476), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1474, 1476), False, 'from collections import deque, OrderedDict\n'), ((2187, 2330), 'requests.post', 'requests.post', (["(UPLOAD_HOST + '/multipart/start')"], {'data': 'data', 'files': "{'file': (self.filename, '', None)}", 'params': 'self.params', 'headers': 'HEADERS'}), "(UPLOAD_HOST + '/multipart/start', data=data, files={'file': (\n self.filename, '', None)}, params=self.params, headers=HEADERS)\n", (2200, 2330), False, 'import requests\n'), ((10128, 10145), 'time.sleep', 'time.sleep', (['delay'], {}), '(delay)\n', (10138, 10145), False, 'import time\n'), ((12791, 13110), 'requests.post', 'requests.post', (["(UPLOAD_HOST + '/multipart/commit')"], {'data': "{'apikey': job['apikey'], 'uri': job['uri'], 'region': job['region'],\n 'upload_id': job['upload_id'], 'size': job['size'], 'part': job['part'],\n 'store_location': job['store_location']}", 'files': "{'file': (job['filename'], '', None)}", 'headers': 'HEADERS'}), "(UPLOAD_HOST + '/multipart/commit', data={'apikey': job[\n 'apikey'], 'uri': job['uri'], 'region': job['region'], 'upload_id': job\n ['upload_id'], 'size': job['size'], 'part': job['part'],\n 'store_location': job['store_location']}, files={'file': (job[\n 'filename'], '', None)}, headers=HEADERS)\n", (12804, 13110), False, 'import requests\n'), ((13797, 13882), 'multiprocessing.Process', 'Process', ([], {'target': 'consume_upload_job', 'name': '"""uploader"""', 'args': '(upload_q, response_q)'}), "(target=consume_upload_job, name='uploader', args=(upload_q, response_q)\n )\n", (13804, 13882), False, 'from multiprocessing import Queue, Process\n'), ((1272, 1295), 'os.path.split', 'os.path.split', (['filepath'], {}), '(filepath)\n', (1285, 1295), False, 'import os\n'), ((1373, 1403), 'mimetypes.guess_type', 'mimetypes.guess_type', (['filepath'], {}), '(filepath)\n', (1393, 1403), False, 'import mimetypes\n'), ((3093, 3238), 'requests.post', 'requests.post', (["(UPLOAD_HOST + '/multipart/complete')"], {'data': 'data', 'files': "{'file': (self.filename, '', None)}", 'params': 'self.params', 'headers': 'HEADERS'}), "(UPLOAD_HOST + '/multipart/complete', data=data, files={'file':\n (self.filename, '', None)}, params=self.params, headers=HEADERS)\n", (3106, 3238), False, 'import requests\n'), ((3822, 3829), 'collections.deque', 'deque', ([], {}), '()\n', (3827, 3829), False, 'from collections import deque, OrderedDict\n'), ((14046, 14120), 'multiprocessing.Process', 'Process', ([], {'target': 'commit_part', 'name': '"""committer"""', 'args': '(commit_q, response_q)'}), "(target=commit_part, name='committer', args=(commit_q, response_q))\n", (14053, 14120), False, 'from multiprocessing import Queue, Process\n'), ((1965, 1990), 'filestack.utils.utils.store_params', 'store_params', (['self.params'], {}), '(self.params)\n', (1977, 1990), False, 'from filestack.utils.utils import store_params\n'), ((2961, 2986), 'filestack.utils.utils.store_params', 'store_params', (['self.params'], {}), '(self.params)\n', (2973, 2986), False, 'from filestack.utils.utils import store_params\n'), ((11311, 11389), 'requests.put', 'requests.put', (["backend_data['url']"], {'headers': "backend_data['headers']", 'data': 'chunk'}), "(backend_data['url'], headers=backend_data['headers'], data=chunk)\n", (11323, 11389), False, 'import requests\n'), ((10658, 10676), 'hashlib.md5', 'hashlib.md5', (['chunk'], {}), '(chunk)\n', (10669, 10676), False, 'import hashlib\n')]
|
#https://seaborn.pydata.org/generated/seaborn.pairplot.html
import matplotlib.pyplot as plt
import os
import seaborn as sns; sns.set(style="ticks", color_codes=True)
iris = sns.load_dataset("iris")
#g = sns.pairplot(iris)
g = sns.pairplot(iris, hue="species")
plt.savefig(os.path.join('figures', 'iris-scatterplot.pdf'))
|
[
"seaborn.set",
"os.path.join",
"seaborn.load_dataset",
"seaborn.pairplot"
] |
[((125, 165), 'seaborn.set', 'sns.set', ([], {'style': '"""ticks"""', 'color_codes': '(True)'}), "(style='ticks', color_codes=True)\n", (132, 165), True, 'import seaborn as sns\n'), ((173, 197), 'seaborn.load_dataset', 'sns.load_dataset', (['"""iris"""'], {}), "('iris')\n", (189, 197), True, 'import seaborn as sns\n'), ((226, 259), 'seaborn.pairplot', 'sns.pairplot', (['iris'], {'hue': '"""species"""'}), "(iris, hue='species')\n", (238, 259), True, 'import seaborn as sns\n'), ((272, 319), 'os.path.join', 'os.path.join', (['"""figures"""', '"""iris-scatterplot.pdf"""'], {}), "('figures', 'iris-scatterplot.pdf')\n", (284, 319), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
import urllib
from urlobject import URLObject
from zenqueue import json
from zenqueue.client.common import AbstractQueueClient
class HTTPQueueClient(AbstractQueueClient):
log_name = 'zenq.client.http'
def __init__(self, host='127.0.0.1', port=3080):
super(HTTPQueueClient, self).__init__() # Initializes logging.
self.host = host
self.port = port
def send(self, url, data=''):
raise NotImplementedError
def action(self, action, args, kwargs):
# It's really pathetic, but it's still debugging output.
self.log.debug('Action %r called with %d args', action,
len(args) + len(kwargs))
path = '/' + urllib.quote(action) + '/'
url = URLObject(host=self.host).with_port(self.port).with_path(path)
received_data = self.send(url, data=json.dumps([args, kwargs]))
return self.handle_response(received_data)
|
[
"zenqueue.json.dumps",
"urllib.quote",
"urlobject.URLObject"
] |
[((745, 765), 'urllib.quote', 'urllib.quote', (['action'], {}), '(action)\n', (757, 765), False, 'import urllib\n'), ((901, 927), 'zenqueue.json.dumps', 'json.dumps', (['[args, kwargs]'], {}), '([args, kwargs])\n', (911, 927), False, 'from zenqueue import json\n'), ((794, 819), 'urlobject.URLObject', 'URLObject', ([], {'host': 'self.host'}), '(host=self.host)\n', (803, 819), False, 'from urlobject import URLObject\n')]
|
#!/usr/bin/python2.5
# Until Python 2.6
from dnutils import logs
from pracmln.utils import locs
"""
Converts LaTeX math to png images.
Run latexmath2png.py --help for usage instructions.
"""
"""
Author:
<NAME> <<EMAIL>>
URL: http://www.kamilkisiel.net
Revision History:
2007/04/20 - Initial version
TODO:
- Make handling of bad input more graceful?
---
Some ideas borrowed from Kjell Fauske's article at http://fauskes.net/nb/htmleqII/
Licensed under the MIT License:
Copyright (c) 2007 <NAME> <<EMAIL>>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
import os
import tempfile
from PIL import Image
import base64
logger = logs.getlogger(__name__, logs.DEBUG)
# Default packages to use when generating output
default_packages = [
'amsmath',
'amsthm',
'amssymb',
'bm'
]
def __build_preamble(packages, declarations):
preamble = '\documentclass{article}\n'
for p in packages:
preamble += "\\usepackage{{{}}}\n".format(p)
for d in declarations:
preamble += '{}\n'.format(d)
preamble += "\pagestyle{empty}\n\\begin{document}\n"
return preamble
def __write_output(infile, outdir, workdir='.', filename='', size=1, svg=True):
try:
# Generate the DVI file. NOTE: no output in stdout, as it is piped into /dev/null!
latexcmd = 'latex -halt-on-error -output-directory {} {} >/dev/null'.format(workdir, infile)
rc = os.system(latexcmd)
# Something bad happened, abort
if rc != 0:
raise Exception('latex error')
# Convert the DVI file to PNG's
dvifile = infile.replace('.tex', '.dvi')
outfilename = os.path.join(outdir, filename)
if svg:
dvicmd = "dvisvgm -v 0 -o {}.svg --no-fonts {}".format(outfilename, dvifile)
else:
dvicmd = "dvipng -q* -T tight -x {} -z 9 -bg Transparent -o {}.png {} >/dev/null".format(size * 1000, outfilename, dvifile)
rc = os.system(dvicmd)
if rc != 0:
raise Exception('{} error'.format('dvisvgm error' if svg else'dvipng'))
finally:
# Cleanup temporaries
basefile = infile.replace('.tex', '')
tempext = ['.aux', '.dvi', '.log']
for te in tempext:
tempfile = basefile + te
if os.path.exists(tempfile):
os.remove(tempfile)
def math2png(content, outdir, packages=default_packages, declarations=[], filename='', size=1, svg=True):
"""
Generate png images from $$...$$ style math environment equations.
Parameters:
content - A string containing latex math environment formulas
outdir - Output directory for PNG images
packages - Optional list of packages to include in the LaTeX preamble
declarations - Optional list of declarations to add to the LaTeX preamble
filename - Optional filename for output files
size - Scale factor for output
"""
outfilename = '/tmp/default.tex'
# Set the working directory
workdir = tempfile.gettempdir()
# Get a temporary file
fd, texfile = tempfile.mkstemp('.tex', 'eq', workdir, True)
try:
content = content.replace('$', r'\$')
# Create the TeX document and save to tempfile
fileContent = '{}$${}$$\n\end{{document}}'.format(__build_preamble(packages, declarations), content)
with os.fdopen(fd, 'w+') as f:
f.write(fileContent)
__write_output(texfile, outdir, workdir=workdir, filename=filename, size=size, svg=svg)
outfilename = os.path.join(outdir, '{}.{}'.format(filename, 'svg' if svg else 'png'))
except:
logger.error('Unable to create image. A reason you encounter '
'this error might be that you are either missing latex '
'packages for generating .dvi files or {} for '
'generating the {} image from the .dvi file.'.format('dvisvgm' if svg else 'dvipng', 'svg' if svg else 'png'))
outfilename = os.path.join(locs.etc, 'default.{}'.format('svg' if svg else 'png'))
finally:
if svg:
with open(outfilename, 'r') as outfile:
filecontent = outfile.read()
ratio = 1
else:
# determine image size
im = Image.open(outfilename)
width, height = im.size
ratio = float(width)/float(height)
# create base64 encoded file content
png = open(outfilename)
filecontent = base64.b64encode(png.read())
# cleanup and delete temporary files
if os.path.exists(texfile) and locs.etc not in outfilename:
os.remove(texfile)
if os.path.exists(outfilename) and locs.etc not in outfilename:
os.remove(outfilename)
return filecontent, ratio
|
[
"os.remove",
"tempfile.mkstemp",
"tempfile.gettempdir",
"os.path.exists",
"os.system",
"PIL.Image.open",
"os.fdopen",
"dnutils.logs.getlogger",
"os.path.join"
] |
[((1633, 1669), 'dnutils.logs.getlogger', 'logs.getlogger', (['__name__', 'logs.DEBUG'], {}), '(__name__, logs.DEBUG)\n', (1647, 1669), False, 'from dnutils import logs\n'), ((4051, 4072), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (4070, 4072), False, 'import tempfile\n'), ((4119, 4164), 'tempfile.mkstemp', 'tempfile.mkstemp', (['""".tex"""', '"""eq"""', 'workdir', '(True)'], {}), "('.tex', 'eq', workdir, True)\n", (4135, 4164), False, 'import tempfile\n'), ((2426, 2445), 'os.system', 'os.system', (['latexcmd'], {}), '(latexcmd)\n', (2435, 2445), False, 'import os\n'), ((2662, 2692), 'os.path.join', 'os.path.join', (['outdir', 'filename'], {}), '(outdir, filename)\n', (2674, 2692), False, 'import os\n'), ((2962, 2979), 'os.system', 'os.system', (['dvicmd'], {}), '(dvicmd)\n', (2971, 2979), False, 'import os\n'), ((3295, 3319), 'os.path.exists', 'os.path.exists', (['tempfile'], {}), '(tempfile)\n', (3309, 3319), False, 'import os\n'), ((4399, 4418), 'os.fdopen', 'os.fdopen', (['fd', '"""w+"""'], {}), "(fd, 'w+')\n", (4408, 4418), False, 'import os\n'), ((5321, 5344), 'PIL.Image.open', 'Image.open', (['outfilename'], {}), '(outfilename)\n', (5331, 5344), False, 'from PIL import Image\n'), ((5626, 5649), 'os.path.exists', 'os.path.exists', (['texfile'], {}), '(texfile)\n', (5640, 5649), False, 'import os\n'), ((5695, 5713), 'os.remove', 'os.remove', (['texfile'], {}), '(texfile)\n', (5704, 5713), False, 'import os\n'), ((5725, 5752), 'os.path.exists', 'os.path.exists', (['outfilename'], {}), '(outfilename)\n', (5739, 5752), False, 'import os\n'), ((5798, 5820), 'os.remove', 'os.remove', (['outfilename'], {}), '(outfilename)\n', (5807, 5820), False, 'import os\n'), ((3337, 3356), 'os.remove', 'os.remove', (['tempfile'], {}), '(tempfile)\n', (3346, 3356), False, 'import os\n')]
|
from utils._type import *
import discord
from discord.ext import commands
class Beta(commands.Cog):
"""
A cog with commands available to only the beta-testers
"""
def __init__(self, bot):
self.bot = bot
def cog_check(self, ctx: customContext):
member: discord.Member = self.bot.get_guild(int(self.bot.config["SUPPORT_SERVER"])).get_member(ctx.author.id)
if member is None:
return False
check = ctx.author == self.bot.owner or discord.utils.get(member.roles, id=823951076193337384)
return check
def setup(bot):
bot.add_cog(Beta(bot))
|
[
"discord.utils.get"
] |
[((498, 552), 'discord.utils.get', 'discord.utils.get', (['member.roles'], {'id': '(823951076193337384)'}), '(member.roles, id=823951076193337384)\n', (515, 552), False, 'import discord\n')]
|
"""
Django settings for bebotPlatform project.
Generated by 'django-admin startproject' using Django 2.0.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ugettext
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_ROOT = os.path.abspath(os.path.join(BASE_DIR, '..'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '<KEY>'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['localhost', '127.0.0.1','172.16.31.10']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'django.contrib.sites',
'webPlatform',
'vote',
'actstream',
'notifications',
]
SITE_ID = 1
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.locale.LocaleMiddleware',
]
ROOT_URLCONF = 'bebotPlatform.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, "templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.i18n',
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'bebotPlatform.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'bebotDB',
'USER': 'bebot',
'PASSWORD': '<PASSWORD>',
'HOST': 'localhost',
'PORT': '',
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
TIME_ZONE = 'America/Santiago'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
]
# File handler
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Language
LANGUAGE_CODE = 'es'
LANGUAGES = [
('es', _('Spanish'))
]
LOCALE_PATH = (os.path.join(BASE_DIR,'locale'))
# Email setting
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 25
EMAIL_HOST_USER = '<EMAIL>'
EMAIL_HOST_PASSWORD = '<PASSWORD>'
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
SMTP_ENABLED = True
EMAIL_HOST_MEDGO = '<EMAIL>'
TEMPLATED_EMAIL_TEMPLATE_DIR = 'templated_email/' #use '' for top level template dir, ensure there is a trailing slash
TEMPLATED_EMAIL_FILE_EXTENSION = 'email'
# Images Avatar
DJANGORESIZED_DEFAULT_KEEP_META = True
DJANGORESIZED_DEFAULT_FORCE_FORMAT = 'JPEG'
# Google
GOOGLE_RECAPTCHA_SECRET_KEY = '6LfuJEAUAAAAAJdnw0LxAKSlMbhEeYt8ijfoUNyl'
# ACTSTREAM
ACTSTREAM_SETTINGS = {
'FETCH_RELATIONS': True,
'USE_PREFETCH': True,
'USE_JSONFIELD': True,
'GFK_FETCH_DEPTH': 1,
}
# Notification
NOTIFICATIONS_SOFT_DELETE=True
|
[
"os.path.abspath",
"django.utils.translation.ugettext_lazy",
"os.path.join"
] |
[((3733, 3764), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""media"""'], {}), "(BASE_DIR, 'media')\n", (3745, 3764), False, 'import os\n'), ((3854, 3886), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""locale"""'], {}), "(BASE_DIR, 'locale')\n", (3866, 3886), False, 'import os\n'), ((601, 629), 'os.path.join', 'os.path.join', (['BASE_DIR', '""".."""'], {}), "(BASE_DIR, '..')\n", (613, 629), False, 'import os\n'), ((3646, 3678), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""static"""'], {}), "(BASE_DIR, 'static')\n", (3658, 3678), False, 'import os\n'), ((542, 567), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (557, 567), False, 'import os\n'), ((3823, 3835), 'django.utils.translation.ugettext_lazy', '_', (['"""Spanish"""'], {}), "('Spanish')\n", (3824, 3835), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1979, 2014), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""templates"""'], {}), "(BASE_DIR, 'templates')\n", (1991, 2014), False, 'import os\n')]
|
# Generated by Django 3.2.13 on 2022-05-17 14:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("users", "0009_alter_user_filtre_departements"),
]
operations = [
migrations.AddField(
model_name="user",
name="cerbere_login",
field=models.CharField(max_length=255, null=True),
),
]
|
[
"django.db.models.CharField"
] |
[((350, 393), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'null': '(True)'}), '(max_length=255, null=True)\n', (366, 393), False, 'from django.db import migrations, models\n')]
|
#https://machinelearningmastery.com/develop-arch-and-garch-models-for-time-series-forecasting-in-python/
# example of ARCH model
from random import gauss
from random import seed
from matplotlib import pyplot
from arch import arch_model
# seed pseudorandom number generator
seed(1)
# create dataset
data = [gauss(0, i*0.01) for i in range(0,100)]
# split into train/test
n_test = 10
train, test = data[:-n_test], data[-n_test:]
# define model
model = arch_model(train, mean='Zero', vol='ARCH', p=15)
# fit model
model_fit = model.fit()
# forecast the test set
yhat = model_fit.forecast(horizon=n_test)
# plot the actual variance
var = [i*0.01 for i in range(0,100)]
pyplot.plot(var[-n_test:])
# plot forecast variance
pyplot.plot(yhat.variance.values[-1, :])
pyplot.show()
# define model
model = arch_model(train, mean='Zero', vol='GARCH', p=15, q=15)
# fit model
model_fit = model.fit()
# forecast the test set
yhat = model_fit.forecast(horizon=n_test)
# plot the actual variance
var = [i*0.01 for i in range(0,100)]
pyplot.plot(var[-n_test:])
# plot forecast variance
pyplot.plot(yhat.variance.values[-1, :])
pyplot.show()
|
[
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"arch.arch_model",
"random.seed",
"random.gauss"
] |
[((274, 281), 'random.seed', 'seed', (['(1)'], {}), '(1)\n', (278, 281), False, 'from random import seed\n'), ((451, 499), 'arch.arch_model', 'arch_model', (['train'], {'mean': '"""Zero"""', 'vol': '"""ARCH"""', 'p': '(15)'}), "(train, mean='Zero', vol='ARCH', p=15)\n", (461, 499), False, 'from arch import arch_model\n'), ((666, 692), 'matplotlib.pyplot.plot', 'pyplot.plot', (['var[-n_test:]'], {}), '(var[-n_test:])\n', (677, 692), False, 'from matplotlib import pyplot\n'), ((718, 758), 'matplotlib.pyplot.plot', 'pyplot.plot', (['yhat.variance.values[-1, :]'], {}), '(yhat.variance.values[-1, :])\n', (729, 758), False, 'from matplotlib import pyplot\n'), ((759, 772), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (770, 772), False, 'from matplotlib import pyplot\n'), ((797, 852), 'arch.arch_model', 'arch_model', (['train'], {'mean': '"""Zero"""', 'vol': '"""GARCH"""', 'p': '(15)', 'q': '(15)'}), "(train, mean='Zero', vol='GARCH', p=15, q=15)\n", (807, 852), False, 'from arch import arch_model\n'), ((1019, 1045), 'matplotlib.pyplot.plot', 'pyplot.plot', (['var[-n_test:]'], {}), '(var[-n_test:])\n', (1030, 1045), False, 'from matplotlib import pyplot\n'), ((1071, 1111), 'matplotlib.pyplot.plot', 'pyplot.plot', (['yhat.variance.values[-1, :]'], {}), '(yhat.variance.values[-1, :])\n', (1082, 1111), False, 'from matplotlib import pyplot\n'), ((1112, 1125), 'matplotlib.pyplot.show', 'pyplot.show', ([], {}), '()\n', (1123, 1125), False, 'from matplotlib import pyplot\n'), ((307, 325), 'random.gauss', 'gauss', (['(0)', '(i * 0.01)'], {}), '(0, i * 0.01)\n', (312, 325), False, 'from random import gauss\n')]
|
import numpy as np
def clamp(value, min, max):
return np.clip(value, min, max)
def lerp(a, b, fraction):
fraction = clamp(fraction, 0, 1)
return a * (1 - fraction) + b * fraction
def fit(value, omin, omax, nmin, nmax):
v = (value - omin) / (omax - omin)
return v * (nmax - nmin) + nmin
def fit01(value, min, max):
return value * (max - min) + min
def fit10(value, min, max):
return (1.0 - value) * (max - min) + min
def fit11(value, min, max):
return fit(value, -1, 1, min, max)
def fit_to_01(value, min, max):
return (value - min) / (max - min)
def fit_11_to_01(value):
return (value + 1.0) * 0.5
|
[
"numpy.clip"
] |
[((60, 84), 'numpy.clip', 'np.clip', (['value', 'min', 'max'], {}), '(value, min, max)\n', (67, 84), True, 'import numpy as np\n')]
|
"""
class for handling .bb files
Reads a .bb file and obtains its metadata
"""
# Copyright (C) 2003, 2004 <NAME>
# Copyright (C) 2003, 2004 <NAME>
#
# SPDX-License-Identifier: GPL-2.0-only
#
import re, bb, os
import bb.build, bb.utils
from . import ConfHandler
from .. import resolve_file, ast, logger, ParseError
from .ConfHandler import include, init
# For compatibility
bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
__func_start_regexp__ = re.compile(r"(((?P<py>python(?=(\s|\()))|(?P<fr>fakeroot(?=\s)))\s*)*(?P<func>[\w\.\-\+\{\}\$:]+)?\s*\(\s*\)\s*{$" )
__inherit_regexp__ = re.compile(r"inherit\s+(.+)" )
__export_func_regexp__ = re.compile(r"EXPORT_FUNCTIONS\s+(.+)" )
__addtask_regexp__ = re.compile(r"addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
__deltask_regexp__ = re.compile(r"deltask\s+(?P<func>\w+)(?P<ignores>.*)")
__addhandler_regexp__ = re.compile(r"addhandler\s+(.+)" )
__def_regexp__ = re.compile(r"def\s+(\w+).*:" )
__python_func_regexp__ = re.compile(r"(\s+.*)|(^$)|(^#)" )
__python_tab_regexp__ = re.compile(r" *\t")
__infunc__ = []
__inpython__ = False
__body__ = []
__classname__ = ""
cached_statements = {}
def supports(fn, d):
"""Return True if fn has a supported extension"""
return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
def inherit(files, fn, lineno, d):
__inherit_cache = d.getVar('__inherit_cache', False) or []
files = d.expand(files).split()
for file in files:
if not os.path.isabs(file) and not file.endswith(".bbclass"):
file = os.path.join('classes', '%s.bbclass' % file)
if not os.path.isabs(file):
bbpath = d.getVar("BBPATH")
abs_fn, attempts = bb.utils.which(bbpath, file, history=True)
for af in attempts:
if af != abs_fn:
bb.parse.mark_dependency(d, af)
if abs_fn:
file = abs_fn
if not file in __inherit_cache:
logger.debug(1, "Inheriting %s (from %s:%d)" % (file, fn, lineno))
__inherit_cache.append( file )
d.setVar('__inherit_cache', __inherit_cache)
include(fn, file, lineno, d, "inherit")
__inherit_cache = d.getVar('__inherit_cache', False) or []
def get_statements(filename, absolute_filename, base_name):
global cached_statements
try:
return cached_statements[absolute_filename]
except KeyError:
with open(absolute_filename, 'r') as f:
statements = ast.StatementGroup()
lineno = 0
while True:
lineno = lineno + 1
s = f.readline()
if not s: break
s = s.rstrip()
feeder(lineno, s, filename, base_name, statements)
if __inpython__:
# add a blank line to close out any python definition
feeder(lineno, "", filename, base_name, statements, eof=True)
if filename.endswith(".bbclass") or filename.endswith(".inc"):
cached_statements[absolute_filename] = statements
return statements
def handle(fn, d, include):
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__, __classname__
__body__ = []
__infunc__ = []
__classname__ = ""
__residue__ = []
base_name = os.path.basename(fn)
(root, ext) = os.path.splitext(base_name)
init(d)
if ext == ".bbclass":
__classname__ = root
__inherit_cache = d.getVar('__inherit_cache', False) or []
if not fn in __inherit_cache:
__inherit_cache.append(fn)
d.setVar('__inherit_cache', __inherit_cache)
if include != 0:
oldfile = d.getVar('FILE', False)
else:
oldfile = None
abs_fn = resolve_file(fn, d)
# actual loading
statements = get_statements(fn, abs_fn, base_name)
# DONE WITH PARSING... time to evaluate
if ext != ".bbclass" and abs_fn != oldfile:
d.setVar('FILE', abs_fn)
try:
statements.eval(d)
except bb.parse.SkipRecipe:
d.setVar("__SKIPPED", True)
if include == 0:
return { "" : d }
if __infunc__:
raise ParseError("Shell function %s is never closed" % __infunc__[0], __infunc__[1], __infunc__[2])
if __residue__:
raise ParseError("Leftover unparsed (incomplete?) data %s from %s" % __residue__, fn)
if ext != ".bbclass" and include == 0:
return ast.multi_finalize(fn, d)
if ext != ".bbclass" and oldfile and abs_fn != oldfile:
d.setVar("FILE", oldfile)
return d
def feeder(lineno, s, fn, root, statements, eof=False):
global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, bb, __residue__, __classname__
# Check tabs in python functions:
# - def py_funcname(): covered by __inpython__
# - python(): covered by '__anonymous' == __infunc__[0]
# - python funcname(): covered by __infunc__[3]
if __inpython__ or (__infunc__ and ('__anonymous' == __infunc__[0] or __infunc__[3])):
tab = __python_tab_regexp__.match(s)
if tab:
bb.warn('python should use 4 spaces indentation, but found tabs in %s, line %s' % (root, lineno))
if __infunc__:
if s == '}':
__body__.append('')
ast.handleMethod(statements, fn, lineno, __infunc__[0], __body__, __infunc__[3], __infunc__[4])
__infunc__ = []
__body__ = []
else:
__body__.append(s)
return
if __inpython__:
m = __python_func_regexp__.match(s)
if m and not eof:
__body__.append(s)
return
else:
ast.handlePythonMethod(statements, fn, lineno, __inpython__,
root, __body__)
__body__ = []
__inpython__ = False
if eof:
return
if s and s[0] == '#':
if len(__residue__) != 0 and __residue__[0][0] != "#":
bb.fatal("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s))
if len(__residue__) != 0 and __residue__[0][0] == "#" and (not s or s[0] != "#"):
bb.fatal("There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed." % (lineno, fn, s))
if s and s[-1] == '\\':
__residue__.append(s[:-1])
return
s = "".join(__residue__) + s
__residue__ = []
# Skip empty lines
if s == '':
return
# Skip comments
if s[0] == '#':
return
m = __func_start_regexp__.match(s)
if m:
__infunc__ = [m.group("func") or "__anonymous", fn, lineno, m.group("py") is not None, m.group("fr") is not None]
return
m = __def_regexp__.match(s)
if m:
__body__.append(s)
__inpython__ = m.group(1)
return
m = __export_func_regexp__.match(s)
if m:
ast.handleExportFuncs(statements, fn, lineno, m, __classname__)
return
m = __addtask_regexp__.match(s)
if m:
if len(m.group().split()) == 2:
# Check and warn for "addtask task1 task2"
m2 = re.match(r"addtask\s+(?P<func>\w+)(?P<ignores>.*)", s)
if m2 and m2.group('ignores'):
logger.warning('addtask ignored: "%s"' % m2.group('ignores'))
# Check and warn for "addtask task1 before task2 before task3", the
# similar to "after"
taskexpression = s.split()
for word in ('before', 'after'):
if taskexpression.count(word) > 1:
logger.warning("addtask contained multiple '%s' keywords, only one is supported" % word)
ast.handleAddTask(statements, fn, lineno, m)
return
m = __deltask_regexp__.match(s)
if m:
# Check and warn "for deltask task1 task2"
if m.group('ignores'):
logger.warning('deltask ignored: "%s"' % m.group('ignores'))
ast.handleDelTask(statements, fn, lineno, m)
return
m = __addhandler_regexp__.match(s)
if m:
ast.handleBBHandlers(statements, fn, lineno, m)
return
m = __inherit_regexp__.match(s)
if m:
ast.handleInherit(statements, fn, lineno, m)
return
return ConfHandler.feeder(lineno, s, fn, statements)
# Add us to the handlers list
from .. import handlers
handlers.append({'supports': supports, 'handle': handle, 'init': init})
del handlers
|
[
"os.path.isabs",
"bb.deprecate_import",
"bb.parse.mark_dependency",
"os.path.basename",
"bb.fatal",
"bb.utils.which",
"re.match",
"os.path.splitext",
"os.path.join",
"bb.warn",
"re.compile"
] |
[((390, 451), 'bb.deprecate_import', 'bb.deprecate_import', (['__name__', '"""bb.parse"""', "['vars_from_file']"], {}), "(__name__, 'bb.parse', ['vars_from_file'])\n", (409, 451), False, 'import re, bb, os\n'), ((480, 620), 're.compile', 're.compile', (['"""(((?P<py>python(?=(\\\\s|\\\\()))|(?P<fr>fakeroot(?=\\\\s)))\\\\s*)*(?P<func>[\\\\w\\\\.\\\\-\\\\+\\\\{\\\\}\\\\$:]+)?\\\\s*\\\\(\\\\s*\\\\)\\\\s*{$"""'], {}), "(\n '(((?P<py>python(?=(\\\\s|\\\\()))|(?P<fr>fakeroot(?=\\\\s)))\\\\s*)*(?P<func>[\\\\w\\\\.\\\\-\\\\+\\\\{\\\\}\\\\$:]+)?\\\\s*\\\\(\\\\s*\\\\)\\\\s*{$'\n )\n", (490, 620), False, 'import re, bb, os\n'), ((624, 653), 're.compile', 're.compile', (['"""inherit\\\\s+(.+)"""'], {}), "('inherit\\\\s+(.+)')\n", (634, 653), False, 'import re, bb, os\n'), ((682, 720), 're.compile', 're.compile', (['"""EXPORT_FUNCTIONS\\\\s+(.+)"""'], {}), "('EXPORT_FUNCTIONS\\\\s+(.+)')\n", (692, 720), False, 'import re, bb, os\n'), ((749, 893), 're.compile', 're.compile', (['"""addtask\\\\s+(?P<func>\\\\w+)\\\\s*((before\\\\s*(?P<before>((.*(?=after))|(.*))))|(after\\\\s*(?P<after>((.*(?=before))|(.*)))))*"""'], {}), "(\n 'addtask\\\\s+(?P<func>\\\\w+)\\\\s*((before\\\\s*(?P<before>((.*(?=after))|(.*))))|(after\\\\s*(?P<after>((.*(?=before))|(.*)))))*'\n )\n", (759, 893), False, 'import re, bb, os\n'), ((907, 961), 're.compile', 're.compile', (['"""deltask\\\\s+(?P<func>\\\\w+)(?P<ignores>.*)"""'], {}), "('deltask\\\\s+(?P<func>\\\\w+)(?P<ignores>.*)')\n", (917, 961), False, 'import re, bb, os\n'), ((988, 1020), 're.compile', 're.compile', (['"""addhandler\\\\s+(.+)"""'], {}), "('addhandler\\\\s+(.+)')\n", (998, 1020), False, 'import re, bb, os\n'), ((1049, 1079), 're.compile', 're.compile', (['"""def\\\\s+(\\\\w+).*:"""'], {}), "('def\\\\s+(\\\\w+).*:')\n", (1059, 1079), False, 'import re, bb, os\n'), ((1107, 1139), 're.compile', 're.compile', (['"""(\\\\s+.*)|(^$)|(^#)"""'], {}), "('(\\\\s+.*)|(^$)|(^#)')\n", (1117, 1139), False, 'import re, bb, os\n'), ((1168, 1187), 're.compile', 're.compile', (['""" *\\\\t"""'], {}), "(' *\\\\t')\n", (1178, 1187), False, 'import re, bb, os\n'), ((3522, 3542), 'os.path.basename', 'os.path.basename', (['fn'], {}), '(fn)\n', (3538, 3542), False, 'import re, bb, os\n'), ((3561, 3588), 'os.path.splitext', 'os.path.splitext', (['base_name'], {}), '(base_name)\n', (3577, 3588), False, 'import re, bb, os\n'), ((6665, 6865), 'bb.fatal', 'bb.fatal', (['("""There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed."""\n % (lineno, fn, s))'], {}), '(\n """There is a confusing multiline, partially commented expression on line %s of file %s (%s).\nPlease clarify whether this is all a comment or should be parsed."""\n % (lineno, fn, s))\n', (6673, 6865), False, 'import re, bb, os\n'), ((1372, 1392), 'os.path.splitext', 'os.path.splitext', (['fn'], {}), '(fn)\n', (1388, 1392), False, 'import re, bb, os\n'), ((1675, 1719), 'os.path.join', 'os.path.join', (['"""classes"""', "('%s.bbclass' % file)"], {}), "('classes', '%s.bbclass' % file)\n", (1687, 1719), False, 'import re, bb, os\n'), ((1736, 1755), 'os.path.isabs', 'os.path.isabs', (['file'], {}), '(file)\n', (1749, 1755), False, 'import re, bb, os\n'), ((1828, 1870), 'bb.utils.which', 'bb.utils.which', (['bbpath', 'file'], {'history': '(True)'}), '(bbpath, file, history=True)\n', (1842, 1870), False, 'import re, bb, os\n'), ((5439, 5545), 'bb.warn', 'bb.warn', (["('python should use 4 spaces indentation, but found tabs in %s, line %s' %\n (root, lineno))"], {}), "(\n 'python should use 4 spaces indentation, but found tabs in %s, line %s' %\n (root, lineno))\n", (5446, 5545), False, 'import re, bb, os\n'), ((6317, 6582), 'bb.fatal', 'bb.fatal', (['("""There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change."""\n % (lineno, fn, s))'], {}), '(\n """There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change."""\n % (lineno, fn, s))\n', (6325, 6582), False, 'import re, bb, os\n'), ((7705, 7760), 're.match', 're.match', (['"""addtask\\\\s+(?P<func>\\\\w+)(?P<ignores>.*)"""', 's'], {}), "('addtask\\\\s+(?P<func>\\\\w+)(?P<ignores>.*)', s)\n", (7713, 7760), False, 'import re, bb, os\n'), ((1601, 1620), 'os.path.isabs', 'os.path.isabs', (['file'], {}), '(file)\n', (1614, 1620), False, 'import re, bb, os\n'), ((1956, 1987), 'bb.parse.mark_dependency', 'bb.parse.mark_dependency', (['d', 'af'], {}), '(d, af)\n', (1980, 1987), False, 'import re, bb, os\n')]
|
"""
Unit tests over parameter combinations of the library.
TODO ADD MORE
"""
from __future__ import print_function
from minorminer import find_embedding as find_embedding_orig
from warnings import warn
import os
import sys
import time
# Given that this test is in the tests directory, the calibration data should be
# in a sub directory. Use the path of this source file to find the calibration
calibration_dir = os.path.join(os.path.dirname(
os.path.abspath(__file__)), "calibration")
def find_embedding(Q, A, return_overlap=False, **args):
args['verbose'] = 0
args['tries'] = 1
if return_overlap:
emb, succ = find_embedding_orig(
Q, A, return_overlap=return_overlap, **args)
if not succ:
return emb, succ
elif check_embedding(Q, A, emb, **args):
if check_embedding.warning:
warn(check_embedding.warning, RuntimeWarning)
return emb, succ
else:
raise RuntimeError(
"bad embedding reported as success (%s)" % (check_embedding.errcode))
else:
emb = find_embedding_orig(Q, A, return_overlap=return_overlap, **args)
if emb:
if not check_embedding(Q, A, emb, **args):
raise RuntimeError(
"bad embedding reported as success (%s)" % (check_embedding.errcode))
elif check_embedding.warning:
warn(check_embedding.warning, RuntimeWarning)
return emb
def check_embedding(Q, A, emb, **args):
from networkx import Graph, is_connected
check_embedding.warning = None
Qg = Graph()
Ag = Graph()
Qg.add_edges_from(Q)
Ag.add_edges_from(A)
qubhits = 0
footprint = set()
var = {}
for x in Qg:
try:
embx = emb[x]
except KeyError:
check_embedding.errcode = "missing chain"
return False
for q in embx:
var[q] = x
footprint.update(embx)
qubhits += len(embx)
if not is_connected(Ag.subgraph(embx)):
check_embedding.errcode = "broken chain for %s: (%s)" % (x, embx)
return False
if len(footprint) != qubhits:
check_embedding.errcode = "overlapped chains"
return False
Qv = Graph()
for p, q in Ag.edges():
try:
Qv.add_edge(var[p], var[q])
except KeyError:
continue
for x, y in Qg.edges():
if not Qv.has_edge(x, y):
check_embedding.errcode = "missing edge"
return False
for x, chain in args.get("fixed_chains", {}).items():
if set(chain) != set(emb[x]):
check_embedding.errcode = "fixed chain mismatch"
return False
for x, domain in args.get("restrict_chains", {}).items():
if not set(domain) >= set(emb[x]):
check_embedding.warning = "restrict chain mismatch"
return True
def Path(n):
return [(i, i + 1) for i in range(n - 1)]
def Grid(n):
return [((x, y), (x + dx, y + dy)) for a in range(n) for b in range(n - 1) for (x, y, dx, dy) in [(a, b, 0, 1), (b, a, 1, 0)]]
def Clique(n):
return [(u, v) for u in range(n) for v in range(u)]
def Biclique(n):
return [(u, v) for u in range(n) for v in range(n, 2 * n)]
def Chimera(n, l=4):
return [((x, y, u, k), (x + dx, y + dy, u, k))
for a in range(n)
for b in range(n - 1)
for k in range(l)
for x, y, u, dx, dy in [(b, a, 0, 1, 0), (a, b, 1, 0, 1)]
] + [((x, y, 0, k), (x, y, 1, kk)) for x in range(n) for y in range(n) for k in range(l) for kk in range(l)]
def NAE3SAT(n):
import networkx
from math import ceil
from random import seed, randint
seed(18293447845779813366)
c = int(ceil(sum(randint(1, ceil(n * 4.2)) for _ in range(100)) / 100.))
return networkx.generators.k_random_intersection_graph(c, n, 3).edges()
def ChordalCycle(p):
import networkx
G = networkx.generators.chordal_cycle_graph(p)
G.remove_edges_from(list(G.selfloop_edges()))
return G.edges()
def GeometricGraph(n, pos=None):
import networkx
G = networkx.generators.geometric.random_geometric_graph(
n, n**-.333, dim=2, pos=pos)
if pos is not None:
for g in G:
if len(list(G[g])) == 0:
del pos[g]
return G.edges()
def CartesianProduct(n):
import networkx
K = networkx.generators.complete_graph(n)
return networkx.product.cartesian_product(K, K).edges()
def GridChimeraEmbedding(n):
emb = {}
M = [[0, 2, 2, 0], [1, 3, 3, 1], [1, 3, 3, 1], [0, 2, 2, 0]]
for x in range(n):
for y in range(n):
emb[x, y] = [(x // 2, y // 2, 0, M[x % 4][y % 4]),
(x // 2, y // 2, 1, M[y % 4][x % 4])]
return emb
def mask_wxw(n, w=2, l=4):
return {(X // w, Y // w): [(x, y, u, k) for x in range(X, X + w) for y in range(Y, Y + w) for u in (0, 1) for k in range(l)] for X in range(0, n, w) for Y in range(0, n, w)}
success_count_functions = []
def success_count(n, *a, **k):
from functools import wraps
from math import log
def count_successes(f):
global success_count_functions
success_count_functions.append([f, n, a, k])
if os.path.exists(os.path.join(calibration_dir, f.__name__)):
S, N = load_success_count_calibration(f)
N += (S == N)
accept_prob = .0001 # .01% false negative rate
tts = int(log(accept_prob * S / N, 1 - S / N) + 1)
false_prob = (S / N) * (1 - S / N)**tts
@wraps(f)
def test_run():
for i in range(tts):
if f(*a, **k):
break
else:
assert False, "took %d tries without success, this should only happen %.02f%% of the time" % (
tts, false_prob * 100)
else:
def test_run():
raise RuntimeError(
"%s is not calibrated -- run calibrate_all() or calibrate_new()" % (f.__name__))
test_run.original = f
return test_run
return count_successes
def calibrate_success_count(f, n, a, k, directory=calibration_dir, M=None):
succ = 0
if M is None:
M = 10000
N = M * n
print("calibrating %s, %d trials " % (f.__name__, N))
t0 = time.clock()
for i in range(N):
if i % (N / 10) == 0:
print("%d " % (10 * i // N), end='')
sys.stdout.flush()
succ += bool(f(*a, **k))
print()
dt = time.clock() - t0
print("%s: %.04e per trial; success rate %.01f%% " %
(f.__name__, dt / N, succ * 100. / N))
if directory != calibration_dir and os.path.exists(os.path.join(calibration_dir, f.__name__)):
olds, oldn = load_success_count_calibration(f)
print("standard is %.01f%%" % (olds * 100. / oldn))
else:
print()
with open(os.path.join(directory, f.__name__), "w") as cal_f:
cal_f.write(repr((succ, float(N))))
def load_success_count_calibration(f, directory=calibration_dir):
with open(os.path.join(directory, f.__name__)) as cal_f:
return eval(cal_f.read())
def calibrate_all(directory=calibration_dir, M=None):
global success_count_functions
if not os.path.exists(directory):
os.mkdir(directory)
for f, n, a, k in success_count_functions:
calibrate_success_count(f, n, a, k, directory=directory, M=M)
print()
def calibrate_new(directory=calibration_dir, M=None):
for f, n, a, k in success_count_functions:
if os.path.exists(os.path.join(directory, f.__name__)):
continue
else:
calibrate_success_count(f, n, a, k, directory=directory, M=M)
def success_perfect(n, *a, **k):
from functools import wraps
def is_perfect(f):
@wraps(f)
def test_run():
for _ in range(n):
assert bool(f(*a, **k)), "test fail"
test_run.original = f
return test_run
return is_perfect
def success_bounce(n, *a, **k):
from functools import wraps
def is_perfect(f):
@wraps(f)
def test_run():
succs = sum(bool(f(*a, **k)) for _ in range(n))
assert False, "%d successes out of %d trials" % (succs, n)
test_run.original = f
return test_run
return is_perfect
def check_args(prob, hard, initial_chains=None, fixed_chains=None, restrict_chains=None, skip_initialization=False):
import networkx
probg = networkx.Graph()
probg.add_edges_from(prob)
hardg = networkx.Graph()
hardg.add_edges_from(hard)
assert networkx.is_connected(hardg), "hardware graph not connected"
assert networkx.is_connected(probg), "problem graph not connected"
if fixed_chains is not None:
for v, chain in fixed_chains.items():
assert probg.has_node(
v), "fixed_chains vars not contained in problem graph"
for q in chain:
assert hardg.has_node(
q), "fixed_chains chains not contained in hardware graph"
if initial_chains is not None:
for v in fixed_chains:
assert v not in initial_chains, "fixed_chains chains overwrite initial chains"
if restrict_chains is not None:
for v in fixed_chains:
assert v not in restrict_chains, "fixed_chains chains are restricted"
if initial_chains is not None:
for v, chain in initial_chains.items():
assert probg.has_node(
v), "initial vars not contained in problem graph"
for q in chain:
assert hardg.has_node(
q), "initial chains not contained in hardware graph"
if skip_initialization:
for u, v in probg.edges():
edgelord = {z for q in initial_chains[v] for z in hardg.neighbors(
q)} | set(initial_chains[v])
assert set(
initial_chains[u]) & edgelord, "%s and %s are connected as variables but not as initials" % (u, v)
if restrict_chains is not None:
fullset = set(hardg.nodes())
for v, chain in restrict_chains.items():
assert probg.has_node(
v), "restricted vars not contained in problem graph"
for q in chain:
assert hardg.has_node(
q), "restricted chains not contained in hardware graph"
for u, v in probg.edges():
edgelord = {z for q in restrict_chains.get(v, fullset) for z in hardg.neighbors(
q)} | set(restrict_chains.get(v, fullset))
assert set(restrict_chains.get(
u, fullset)) & edgelord, "%s and %s are connected as variables but not as domains" % (u, v)
@success_count(100, 5)
def test_path_label_00(n):
p = Path(n)
return find_embedding(p, p)
@success_count(100, 5)
def test_path_label_01(n):
p = Path(n)
L = [str(i) for i in range(n)]
Lp = [(L[x], L[y]) for x, y in p]
return find_embedding(p, Lp)
@success_count(100, 5)
def test_path_label_10(n):
p = Path(n)
L = [str(i) for i in range(n)]
Lp = [(L[x], L[y]) for x, y in p]
return find_embedding(Lp, p)
@success_count(100, 5)
def test_path_label_11(n):
p = Path(n)
L = [str(i) for i in range(n)]
Lp = [(L[x], L[y]) for x, y in p]
return find_embedding(Lp, Lp)
@success_count(30, 3)
def test_grid_init_restrict(n):
from random import choice
chim = Chimera(n, l=4)
mask = mask_wxw(n, 1, l=4)
grid = Grid(2 * n)
init = {(x, y): [choice(mask[x // 2, y // 2])]
for x in range(2 * n) for y in range(2 * n)}
doms = {(x, y): mask[x // 2, y // 2]
for x in range(2 * n) for y in range(2 * n)}
return find_embedding(grid, chim, initial_chains=init, restrict_chains=doms, skip_initialization=False)
@success_count(30, 3)
def test_grid_init(n):
from random import choice
chim = Chimera(n, l=4)
mask = mask_wxw(n, 1, l=2)
grid = Grid(2 * n)
init = {(x, y): mask[x // 2, y // 2]
for x in range(2 * n) for y in range(2 * n)}
return find_embedding(grid, chim, initial_chains=init, skip_initialization=False)
@success_count(30, 15, 7)
def test_nae3sat(n, m):
from random import choice
chim = Chimera(m)
prob = NAE3SAT(n)
return find_embedding(prob, chim)
@success_count(30, 79, 6)
def test_expander(p, m):
prob = ChordalCycle(p)
chim = Chimera(m)
return find_embedding(prob, chim)
@success_count(30, 5)
def test_cartesian(n):
prob = CartesianProduct(n)
chim = Chimera(n, l=n)
return find_embedding(prob, chim)
@success_count(30, 45, 6)
def test_geometric_nohint(n, m):
prob = GeometricGraph(n)
chim = Chimera(m)
return find_embedding(prob, chim)
@success_count(30, 55, 6)
def test_geometric_hint(n, m):
from random import randint
pos = {}
chains = {}
for i in range(n):
x = randint(0, m - 1)
k1 = randint(0, 3)
y = randint(0, m - 1)
k2 = randint(0, 3)
pos[i] = (4 * x + k2) / 4. / m, (4 * y + k1) / 4. / m
chains[i] = (x, y, 0, k1), (x, y, 1, k2)
prob = GeometricGraph(n, pos)
chim = Chimera(m)
return find_embedding(prob, chim, initial_chains={i: c for i, c in chains.items() if i in pos})
@success_count(30, 3)
def test_grid_restrict(n):
chim = Chimera(n)
mask = mask_wxw(n, 1)
grid = Grid(2 * n)
doms = {(x, y): mask[x // 2, y // 2]
for x in range(2 * n) for y in range(2 * n)}
check_args(grid, chim, restrict_chains=doms)
return find_embedding(grid, chim, restrict_chains=doms)
@success_perfect(100, 4)
def test_grid_with_answer_fast(n):
chim = Chimera(n)
mask = mask_wxw(n, 1)
grid = Grid(2 * n)
init = GridChimeraEmbedding(2 * n)
check_args(grid, chim, initial_chains=init, skip_initialization=True)
return find_embedding(grid, chim, initial_chains=init, skip_initialization=True, chainlength_patience=0)
@success_perfect(100, 2)
def test_grid_with_answer_slow(n):
chim = Chimera(n)
mask = mask_wxw(n, 1)
grid = Grid(2 * n)
init = GridChimeraEmbedding(2 * n)
check_args(grid, chim, initial_chains=init, skip_initialization=True)
return find_embedding(grid, chim, initial_chains=init, skip_initialization=True, chainlength_patience=10)
@success_count(30, 5)
def test_grid_suspend(n):
chim = Chimera(n)
mask = mask_wxw(n, 1)
grid = Grid(2 * n)
suspg = [((x, y), (x // 2, y // 2, 0))
for x in range(2 * n) for y in range(2 * n)]
suspc = [((x, y, 0), m) for x in range(n)
for y in range(n) for m in mask[x, y]]
suspension = {(x, y, 0): [(x, y, 0)] for x in range(n) for y in range(n)}
return find_embedding(grid + suspg, chim + suspc, fixed_chains=suspension, chainlength_patience=0)
@success_count(30, 5)
def test_grid_plant_suspend(n):
chim = Chimera(n)
mask = mask_wxw(n, 1)
grid = Grid(2 * n)
suspg = [((x, y), (x // 2, y // 2, 0))
for x in range(2 * n) for y in range(2 * n)]
suspc = [(m, (x, y, 0)) for x in range(n)
for y in range(n) for m in mask[x, y]]
suspension = {(x, y, 0): [(x, y, 0)] for x in range(n) for y in range(n)}
init = {(x, y): mask[x // 2, y // 2]
for x in range(2 * n) for y in range(2 * n)}
return find_embedding(grid + suspg, chim + suspc, fixed_chains=suspension, initial_chains=init, chainlength_patience=0)
@success_count(30, 5)
def test_grid_suspend_chains(n):
chim = Chimera(n)
mask = mask_wxw(n, 1)
grid = Grid(2 * n)
suspension = {(x, y): [mask[x//2, y//2]]
for x in range(2*n) for y in range(2*n)}
return find_embedding(grid, chim, suspend_chains=suspension, chainlength_patience=0)
@success_count(30, 5)
def test_grid_suspend_domain(n):
chim = Chimera(n)
mask = mask_wxw(n, 1)
grid = Grid(2 * n)
suspg = [((x, y), (x // 2, y // 2, 0))
for x in range(2 * n) for y in range(2 * n)]
suspc = [((x, y, 0), m) for x in range(n)
for y in range(n) for m in mask[x, y]]
suspension = {(x, y, 0): [(x, y, 0)] for x in range(n) for y in range(n)}
doms = {(x, y): mask[x // 2, y // 2]
for x in range(2 * n) for y in range(2 * n)}
check_args(grid + suspg, chim + suspc, fixed_chains=suspension,
skip_initialization=False, restrict_chains=doms)
return find_embedding(grid + suspg, chim + suspc, fixed_chains=suspension, restrict_chains=doms, chainlength_patience=0)
@success_count(30, 5)
def test_grid_cheat_domain(n):
chim = Chimera(n)
grid = Grid(2 * n)
cheat = GridChimeraEmbedding(2 * n)
return find_embedding(grid, chim, restrict_chains=cheat, chainlength_patience=0)
@success_count(30, 2)
def test_biclique_chimera(n):
chim = Chimera(n)
kliq = Biclique(4 * n)
return find_embedding(kliq, chim, chainlength_patience=0)
@success_count(30, 5)
def test_path_cheat_domain(n):
P = Path(n)
cheat = {p: [p] for p in range(n)}
return find_embedding(P, P, restrict_chains=cheat, chainlength_patience=0)
@success_count(30, 6, 25)
def test_clique(n, k):
chim = Chimera(n)
cliq = Clique(k)
return find_embedding(cliq, chim, chainlength_patience=0)
@success_perfect(20, 25, 25)
def test_clique_clique(n, k):
cliq = Clique(k)
return find_embedding(cliq, cliq, chainlength_patience=0)
@success_perfect(3, 16)
def test_clique_large_nosegfault(n):
chim = Chimera(n)
cliq = Clique(4 * n + 2)
return not find_embedding(cliq, chim, chainlength_patience=0, timeout=1)
@success_count(30, 6, 25)
def test_clique_parallel(n, k):
chim = Chimera(n)
cliq = Clique(k)
return find_embedding(cliq, chim, chainlength_patience=0, threads=2)
@success_count(30, 3, 13)
def test_clique_term(n, k):
chim = Chimera(n)
cliq = Clique(k)
cterm = [((n // 2, n // 2, 0, 0), k)]
kterm = [(0, k)]
fix = {k: [k]}
return find_embedding(cliq + kterm, chim + cterm, fixed_chains=fix, chainlength_patience=0)
@success_count(30, 8)
def test_grid_heal_A(n):
from random import randint
grid = Grid(2 * n)
chim = Chimera(n + 2)
breaks = {(x, x, x % 2, randint(0, 3)) for x in range(1, 4)}
chim = [e for e in chim if not breaks.intersection(e)]
emb = GridChimeraEmbedding(2 * n)
i_emb = {}
for v, chain in emb.items():
remainder = {(x + 1, y + 1, u, k)
for x, y, u, k in chain}.difference(breaks)
if remainder:
i_emb[v] = remainder
return find_embedding(grid, chim, initial_chains=i_emb, chainlength_patience=0)
@success_count(30, 4)
def test_grid_heal_B(n):
from random import randint
grid = Grid(2 * n)
chim = Chimera(n + 2)
breaks = {(x, x, x % 2, randint(0, 3)) for x in range(1, 4)}
chim = [e for e in chim]
chimb = [(b, (b, None)) for b in breaks]
gridb = [(b, (b, None)) for b in breaks]
f_emb = {(b, None): [(b, None)] for b in breaks}
emb = GridChimeraEmbedding(2 * n)
return find_embedding(grid + gridb, chim + chimb, initial_chains=emb, fixed_chains=f_emb, chainlength_patience=0)
@success_perfect(1000, 3)
def test_fail_impossible(n):
Kn = Clique(n) # we're gonna try to embed this here clique
Pn = Path(n) # into this here path, and it ain't gonna work
return not find_embedding(Kn, Pn)
@success_perfect(1, 16, .1)
def test_fail_timeout(n, t):
Kn = Clique(4 * n + 1) # we're gonna try to embed this here clique
# into this here chimera, and it might work but we'll time out
Cn = Chimera(n)
return not find_embedding(Kn, Cn, tries=1e6, max_no_improvement=1e6, inner_rounds=1e6, timeout=t, threads=4)
@success_count(30)
def test_chainlength_fast():
C = Chimera(4)
K = Clique(16)
e = find_embedding(K, C, tries=1, chainlength_patience=1)
if not len(e):
return False
return max(len(c) for c in e.values()) <= 7
@success_count(30)
def test_chainlength_slow():
C = Chimera(4)
K = Clique(16)
e = find_embedding(K, C, tries=1, chainlength_patience=10)
if not len(e):
return False
return max(len(c) for c in e.values()) <= 6
def chainlength_diagnostic(n=100, old=False, chainlength_argument=0, verbose=0, m=8):
C = Chimera(m)
K = Clique(4 * m)
if old:
from dwave_sapi2.embedding import find_embedding as find_embedding_dws2
nodes = set(x for e in C for x in e)
trans = {x: i for i, x in enumerate(nodes)}
C = [(trans[x], trans[y]) for x, y in C]
assert 0 <= chainlength_argument <= 1, "sapi2 only supports a chainlength argument of 0 or 1"
embs = [find_embedding_dws2(
K, C, tries=1, fast_embedding=chainlength_argument, verbose=verbose) for _ in range(n)]
else:
embs = [find_embedding_orig(
K, C, tries=1, chainlength_patience=chainlength_argument, verbose=verbose).values() for _ in range(n)]
return sorted(max(map(len, e)) if e else None for e in embs)
def chainlength_rundown(n=100, m=8):
from dwave_sapi2.embedding import find_embedding as find_embedding_dws2
C = Chimera(m)
K = Clique(4 * m)
nodes = set(x for e in C for x in e)
trans = {x: i for i, x in enumerate(nodes)}
C = [(trans[x], trans[y]) for x, y in C]
def trial(f):
t0 = time.clock()
stats = [f() for _ in range(n)]
t = time.clock() - t0
stats = filter(None, stats)
stats = [max(map(len, e)) for e in stats]
print("successes %d, best maxchain %d, avg maxchain %.02f, time %.02fs" % (
len(stats), min(stats), sum(stats) / float(len(stats)), t))
return t
print("sapi fast embedding:", end='')
trial(lambda: find_embedding_dws2(K, C, tries=1, fast_embedding=True))
print("sapi slow embedding:", end='')
basetime = trial(lambda: find_embedding_dws2(
K, C, tries=1, fast_embedding=False))
patience = 0
while 1:
print("minorminer, chainlength_patience %d:" % patience, end='')
t = trial(lambda: find_embedding_orig(K, C, tries=1,
chainlength_patience=patience).values())
if t > basetime:
break
patience += 1
|
[
"os.mkdir",
"minorminer.find_embedding",
"sys.stdout.flush",
"dwave_sapi2.embedding.find_embedding",
"networkx.generators.geometric.random_geometric_graph",
"os.path.join",
"os.path.abspath",
"random.randint",
"networkx.generators.complete_graph",
"os.path.exists",
"time.clock",
"random.seed",
"math.log",
"math.ceil",
"networkx.is_connected",
"networkx.generators.chordal_cycle_graph",
"networkx.generators.k_random_intersection_graph",
"functools.wraps",
"random.choice",
"networkx.product.cartesian_product",
"networkx.Graph",
"warnings.warn"
] |
[((1627, 1634), 'networkx.Graph', 'Graph', ([], {}), '()\n', (1632, 1634), False, 'from networkx import Graph, is_connected\n'), ((1644, 1651), 'networkx.Graph', 'Graph', ([], {}), '()\n', (1649, 1651), False, 'from networkx import Graph, is_connected\n'), ((2290, 2297), 'networkx.Graph', 'Graph', ([], {}), '()\n', (2295, 2297), False, 'from networkx import Graph, is_connected\n'), ((3760, 3786), 'random.seed', 'seed', (['(18293447845779813366)'], {}), '(18293447845779813366)\n', (3764, 3786), False, 'from random import seed, randint\n'), ((3991, 4033), 'networkx.generators.chordal_cycle_graph', 'networkx.generators.chordal_cycle_graph', (['p'], {}), '(p)\n', (4030, 4033), False, 'import networkx\n'), ((4168, 4256), 'networkx.generators.geometric.random_geometric_graph', 'networkx.generators.geometric.random_geometric_graph', (['n', '(n ** -0.333)'], {'dim': '(2)', 'pos': 'pos'}), '(n, n ** -0.333, dim=2,\n pos=pos)\n', (4220, 4256), False, 'import networkx\n'), ((4443, 4480), 'networkx.generators.complete_graph', 'networkx.generators.complete_graph', (['n'], {}), '(n)\n', (4477, 4480), False, 'import networkx\n'), ((6420, 6432), 'time.clock', 'time.clock', ([], {}), '()\n', (6430, 6432), False, 'import time\n'), ((8608, 8624), 'networkx.Graph', 'networkx.Graph', ([], {}), '()\n', (8622, 8624), False, 'import networkx\n'), ((8669, 8685), 'networkx.Graph', 'networkx.Graph', ([], {}), '()\n', (8683, 8685), False, 'import networkx\n'), ((8729, 8757), 'networkx.is_connected', 'networkx.is_connected', (['hardg'], {}), '(hardg)\n', (8750, 8757), False, 'import networkx\n'), ((8801, 8829), 'networkx.is_connected', 'networkx.is_connected', (['probg'], {}), '(probg)\n', (8822, 8829), False, 'import networkx\n'), ((457, 482), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (472, 482), False, 'import os\n'), ((647, 711), 'minorminer.find_embedding', 'find_embedding_orig', (['Q', 'A'], {'return_overlap': 'return_overlap'}), '(Q, A, return_overlap=return_overlap, **args)\n', (666, 711), True, 'from minorminer import find_embedding as find_embedding_orig\n'), ((1111, 1175), 'minorminer.find_embedding', 'find_embedding_orig', (['Q', 'A'], {'return_overlap': 'return_overlap'}), '(Q, A, return_overlap=return_overlap, **args)\n', (1130, 1175), True, 'from minorminer import find_embedding as find_embedding_orig\n'), ((6620, 6632), 'time.clock', 'time.clock', ([], {}), '()\n', (6630, 6632), False, 'import time\n'), ((7360, 7385), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (7374, 7385), False, 'import os\n'), ((7395, 7414), 'os.mkdir', 'os.mkdir', (['directory'], {}), '(directory)\n', (7403, 7414), False, 'import os\n'), ((7925, 7933), 'functools.wraps', 'wraps', (['f'], {}), '(f)\n', (7930, 7933), False, 'from functools import wraps\n'), ((8217, 8225), 'functools.wraps', 'wraps', (['f'], {}), '(f)\n', (8222, 8225), False, 'from functools import wraps\n'), ((13111, 13128), 'random.randint', 'randint', (['(0)', '(m - 1)'], {}), '(0, m - 1)\n', (13118, 13128), False, 'from random import randint\n'), ((13142, 13155), 'random.randint', 'randint', (['(0)', '(3)'], {}), '(0, 3)\n', (13149, 13155), False, 'from random import randint\n'), ((13168, 13185), 'random.randint', 'randint', (['(0)', '(m - 1)'], {}), '(0, m - 1)\n', (13175, 13185), False, 'from random import randint\n'), ((13199, 13212), 'random.randint', 'randint', (['(0)', '(3)'], {}), '(0, 3)\n', (13206, 13212), False, 'from random import randint\n'), ((21581, 21593), 'time.clock', 'time.clock', ([], {}), '()\n', (21591, 21593), False, 'import time\n'), ((3875, 3931), 'networkx.generators.k_random_intersection_graph', 'networkx.generators.k_random_intersection_graph', (['c', 'n', '(3)'], {}), '(c, n, 3)\n', (3922, 3931), False, 'import networkx\n'), ((4492, 4532), 'networkx.product.cartesian_product', 'networkx.product.cartesian_product', (['K', 'K'], {}), '(K, K)\n', (4526, 4532), False, 'import networkx\n'), ((5316, 5357), 'os.path.join', 'os.path.join', (['calibration_dir', 'f.__name__'], {}), '(calibration_dir, f.__name__)\n', (5328, 5357), False, 'import os\n'), ((5628, 5636), 'functools.wraps', 'wraps', (['f'], {}), '(f)\n', (5633, 5636), False, 'from functools import wraps\n'), ((6547, 6565), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (6563, 6565), False, 'import sys\n'), ((6799, 6840), 'os.path.join', 'os.path.join', (['calibration_dir', 'f.__name__'], {}), '(calibration_dir, f.__name__)\n', (6811, 6840), False, 'import os\n'), ((6998, 7033), 'os.path.join', 'os.path.join', (['directory', 'f.__name__'], {}), '(directory, f.__name__)\n', (7010, 7033), False, 'import os\n'), ((7176, 7211), 'os.path.join', 'os.path.join', (['directory', 'f.__name__'], {}), '(directory, f.__name__)\n', (7188, 7211), False, 'import os\n'), ((7678, 7713), 'os.path.join', 'os.path.join', (['directory', 'f.__name__'], {}), '(directory, f.__name__)\n', (7690, 7713), False, 'import os\n'), ((11718, 11746), 'random.choice', 'choice', (['mask[x // 2, y // 2]'], {}), '(mask[x // 2, y // 2])\n', (11724, 11746), False, 'from random import choice\n'), ((18437, 18450), 'random.randint', 'randint', (['(0)', '(3)'], {}), '(0, 3)\n', (18444, 18450), False, 'from random import randint\n'), ((19024, 19037), 'random.randint', 'randint', (['(0)', '(3)'], {}), '(0, 3)\n', (19031, 19037), False, 'from random import randint\n'), ((20911, 21003), 'dwave_sapi2.embedding.find_embedding', 'find_embedding_dws2', (['K', 'C'], {'tries': '(1)', 'fast_embedding': 'chainlength_argument', 'verbose': 'verbose'}), '(K, C, tries=1, fast_embedding=chainlength_argument,\n verbose=verbose)\n', (20930, 21003), True, 'from dwave_sapi2.embedding import find_embedding as find_embedding_dws2\n'), ((21646, 21658), 'time.clock', 'time.clock', ([], {}), '()\n', (21656, 21658), False, 'import time\n'), ((21984, 22039), 'dwave_sapi2.embedding.find_embedding', 'find_embedding_dws2', (['K', 'C'], {'tries': '(1)', 'fast_embedding': '(True)'}), '(K, C, tries=1, fast_embedding=True)\n', (22003, 22039), True, 'from dwave_sapi2.embedding import find_embedding as find_embedding_dws2\n'), ((22112, 22168), 'dwave_sapi2.embedding.find_embedding', 'find_embedding_dws2', (['K', 'C'], {'tries': '(1)', 'fast_embedding': '(False)'}), '(K, C, tries=1, fast_embedding=False)\n', (22131, 22168), True, 'from dwave_sapi2.embedding import find_embedding as find_embedding_dws2\n'), ((880, 925), 'warnings.warn', 'warn', (['check_embedding.warning', 'RuntimeWarning'], {}), '(check_embedding.warning, RuntimeWarning)\n', (884, 925), False, 'from warnings import warn\n'), ((1431, 1476), 'warnings.warn', 'warn', (['check_embedding.warning', 'RuntimeWarning'], {}), '(check_embedding.warning, RuntimeWarning)\n', (1435, 1476), False, 'from warnings import warn\n'), ((5521, 5556), 'math.log', 'log', (['(accept_prob * S / N)', '(1 - S / N)'], {}), '(accept_prob * S / N, 1 - S / N)\n', (5524, 5556), False, 'from math import log\n'), ((21058, 21157), 'minorminer.find_embedding', 'find_embedding_orig', (['K', 'C'], {'tries': '(1)', 'chainlength_patience': 'chainlength_argument', 'verbose': 'verbose'}), '(K, C, tries=1, chainlength_patience=\n chainlength_argument, verbose=verbose)\n', (21077, 21157), True, 'from minorminer import find_embedding as find_embedding_orig\n'), ((22309, 22374), 'minorminer.find_embedding', 'find_embedding_orig', (['K', 'C'], {'tries': '(1)', 'chainlength_patience': 'patience'}), '(K, C, tries=1, chainlength_patience=patience)\n', (22328, 22374), True, 'from minorminer import find_embedding as find_embedding_orig\n'), ((3819, 3832), 'math.ceil', 'ceil', (['(n * 4.2)'], {}), '(n * 4.2)\n', (3823, 3832), False, 'from math import ceil\n')]
|
"""
Run few-shot learning on FashionProductImaes dataset using code from github
repo https://github.com/oscarknagg/few-shot under
MIT License
Copyright (c) 2019 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
reproducing results of
Snell et al Prototypical Networks. In places where substantial changes have
been made to the original code, this is marked with an ADAPTED/BEFORE comment
"""
import torch
from torch.optim import Adam
import torch.nn.parallel
from torch.utils.data import DataLoader
from torchvision import transforms, models
import warnings
import numpy as np
from typing import Callable, Tuple
from few_shot.models import get_few_shot_encoder
from few_shot.core import NShotTaskSampler, create_nshot_task_label
from few_shot.proto import proto_net_episode
from few_shot.train import fit
from few_shot.callbacks import *
from few_shot.utils import setup_dirs
from few_shot.metrics import categorical_accuracy
from few_shot_learning.datasets import FashionProductImages, \
FashionProductImagesSmall
from few_shot_learning.models import Identity
from config import DATA_PATH, PATH
model_names = sorted(name for name in models.__dict__
if name.islower() and not name.startswith("__")
and callable(models.__dict__[name]))
def few_shot_training(
datadir=DATA_PATH,
dataset='fashion',
num_input_channels=3,
drop_lr_every=20,
validation_episodes=200,
evaluation_episodes=1000,
episodes_per_epoch=100,
n_epochs=80,
small_dataset=False,
n_train=1,
n_test=1,
k_train=30,
k_test=5,
q_train=5,
q_test=1,
distance='l2',
pretrained=False,
monitor_validation=False,
n_val_classes=10,
architecture='resnet18',
gpu=None
):
setup_dirs()
if dataset == 'fashion':
dataset_class = FashionProductImagesSmall if small_dataset \
else FashionProductImages
else:
raise (ValueError, 'Unsupported dataset')
param_str = f'{dataset}_nt={n_train}_kt={k_train}_qt={q_train}_' \
f'nv={n_test}_kv={k_test}_qv={q_test}_small={small_dataset}_' \
f'pretrained={pretrained}_validate={monitor_validation}'
print(param_str)
###################
# Create datasets #
###################
# ADAPTED: data transforms including augmentation
resize = (80, 60) if small_dataset else (400, 300)
background_transform = transforms.Compose([
transforms.RandomResizedCrop(resize, scale=(0.8, 1.0)),
# transforms.RandomGrayscale(),
transforms.RandomPerspective(),
transforms.RandomHorizontalFlip(),
# transforms.Resize(resize),
transforms.ToTensor(),
# transforms.Normalize(mean=[0.485, 0.456, 0.406],
# std=[0.229, 0.224, 0.225])
])
evaluation_transform = transforms.Compose([
transforms.Resize(resize),
# transforms.CenterCrop(224),
transforms.ToTensor(),
# transforms.Normalize(mean=[0.485, 0.456, 0.406],
# std=[0.229, 0.224, 0.225])
])
if monitor_validation:
if not n_val_classes >= k_test:
n_val_classes = k_test
print("Warning: `n_val_classes` < `k_test`. Take a larger number"
" of validation classes next time. Increased to `k_test`"
" classes")
# class structure for background (training), validation (validation),
# evaluation (test): take a random subset of background classes
validation_classes = list(
np.random.choice(dataset_class.background_classes, n_val_classes))
background_classes = list(set(dataset_class.background_classes).difference(
set(validation_classes)))
# use keyword for evaluation classes
evaluation_classes = 'evaluation'
# Meta-validation set
validation = dataset_class(datadir, split='all',
classes=validation_classes,
transform=evaluation_transform)
# ADAPTED: in the original code, `episodes_per_epoch` was provided to
# `NShotTaskSampler` instead of `validation_episodes`.
validation_sampler = NShotTaskSampler(validation, validation_episodes,
n_test, k_test, q_test)
validation_taskloader = DataLoader(
validation,
batch_sampler=validation_sampler,
num_workers=4
)
else:
# use keyword for both background and evaluation classes
background_classes = 'background'
evaluation_classes = 'evaluation'
# Meta-training set
background = dataset_class(datadir, split='all',
classes=background_classes,
transform=background_transform)
background_sampler = NShotTaskSampler(background, episodes_per_epoch,
n_train, k_train, q_train)
background_taskloader = DataLoader(
background,
batch_sampler=background_sampler,
num_workers=4
)
# Meta-test set
evaluation = dataset_class(datadir, split='all',
classes=evaluation_classes,
transform=evaluation_transform)
# ADAPTED: in the original code, `episodes_per_epoch` was provided to
# `NShotTaskSampler` instead of `evaluation_episodes`.
evaluation_sampler = NShotTaskSampler(evaluation, evaluation_episodes,
n_test, k_test, q_test)
evaluation_taskloader = DataLoader(
evaluation,
batch_sampler=evaluation_sampler,
num_workers=4
)
#########
# Model #
#########
if torch.cuda.is_available():
if gpu is not None:
device = torch.device('cuda', gpu)
else:
device = torch.device('cuda')
torch.backends.cudnn.benchmark = True
else:
device = torch.device('cpu')
if not pretrained:
model = get_few_shot_encoder(num_input_channels)
# ADAPTED
model.to(device)
# BEFORE
# model.to(device, dtype=torch.double)
else:
assert torch.cuda.is_available()
model = models.__dict__[architecture](pretrained=True)
model.fc = Identity()
if gpu is not None:
model = model.cuda(gpu)
else:
model = model.cuda()
# TODO this is too risky: I'm not sure that this can work, since in
# the few-shot github repo the batch axis is actually split into
# support and query samples
# model = torch.nn.DataParallel(model).cuda()
def lr_schedule(epoch, lr):
# Drop lr every 2000 episodes
if epoch % drop_lr_every == 0:
return lr / 2
else:
return lr
############
# Training #
############
print(f'Training Prototypical network on {dataset}...')
optimiser = Adam(model.parameters(), lr=1e-3)
loss_fn = torch.nn.NLLLoss().to(device)
callbacks = [
# ADAPTED: this is the test monitoring now - and is only done at the
# end of training.
EvaluateFewShot(
eval_fn=proto_net_episode,
num_tasks=evaluation_episodes, # THIS IS NOT USED
n_shot=n_test,
k_way=k_test,
q_queries=q_test,
taskloader=evaluation_taskloader,
prepare_batch=prepare_nshot_task(n_test, k_test, q_test, device=device),
distance=distance,
on_epoch_end=False,
on_train_end=True,
prefix='test_'
)
]
if monitor_validation:
callbacks.append(
# ADAPTED: this is the validation monitoring now - computed
# after every epoch.
EvaluateFewShot(
eval_fn=proto_net_episode,
num_tasks=evaluation_episodes, # THIS IS NOT USED
n_shot=n_test,
k_way=k_test,
q_queries=q_test,
# BEFORE taskloader=evaluation_taskloader,
taskloader=validation_taskloader, # ADAPTED
prepare_batch=prepare_nshot_task(n_test, k_test, q_test, device=device),
distance=distance,
on_epoch_end=True, # ADAPTED
on_train_end=False, # ADAPTED
prefix='val_'
)
)
callbacks.extend([
ModelCheckpoint(
filepath=PATH + f'/models/proto_nets/{param_str}.pth',
monitor=f'val_{n_test}-shot_{k_test}-way_acc',
verbose=1, # ADAPTED
save_best_only=monitor_validation # ADAPTED
),
LearningRateScheduler(schedule=lr_schedule),
CSVLogger(PATH + f'/logs/proto_nets/{param_str}.csv'),
])
fit(
model,
optimiser,
loss_fn,
epochs=n_epochs,
dataloader=background_taskloader,
prepare_batch=prepare_nshot_task(n_train, k_train, q_train, device=device),
callbacks=callbacks,
metrics=['categorical_accuracy'],
fit_function=proto_net_episode,
fit_function_kwargs={'n_shot': n_train, 'k_way': k_train,
'q_queries': q_train, 'train': True,
'distance': distance},
)
# ADAPTED: the original code used torch.double
def prepare_nshot_task(n: int, k: int, q: int, device=None) -> Callable:
"""Typical n-shot task preprocessing.
# Arguments
n: Number of samples for each class in the n-shot classification task
k: Number of classes in the n-shot classification task
q: Number of query samples for each class in the n-shot classification task
# Returns
prepare_nshot_task_: A Callable that processes a few shot tasks with specified n, k and q
"""
def prepare_nshot_task_(batch: Tuple[torch.Tensor, torch.Tensor]) -> Tuple[
torch.Tensor, torch.Tensor]:
"""Create 0-k label and move to GPU.
TODO: Move to arbitrary device
"""
x, y = batch
# BEFROE x = x.double().cuda()
x = x.to(device) # ADPATED
# Create dummy 0-(num_classes - 1) label
y = create_nshot_task_label(k, q).to(device)
return x, y
return prepare_nshot_task_
class EvaluateFewShot(Callback):
"""Evaluate a network on an n-shot, k-way classification tasks after every epoch.
# Arguments
eval_fn: Callable to perform few-shot classification. Examples include `proto_net_episode`,
`matching_net_episode` and `meta_gradient_step` (MAML).
num_tasks: int. Number of n-shot classification tasks to evaluate the model with.
n_shot: int. Number of samples for each class in the n-shot classification tasks.
k_way: int. Number of classes in the n-shot classification tasks.
q_queries: int. Number query samples for each class in the n-shot classification tasks.
task_loader: Instance of NShotWrapper class
prepare_batch: function. The preprocessing function to apply to samples from the dataset.
prefix: str. Prefix to identify dataset.
"""
def __init__(self,
eval_fn: Callable,
num_tasks: int,
n_shot: int,
k_way: int,
q_queries: int,
taskloader: torch.utils.data.DataLoader,
prepare_batch: Callable,
prefix: str = 'val_',
on_epoch_end: bool = True,
on_train_end: bool = False,
**kwargs):
super(EvaluateFewShot, self).__init__()
self.eval_fn = eval_fn
self.num_tasks = num_tasks
self.n_shot = n_shot
self.k_way = k_way
self.q_queries = q_queries
self.taskloader = taskloader
self.prepare_batch = prepare_batch
self.prefix = prefix
self.kwargs = kwargs
self.metric_name = f'{self.prefix}{self.n_shot}-shot_{self.k_way}-way_acc'
# ADAPTED
self._on_epoch_end = on_epoch_end
self._on_train_end = on_train_end
def on_train_begin(self, logs=None):
self.loss_fn = self.params['loss_fn']
self.optimiser = self.params['optimiser']
# ADAPTED
def on_epoch_end(self, epoch, logs=None):
if self._on_epoch_end:
self._validate(epoch, logs=logs)
# ADAPTED
def on_train_end(self, epoch, logs=None):
if self._on_train_end:
self._validate(epoch, logs=logs)
# ADAPTED
def _validate(self, epoch, logs=None):
logs = logs or {}
seen = 0
totals = {'loss': 0, self.metric_name: 0}
for batch_index, batch in enumerate(self.taskloader):
x, y = self.prepare_batch(batch)
loss, y_pred = self.eval_fn(
self.model,
self.optimiser,
self.loss_fn,
x,
y,
n_shot=self.n_shot,
k_way=self.k_way,
q_queries=self.q_queries,
train=False,
**self.kwargs
)
seen += y_pred.shape[0]
totals['loss'] += loss.item() * y_pred.shape[0]
totals[self.metric_name] += categorical_accuracy(y, y_pred) * \
y_pred.shape[0]
logs[self.prefix + 'loss'] = totals['loss'] / seen
logs[self.metric_name] = totals[self.metric_name] / seen
class ModelCheckpoint(Callback):
"""Save the model after every epoch.
`filepath` can contain named formatting options, which will be filled the value of `epoch` and keys in `logs`
(passed in `on_epoch_end`).
For example: if `filepath` is `weights.{epoch:02d}-{val_loss:.2f}.hdf5`, then the model checkpoints will be saved
with the epoch number and the validation loss in the filename.
# Arguments
filepath: string, path to save the model file.
monitor: quantity to monitor.
verbose: verbosity mode, 0 or 1.
save_best_only: if `save_best_only=True`,
the latest best model according to
the quantity monitored will not be overwritten.
mode: one of {auto, min, max}.
If `save_best_only=True`, the decision
to overwrite the current save file is made
based on either the maximization or the
minimization of the monitored quantity. For `val_acc`,
this should be `max`, for `val_loss` this should
be `min`, etc. In `auto` mode, the direction is
automatically inferred from the name of the monitored quantity.
save_weights_only: if True, then only the model's weights will be
saved (`model.save_weights(filepath)`), else the full model
is saved (`model.save(filepath)`).
period: Interval (number of epochs) between checkpoints.
"""
def __init__(self, filepath, monitor='val_loss', verbose=0,
save_best_only=False, mode='auto', period=1):
super(ModelCheckpoint, self).__init__()
self.monitor = monitor
self.verbose = verbose
self.filepath = filepath
self.save_best_only = save_best_only
self.period = period
self.epochs_since_last_save = 0
if mode not in ['auto', 'min', 'max']:
raise ValueError('Mode must be one of (auto, min, max).')
if mode == 'min':
self.monitor_op = np.less
self.best = np.Inf
elif mode == 'max':
self.monitor_op = np.greater
self.best = -np.Inf
else:
if 'acc' in self.monitor or self.monitor.startswith('fmeasure'):
self.monitor_op = np.greater
self.best = -np.Inf
else:
self.monitor_op = np.less
# BEFORE: THIS IS A BUG
# self.best = np.Inf
def on_epoch_end(self, epoch, logs=None):
logs = logs or {}
self.epochs_since_last_save += 1
if self.epochs_since_last_save >= self.period:
self.epochs_since_last_save = 0
filepath = self.filepath.format(epoch=epoch + 1, **logs)
if self.save_best_only:
current = logs.get(self.monitor)
if current is None:
warnings.warn(
'Can save best model only with %s available, '
'skipping.' % (self.monitor), RuntimeWarning)
else:
if self.monitor_op(current, self.best):
if self.verbose > 0:
print(
'\nEpoch %05d: %s improved from %0.5f to %0.5f,'
' saving model to %s'
% (epoch + 1, self.monitor, self.best,
current, filepath))
self.best = current
torch.save(self.model.state_dict(), filepath)
else:
if self.verbose > 0:
print(
'\nEpoch %05d: %s did not improve from %0.5f' %
(epoch + 1, self.monitor, self.best))
else:
if self.verbose > 0:
print('\nEpoch %05d: saving model to %s' % (
epoch + 1, filepath))
torch.save(self.model.state_dict(), filepath)
|
[
"few_shot.metrics.categorical_accuracy",
"numpy.random.choice",
"few_shot.core.NShotTaskSampler",
"torch.utils.data.DataLoader",
"torchvision.transforms.RandomHorizontalFlip",
"few_shot.core.create_nshot_task_label",
"few_shot_learning.models.Identity",
"torchvision.transforms.RandomPerspective",
"torch.nn.NLLLoss",
"torch.cuda.is_available",
"torch.device",
"few_shot.models.get_few_shot_encoder",
"few_shot.utils.setup_dirs",
"torchvision.transforms.RandomResizedCrop",
"torchvision.transforms.Resize",
"warnings.warn",
"torchvision.transforms.ToTensor"
] |
[((2831, 2843), 'few_shot.utils.setup_dirs', 'setup_dirs', ([], {}), '()\n', (2841, 2843), False, 'from few_shot.utils import setup_dirs\n'), ((5978, 6053), 'few_shot.core.NShotTaskSampler', 'NShotTaskSampler', (['background', 'episodes_per_epoch', 'n_train', 'k_train', 'q_train'], {}), '(background, episodes_per_epoch, n_train, k_train, q_train)\n', (5994, 6053), False, 'from few_shot.core import NShotTaskSampler, create_nshot_task_label\n'), ((6124, 6195), 'torch.utils.data.DataLoader', 'DataLoader', (['background'], {'batch_sampler': 'background_sampler', 'num_workers': '(4)'}), '(background, batch_sampler=background_sampler, num_workers=4)\n', (6134, 6195), False, 'from torch.utils.data import DataLoader\n'), ((6580, 6653), 'few_shot.core.NShotTaskSampler', 'NShotTaskSampler', (['evaluation', 'evaluation_episodes', 'n_test', 'k_test', 'q_test'], {}), '(evaluation, evaluation_episodes, n_test, k_test, q_test)\n', (6596, 6653), False, 'from few_shot.core import NShotTaskSampler, create_nshot_task_label\n'), ((6724, 6795), 'torch.utils.data.DataLoader', 'DataLoader', (['evaluation'], {'batch_sampler': 'evaluation_sampler', 'num_workers': '(4)'}), '(evaluation, batch_sampler=evaluation_sampler, num_workers=4)\n', (6734, 6795), False, 'from torch.utils.data import DataLoader\n'), ((6881, 6906), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (6904, 6906), False, 'import torch\n'), ((5324, 5397), 'few_shot.core.NShotTaskSampler', 'NShotTaskSampler', (['validation', 'validation_episodes', 'n_test', 'k_test', 'q_test'], {}), '(validation, validation_episodes, n_test, k_test, q_test)\n', (5340, 5397), False, 'from few_shot.core import NShotTaskSampler, create_nshot_task_label\n'), ((5476, 5547), 'torch.utils.data.DataLoader', 'DataLoader', (['validation'], {'batch_sampler': 'validation_sampler', 'num_workers': '(4)'}), '(validation, batch_sampler=validation_sampler, num_workers=4)\n', (5486, 5547), False, 'from torch.utils.data import DataLoader\n'), ((7112, 7131), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (7124, 7131), False, 'import torch\n'), ((7176, 7216), 'few_shot.models.get_few_shot_encoder', 'get_few_shot_encoder', (['num_input_channels'], {}), '(num_input_channels)\n', (7196, 7216), False, 'from few_shot.models import get_few_shot_encoder\n'), ((7349, 7374), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (7372, 7374), False, 'import torch\n'), ((7457, 7467), 'few_shot_learning.models.Identity', 'Identity', ([], {}), '()\n', (7465, 7467), False, 'from few_shot_learning.models import Identity\n'), ((3528, 3582), 'torchvision.transforms.RandomResizedCrop', 'transforms.RandomResizedCrop', (['resize'], {'scale': '(0.8, 1.0)'}), '(resize, scale=(0.8, 1.0))\n', (3556, 3582), False, 'from torchvision import transforms, models\n'), ((3632, 3662), 'torchvision.transforms.RandomPerspective', 'transforms.RandomPerspective', ([], {}), '()\n', (3660, 3662), False, 'from torchvision import transforms, models\n'), ((3672, 3705), 'torchvision.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (3703, 3705), False, 'from torchvision import transforms, models\n'), ((3752, 3773), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (3771, 3773), False, 'from torchvision import transforms, models\n'), ((3955, 3980), 'torchvision.transforms.Resize', 'transforms.Resize', (['resize'], {}), '(resize)\n', (3972, 3980), False, 'from torchvision import transforms, models\n'), ((4028, 4049), 'torchvision.transforms.ToTensor', 'transforms.ToTensor', ([], {}), '()\n', (4047, 4049), False, 'from torchvision import transforms, models\n'), ((4659, 4724), 'numpy.random.choice', 'np.random.choice', (['dataset_class.background_classes', 'n_val_classes'], {}), '(dataset_class.background_classes, n_val_classes)\n', (4675, 4724), True, 'import numpy as np\n'), ((6957, 6982), 'torch.device', 'torch.device', (['"""cuda"""', 'gpu'], {}), "('cuda', gpu)\n", (6969, 6982), False, 'import torch\n'), ((7018, 7038), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (7030, 7038), False, 'import torch\n'), ((8168, 8186), 'torch.nn.NLLLoss', 'torch.nn.NLLLoss', ([], {}), '()\n', (8184, 8186), False, 'import torch\n'), ((11393, 11422), 'few_shot.core.create_nshot_task_label', 'create_nshot_task_label', (['k', 'q'], {}), '(k, q)\n', (11416, 11422), False, 'from few_shot.core import NShotTaskSampler, create_nshot_task_label\n'), ((14483, 14514), 'few_shot.metrics.categorical_accuracy', 'categorical_accuracy', (['y', 'y_pred'], {}), '(y, y_pred)\n', (14503, 14514), False, 'from few_shot.metrics import categorical_accuracy\n'), ((17561, 17666), 'warnings.warn', 'warnings.warn', (["('Can save best model only with %s available, skipping.' % self.monitor)", 'RuntimeWarning'], {}), "('Can save best model only with %s available, skipping.' %\n self.monitor, RuntimeWarning)\n", (17574, 17666), False, 'import warnings\n')]
|
from django.shortcuts import render,redirect,get_object_or_404
from .models import Student, Event
# Create your views here.
def index(request):
students = Student.objects.all()
events = Event.objects.all().order_by('name','category')
context={'event_len':len(events),'students':len(students),'events':events}
return render(request,'aradhana/details.html',context=context)
def events(request,eventID):
event=get_object_or_404(Event,pk=eventID)
students=event.student_set.all().order_by('name','school')
total=len(students)
context={'event':event,'students':students,'total':total}
return render(request,'aradhana/event.html',context=context)
|
[
"django.shortcuts.render",
"django.shortcuts.get_object_or_404"
] |
[((320, 377), 'django.shortcuts.render', 'render', (['request', '"""aradhana/details.html"""'], {'context': 'context'}), "(request, 'aradhana/details.html', context=context)\n", (326, 377), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((413, 449), 'django.shortcuts.get_object_or_404', 'get_object_or_404', (['Event'], {'pk': 'eventID'}), '(Event, pk=eventID)\n', (430, 449), False, 'from django.shortcuts import render, redirect, get_object_or_404\n'), ((597, 652), 'django.shortcuts.render', 'render', (['request', '"""aradhana/event.html"""'], {'context': 'context'}), "(request, 'aradhana/event.html', context=context)\n", (603, 652), False, 'from django.shortcuts import render, redirect, get_object_or_404\n')]
|
import unittest
import tensorflow as tf
import logging
from ml4ir.base.data.tfrecord_reader import TFRecordSequenceExampleParser
from ml4ir.base.features.feature_config import FeatureConfig
from ml4ir.base.config.keys import TFRecordTypeKey
from ml4ir.base.io.local_io import LocalIO
from ml4ir.base.features.preprocessing import PreprocessingMap
DATASET_PATH = "ml4ir/applications/ranking/tests/data/tfrecord/train/file_0.tfrecord"
FEATURE_CONFIG_PATH = "ml4ir/applications/ranking/tests/data/configs/feature_config.yaml"
MAX_SEQUENCE_SIZE = 25
class SequenceExampleParserTest(unittest.TestCase):
"""
Test class for ml4ir.base.data.tfrecord_reader.TFRecordSequenceExampleParser
"""
def setUp(self):
file_io = LocalIO()
logger = logging.getLogger()
self.dataset = tf.data.TFRecordDataset(DATASET_PATH)
self.proto = next(iter(self.dataset))
self.feature_config = FeatureConfig.get_instance(
tfrecord_type=TFRecordTypeKey.SEQUENCE_EXAMPLE,
feature_config_dict=file_io.read_yaml(FEATURE_CONFIG_PATH),
logger=logger,
)
self.parser = TFRecordSequenceExampleParser(
feature_config=self.feature_config,
preprocessing_map=PreprocessingMap(),
required_fields_only=False,
pad_sequence=True,
max_sequence_size=25,
)
def test_features_spec(self):
"""
Test the feature specification constructed and used to parse the Example proto
"""
features_spec = self.parser.features_spec
assert isinstance(features_spec, tuple)
assert isinstance(features_spec[0], dict)
assert isinstance(features_spec[1], dict)
# Check if the feature specification matches with the feature_config
assert len(set(self.feature_config.get_context_features("name"))) == len(features_spec[0])
assert len(set(self.feature_config.get_sequence_features("name"))) == len(features_spec[1])
for feature in self.feature_config.get_context_features("name"):
assert feature in features_spec[0]
for feature in self.feature_config.get_sequence_features("name"):
assert feature in features_spec[1]
def test_extract_features_from_proto(self):
"""
Test extraction of features from serialized proto
"""
context_features, sequence_features = self.parser.extract_features_from_proto(self.proto)
for feature in self.feature_config.get_context_features("name"):
assert feature in context_features
# Test that all features are sparse tensor
assert isinstance(context_features[feature], tf.sparse.SparseTensor)
feature_tensor = tf.sparse.to_dense(tf.sparse.reset_shape(context_features[feature]))
# Test the shape of each extracted feature
assert context_features[feature].shape == (1,)
for feature in self.feature_config.get_sequence_features("name"):
assert feature in sequence_features
# Test that all features are sparse tensor
assert isinstance(sequence_features[feature], tf.sparse.SparseTensor)
feature_tensor = tf.sparse.to_dense(tf.sparse.reset_shape(sequence_features[feature]))
assert feature_tensor.shape == (2, 1)
# Assert that there is no mask feature
assert "mask" not in sequence_features
def test_get_default_tensor(self):
"""
Test the default tensor used for missing features
"""
default_tensor = self.parser.get_default_tensor(
self.feature_config.get_feature("query_text"), sequence_size=25
)
assert default_tensor.shape == (1,)
default_tensor = self.parser.get_default_tensor(
self.feature_config.get_feature("quality_score"), sequence_size=8
)
assert default_tensor.shape == (8, 1)
def test_get_feature(self):
"""
Test fetching feature tensor from extracted feature dictionary
"""
# Checking context features
feature_tensor = self.parser.get_feature(
self.feature_config.get_feature("query_text"),
extracted_features=({"query_text": tf.zeros((3, 4, 6))}, {}),
sequence_size=10,
)
assert feature_tensor.shape == (3, 4, 6)
# Check missing feature being replaced with default tensor
feature_tensor = self.parser.get_feature(
self.feature_config.get_feature("query_text"),
extracted_features=({}, {}),
sequence_size=10,
)
assert feature_tensor.shape == (1,)
# Checking sequence features
feature_tensor = self.parser.get_feature(
self.feature_config.get_feature("quality_score"),
extracted_features=({}, {"quality_score": tf.zeros((3, 4, 6))}),
sequence_size=10,
)
assert feature_tensor.shape == (3, 4, 6)
# Check missing feature being replaced with default tensor
feature_tensor = self.parser.get_feature(
self.feature_config.get_feature("quality_score"),
extracted_features=({}, {}),
sequence_size=10,
)
assert feature_tensor.shape == (10, 1)
def test_generate_and_add_mask(self):
"""
Test mask generation and addition
"""
rank_tensor = tf.constant([[1], [2], [3], [4], [5]])
indices = tf.where(tf.not_equal(rank_tensor, tf.constant(0)))
values = tf.gather_nd(rank_tensor, indices)
sparse_rank_tensor = tf.SparseTensor(indices, values, rank_tensor.shape)
# Check when pad sequence is set to True
features_dict, sequence_size = self.parser.generate_and_add_mask(
({}, {"rank": sparse_rank_tensor}), {}
)
assert "mask" in features_dict
assert features_dict["mask"].shape == (25, 1)
assert tf.reduce_sum(features_dict["mask"]).numpy() == 5
assert sequence_size == 25
# Check when pad sequence is set to False
self.parser.pad_sequence = False
features_dict, sequence_size = self.parser.generate_and_add_mask(
({}, {"rank": sparse_rank_tensor}), {}
)
assert "mask" in features_dict
assert features_dict["mask"].shape == (5, 1)
assert tf.reduce_sum(features_dict["mask"]).numpy() == 5
assert sequence_size == 5
self.parser.pad_sequence = True
def test_parse_fn(self):
"""
Test the Example parsing function
"""
# Check tensor shapes when pad_sequence is True
features, labels = self.parser.get_parse_fn()(self.proto)
assert isinstance(features, dict)
assert isinstance(labels, tf.Tensor)
for feature in self.feature_config.get_all_features(key="node_name", include_label=False):
assert feature in features
assert features["mask"].shape == (25, 1)
for feature in self.feature_config.get_context_features("node_name"):
assert features[feature].shape == (1,)
for feature in self.feature_config.get_sequence_features("node_name"):
if feature != "clicked":
assert features[feature].shape == (25, 1)
assert labels.shape == (25, 1)
# Check tensor shapes when pad_sequence is False
self.parser.pad_sequence = False
features, labels = self.parser.get_parse_fn()(self.proto)
assert features["mask"].shape == (2, 1)
for feature in self.feature_config.get_context_features("node_name"):
assert features[feature].shape == (1,)
for feature in self.feature_config.get_sequence_features("node_name"):
if feature != "clicked":
assert features[feature].shape == (2, 1)
assert labels.shape == (2, 1)
self.pad_sequence = True
|
[
"tensorflow.reduce_sum",
"ml4ir.base.features.preprocessing.PreprocessingMap",
"tensorflow.data.TFRecordDataset",
"tensorflow.gather_nd",
"tensorflow.sparse.reset_shape",
"tensorflow.constant",
"tensorflow.zeros",
"tensorflow.SparseTensor",
"ml4ir.base.io.local_io.LocalIO",
"logging.getLogger"
] |
[((739, 748), 'ml4ir.base.io.local_io.LocalIO', 'LocalIO', ([], {}), '()\n', (746, 748), False, 'from ml4ir.base.io.local_io import LocalIO\n'), ((766, 785), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (783, 785), False, 'import logging\n'), ((810, 847), 'tensorflow.data.TFRecordDataset', 'tf.data.TFRecordDataset', (['DATASET_PATH'], {}), '(DATASET_PATH)\n', (833, 847), True, 'import tensorflow as tf\n'), ((5453, 5491), 'tensorflow.constant', 'tf.constant', (['[[1], [2], [3], [4], [5]]'], {}), '([[1], [2], [3], [4], [5]])\n', (5464, 5491), True, 'import tensorflow as tf\n'), ((5579, 5613), 'tensorflow.gather_nd', 'tf.gather_nd', (['rank_tensor', 'indices'], {}), '(rank_tensor, indices)\n', (5591, 5613), True, 'import tensorflow as tf\n'), ((5643, 5694), 'tensorflow.SparseTensor', 'tf.SparseTensor', (['indices', 'values', 'rank_tensor.shape'], {}), '(indices, values, rank_tensor.shape)\n', (5658, 5694), True, 'import tensorflow as tf\n'), ((1252, 1270), 'ml4ir.base.features.preprocessing.PreprocessingMap', 'PreprocessingMap', ([], {}), '()\n', (1268, 1270), False, 'from ml4ir.base.features.preprocessing import PreprocessingMap\n'), ((2787, 2835), 'tensorflow.sparse.reset_shape', 'tf.sparse.reset_shape', (['context_features[feature]'], {}), '(context_features[feature])\n', (2808, 2835), True, 'import tensorflow as tf\n'), ((3262, 3311), 'tensorflow.sparse.reset_shape', 'tf.sparse.reset_shape', (['sequence_features[feature]'], {}), '(sequence_features[feature])\n', (3283, 3311), True, 'import tensorflow as tf\n'), ((5545, 5559), 'tensorflow.constant', 'tf.constant', (['(0)'], {}), '(0)\n', (5556, 5559), True, 'import tensorflow as tf\n'), ((5989, 6025), 'tensorflow.reduce_sum', 'tf.reduce_sum', (["features_dict['mask']"], {}), "(features_dict['mask'])\n", (6002, 6025), True, 'import tensorflow as tf\n'), ((6409, 6445), 'tensorflow.reduce_sum', 'tf.reduce_sum', (["features_dict['mask']"], {}), "(features_dict['mask'])\n", (6422, 6445), True, 'import tensorflow as tf\n'), ((4280, 4299), 'tensorflow.zeros', 'tf.zeros', (['(3, 4, 6)'], {}), '((3, 4, 6))\n', (4288, 4299), True, 'import tensorflow as tf\n'), ((4902, 4921), 'tensorflow.zeros', 'tf.zeros', (['(3, 4, 6)'], {}), '((3, 4, 6))\n', (4910, 4921), True, 'import tensorflow as tf\n')]
|
import re
import pandas as pd
from CosmOrc.setting import Setting
def chunkit(data: list or tuple = None, n: int = None):
"""
Функция разбивает исходный массив на N частей (N == n).
Arguments
---------
data_list: list or tuple
Массив, который будет разделен на n частей
n: int
Число подмассивов в возвращаемом массиве (default: 2)
Returns
-------
list: разделенный на части список
Example
-------
>>> l = [1, 2, 3, 4, 5, 6, 7, 8]
>>> chunkit(l)
[[1, 2, 3, 4], [5, 6, 7, 8]]
>>> chunkit(l, n=4)
[[1, 2], [3, 4], [5, 6], [7, 8]]
"""
new_data = []
if not n:
n = 2
avg = len(data) / n
last = 0
while last < len(data):
new_data.append(data[int(last):int(last + avg)])
last += avg
return new_data
def read_data_cosmo(file_path: str = None) -> list:
"""Функция для чтения *.tab файлов CosmoTherm, выбирает строки с
параметрами расчета, единицами измерения и непосредственно результатами
расчета.
Arguments
---------
file_path: str
Путь к *.tab файлу
Return
------
data: list
Двумерный список, len(data) == количеству работ (job) в исходном файле,
в каждый подмассив массива, входят данные о каждой конкретной работы
"""
with open(file_path, "r") as file:
data = []
for line in file:
if line.split():
# Выбираем строки с параметрами расчета
if "Setting" in line:
jobs_data = []
jobs_data.append(line)
data.append(jobs_data)
# Выбираем строки с единицами измерения и данными расчетов
elif "job" not in line or "Units" in line:
jobs_data.append(line)
return data
def compound_nr(some_str: str):
_compound_nr = r"x\(([\d]*)\)="
_compound_nr_string = re.search(_compound_nr, some_str)
if _compound_nr_string:
return _compound_nr_string.group(1)
def setting_pars(settings_str: str):
# TODO Документация job_indx
"""Функция для извлечения параметров расчета из строк,
принимает строку из *.tab файла, содержащую подстроку
'Settings'
Arguments
---------
settings_str: str
Строка *.tab файла, содержащая ключевое слово 'Settings'
Return
------
job_indx: str
settings_list: tuple
Кортеж содержащий объекты класса Setting, описывающие
условия проведения расчета
Example
-------
>>> setting_pars('Settings job 2 : T= 223.15 K ; x(1)= 0.1000;')
(2, (T= 223.15 K, x(1)= 0.1 %))
"""
settings_list = []
job_indx, new_line = settings_str.split(":")
job_indx = job_indx.split()[2]
settings = new_line.split(";")
for setting in settings:
new_setting = None
if len(setting.split()) == 3:
settings_list.append(Setting.from_record(setting))
elif len(setting.split()) == 2:
new_setting = Setting.from_record(setting)
new_setting.convert(name=compound_nr(new_setting.name), unit="%")
settings_list.append(new_setting)
elif len(setting.split()) > 3:
# TODO: Проблемное место, пофиксить n в chunkit
for element in chunkit(setting.split(),
n=len(setting.split()) / 2):
new_setting = Setting.from_record(element)
new_setting.convert(name=compound_nr(new_setting.name),
unit="%")
settings_list.append(new_setting)
return int(job_indx), tuple(settings_list)
def columns_pars(head_str: str):
"""Функция для парсинга строки заголовка таблицы,
возвращает массив с названиями всех столбцов
данной таблицы, за исключением 'Compound'
Arguments
---------
head_str: str
Строка - заголовок таблицы
Return
------
Возвращает кортеж со именами колонок в таблице CosmoTherm,
за исключением 'Compound'
Example
-------
>>> columns_pars('Nr Compound H ln(gamma) pv Gsolv pvExp HpvExp GpvExp')
('Nr', 'H', 'ln(gamma)', 'pv', 'Gsolv', 'pvExp', 'HpvExp', 'GpvExp')
"""
return tuple(filter(lambda x: x != "Compound", head_str.split()))
def data_pars(data: list or tuple):
# TODO Documentations
"""Функция для пасинга данных одной таблицы
Arguments
---------
data: list or tuple
Список содержащий строки с данными расчета CosmoTherm
Return
------
Возвращает список содержащий имена веществ, заданных в
таблице *.tab файла CosmoTherm
Example
-------
>>> data = ['1 dbunew 7.9345E-10 0.31479727 5.7916E-07 -11.11061250',
... '2 dbu+new 6.3253E-33 2.96259067 3.2692E-31 -33.6383173',
... '3 cosmo1 3.0623E-36 -5.34179718 6.3968E-31 -36.8714363',
... '4 cosmo2 2.3622E-44 -4.50125249 2.1291E-39 -44.7837135',
... '5 cosmo3 1.0057E-48 -2.99155560 2.0031E-44 -49.0465532',
... '6 cosmo4 1.9260E-40 -4.55722446 1.8359E-35 -40.9690089']
>>> data_pars(data)
(['dbunew', 'dbu+new', 'cosmo1', 'cosmo2', 'cosmo3', 'cosmo4'], [['1', '7.9345E-10', '0.31479727', '5.7916E-07', '-11.11061250'], ['2', '6.3253E-33', '2.96259067', '3.2692E-31', '-33.6383173'], ['3', '3.0623E-36', '-5.34179718', '6.3968E-31', '-36.8714363'], ['4', '2.3622E-44', '-4.50125249', '2.1291E-39', '-44.7837135'], ['5', '1.0057E-48', '-2.99155560', '2.0031E-44', '-49.0465532'], ['6', '1.9260E-40', '-4.55722446', '1.8359E-35', '-40.9690089']])
"""
compounds = []
new_parameters = []
for line in data:
_ = line.split()
compounds.append(_[1])
new_parameters.append([_[0]] + _[2:])
return compounds, new_parameters
class Job:
"""
Arguments
---------
data: list or tuple
Данные из одного "job" CosmoTherm
Attributes
---------
setting:
Набор настроек данного расчета
units:
Строка с информацией о некоторых единицах измерения
parameters:
Данные расчетов СosmoTherm
Properties
----------
full_df:
small_df:
settings_df:
"""
__slots__ = ("units", "settings", "compounds", "parameters", "columns",
"job_indx")
def __init__(self, job: list or tuple):
self.units = job[1]
self.job_indx, self.settings = setting_pars(job[0])
self.compounds, self.parameters = data_pars(job[3:])
self.columns = columns_pars(job[2])
self.settings = list(self.settings)
def full_df(self):
"""
Метод для получения полной информации об одной работе,
вспомогательный метод для упрощения работы с классом
Jobs. Сработает только если класс правильно инициализирован.
Return
------
pd.Dataframe(): Возвращает датафрейм с данными одной работы,
index -- мультииндекс состоящий из номера работы и списка
рассчитываемых веществ.
columns -- названия параметров,
data -- значения таблицы COSMOtherm
"""
index = list(zip([self.job_indx] * len(self.compounds),
self.compounds))
multiindex = pd.MultiIndex.from_tuples(index,
names=["Job", "Compound"])
return pd.DataFrame(data=self.parameters,
index=multiindex,
columns=self.columns)
def small_df(self, columns: list or tuple):
"""
Вспомогательный метод, помогает получать одну таблицу с определенными
столбцами. Нужен для упрощения работы с классом Jobs.
Arguments
---------
columns: list or tuple
Список колонок
"""
_small_df = self.full_df().loc[:, columns].copy()
return _small_df
def settings_df(self, detailed=None):
# TODO Документация
"""
"""
columns = [self.job_indx]
index = [x.name for x in self.settings]
if 'p=' in index:
pass
else:
index.append('p=')
self.settings.append(Setting(name='p=', value=1, unit='atm'))
if detailed:
data = self.settings
else:
data = [x.value for x in self.settings]
return pd.DataFrame(columns=columns, index=index, data=data)
class Jobs:
"""
Класс, хранит в себе данные одного расчета COSMOTherm.
При инициализации принимает аргумент path: str - путь к *.tab файлу,
автоматически считывает данные из файла и инициализирует классы Job,
для каждой отдельной работы.
Arguments
---------
path: str
Путь к *.tab файлу
Methods
-------
full_df(csv: bool, invert: bool): df
small_df(csv: bool, invert: bool): df
settings_df(csv: bool): df
for need spec df for calc
"""
__slots__ = ("path", "data")
def __init__(self, path: str):
self.path = path
self.data = [Job(i) for i in read_data_cosmo(path)]
def full_df(self, invert=None):
# TODO Документация
"""
"""
df = pd.concat([job.full_df() for job in self.data], sort=True)
df = df.applymap(lambda x: 0 if x == 'NA' else x)
df = df.apply(pd.to_numeric)
df.fillna(0, inplace=True)
if invert:
df.sort_index(axis=0, level=1, inplace=True)
return df.swaplevel(i=-2, j=-1, axis=0)
else:
return df
def small_df(self, columns: list or tuple = None, invert: bool = None):
# TODO Документация
"""
"""
if columns:
_small_df = self.full_df().loc[:, columns].copy()
if invert:
_small_df.sort_index(axis=0, level=1, inplace=True)
return _small_df.swaplevel(i=-2, j=-1, axis=0)
else:
return _small_df
else:
pass
def settings_df(self, detailed=None):
# TODO
"""[summary]
Returns
-------
[type]
[description]
"""
if detailed:
df = pd.concat([job.settings_df(detailed=1) for job in self.data],
axis=1,
sort=True)
df.fillna(0, inplace=True)
return df
else:
df = pd.concat([job.settings_df() for job in self.data],
axis=1,
sort=True)
df.fillna(0, inplace=True)
return df
def main():
from os import listdir
from os.path import isfile, join
mypath = '/home/anton/Documents/Scamt_projects/Adonin_project/COSMOthermProject/EA_scrf/'
onlyfiles = [f for f in listdir(mypath) if isfile(join(mypath, f))]
files = [i for i in onlyfiles if i.endswith('tab')]
for file in files:
Jobs(mypath + file).small_df(
invert=1, columns=('Gsolv', 'ln(gamma)',
'Nr')).T.to_csv(f'{mypath + file}.csv')
Jobs(mypath +
file).settings_df().T.to_csv(f'{mypath + file}_Settings.csv')
if __name__ == "__main__":
main()
|
[
"pandas.DataFrame",
"CosmOrc.setting.Setting.from_record",
"pandas.MultiIndex.from_tuples",
"os.path.join",
"CosmOrc.setting.Setting",
"re.search",
"os.listdir"
] |
[((1938, 1971), 're.search', 're.search', (['_compound_nr', 'some_str'], {}), '(_compound_nr, some_str)\n', (1947, 1971), False, 'import re\n'), ((7299, 7358), 'pandas.MultiIndex.from_tuples', 'pd.MultiIndex.from_tuples', (['index'], {'names': "['Job', 'Compound']"}), "(index, names=['Job', 'Compound'])\n", (7324, 7358), True, 'import pandas as pd\n'), ((7421, 7495), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': 'self.parameters', 'index': 'multiindex', 'columns': 'self.columns'}), '(data=self.parameters, index=multiindex, columns=self.columns)\n', (7433, 7495), True, 'import pandas as pd\n'), ((8428, 8481), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': 'columns', 'index': 'index', 'data': 'data'}), '(columns=columns, index=index, data=data)\n', (8440, 8481), True, 'import pandas as pd\n'), ((10871, 10886), 'os.listdir', 'listdir', (['mypath'], {}), '(mypath)\n', (10878, 10886), False, 'from os import listdir\n'), ((2936, 2964), 'CosmOrc.setting.Setting.from_record', 'Setting.from_record', (['setting'], {}), '(setting)\n', (2955, 2964), False, 'from CosmOrc.setting import Setting\n'), ((3032, 3060), 'CosmOrc.setting.Setting.from_record', 'Setting.from_record', (['setting'], {}), '(setting)\n', (3051, 3060), False, 'from CosmOrc.setting import Setting\n'), ((8250, 8289), 'CosmOrc.setting.Setting', 'Setting', ([], {'name': '"""p="""', 'value': '(1)', 'unit': '"""atm"""'}), "(name='p=', value=1, unit='atm')\n", (8257, 8289), False, 'from CosmOrc.setting import Setting\n'), ((10897, 10912), 'os.path.join', 'join', (['mypath', 'f'], {}), '(mypath, f)\n', (10901, 10912), False, 'from os.path import isfile, join\n'), ((3430, 3458), 'CosmOrc.setting.Setting.from_record', 'Setting.from_record', (['element'], {}), '(element)\n', (3449, 3458), False, 'from CosmOrc.setting import Setting\n')]
|
from django.db import models
from billing.models import BillingProfile
ADDRESS_TYPES = (
('billing', 'Billing'),
('shipping', 'Shipping')
)
class Address(models.Model):
billing_profile = models.ForeignKey(BillingProfile, null=True, blank=False, on_delete=models.SET_NULL)
address_type = models.CharField(max_length=120, choices=ADDRESS_TYPES)
address_line_1 = models.CharField(max_length=120)
address_line_2 = models.CharField(max_length=120, null=True, blank=True)
city = models.CharField(max_length=120)
country = models.CharField(max_length=120, default='Turkey')
state = models.CharField(max_length=120)
postal_code = models.CharField(max_length=120)
def __str__(self):
return str(self.billing_profile) + ' : ' + str(self.address_type).upper()
def get_address(self):
return f"{self.address_line_1} {self.address_line_2 or ''} / {self.state}, {self.city} {self.postal_code} {self.country}"
|
[
"django.db.models.ForeignKey",
"django.db.models.CharField"
] |
[((203, 292), 'django.db.models.ForeignKey', 'models.ForeignKey', (['BillingProfile'], {'null': '(True)', 'blank': '(False)', 'on_delete': 'models.SET_NULL'}), '(BillingProfile, null=True, blank=False, on_delete=models.\n SET_NULL)\n', (220, 292), False, 'from django.db import models\n'), ((307, 362), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)', 'choices': 'ADDRESS_TYPES'}), '(max_length=120, choices=ADDRESS_TYPES)\n', (323, 362), False, 'from django.db import models\n'), ((384, 416), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)'}), '(max_length=120)\n', (400, 416), False, 'from django.db import models\n'), ((438, 493), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)', 'null': '(True)', 'blank': '(True)'}), '(max_length=120, null=True, blank=True)\n', (454, 493), False, 'from django.db import models\n'), ((505, 537), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)'}), '(max_length=120)\n', (521, 537), False, 'from django.db import models\n'), ((552, 602), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)', 'default': '"""Turkey"""'}), "(max_length=120, default='Turkey')\n", (568, 602), False, 'from django.db import models\n'), ((615, 647), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)'}), '(max_length=120)\n', (631, 647), False, 'from django.db import models\n'), ((666, 698), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(120)'}), '(max_length=120)\n', (682, 698), False, 'from django.db import models\n')]
|
# Author: <NAME> <<EMAIL>>
#
# Based on the Adafruit BMP280 Driver C++ driver and the BMP085 python lib.
# - https://github.com/adafruit/Adafruit_BMP280_Library
# - https://github.com/adafruit/Adafruit_Python_BMP
#
# Datasheet: https://www.adafruit.com/datasheets/BST-BMP280-DS001-11.pdf
from __future__ import division
import logging
# BMP280 default address.
BMP280_I2CADDR = 0x77
BMP280_CHIPID = 0xD0
# BMP280 Registers
BMP280_DIG_T1 = 0x88 # R Unsigned Calibration data (16 bits)
BMP280_DIG_T2 = 0x8A # R Signed Calibration data (16 bits)
BMP280_DIG_T3 = 0x8C # R Signed Calibration data (16 bits)
BMP280_DIG_P1 = 0x8E # R Unsigned Calibration data (16 bits)
BMP280_DIG_P2 = 0x90 # R Signed Calibration data (16 bits)
BMP280_DIG_P3 = 0x92 # R Signed Calibration data (16 bits)
BMP280_DIG_P4 = 0x94 # R Signed Calibration data (16 bits)
BMP280_DIG_P5 = 0x96 # R Signed Calibration data (16 bits)
BMP280_DIG_P6 = 0x98 # R Signed Calibration data (16 bits)
BMP280_DIG_P7 = 0x9A # R Signed Calibration data (16 bits)
BMP280_DIG_P8 = 0x9C # R Signed Calibration data (16 bits)
BMP280_DIG_P9 = 0x9E # R Signed Calibration data (16 bits)
BMP280_CONTROL = 0xF4
BMP280_RESET = 0xE0
BMP280_CONFIG = 0xF5
BMP280_PRESSUREDATA = 0xF7
BMP280_TEMPDATA = 0xFA
class BMP280(object):
def __init__(self, address=BMP280_I2CADDR, i2c=None, **kwargs):
self._logger = logging.getLogger('Adafruit_BMP.BMP280')
# Create I2C device.
if i2c is None:
import Adafruit_GPIO.I2C as I2C
i2c = I2C
self._device = i2c.get_i2c_device(address, **kwargs)
if self._device.readU8(BMP280_CHIPID) != 0x58:
raise Exception('Unsupported chip')
# Load calibration values.
self._load_calibration()
self._device.write8(BMP280_CONTROL, 0x3F)
def _load_calibration(self):
self.cal_t1 = int(self._device.readU16(BMP280_DIG_T1)) # UINT16
self.cal_t2 = int(self._device.readS16(BMP280_DIG_T2)) # INT16
self.cal_t3 = int(self._device.readS16(BMP280_DIG_T3)) # INT16
self.cal_p1 = int(self._device.readU16(BMP280_DIG_P1)) # UINT16
self.cal_p2 = int(self._device.readS16(BMP280_DIG_P2)) # INT16
self.cal_p3 = int(self._device.readS16(BMP280_DIG_P3)) # INT16
self.cal_p4 = int(self._device.readS16(BMP280_DIG_P4)) # INT16
self.cal_p5 = int(self._device.readS16(BMP280_DIG_P5)) # INT16
self.cal_p6 = int(self._device.readS16(BMP280_DIG_P6)) # INT16
self.cal_p7 = int(self._device.readS16(BMP280_DIG_P7)) # INT16
self.cal_p8 = int(self._device.readS16(BMP280_DIG_P8)) # INT16
self.cal_p9 = int(self._device.readS16(BMP280_DIG_P9)) # INT16
self._logger.debug('T1 = {0:6d}'.format(self.cal_t1))
self._logger.debug('T2 = {0:6d}'.format(self.cal_t2))
self._logger.debug('T3 = {0:6d}'.format(self.cal_t3))
self._logger.debug('P1 = {0:6d}'.format(self.cal_p1))
self._logger.debug('P2 = {0:6d}'.format(self.cal_p2))
self._logger.debug('P3 = {0:6d}'.format(self.cal_p3))
self._logger.debug('P4 = {0:6d}'.format(self.cal_p4))
self._logger.debug('P5 = {0:6d}'.format(self.cal_p5))
self._logger.debug('P6 = {0:6d}'.format(self.cal_p6))
self._logger.debug('P7 = {0:6d}'.format(self.cal_p7))
self._logger.debug('P8 = {0:6d}'.format(self.cal_p8))
self._logger.debug('P9 = {0:6d}'.format(self.cal_p9))
def _load_datasheet_calibration(self):
# Set calibration from values in the datasheet example. Useful for debugging the
# temp and pressure calculation accuracy.
self.cal_t1 = 27504
self.cal_t2 = 26435
self.cal_t3 = -1000
self.cal_p1 = 36477
self.cal_p2 = -10685
self.cal_p3 = 3024
self.cal_p4 = 2855
self.cal_p5 = 140
self.cal_p6 = -7
self.cal_p7 = 15500
self.cal_p8 = -14500
self.cal_p9 = 6000
def read_raw(self, register):
"""Reads the raw (uncompensated) temperature or pressure from the sensor."""
raw = self._device.readU16BE(register)
raw <<= 8
raw = raw | self._device.readU8(register + 2)
raw >>= 4
self._logger.debug('Raw value 0x{0:X} ({1})'.format(raw & 0xFFFF, raw))
return raw
def _compensate_temp(self, raw_temp):
""" Compensate temperature """
t1 = (((raw_temp >> 3) - (self.cal_t1 << 1)) *
(self.cal_t2)) >> 11
t2 = (((((raw_temp >> 4) - (self.cal_t1)) *
((raw_temp >> 4) - (self.cal_t1))) >> 12) *
(self.cal_t3)) >> 14
return t1 + t2
def read_temperature(self):
"""Gets the compensated temperature in degrees celsius."""
raw_temp = self.read_raw(BMP280_TEMPDATA)
compensated_temp = self._compensate_temp(raw_temp)
temp = float(((compensated_temp * 5 + 128) >> 8)) // 100
self._logger.debug('Calibrated temperature {0}'.format(temp))
return temp
def read_pressure(self):
"""Gets the compensated pressure in Pascals."""
raw_temp = self.read_raw(BMP280_TEMPDATA)
compensated_temp = self._compensate_temp(raw_temp)
raw_pressure = self.read_raw(BMP280_PRESSUREDATA)
p1 = compensated_temp - 128000
p2 = p1 * p1 * self.cal_p6
p2 += (p1 * self.cal_p5) << 17
p2 += self.cal_p4 << 35
p1 = ((p1 * p1 * self.cal_p3) >> 8) + ((p1 * self.cal_p2) << 12)
p1 = ((1 << 47) + p1) * (self.cal_p1) >> 33
if 0 == p1:
return 0
p = 1048576 - raw_pressure
p = (((p << 31) - p2) * 3125) // p1
p1 = (self.cal_p9 * (p >> 13) * (p >> 13)) >> 25
p2 = (self.cal_p8 * p) >> 19
p = ((p + p1 + p2) >> 8) + ((self.cal_p7) << 4)
return float(p // 256)
def read_altitude(self, sealevel_pa=101325.0):
"""Calculates the altitude in meters."""
# Calculation taken straight from section 3.6 of the datasheet.
pressure = float(self.read_pressure())
# altitude = 44330.0 * (1.0 - pow(pressure // sealevel_pa, (1.0 // 5.255))) # nlsn DEL
altitude = 44330.0 * (1.0 - pow(pressure // sealevel_pa, (1.0 / 5.255))) / 100 # nlsn INS
self._logger.debug('Altitude {0} m'.format(altitude))
return altitude
def read_sealevel_pressure(self, altitude_m=0.0):
"""Calculates the pressure at sealevel when given a known altitude in
meters. Returns a value in Pascals."""
pressure = float(self.read_pressure())
p0 = pressure // pow(1.0 - altitude_m // 44330.0, 5.255)
self._logger.debug('Sealevel pressure {0} Pa'.format(p0))
return p0
|
[
"logging.getLogger"
] |
[((1405, 1445), 'logging.getLogger', 'logging.getLogger', (['"""Adafruit_BMP.BMP280"""'], {}), "('Adafruit_BMP.BMP280')\n", (1422, 1445), False, 'import logging\n')]
|
import automaton
def automaton1():
a = automaton.Automaton()
a.add_string('label1L a? b! label2L')
return a
def process_example():
process = automaton.Automaton()
process.add_edge('p0', 'p1', label='a!')
return process
def environment_example():
environment = automaton.Automaton()
environment.add_edge('e0', 'e1', label='a?')
environment.add_edge('e1', 'e0', label='b!')
return environment
|
[
"automaton.Automaton"
] |
[((44, 65), 'automaton.Automaton', 'automaton.Automaton', ([], {}), '()\n', (63, 65), False, 'import automaton\n'), ((160, 181), 'automaton.Automaton', 'automaton.Automaton', ([], {}), '()\n', (179, 181), False, 'import automaton\n'), ((293, 314), 'automaton.Automaton', 'automaton.Automaton', ([], {}), '()\n', (312, 314), False, 'import automaton\n')]
|
import os
import util
# Server configuration
DATA_SERVER = 'http://localhost:3030'
TEMP_FOLDER = 'tmp'
STAGING_FOLDER = 'staging'
AUTOPROCESS = True
# General paths to binaries
SCRIPT_DIR = util.getScriptPath()
SOURCE_DIR = os.path.join(SCRIPT_DIR, '..')
DATA_DIR = 'staging/'
COLOR_FOLDER = 'color'
DEPTH_FOLDER = 'depth'
RECONS_RESULT_DIR = ''
PHOTOGRAMMETRY_RESULT_DIR = ''
# System specific paths for processing server binaries
TOOLS_DIR = '../'
DECODE_DIR = 'scripts'
RECONS_DIR = 'reconstruction'
PHOTOGRAMMETRY_DIR = 'meshroom'
# where scan data is stored under as subdirs with unique ids
# STAGING_FOLDER_LOCAL = os.path.join(DATA_DIR, 'scans', 'staging')
|
[
"os.path.join",
"util.getScriptPath"
] |
[((192, 212), 'util.getScriptPath', 'util.getScriptPath', ([], {}), '()\n', (210, 212), False, 'import util\n'), ((226, 256), 'os.path.join', 'os.path.join', (['SCRIPT_DIR', '""".."""'], {}), "(SCRIPT_DIR, '..')\n", (238, 256), False, 'import os\n')]
|
import numpy as np
import pandas as pd
import pytest
from pandas.testing import assert_series_equal
from visions import StandardSet
from compressio.compress import compress_func
from compressio.type_compressor import DefaultCompressor
bool_dtype = "boolean" if int(pd.__version__.split(".")[0]) >= 1 else "Bool"
@pytest.mark.parametrize(
"series,before,expected",
[
(
pd.Series([10.0, 100.0, np.iinfo(np.int16).max * 1.0], dtype=np.float64),
np.float64,
"int16",
),
(pd.Series([np.nan, 1], dtype=np.float64), np.float64, "Int8"),
(
pd.Series([True, False, None, None, None, None, True, False] * 1000),
np.object,
bool_dtype,
),
],
)
def test_compress_series(series, before, expected):
assert series.dtype == before
compressed_series = compress_func(
series,
typeset=StandardSet(),
compressor=DefaultCompressor(),
with_inference=True,
inplace=False,
)
assert str(compressed_series.dtype) == expected
assert_series_equal(series, compressed_series, check_dtype=False)
|
[
"pandas.__version__.split",
"numpy.iinfo",
"visions.StandardSet",
"pandas.Series",
"compressio.type_compressor.DefaultCompressor",
"pandas.testing.assert_series_equal"
] |
[((1089, 1154), 'pandas.testing.assert_series_equal', 'assert_series_equal', (['series', 'compressed_series'], {'check_dtype': '(False)'}), '(series, compressed_series, check_dtype=False)\n', (1108, 1154), False, 'from pandas.testing import assert_series_equal\n'), ((918, 931), 'visions.StandardSet', 'StandardSet', ([], {}), '()\n', (929, 931), False, 'from visions import StandardSet\n'), ((952, 971), 'compressio.type_compressor.DefaultCompressor', 'DefaultCompressor', ([], {}), '()\n', (969, 971), False, 'from compressio.type_compressor import DefaultCompressor\n'), ((539, 579), 'pandas.Series', 'pd.Series', (['[np.nan, 1]'], {'dtype': 'np.float64'}), '([np.nan, 1], dtype=np.float64)\n', (548, 579), True, 'import pandas as pd\n'), ((624, 692), 'pandas.Series', 'pd.Series', (['([True, False, None, None, None, None, True, False] * 1000)'], {}), '([True, False, None, None, None, None, True, False] * 1000)\n', (633, 692), True, 'import pandas as pd\n'), ((267, 292), 'pandas.__version__.split', 'pd.__version__.split', (['"""."""'], {}), "('.')\n", (287, 292), True, 'import pandas as pd\n'), ((424, 442), 'numpy.iinfo', 'np.iinfo', (['np.int16'], {}), '(np.int16)\n', (432, 442), True, 'import numpy as np\n')]
|
import torch, os, argparse
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
import sypt_dataset, sypt_utils
from torch.utils.data import DataLoader
from sypt_utils import *
from sypt_dataset import create_pt_pan2018
US = "\x1f" # unit separator => sentence separator
soh = "\x02"
class PTFAttenPRNN(nn.Module):
def __init__(self, vocab_size, embedding_dim, hidden_dim, embedding_martix, batch_size, iscuda= True):
super(PTFAttenPRNN, self).__init__()
self.batch_size = batch_size
self.ptf_hidden_size = hidden_dim
self.ptf_embed_dim = embedding_dim
self.iscuda = iscuda
self.ptf_embed = nn.Embedding(vocab_size, embedding_dim)
self.ptf_embed.weight.data.copy_(torch.from_numpy(embedding_martix))
self.lstm = nn.LSTM(embedding_dim, hidden_dim)
self.ptf_context_vector = self.init_ptf_contx_vector()
self.ptf_hidden = self.init_ptf_hidden()
self.lin_attention = nn.Linear(self.ptf_hidden_size, self.ptf_hidden_size)
def init_ptf_hidden(self):
if self.iscuda:
return Variable(torch.zeros(1, self.batch_size, self.ptf_hidden_size)).cuda(),\
Variable(torch.zeros(1, self.batch_size, self.ptf_hidden_size)).cuda()
else:
return Variable(torch.zeros(1, self.batch_size, self.ptf_hidden_size)), \
Variable(torch.zeros(1, self.batch_size, self.ptf_hidden_size))
def init_ptf_contx_vector(self):
return nn.Parameter(torch.Tensor(self.ptf_hidden_size, 1).uniform_(-0.1, 0.1)) # changed
def get_ptf_attention(self, ptf_encoded):
u = F.tanh(self.lin_attention(ptf_encoded))
mul = torch.matmul(u, self.ptf_context_vector.squeeze())
assert mul.size() == torch.Size([ptf_encoded.size(0), self.batch_size])
alpha = F.softmax(mul, dim=0).unsqueeze(2)# (seq_length, batch_size)->(seq_length,batch_size,1)
return alpha * ptf_encoded
def forward(self, ptf_sequence, ptf_hidden_state):
embeded_ptfs = self.ptf_embed(ptf_sequence).view(len(ptf_sequence), self.batch_size, -1)
(ptf_output, ptf_hidden_state) = self.lstm(embeded_ptfs, ptf_hidden_state)
ptf_attention = self.get_ptf_attention(ptf_output)
s_i = torch.sum(ptf_attention, dim=0).unsqueeze(0)
return s_i, ptf_hidden_state
class PTSentAttenRNN(nn.Module):
def __init__(self, batch_size, sent_hidden_size, ptf_hidden_size, class_no, drop_rate, iscuda=True, fuse=True):
super(PTSentAttenRNN, self).__init__()
self.batch_size = batch_size
self.ptf_hidden_size = ptf_hidden_size
self.sent_hidden_size = sent_hidden_size
self.drop_rate = drop_rate
self.iscuda = iscuda
self.fuse = fuse
self.sent_lstm_l = nn.LSTM(ptf_hidden_size, sent_hidden_size)
self.sent_context_vector_l = self.init_sent_contx_vector()
self.sent_hidden_l = self.init_sent_hidden()
self.lin_attention_l = nn.Linear(self.sent_hidden_size, self.sent_hidden_size)
self.sent_lstm_r = nn.LSTM(ptf_hidden_size, sent_hidden_size)
self.sent_context_vector_r = self.init_sent_contx_vector()
self.sent_hidden_r = self.init_sent_hidden()
self.lin_attention_r = nn.Linear(self.sent_hidden_size, self.sent_hidden_size)
self.lin = nn.Linear(7, class_no) if self.fuse else nn.Linear(2*self.sent_hidden_size, class_no)
def forward(self, ptf_atten_sequence, sent_hidden_state):
ptf_atten_seq_l, ptf_atten_seq_r = ptf_atten_sequence[0], ptf_atten_sequence[1]
sent_hidden_state_l, sent_hidden_state_r = sent_hidden_state[0], sent_hidden_state[1]
(sent_output_l, sent_hidden_state_l) = self.sent_lstm_l(ptf_atten_seq_l, sent_hidden_state_l)
sent_attention_l = self.get_sent_attention_l(sent_output_l)
l_hidden = torch.sum(sent_attention_l, dim=0)
(sent_output_r, sent_hidden_state_r) = self.sent_lstm_r(ptf_atten_seq_r, sent_hidden_state_r)
sent_attention_r = self.get_sent_attention_r(sent_output_r)
r_hidden = torch.sum(sent_attention_r, dim=0)
sent_hidden_state = [sent_hidden_state_l, sent_hidden_state_r]
merged = PTSentAttenRNN.get_last_layer(l_hidden, r_hidden, self.fuse)
merged = F.dropout(merged, p=self.drop_rate, training=self.training)
merged = self.lin(merged)
return F.log_softmax(merged, dim=1), sent_hidden_state
def get_sent_attention_l(self, sent_encoded):
u = F.tanh(self.lin_attention_l(sent_encoded))
mul = torch.matmul(u, self.sent_context_vector_l.squeeze())
assert mul.size() == torch.Size([sent_encoded.size(0), self.batch_size])
alpha = F.softmax(mul, dim=0).unsqueeze(2) # (sent_no, batch_size)->(sent_no,batch_size,1)
return alpha * sent_encoded
def get_sent_attention_r(self, sent_encoded):
u = F.tanh(self.lin_attention_r(sent_encoded))
mul = torch.matmul(u, self.sent_context_vector_r.squeeze())
assert mul.size() == torch.Size([sent_encoded.size(0), self.batch_size])
alpha = F.softmax(mul, dim=0).unsqueeze(2) # (sent_no, batch_size)->(sent_no,batch_size,1)
return alpha * sent_encoded
def init_sent_contx_vector(self):
return nn.Parameter(torch.Tensor(self.sent_hidden_size, 1).uniform_(-0.1, 0.1)) ## changed
@staticmethod
def get_last_layer(l_hidden, r_hidden, fuse=True):
if fuse:
cos = F.cosine_similarity(l_hidden, r_hidden, dim=1).view(1, -1)
euc = sypt_utils.euclidean_distance(l_hidden, r_hidden, dim=1).view(1, -1)
dot_dis = sypt_utils.dot(l_hidden, r_hidden, dim=1).view(1, -1)
mean_l1 = sypt_utils.mean_of_l1(l_hidden, r_hidden, dim=1).view(1, -1)
sig = sypt_utils.sigmoid_kernel(l_hidden, r_hidden, dim=1).view(1, -1)
chi = sypt_utils.chi_squared(l_hidden, r_hidden, dim=1).view(1, -1)
rbf = sypt_utils.rbf_kernel(l_hidden, r_hidden, dim=1).view(1, -1)
return torch.cat([cos, euc, dot_dis, mean_l1, sig, chi, rbf], dim=0).view(1, -1)
else:
return torch.cat([l_hidden, r_hidden], dim=1).view(1, -1)
def init_sent_hidden(self):
if self.iscuda:
return Variable(torch.zeros(1, self.batch_size, self.sent_hidden_size)).cuda(),\
Variable(torch.zeros(1, self.batch_size, self.sent_hidden_size)).cuda()
else:
return Variable(torch.zeros(1, self.batch_size, self.sent_hidden_size)),\
Variable(torch.zeros(1, self.batch_size, self.sent_hidden_size))
def make_context_vector(context, ptf_index): # ok
return [ptf_index[word] for word in context if word in ptf_index]
def train_data(x_train, y_target, ptf_attn_model, sent_attn_model, ptf_optimizer, sent_optimizer, criterion):
ptf_attn_model_l, ptf_attn_model_r = ptf_attn_model[0], ptf_attn_model[1]
ptf_optimizer_l, ptf_optimizer_r = ptf_optimizer[0], ptf_optimizer[1]
state_ptf_l, state_ptf_r = ptf_attn_model_l.init_ptf_hidden(), ptf_attn_model_r.init_ptf_hidden()
state_sent = [sent_attn_model.init_sent_hidden(), sent_attn_model.init_sent_hidden()]
y_target = Variable(torch.LongTensor(y_target))
ptf_optimizer_l.zero_grad()
ptf_optimizer_r.zero_grad()
sent_optimizer.zero_grad()
s_l, s_r = None, None
for i in range(len(x_train[0])):
ptf_idx_seq = Variable(torch.LongTensor(x_train[0][i])).cuda()
_s, state_ptf_l = ptf_attn_model_l(ptf_idx_seq, state_ptf_l)
if s_l is None:
s_l = _s
else:
s_l = torch.cat((s_l, _s), 0)
assert len(x_train[0]) == len(s_l)
for i in range(len(x_train[1])):
ptf_idx_seq = Variable(torch.LongTensor(x_train[1][i])).cuda()
_s, state_ptf_r = ptf_attn_model_r(ptf_idx_seq, state_ptf_r)
if s_r is None:
s_r = _s
else:
s_r = torch.cat((s_r, _s), 0)
assert len(x_train[1]) == len(s_r)
y_pred, state_sent = sent_attn_model([s_l, s_r], state_sent)
loss_train = criterion(y_pred.cuda(), y_target.cuda())
loss_train.backward()
# `clip_grad_norm_` helps prevent the exploding gradient problem in LSTMs
torch.nn.utils.clip_grad_norm_(ptf_attn_model_l.parameters(), 0.25)
torch.nn.utils.clip_grad_norm_(ptf_attn_model_r.parameters(), 0.25)
torch.nn.utils.clip_grad_norm_(sent_attn_model.parameters(), 0.25)
ptf_optimizer_l.step()
ptf_optimizer_r.step()
sent_optimizer.step()
return loss_train.data.item()
def tst_data(x_test, y_target, ptf_attn_model, sent_attn_model, criterion, iscuda):
ptf_attn_model_l, ptf_attn_model_r = ptf_attn_model[0], ptf_attn_model[1]
state_ptf_l, state_ptf_r = ptf_attn_model_l.init_ptf_hidden(), ptf_attn_model_r.init_ptf_hidden()
state_sent = [sent_attn_model.init_sent_hidden(), sent_attn_model.init_sent_hidden()]
s_l, s_r = None, None
for i in range(len(x_test[0])):
ptf_idx_seq = Variable(torch.LongTensor(x_test[0][i]))
if iscuda:
ptf_idx_seq = ptf_idx_seq.cuda()
_s, state_ptf_l = ptf_attn_model_l(ptf_idx_seq, state_ptf_l)
if s_l is None:
s_l = _s
else:
s_l = torch.cat((s_l, _s), 0)
assert len(x_test[0]) == len(s_l)
for i in range(len(x_test[1])):
ptf_idx_seq = Variable(torch.LongTensor(x_test[1][i]))
if iscuda:
ptf_idx_seq = ptf_idx_seq.cuda()
_s, state_ptf_r = ptf_attn_model_r(ptf_idx_seq, state_ptf_r)
if s_r is None:
s_r = _s
else:
s_r = torch.cat((s_r, _s), 0)
assert len(x_test[1]) == len(s_r)
y_pred, state_sent = sent_attn_model([s_l, s_r], state_sent)
if iscuda:
loss_test = criterion(y_pred.cuda(), y_target.cuda())
else:
loss_test = criterion(y_pred, y_target)
return y_pred, loss_test.data.item()
def eval(dataloader, ptf_index, criterion, return_json=False, models=None, iscuda=True):
for mdl in models.values():
mdl.eval()
ptf_model_l = models["ptf_model_l"]
ptf_model_r = models["ptf_model_r"]
sent_model = models["sent_model"]
total, correct = 0, 0
total_loss = torch.Tensor([0])
if iscuda:
total_loss = total_loss.cuda()
if return_json:
json={}
for itr, d in enumerate(dataloader):
l_doc = d["doc"][0]
l_doc = l_doc.split(US)
target = d["label"]
l_vec = []
for e in l_doc:
cv = make_context_vector(e.split(soh), ptf_index)
if len(cv) != 0:
l_vec.append(cv)
r_vec = backward(l_vec)
l_vec = list_of_list_to_long_tensor(l_vec)
r_vec = list_of_list_to_long_tensor(r_vec)
target = Variable(torch.LongTensor(target))
if iscuda:
target = target.cuda()
data_test = [l_vec, r_vec]
ptf_model = [ptf_model_l, ptf_model_r]
outputs, loss = tst_data(data_test, target, ptf_model, sent_model, criterion, iscuda)
_, predicted = torch.max(outputs.data, 1)
total += target.size(0)
if return_json:
json[d["id"][0]] = bool(predicted.cpu().numpy()[0])
correct += (predicted == target.data).sum()
total_loss += loss
if return_json:
return (100 * correct / total), (total_loss/len(dataloader))[0], json
else:
return (100 * correct / total), (total_loss / len(dataloader))[0]
def backward(doc):
rdoc = list(reversed(doc))
return [list(reversed(e)) for e in rdoc]
def list_of_list_to_long_tensor(src_list):
des_list = [torch.LongTensor(e) for e in src_list]
return des_list
def train_epoch(dataloader, ptf_index, models, optmzrs, loss_func):
ptf_optim_l = optmzrs["ptf_optim_l"]
ptf_optim_r = optmzrs["ptf_optim_r"]
sent_optim = optmzrs["sent_optim"]
for mdl in models.values():
mdl.train()
ptf_model_l = models["ptf_model_l"]
ptf_model_r = models["ptf_model_r"]
sent_model = models["sent_model"]
total_loss = torch.Tensor([0]).cuda()
for itr, d in enumerate(dataloader):
l_doc = d["doc"][0]
l_doc = l_doc.split(US)
l_vec = []
for e in l_doc:
cv = make_context_vector(e.split(soh), ptf_index)
if len(cv) != 0:
l_vec.append(cv)
r_vec = backward(l_vec)
l_vec = list_of_list_to_long_tensor(l_vec)
r_vec = list_of_list_to_long_tensor(r_vec)
x_train = [l_vec, r_vec]
ptf_model = [ptf_model_l, ptf_model_r]
ptf_optim = [ptf_optim_l, ptf_optim_r]
loss = train_data(x_train, d["label"], ptf_model, sent_model, ptf_optim, sent_optim, loss_func)
total_loss += loss
return (total_loss/len(dataloader))[0]
def get_params():
params = dict()
params["EMBEDDING_DIM"] = 100
params["ptf_HIDDEN_DIM"] = 8
params["SENT_HIDDEN_DIM"] = 8
params["EPOCHS"] = 30
params["dropout_rate"] = 0.3
params["CLASS_NO"] = 2
params["fuse"] = True
params["iscuda"] = True
return params
def save_checkpoint(models, is_best, model_name):
"""Save checkpoint if a new best is achieved"""
if is_best:
print ("=> Saving a new best")
torch.save(models['ptf_model_l'].state_dict(), 'ptf_model_l' + model_name)
torch.save(models['ptf_model_r'].state_dict(), 'ptf_model_r' + model_name)
torch.save(models['sent_model'].state_dict(), 'sent_model' + model_name)
else:
print ("=> Validation Accuracy did not improve")
def train_model(train_path, val_path, model_name):
'''
train the model.
:param train_path:
:param val_path:
:param model_name:
:return:
'''
params = get_params()
EMBEDDING_DIM = params["EMBEDDING_DIM"]
ptf_HIDDEN_DIM = params["ptf_HIDDEN_DIM"]
SENT_HIDDEN_DIM = params["SENT_HIDDEN_DIM"]
EPOCHS = params["EPOCHS"]
dropout_rate = params["dropout_rate"]
batch_size = 1 # code should change a bit for batch size > 1
CLASS_NO = params["CLASS_NO"]
fuse = params["fuse"]
for p,v in params.items():
print('param %s = %s' % (p, str(v)))
ds_files = dict()
ds_files['train'] = train_path
datasets, ptf_index, embd_matrix, index_word = sypt_dataset.load_dataset_and_pt_embedding\
(ds_files, EMBEDDING_DIM)
datasets["val"] = sypt_dataset.PAN_Dataset(val_path, None)
train_dataloader = DataLoader(datasets["train"], 1, True)
val_dataloader = DataLoader(datasets["val"], 1, True)
VOCAB_SIZE = len(ptf_index)
print('Vocab Size %d' % VOCAB_SIZE)
print('train = %s , val = %s' % (train_path, val_path))
# model definition
ptf_model_l = PTFAttenPRNN(VOCAB_SIZE, EMBEDDING_DIM, ptf_HIDDEN_DIM, embd_matrix, batch_size).cuda()
ptf_model_r = PTFAttenPRNN(VOCAB_SIZE, EMBEDDING_DIM, ptf_HIDDEN_DIM, embd_matrix, batch_size).cuda()
pt_sent_model = PTSentAttenRNN(batch_size, SENT_HIDDEN_DIM, ptf_HIDDEN_DIM, CLASS_NO, dropout_rate, True, fuse=fuse).cuda()
models = dict()
models["ptf_model_l"] = ptf_model_l
models["ptf_model_r"] = ptf_model_r
models["sent_model"] = pt_sent_model
# optimizers
ptf_optim_l = optim.RMSprop(ptf_model_l.parameters(), lr=1e-03)
ptf_optim_r = optim.RMSprop(ptf_model_r.parameters(), lr=1e-03)
sent_optim = optim.RMSprop(pt_sent_model.parameters(), lr=1e-03)
optmzrs = dict()
optmzrs["ptf_optim_l"] = ptf_optim_l
optmzrs["ptf_optim_r"] = ptf_optim_r
optmzrs["sent_optim"] = sent_optim
# loss function
loss_func = nn.NLLLoss()
# training and evaluation
best_accuracy = 0.0
for epoch in range(1, EPOCHS + 1):
train_loss = train_epoch(train_dataloader, ptf_index, models, optmzrs, loss_func)
val_acc, val_loss = eval(val_dataloader, ptf_index, loss_func, False, models)
print('Epoch: %d and train loss: %.4F val loss: %.4f val acc: %.4F' % (epoch, train_loss, val_loss, val_acc))
# Get bool not ByteTensor
is_best = bool(val_acc > best_accuracy)
# Get greater Tensor to keep track best acc
best_accuracy = max(val_acc, best_accuracy)
# Save checkpoint if is a new best
save_checkpoint(models, is_best, model_name)
# show the final results
train_acc, train_loss = eval(train_dataloader, ptf_index, loss_func, False, models)
print('train acc: %.4F train loss: %.10f ' % (train_acc, train_loss))
val_acc, val_loss= eval(val_dataloader, ptf_index, loss_func, False, models)
print('val acc: %.4F val loss: %.10f ' % (val_acc, val_loss))
def get_args():
'''
get arguments from command line
:return: a dic of all arguments
'''
parser = argparse.ArgumentParser()
parser.add_argument('-c', action='store', default='data/', help='source path')
parser.add_argument('-o', action='store', default='data/', help='destination path')
results = parser.parse_args()
print(results)
return vars(results)
if __name__ == "__main__":
# param setting
params = get_args()
csv_path = params["c"]
pt_path = params["o"]
model_name = ''
server = 'corenlp'
train = f'{pt_path}train.{server}.pt'
val = f'{pt_path}val.{server}.pt'
train_csv = f'{csv_path}train.csv'
val_csv = f'{csv_path}val.csv'
# create ptf of train and val dataset
if not os.path.exists(pt_path):
os.mkdir(pt_path)
if not os.path.exists(train):
create_pt_pan2018(train_csv, train, root='', server_type=server)
if not os.path.exists(val):
create_pt_pan2018(val_csv, val, root='', server_type=server)
# train the model
train_model(train, val, model_name)
|
[
"os.mkdir",
"argparse.ArgumentParser",
"torch.nn.Embedding",
"torch.nn.functional.dropout",
"torch.cat",
"torch.nn.NLLLoss",
"sypt_dataset.PAN_Dataset",
"sypt_utils.rbf_kernel",
"torch.utils.data.DataLoader",
"sypt_dataset.load_dataset_and_pt_embedding",
"os.path.exists",
"torch.Tensor",
"torch.nn.functional.log_softmax",
"torch.nn.Linear",
"torch.zeros",
"torch.nn.LSTM",
"sypt_utils.sigmoid_kernel",
"sypt_utils.dot",
"torch.max",
"torch.nn.functional.cosine_similarity",
"torch.sum",
"sypt_dataset.create_pt_pan2018",
"torch.from_numpy",
"sypt_utils.euclidean_distance",
"torch.LongTensor",
"torch.nn.functional.softmax",
"sypt_utils.mean_of_l1",
"sypt_utils.chi_squared"
] |
[((10313, 10330), 'torch.Tensor', 'torch.Tensor', (['[0]'], {}), '([0])\n', (10325, 10330), False, 'import torch, os, argparse\n'), ((14386, 14453), 'sypt_dataset.load_dataset_and_pt_embedding', 'sypt_dataset.load_dataset_and_pt_embedding', (['ds_files', 'EMBEDDING_DIM'], {}), '(ds_files, EMBEDDING_DIM)\n', (14428, 14453), False, 'import sypt_dataset, sypt_utils\n'), ((14486, 14526), 'sypt_dataset.PAN_Dataset', 'sypt_dataset.PAN_Dataset', (['val_path', 'None'], {}), '(val_path, None)\n', (14510, 14526), False, 'import sypt_dataset, sypt_utils\n'), ((14550, 14588), 'torch.utils.data.DataLoader', 'DataLoader', (["datasets['train']", '(1)', '(True)'], {}), "(datasets['train'], 1, True)\n", (14560, 14588), False, 'from torch.utils.data import DataLoader\n'), ((14610, 14646), 'torch.utils.data.DataLoader', 'DataLoader', (["datasets['val']", '(1)', '(True)'], {}), "(datasets['val'], 1, True)\n", (14620, 14646), False, 'from torch.utils.data import DataLoader\n'), ((15686, 15698), 'torch.nn.NLLLoss', 'nn.NLLLoss', ([], {}), '()\n', (15696, 15698), True, 'import torch.nn as nn\n'), ((16830, 16855), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (16853, 16855), False, 'import torch, os, argparse\n'), ((712, 751), 'torch.nn.Embedding', 'nn.Embedding', (['vocab_size', 'embedding_dim'], {}), '(vocab_size, embedding_dim)\n', (724, 751), True, 'import torch.nn as nn\n'), ((849, 883), 'torch.nn.LSTM', 'nn.LSTM', (['embedding_dim', 'hidden_dim'], {}), '(embedding_dim, hidden_dim)\n', (856, 883), True, 'import torch.nn as nn\n'), ((1025, 1078), 'torch.nn.Linear', 'nn.Linear', (['self.ptf_hidden_size', 'self.ptf_hidden_size'], {}), '(self.ptf_hidden_size, self.ptf_hidden_size)\n', (1034, 1078), True, 'import torch.nn as nn\n'), ((2855, 2897), 'torch.nn.LSTM', 'nn.LSTM', (['ptf_hidden_size', 'sent_hidden_size'], {}), '(ptf_hidden_size, sent_hidden_size)\n', (2862, 2897), True, 'import torch.nn as nn\n'), ((3049, 3104), 'torch.nn.Linear', 'nn.Linear', (['self.sent_hidden_size', 'self.sent_hidden_size'], {}), '(self.sent_hidden_size, self.sent_hidden_size)\n', (3058, 3104), True, 'import torch.nn as nn\n'), ((3133, 3175), 'torch.nn.LSTM', 'nn.LSTM', (['ptf_hidden_size', 'sent_hidden_size'], {}), '(ptf_hidden_size, sent_hidden_size)\n', (3140, 3175), True, 'import torch.nn as nn\n'), ((3327, 3382), 'torch.nn.Linear', 'nn.Linear', (['self.sent_hidden_size', 'self.sent_hidden_size'], {}), '(self.sent_hidden_size, self.sent_hidden_size)\n', (3336, 3382), True, 'import torch.nn as nn\n'), ((3923, 3957), 'torch.sum', 'torch.sum', (['sent_attention_l'], {'dim': '(0)'}), '(sent_attention_l, dim=0)\n', (3932, 3957), False, 'import torch, os, argparse\n'), ((4148, 4182), 'torch.sum', 'torch.sum', (['sent_attention_r'], {'dim': '(0)'}), '(sent_attention_r, dim=0)\n', (4157, 4182), False, 'import torch, os, argparse\n'), ((4358, 4417), 'torch.nn.functional.dropout', 'F.dropout', (['merged'], {'p': 'self.drop_rate', 'training': 'self.training'}), '(merged, p=self.drop_rate, training=self.training)\n', (4367, 4417), True, 'import torch.nn.functional as F\n'), ((7293, 7319), 'torch.LongTensor', 'torch.LongTensor', (['y_target'], {}), '(y_target)\n', (7309, 7319), False, 'import torch, os, argparse\n'), ((11160, 11186), 'torch.max', 'torch.max', (['outputs.data', '(1)'], {}), '(outputs.data, 1)\n', (11169, 11186), False, 'import torch, os, argparse\n'), ((11726, 11745), 'torch.LongTensor', 'torch.LongTensor', (['e'], {}), '(e)\n', (11742, 11745), False, 'import torch, os, argparse\n'), ((17486, 17509), 'os.path.exists', 'os.path.exists', (['pt_path'], {}), '(pt_path)\n', (17500, 17509), False, 'import torch, os, argparse\n'), ((17519, 17536), 'os.mkdir', 'os.mkdir', (['pt_path'], {}), '(pt_path)\n', (17527, 17536), False, 'import torch, os, argparse\n'), ((17548, 17569), 'os.path.exists', 'os.path.exists', (['train'], {}), '(train)\n', (17562, 17569), False, 'import torch, os, argparse\n'), ((17579, 17643), 'sypt_dataset.create_pt_pan2018', 'create_pt_pan2018', (['train_csv', 'train'], {'root': '""""""', 'server_type': 'server'}), "(train_csv, train, root='', server_type=server)\n", (17596, 17643), False, 'from sypt_dataset import create_pt_pan2018\n'), ((17655, 17674), 'os.path.exists', 'os.path.exists', (['val'], {}), '(val)\n', (17669, 17674), False, 'import torch, os, argparse\n'), ((17684, 17744), 'sypt_dataset.create_pt_pan2018', 'create_pt_pan2018', (['val_csv', 'val'], {'root': '""""""', 'server_type': 'server'}), "(val_csv, val, root='', server_type=server)\n", (17701, 17744), False, 'from sypt_dataset import create_pt_pan2018\n'), ((793, 827), 'torch.from_numpy', 'torch.from_numpy', (['embedding_martix'], {}), '(embedding_martix)\n', (809, 827), False, 'import torch, os, argparse\n'), ((3402, 3424), 'torch.nn.Linear', 'nn.Linear', (['(7)', 'class_no'], {}), '(7, class_no)\n', (3411, 3424), True, 'import torch.nn as nn\n'), ((3443, 3489), 'torch.nn.Linear', 'nn.Linear', (['(2 * self.sent_hidden_size)', 'class_no'], {}), '(2 * self.sent_hidden_size, class_no)\n', (3452, 3489), True, 'import torch.nn as nn\n'), ((4467, 4495), 'torch.nn.functional.log_softmax', 'F.log_softmax', (['merged'], {'dim': '(1)'}), '(merged, dim=1)\n', (4480, 4495), True, 'import torch.nn.functional as F\n'), ((7697, 7720), 'torch.cat', 'torch.cat', (['(s_l, _s)', '(0)'], {}), '((s_l, _s), 0)\n', (7706, 7720), False, 'import torch, os, argparse\n'), ((8015, 8038), 'torch.cat', 'torch.cat', (['(s_r, _s)', '(0)'], {}), '((s_r, _s), 0)\n', (8024, 8038), False, 'import torch, os, argparse\n'), ((9089, 9119), 'torch.LongTensor', 'torch.LongTensor', (['x_test[0][i]'], {}), '(x_test[0][i])\n', (9105, 9119), False, 'import torch, os, argparse\n'), ((9332, 9355), 'torch.cat', 'torch.cat', (['(s_l, _s)', '(0)'], {}), '((s_l, _s), 0)\n', (9341, 9355), False, 'import torch, os, argparse\n'), ((9462, 9492), 'torch.LongTensor', 'torch.LongTensor', (['x_test[1][i]'], {}), '(x_test[1][i])\n', (9478, 9492), False, 'import torch, os, argparse\n'), ((9704, 9727), 'torch.cat', 'torch.cat', (['(s_r, _s)', '(0)'], {}), '((s_r, _s), 0)\n', (9713, 9727), False, 'import torch, os, argparse\n'), ((10879, 10903), 'torch.LongTensor', 'torch.LongTensor', (['target'], {}), '(target)\n', (10895, 10903), False, 'import torch, os, argparse\n'), ((12163, 12180), 'torch.Tensor', 'torch.Tensor', (['[0]'], {}), '([0])\n', (12175, 12180), False, 'import torch, os, argparse\n'), ((1892, 1913), 'torch.nn.functional.softmax', 'F.softmax', (['mul'], {'dim': '(0)'}), '(mul, dim=0)\n', (1901, 1913), True, 'import torch.nn.functional as F\n'), ((2325, 2356), 'torch.sum', 'torch.sum', (['ptf_attention'], {'dim': '(0)'}), '(ptf_attention, dim=0)\n', (2334, 2356), False, 'import torch, os, argparse\n'), ((4786, 4807), 'torch.nn.functional.softmax', 'F.softmax', (['mul'], {'dim': '(0)'}), '(mul, dim=0)\n', (4795, 4807), True, 'import torch.nn.functional as F\n'), ((5176, 5197), 'torch.nn.functional.softmax', 'F.softmax', (['mul'], {'dim': '(0)'}), '(mul, dim=0)\n', (5185, 5197), True, 'import torch.nn.functional as F\n'), ((1355, 1408), 'torch.zeros', 'torch.zeros', (['(1)', 'self.batch_size', 'self.ptf_hidden_size'], {}), '(1, self.batch_size, self.ptf_hidden_size)\n', (1366, 1408), False, 'import torch, os, argparse\n'), ((1441, 1494), 'torch.zeros', 'torch.zeros', (['(1)', 'self.batch_size', 'self.ptf_hidden_size'], {}), '(1, self.batch_size, self.ptf_hidden_size)\n', (1452, 1494), False, 'import torch, os, argparse\n'), ((1562, 1599), 'torch.Tensor', 'torch.Tensor', (['self.ptf_hidden_size', '(1)'], {}), '(self.ptf_hidden_size, 1)\n', (1574, 1599), False, 'import torch, os, argparse\n'), ((5362, 5400), 'torch.Tensor', 'torch.Tensor', (['self.sent_hidden_size', '(1)'], {}), '(self.sent_hidden_size, 1)\n', (5374, 5400), False, 'import torch, os, argparse\n'), ((5541, 5587), 'torch.nn.functional.cosine_similarity', 'F.cosine_similarity', (['l_hidden', 'r_hidden'], {'dim': '(1)'}), '(l_hidden, r_hidden, dim=1)\n', (5560, 5587), True, 'import torch.nn.functional as F\n'), ((5618, 5674), 'sypt_utils.euclidean_distance', 'sypt_utils.euclidean_distance', (['l_hidden', 'r_hidden'], {'dim': '(1)'}), '(l_hidden, r_hidden, dim=1)\n', (5647, 5674), False, 'import sypt_dataset, sypt_utils\n'), ((5709, 5750), 'sypt_utils.dot', 'sypt_utils.dot', (['l_hidden', 'r_hidden'], {'dim': '(1)'}), '(l_hidden, r_hidden, dim=1)\n', (5723, 5750), False, 'import sypt_dataset, sypt_utils\n'), ((5785, 5833), 'sypt_utils.mean_of_l1', 'sypt_utils.mean_of_l1', (['l_hidden', 'r_hidden'], {'dim': '(1)'}), '(l_hidden, r_hidden, dim=1)\n', (5806, 5833), False, 'import sypt_dataset, sypt_utils\n'), ((5864, 5916), 'sypt_utils.sigmoid_kernel', 'sypt_utils.sigmoid_kernel', (['l_hidden', 'r_hidden'], {'dim': '(1)'}), '(l_hidden, r_hidden, dim=1)\n', (5889, 5916), False, 'import sypt_dataset, sypt_utils\n'), ((5947, 5996), 'sypt_utils.chi_squared', 'sypt_utils.chi_squared', (['l_hidden', 'r_hidden'], {'dim': '(1)'}), '(l_hidden, r_hidden, dim=1)\n', (5969, 5996), False, 'import sypt_dataset, sypt_utils\n'), ((6027, 6075), 'sypt_utils.rbf_kernel', 'sypt_utils.rbf_kernel', (['l_hidden', 'r_hidden'], {'dim': '(1)'}), '(l_hidden, r_hidden, dim=1)\n', (6048, 6075), False, 'import sypt_dataset, sypt_utils\n'), ((6107, 6168), 'torch.cat', 'torch.cat', (['[cos, euc, dot_dis, mean_l1, sig, chi, rbf]'], {'dim': '(0)'}), '([cos, euc, dot_dis, mean_l1, sig, chi, rbf], dim=0)\n', (6116, 6168), False, 'import torch, os, argparse\n'), ((6214, 6252), 'torch.cat', 'torch.cat', (['[l_hidden, r_hidden]'], {'dim': '(1)'}), '([l_hidden, r_hidden], dim=1)\n', (6223, 6252), False, 'import torch, os, argparse\n'), ((6548, 6602), 'torch.zeros', 'torch.zeros', (['(1)', 'self.batch_size', 'self.sent_hidden_size'], {}), '(1, self.batch_size, self.sent_hidden_size)\n', (6559, 6602), False, 'import torch, os, argparse\n'), ((6634, 6688), 'torch.zeros', 'torch.zeros', (['(1)', 'self.batch_size', 'self.sent_hidden_size'], {}), '(1, self.batch_size, self.sent_hidden_size)\n', (6645, 6688), False, 'import torch, os, argparse\n'), ((7511, 7542), 'torch.LongTensor', 'torch.LongTensor', (['x_train[0][i]'], {}), '(x_train[0][i])\n', (7527, 7542), False, 'import torch, os, argparse\n'), ((7829, 7860), 'torch.LongTensor', 'torch.LongTensor', (['x_train[1][i]'], {}), '(x_train[1][i])\n', (7845, 7860), False, 'import torch, os, argparse\n'), ((1163, 1216), 'torch.zeros', 'torch.zeros', (['(1)', 'self.batch_size', 'self.ptf_hidden_size'], {}), '(1, self.batch_size, self.ptf_hidden_size)\n', (1174, 1216), False, 'import torch, os, argparse\n'), ((1251, 1304), 'torch.zeros', 'torch.zeros', (['(1)', 'self.batch_size', 'self.ptf_hidden_size'], {}), '(1, self.batch_size, self.ptf_hidden_size)\n', (1262, 1304), False, 'import torch, os, argparse\n'), ((6350, 6404), 'torch.zeros', 'torch.zeros', (['(1)', 'self.batch_size', 'self.sent_hidden_size'], {}), '(1, self.batch_size, self.sent_hidden_size)\n', (6361, 6404), False, 'import torch, os, argparse\n'), ((6443, 6497), 'torch.zeros', 'torch.zeros', (['(1)', 'self.batch_size', 'self.sent_hidden_size'], {}), '(1, self.batch_size, self.sent_hidden_size)\n', (6454, 6497), False, 'import torch, os, argparse\n')]
|
# Generated by Django 3.2.2 on 2021-11-01 20:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0005_remove_linkedaccount_unique_identifer_on_community_platform'),
]
operations = [
migrations.AddField(
model_name='plugin',
name='community_platform_id',
field=models.CharField(blank=True, help_text='Optional identifier for this instance. If multiple instances are allowed per community, this field must be set to a unique value for each instance.', max_length=100, null=True),
),
migrations.AlterUniqueTogether(
name='plugin',
unique_together={('name', 'community', 'community_platform_id')},
),
]
|
[
"django.db.models.CharField",
"django.db.migrations.AlterUniqueTogether"
] |
[((624, 739), 'django.db.migrations.AlterUniqueTogether', 'migrations.AlterUniqueTogether', ([], {'name': '"""plugin"""', 'unique_together': "{('name', 'community', 'community_platform_id')}"}), "(name='plugin', unique_together={('name',\n 'community', 'community_platform_id')})\n", (654, 739), False, 'from django.db import migrations, models\n'), ((387, 613), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'help_text': '"""Optional identifier for this instance. If multiple instances are allowed per community, this field must be set to a unique value for each instance."""', 'max_length': '(100)', 'null': '(True)'}), "(blank=True, help_text=\n 'Optional identifier for this instance. If multiple instances are allowed per community, this field must be set to a unique value for each instance.'\n , max_length=100, null=True)\n", (403, 613), False, 'from django.db import migrations, models\n')]
|
import asyncio
import httpx
from blackbull.logger import get_logger_set
logger, log = get_logger_set()
async def main():
async with httpx.AsyncClient(http2=True, verify=False) as c:
res = await c.get('https://localhost:8000/json', headers={'key': 'value'})
assert res.status_code == 200
assert res.content == b'{"a": "b"}'
if __name__ == '__main__':
asyncio.run(
asyncio.wait_for(
main(), timeout=0.5
)
)
|
[
"httpx.AsyncClient",
"blackbull.logger.get_logger_set"
] |
[((87, 103), 'blackbull.logger.get_logger_set', 'get_logger_set', ([], {}), '()\n', (101, 103), False, 'from blackbull.logger import get_logger_set\n'), ((139, 182), 'httpx.AsyncClient', 'httpx.AsyncClient', ([], {'http2': '(True)', 'verify': '(False)'}), '(http2=True, verify=False)\n', (156, 182), False, 'import httpx\n')]
|
import pytc
import string
from random import choice
DBNAME="../mental_cache.hdb"
db = pytc.HDB()
db.open(DBNAME, pytc.HDBOWRITER | pytc.HDBOCREAT)
chars = string.letters.lower() + string.digits
x = 1
while x < 1000000:
page_name = ''.join([choice(chars) for i in xrange(8)])
db.put(page_name,'{"order": "","name": "Untitled","components": {},"last_id": 0}')
x = x+1
#print db.get('2')
|
[
"string.letters.lower",
"random.choice",
"pytc.HDB"
] |
[((88, 98), 'pytc.HDB', 'pytc.HDB', ([], {}), '()\n', (96, 98), False, 'import pytc\n'), ((158, 180), 'string.letters.lower', 'string.letters.lower', ([], {}), '()\n', (178, 180), False, 'import string\n'), ((247, 260), 'random.choice', 'choice', (['chars'], {}), '(chars)\n', (253, 260), False, 'from random import choice\n')]
|
from datetime import datetime, timedelta
from django.contrib.auth.models import User
from django.db.models import Q, Count
from django.http import HttpResponseRedirect
from django.utils.html import format_html
from rest_framework import status, viewsets
from rest_framework.renderers import TemplateHTMLRenderer, JSONRenderer
from rest_framework.response import Response
from rest_framework.views import APIView
# UPDATES PAGE VIEWS
from employee.models import Employee
from fileupload.models import ChequeFile
from restapi.helper_api import verify_pod_data, my_uploaded_pod_data, manual_booking_id_list, check_booking_status, \
get_booking_status_mapping_object
from restapi.models import BookingStatusesMapping, BookingStatusChain
from restapi.serializers.employee import EmployeeSerializer
from restapi.serializers.file_upload import ChequeFileSerializer
from restapi.serializers.team import InvoiceSerializer
from restapi.serializers.team import ManualBookingSerializer
from restapi.serializers.utils import IfscDetailSerializer
from restapi.service.booking import detailed_full_booking_page_data, \
detailed_commission_booking_page_data
from restapi.service.credit_debit_note import approve_credit_note_customer_data, approve_debit_note_customer_data, \
approve_credit_note_supplier_data, approve_debit_note_supplier_data, \
approve_credit_note_customer_direct_advance_data
from restapi.service.invoices import get_invoice_data, get_comment_list, get_amount_data, \
full_booking_invoice_data
from restapi.service.payments import pending_payments_data, pending_payment_adjustment_data
from restapi.service.trackvehicle import track_vehicles_data, track_vehicle_data
from restapi.utils import get_or_none
from sme.models import Sme
from team.models import LrNumber, ManualBooking, CreditNoteCustomer, CreditNoteSupplier, DebitNoteCustomer, \
DebitNoteSupplier, CreditNoteCustomerDirectAdvance, Invoice
from utils.models import VehicleCategory, IfscDetail
class DownloadPaymentFilePage(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def get(self, request):
return Response(status=status.HTTP_200_OK, template_name='team/download_outward_payment_file.html')
class ManualBookingCreatePageView(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def get_basic_full_booking(self, request):
return Response(template_name='team/booking/fetch_full_booking_data_page.html')
def get_confirm_booking(self, request):
return Response(template_name='team/booking/confirm_booking_page.html')
def get_detailed_full_booking(self, request):
json_data = {k: request.GET.get(k) for k in request.GET.keys()}
return Response(template_name='team/booking/full-booking.html', data=detailed_full_booking_page_data(json_data),
status=status.HTTP_200_OK)
def get_detailed_full_booking_mb_id_based(self, request, pk):
try:
manual_booking = ManualBooking.objects.get(id=pk)
except ManualBooking.DoesNotExist:
return Response({"status": "failure",
"msg": "ManualBooking Doesn't exists",
"status_code": status.HTTP_400_BAD_REQUEST,
"data": {}}, status=status.HTTP_400_BAD_REQUEST)
serializer = ManualBookingSerializer(instance=manual_booking)
return Response(template_name='team/booking/detailed_lr_generation.html',
data=serializer.data, status=status.HTTP_200_OK)
def get_basic_commission_booking(self, request):
return Response(template_name='team/booking/fetch-commission-booking-data.html')
def get_detailed_commission_booking(self, request):
data = request.GET
json_data = {k: data.get(k) for k in data.keys()}
return Response(template_name='team/booking/commission-booking.html',
data=detailed_commission_booking_page_data(json_data))
class OutwardPaymentListPageView(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def get(self, request):
return Response(template_name='team/payments/outward_payment_history.html')
def get_payment_receipt(self, request):
return Response(template_name='team/payments/supplier_payment_receipt.html', status=status.HTTP_200_OK)
class EmployeeProfilePageView(viewsets.ViewSet):
renderer_classes = (JSONRenderer, TemplateHTMLRenderer)
def get(self, request):
emp = get_or_none(Employee, username=User.objects.get(username=request.user.username))
employee_serializer = EmployeeSerializer(instance=emp)
return Response(template_name='team/employee/emp-profile.html', data=employee_serializer.data,
status=status.HTTP_200_OK)
class ChangePasswordPageView(viewsets.ViewSet):
renderer_classes = (JSONRenderer, TemplateHTMLRenderer)
def get(self, request):
return Response(template_name='team/employee/change-password.html', status=status.HTTP_200_OK)
class InwardPaymentListPageView(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def get(self, request):
return Response(template_name='team/payments/inward_payment_history.html')
class OutwardPaymentPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
def get(self, request):
return Response(status=status.HTTP_200_OK, template_name='team/payments/add_outward_payment.html')
class BookingStatusesMonitoringPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
def get(self, request):
return Response(status=status.HTTP_200_OK, template_name='team/monitoring/senior_mgmt_booking_status.html')
class TaskStatusesMonitoringPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
def get(self, request):
return Response(status=status.HTTP_200_OK, template_name='team/monitoring/senior_mgmt_task_status.html')
class PendingInwardPageView(viewsets.ViewSet):
renderer_classes = (JSONRenderer, TemplateHTMLRenderer,)
def get(self, request):
return Response(template_name='team/payments/add_received_payment.html')
def unadjusted_list(self, request):
return Response(template_name='team/payments/pending-payment-list.html', data={
'pending_payments': pending_payments_data(),
})
def payment_adjustment(self, request):
response = pending_payment_adjustment_data(data={
'accept_choice': request.GET.get('accept_choice'),
'payment_id': request.GET.get('payment_id'),
'customer': request.GET.get('customer'),
'tds': request.GET.get('tds'),
'username': request.user.username,
})
if response['status'] != 200:
return Response(status=response['status'], data={'msg': response['msg']})
return Response(template_name='team/payments/payment-adjustment-page.html', status=status.HTTP_200_OK,
data=response['data'])
class ChequePageView(viewsets.ViewSet):
renderer_classes = (JSONRenderer, TemplateHTMLRenderer,)
def create(self, request):
return Response(template_name='', status=status.HTTP_200_OK)
def uncredited_cheque_list(self, request):
cheques = ChequeFile.objects.filter(resolved=False).order_by('cheque_date').values(
'cheque_number', 'cheque_date', 'customer_name', 'amount', 'remarks').annotate(Count('cheque_number'))
data = []
for cheque in cheques:
cheque_number = cheque['cheque_number']
data.append({
'id': ','.join([str(row.id) for row in ChequeFile.objects.filter(cheque_number=cheque_number)]),
'cheque_number': cheque_number,
'cheque_date': cheque['cheque_date'],
'customer_name': cheque['customer_name'],
'amount': cheque['amount'],
'remarks': cheque['remarks'],
'images': [{'url': row.s3_upload.public_url(), 'filename': row.cheque_number, } for row in
ChequeFile.objects.filter(cheque_number=cheque_number)]
})
return Response(template_name='team/payments/uncredited-cheques.html', status=status.HTTP_200_OK,
data={'cheques': data})
class InvoicePageView(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def list(self, request):
return Response(template_name='team/invoices/invoice_list.html', status=status.HTTP_200_OK)
def summary(self, request):
return Response(template_name='team/invoices/invoice_summary_statement.html', status=status.HTTP_200_OK)
def fetch_full_booking_invoice(self, request):
return Response(template_name='team/invoices/fetch_full_booking_invoice_data.html', status=status.HTTP_200_OK)
def full_booking_invoice(self, request):
customer = get_or_none(Sme, id=request.GET.get('customer_to_be_billed'))
return Response(template_name='team/invoices/full_booking_invoices.html',
data=full_booking_invoice_data(customer=customer), status=status.HTTP_200_OK)
def fetch_commission_booking_invoice(self, request):
return Response(template_name='team/invoices/fetch-commission-invoice.html', status=status.HTTP_200_OK)
def commission_booking_invoice(self, request):
customer = get_or_none(Sme, id=request.GET.get('customer_to_be_billed'))
bookings = ManualBooking.objects.filter(id__in=request.GET.getlist('booking_id[]'))
if not bookings.exists() or not isinstance(customer, Sme):
return HttpResponseRedirect('/team/commission-invoice-data-page/')
invoice_data = get_invoice_data(bookings, 'commission')
comment_list = get_comment_list(bookings, invoice_data)
return Response(template_name='team/invoices/commission_booking_invoice.html', status=status.HTTP_200_OK,
data={'booking_data': invoice_data, 'customer': customer,
'gst_liability': bookings.last().gst_liability,
'booking_ids': ','.join(map(str, bookings.values_list('id', flat=True))),
'comment_list': comment_list,
'invoice_amount_data': get_amount_data(bookings=bookings, booking_type='full'),
})
class LrNumberPageView(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def list(self, request):
return Response(template_name='team/booking/download-lr.html', status=status.HTTP_200_OK)
class PODPageView(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def upload(self, request):
return Response(template_name='', status=status.HTTP_200_OK)
def list(self, request):
return Response(template_name='team/booking/pod-list.html', status=status.HTTP_200_OK)
def unverified_pod(self, request):
return Response(template_name='team/documents/verify_pod.html', data={'bookings_data': verify_pod_data()},
status=status.HTTP_200_OK)
def td_unverified_pod(self, request):
return Response(template_name='team/documents/td_verify_pod.html', data={'bookings_data': verify_pod_data()},
status=status.HTTP_200_OK)
def my_uploaded_pod(self, request):
return Response(template_name='team/documents/uploaded-pod.html',
data={'bookings_data': my_uploaded_pod_data(user=request.user)}, status=status.HTTP_200_OK)
class AccountingSummaryPageView(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def get_placed_order_customer_summary(self, request):
return Response(template_name='team/accounting/placed-order-customer-summary.html', status=status.HTTP_200_OK)
def get_billed_customer_summary(self, request):
return Response(template_name='team/accounting/billed-customer-summary.html', status=status.HTTP_200_OK)
def get_supplier_summary(self, request):
return Response(template_name='team/accounting/supplier-summary.html', status=status.HTTP_200_OK)
def get_vehicle_summary(self, request):
return Response(template_name='team/accounting/vehicle-summary.html', status=status.HTTP_200_OK)
class BankAccountPageView(viewsets.ViewSet):
renderer_classes = (JSONRenderer, TemplateHTMLRenderer)
def fetch_ifsc(self, request):
return Response(template_name='team/registrations/fetch-bank-details-using-ifsc.html',
status=status.HTTP_200_OK)
def create(self, request):
ifsc = get_or_none(IfscDetail, ifsc_code__iexact=request.GET.get('fetch_ifsc'))
if isinstance(ifsc, IfscDetail):
data = IfscDetailSerializer(ifsc).data
else:
data = {}
return Response(template_name='team/registrations/register_beneficiary_bank_account.html',
status=status.HTTP_200_OK, data=data)
def list(self, request):
return Response(template_name='team/payments/beneficiary_list.html', status=status.HTTP_200_OK)
class TrackVehiclePageView(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def track_vehicles(self, request):
return Response(template_name='team/track/track_vehicles.html', status=status.HTTP_200_OK,
data=track_vehicles_data())
def track_vehicle(self, request):
return Response(template_name='team/track/track_individual_vehicle.html', status=status.HTTP_200_OK,
data=track_vehicle_data(device_id=request.GET.get('gps_log_id')))
# FILE UPLOAD
class PODUploadPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
def get(self, request):
lr_numbers = LrNumber.objects.filter(Q(datetime__date__gte=datetime.now().date() - timedelta(days=180)) & (
Q(booking__pod_status='pending') | Q(booking__pod_status='rejected') | Q(
booking__pod_status='unverified'))).order_by('-datetime').values(
'id', 'lr_number')
bookings = []
for booking in ManualBooking.objects.filter(
(Q(pod_status__iexact='pending') | Q(pod_status__iexact='rejected')) & (
Q(booking_id__istartswith='BROKER') | Q(booking_id__istartswith='AB'))).exclude(
Q(booking_status='cancelled') | Q(deleted=True)):
bookings.append({'booking_id': booking.booking_id})
return Response({'lr_numbers': lr_numbers, 'bookings': bookings}, template_name='fileupload/pod_upload.html',
status=status.HTTP_200_OK)
class ChequeFilePageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
def get(self, request):
cheques = ChequeFile.objects.filter(resolved=False).exclude(deleted=True).order_by('-cheque_date')
cheques_serializer = ChequeFileSerializer(cheques, many=True)
return Response({"data": cheques_serializer.data}, status=status.HTTP_200_OK,
template_name="team/payments/uncredited-cheques.html")
class ManualBookingListPage(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
def get_partial_booking(self, request):
return Response(status=status.HTTP_200_OK, template_name='team/booking/partial_booking.html')
def get_full_booking(self, request):
return Response(status=status.HTTP_200_OK, template_name='team/booking/booking-archive.html')
def get_generate_lr(self, request):
return Response(status=status.HTTP_200_OK, template_name='team/booking/booking_status_loaded.html')
def get_bookings_pay_advance(self, request):
return Response(status=status.HTTP_200_OK, template_name='team/booking/bookings_pay_advance.html')
class BookingMISPage(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def get(self, request):
return Response(template_name='team/booking/mis-booking.html', status=status.HTTP_200_OK)
class UpdateContractBookingPage(viewsets.ViewSet):
renderer_classes = (JSONRenderer, TemplateHTMLRenderer,)
def get(self, request):
bookings = ManualBooking.objects.filter(Q(total_amount_to_company=0)).filter(billing_type='contract').exclude(
Q(deleted=True) | Q(booking_status='cancelled'))
data = []
for booking in bookings:
data.append({
'id': booking.id,
'booking_id': booking.booking_id,
'shipment_date': booking.shipment_date.strftime('%d-%b-%Y') if booking.shipment_date else '',
'lr_numbers': '\n'.join(booking.lr_numbers.values_list('lr_number', flat=True)),
'customer_name': booking.company.get_name() if booking.company else '',
'origin': booking.from_city,
'destination': booking.to_city,
'weight': booking.charged_weight,
'rate_id': '{}_{}'.format('rate', booking.booking_id),
'amount_id': '{}_{}'.format('amount', booking.booking_id)
})
return Response(template_name='team/booking/update-contract-bookings-rate.html', status=status.HTTP_200_OK,
data={'bookings': data, 'id': ','.join(map(str, bookings.values_list('id', flat=True)))})
# UPDATE PAGE VIEWs
class PayBalanceBookingHistoryPage(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def get(self, request):
return Response(template_name='team/payments/pay_balance_booking_history.html', status=status.HTTP_200_OK)
class RaiseInvoiceBookingHistoryPage(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def get(self, request):
return Response(template_name='team/invoices/raise_invoice_booking_history.html', status=status.HTTP_200_OK)
class UploadInvoiceSentReceiptPage(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def get(self, request):
booking_ids = manual_booking_id_list(user=request.user)
invoice_raised_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='invoice_raised').exclude(
Q(deleted=True) | Q(booking_stage='reverted')). \
values_list('manual_booking_id', flat=True)
party_invoice_sent_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='party_invoice_sent').exclude(
Q(deleted=True) | Q(booking_stage='reverted')). \
values_list('manual_booking_id', flat=True)
invoice_not_sent_bookings = [x for x in invoice_raised_bookings if x not in party_invoice_sent_bookings]
bookings = ManualBooking.objects.filter(id__in=booking_ids).filter(id__in=invoice_not_sent_bookings). \
filter(invoice_status='invoice_raised').exclude(billing_type='contract')
invoices = Invoice.objects.filter(bookings__in=bookings,
date__gte=datetime.now().date() - timedelta(days=365)).distinct()
# invoices = Invoice.objects.filter(date__gte=datetime.now() - timedelta(days=3)).exclude(deleted=True)
serializer = InvoiceSerializer(instance=invoices, many=True)
return Response(template_name='team/invoices/invoice_sent_receipt.html', status=status.HTTP_200_OK,
data={'data': serializer.data})
class ConfirmInvoiceSentPage(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def get(self, request):
booking_ids = manual_booking_id_list(user=request.user)
party_invoice_sent_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='party_invoice_sent').exclude(
Q(deleted=True) | Q(booking_stage='reverted')). \
values_list('manual_booking_id', flat=True)
invoice_confirmed_bookings = BookingStatusesMapping.objects.filter(
booking_status_chain__booking_status__status__iexact='invoice_confirmed').exclude(
Q(deleted=True) | Q(booking_stage='reverted')). \
values_list('manual_booking_id', flat=True)
invoice_not_confirmed_bookings = [x for x in party_invoice_sent_bookings if x not in invoice_confirmed_bookings]
bookings = ManualBooking.objects.filter(id__in=booking_ids).filter(id__in=invoice_not_confirmed_bookings). \
filter(invoice_status='invoice_sent').exclude(billing_type='contract')
invoices = Invoice.objects.filter(bookings__in=bookings,
date__gte=datetime.now().date() - timedelta(days=365)).distinct()
# invoices = Invoice.objects.filter(date__gte=datetime.now() - timedelta(days=3)).exclude(deleted=True)
serializer = InvoiceSerializer(instance=invoices, many=True)
data = self.add_booking_status_mapping_info(serializer.data)
return Response(template_name='team/invoices/confirm_sent_invoice.html', status=status.HTTP_200_OK,
data={'data': data})
def add_booking_status_mapping_info(self, data):
for inv in data:
inv['invoice_booking_details'] = []
inv_bookings = Invoice.objects.get(id=inv['id']).bookings.all()
for booking in inv_bookings:
bsm_details = {}
booking_invoice_confirmed = check_booking_status(booking, 'party_invoice_sent')
booking_status_mapping_id = None
booking_status_chain_id = None
booking_status_mapping_booking_stage = None
if booking_invoice_confirmed:
booking_status_mapping_object = get_booking_status_mapping_object(booking, 'party_invoice_sent')
try:
booking_status_chain_id = BookingStatusChain.objects.get(
booking_status__status='party_invoice_sent').id
except BookingStatusChain.DoesNotExist:
booking_status_chain_id = None
if booking_status_mapping_object:
booking_status_mapping_id = booking_status_mapping_object.id
booking_status_mapping_booking_stage = booking_status_mapping_object.booking_stage
bsm_details['booking_id'] = booking.id
bsm_details['booking_status_mapping_id'] = booking_status_mapping_id
bsm_details['booking_status_chain_id'] = booking_status_chain_id
bsm_details['booking_status_mapping_booking_stage'] = booking_status_mapping_booking_stage
inv['invoice_booking_details'].append(bsm_details)
return data
class ProcessPaymentEnetPage(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def get(self, request):
return Response(template_name='team/payments/process_payment_page.html', status=status.HTTP_200_OK)
class ReconcilePaymentPage(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
def get(self, request):
return Response(template_name='team/payments/reconcile_payment_page.html', status=status.HTTP_200_OK)
class OwnerListPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/registrations/owner_list.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class OwnerVehicleListPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/registrations/vehicle-list.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class SmeListPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/registrations/customer-archive.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class SupplierListPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/registrations/supplier-list.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class DriverListPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/registrations/driver-list-page.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
# REGISTER PAGE VIEWS
class VehicleRegisterPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
def get(self, request):
vehicle_categories = [
{'id': vehicle_category.id, 'vehicle_type': vehicle_category.vehicle_type,
'capacity': vehicle_category.capacity}
for vehicle_category in VehicleCategory.objects.all()
]
body_type_choices = (
('open', 'Open'),
('closed', 'Closed'),
('semi', 'Semi'),
('half', 'Half'),
('containerized', 'Containerized'),
)
gps_enable_choices = (
('yes', 'Yes'),
('no', 'No')
)
return Response({
'vehicle_categories': vehicle_categories,
'body_type_choices': body_type_choices,
'gps_enable_choices': gps_enable_choices
}, template_name='team/registrations/register_vehicle.html', status=status.HTTP_200_OK)
class OwnerRegisterPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/registrations/register_owner.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class SmeRegisterPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/registrations/register-customer.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class SupplierRegisterPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/registrations/register-supplier.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class DriverRegisterPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/registrations/register-driver.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
# CREDIT DEBIT NOTE
class IssueCreditDebitNotePageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/credit_debit_note/issue/issue-credit-debit-note.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class IssueCreditNoteCustomerPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/credit_debit_note/issue/issue_cnc.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class IssueCreditNoteSupplierPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/credit_debit_note/issue/issue_cns.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class IssueDebitNoteCustomerPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/credit_debit_note/issue/issue_dnc.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class IssueDebitNoteSupplierPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/credit_debit_note/issue/issue_dns.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class IssueCreditNoteCustomerDirectAdvancePageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/credit_debit_note/issue/issue_cnca.html'
def get(self, request):
return Response(status=status.HTTP_200_OK)
class ApproveCreditDebitNotePageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/credit_debit_note/approve/approve_credit_debit_note_page.html'
def get(self, request):
return Response(status=status.HTTP_200_OK, data={
'cnc': approve_credit_note_customer_data(),
'dnc': approve_debit_note_customer_data(),
'cns': approve_credit_note_supplier_data(),
'dns': approve_debit_note_supplier_data(),
'cnca': approve_credit_note_customer_direct_advance_data(),
})
class ApproveCreditNoteCustomerPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
def get(self, request):
data = []
for row in CreditNoteCustomer.objects.filter(status='pending').exclude(deleted=True).order_by('created_on'):
data.append({
'cnc_id': row.id,
'customer': row.customer.get_name() if row.customer else '-',
'bookings': '\n'.join(
[format_html('''<a href="/team/booking-edit/?booking_id={}">{}</a>''', booking.id,
booking.booking_id)
for booking in row.bookings.all()]),
'invoice': row.invoice.invoice_number if row.invoice else '-',
'amount': row.credit_amount,
'created_on': row.created_on.strftime('%d-%b-%Y') if row.created_on else '-',
'credit_note_number': row.credit_note_number,
'created_by': row.created_by.username if row.created_by else '-',
'credit_note_reason': row.reason.name if row.reason else '-',
'remarks': row.remarks,
'approve_cnc_form': 'approve_cnc_form_{}'.format(row.id),
'approve_cnc_btn': 'approve_cnc_btn_{}'.format(row.id),
'reject_cnc_btn': 'reject_cnc_btn_{}'.format(row.id),
'input_reject_cnc_remarks': 'input_reject_cnc_remarks_{}'.format(row.id),
'btn_status': 'btn_status_{}'.format(row.id),
'div_rejection_remarks': 'div_rejection_remarks_{}'.format(row.id),
'div_rejection_line': 'div_rejection_line_{}'.format(row.id),
})
return Response({'data': data}, template_name='team/credit_debit_note/approve/cnc.html',
status=status.HTTP_200_OK)
class ApproveCreditNoteSupplierPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
def get(self, request):
data = []
for row in CreditNoteSupplier.objects.filter(status='pending').exclude(deleted=True).order_by('created_on'):
data.append({
'cnc_id': row.id,
'broker': row.broker.get_name() if row.broker else '-',
'bookings': '\n'.join(
[format_html('''<a href="/team/booking-edit/?booking_id={}">{}</a>''', booking.id,
booking.booking_id)
for booking in row.bookings.all()]),
'invoice': row.invoice.invoice_number if row.invoice else '-',
'amount': row.credit_amount,
'created_on': row.created_on.strftime('%d-%b-%Y') if row.created_on else '-',
'credit_note_number': row.credit_note_number,
'created_by': row.created_by.username if row.created_by else '-',
'credit_note_reason': row.reason.name if row.reason else '-',
'remarks': row.remarks,
'approve_cns_form': 'approve_cns_form_{}'.format(row.id),
'approve_cns_btn': 'approve_cns_btn_{}'.format(row.id),
'reject_cns_btn': 'reject_cns_btn_{}'.format(row.id),
'input_reject_cns_remarks': 'input_reject_cns_remarks_{}'.format(row.id),
'btn_status': 'btn_status_{}'.format(row.id),
'div_rejection_remarks': 'div_rejection_remarks_{}'.format(row.id),
'div_rejection_line': 'div_rejection_line_{}'.format(row.id),
})
return Response({'data': data}, template_name='team/credit_debit_note/approve/cns.html',
status=status.HTTP_200_OK)
class ApproveDebitNoteCustomerPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
def get(self, request):
data = []
for row in DebitNoteCustomer.objects.filter(status='pending').exclude(deleted=True).order_by('created_on'):
data.append({
'dnc_id': row.id,
'customer': row.customer.get_name() if row.customer else '-',
'bookings': '\n'.join(
[format_html('''<a href="/team/booking-edit/?booking_id={}">{}</a>''', booking.id,
booking.booking_id)
for booking in row.bookings.all()]),
'invoice': row.invoice.invoice_number if row.invoice else '-',
'amount': row.debit_amount,
'created_on': row.created_on.strftime('%d-%b-%Y') if row.created_on else '-',
'debit_note_number': row.debit_note_number,
'created_by': row.created_by.username if row.created_by else '-',
'debit_note_reason': row.reason.name if row.reason else '-',
'remarks': row.remarks,
'approve_dnc_form': 'approve_dnc_form_{}'.format(row.id),
'approve_dnc_btn': 'approve_dnc_btn_{}'.format(row.id),
'reject_dnc_btn': 'reject_dnc_btn_{}'.format(row.id),
'input_reject_dnc_remarks': 'input_reject_dnc_remarks_{}'.format(row.id),
'btn_status': 'btn_status_{}'.format(row.id),
'div_rejection_remarks': 'div_rejection_remarks_{}'.format(row.id),
'div_rejection_line': 'div_rejection_line_{}'.format(row.id),
})
return Response({'data': data}, template_name='team/credit_debit_note/approve/dnc.html',
status=status.HTTP_200_OK)
class ApproveDebitNoteSupplierPageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/credit_debit_note/issue/issue_dns.html'
def get(self, request):
data = []
for row in DebitNoteSupplier.objects.filter(status='pending').exclude(deleted=True).order_by('created_on'):
data.append({
'cnc_id': row.id,
'broker': row.broker.get_name() if row.broker else '-',
'bookings': '\n'.join(
[format_html('''<a href="/team/booking-edit/?booking_id={}">{}</a>''', booking.id,
booking.booking_id)
for booking in row.bookings.all()]),
'invoice': row.invoice.invoice_number if row.invoice else '-',
'amount': row.debit_amount,
'created_on': row.created_on.strftime('%d-%b-%Y') if row.created_on else '-',
'credit_note_number': row.debit_note_number,
'created_by': row.created_by.username if row.created_by else '-',
'credit_note_reason': row.reason.name if row.reason else '-',
'remarks': row.remarks,
'approve_dns_form': 'approve_dns_form_{}'.format(row.id),
'approve_dns_btn': 'approve_dns_btn_{}'.format(row.id),
'reject_dns_btn': 'reject_dns_btn_{}'.format(row.id),
'input_reject_dns_remarks': 'input_reject_dns_remarks_{}'.format(row.id),
'btn_status': 'btn_status_{}'.format(row.id),
'div_rejection_remarks': 'div_rejection_remarks_{}'.format(row.id),
'div_rejection_line': 'div_rejection_line_{}'.format(row.id),
})
return Response({'data': data}, template_name='team/credit_debit_note/approve/dns.html',
status=status.HTTP_200_OK)
class ApproveCreditNoteCustomerDirectAdvancePageView(APIView):
renderer_classes = (TemplateHTMLRenderer, JSONRenderer)
template_name = 'team/credit_debit_note/issue/issue_cnca.html'
def get(self, request):
data = []
for row in CreditNoteCustomerDirectAdvance.objects.filter(status='pending').exclude(deleted=True).order_by(
'created_on'):
data.append({
'cnc_id': row.id,
'broker': row.broker.get_name() if row.broker else '-',
'customer': row.customer.get_name() if row.customer else '-',
'bookings': '\n'.join(
[format_html('''<a href="/team/booking-edit/?booking_id={}">{}</a>''', booking.id,
booking.booking_id)
for booking in row.bookings.all()]),
'invoice': row.invoice.invoice_number if row.invoice else '-',
'amount': row.credit_amount,
'created_on': row.created_on.strftime('%d-%b-%Y') if row.created_on else '-',
'credit_note_number': row.credit_note_number,
'created_by': row.created_by.username if row.created_by else '-',
'credit_note_reason': row.reason.name if row.reason else '-',
'remarks': row.remarks,
'approve_cnca_form': 'approve_cnca_form_{}'.format(row.id),
'approve_cnca_btn': 'approve_cnca_btn_{}'.format(row.id),
'reject_cnca_btn': 'reject_cnca_btn_{}'.format(row.id),
'input_reject_cnca_remarks': 'input_reject_cnca_remarks_{}'.format(row.id),
'btn_status': 'btn_status_{}'.format(row.id),
'div_rejection_remarks': 'div_rejection_remarks_{}'.format(row.id),
'div_rejection_line': 'div_rejection_line_{}'.format(row.id),
})
return Response({'data': data}, template_name='team/credit_debit_note/approve/cnca.html',
status=status.HTTP_200_OK)
class MobilePageView(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def dashboard(self, request):
return Response(template_name='mobile/dashboard.html', status=status.HTTP_200_OK)
class DocumentUploadPageView(viewsets.ViewSet):
renderer_classes = (TemplateHTMLRenderer,)
def pod(self, request):
lr_numbers = LrNumber.objects.filter(Q(datetime__date__gte=datetime.now().date() - timedelta(days=180)) & (
Q(booking__pod_status='pending') | Q(booking__pod_status='rejected') | Q(
booking__pod_status='unverified'))).order_by('-datetime').values(
'id', 'lr_number')
bookings = []
for booking in ManualBooking.objects.filter(
(Q(pod_status__iexact='pending') | Q(pod_status__iexact='rejected')) & (
Q(booking_id__istartswith='BROKER') | Q(booking_id__istartswith='AB'))).exclude(
Q(booking_status='cancelled') | Q(deleted=True)):
bookings.append({'booking_id': booking.booking_id})
return Response(template_name='fileupload/pod_upload.html', status=status.HTTP_200_OK)
def vehicle(self, request):
return Response(template_name='fileupload/upload_vehicle_documents.html', status=status.HTTP_200_OK)
def supplier(self, request):
return Response(template_name='fileupload/upload_supplier_documents.html', status=status.HTTP_200_OK)
def weighing_slip(self, request):
return Response(template_name='fileupload/weighing_slip_upload.html', status=status.HTTP_200_OK)
def owner(self, request):
return Response(template_name='fileupload/upload_owner_documents.html', status=status.HTTP_200_OK)
def driver(self, request):
return Response(template_name='fileupload/upload_driver_documents.html', status=status.HTTP_200_OK)
def cheque(self, request):
return Response(template_name='fileupload/upload_cheque.html', status=status.HTTP_200_OK)
def invoice_receipt(self, request):
return Response(template_name='fileupload/invoice_receipt.html', status=status.HTTP_200_OK)
|
[
"restapi.service.credit_debit_note.approve_credit_note_supplier_data",
"restapi.serializers.employee.EmployeeSerializer",
"restapi.models.BookingStatusesMapping.objects.filter",
"restapi.service.trackvehicle.track_vehicles_data",
"restapi.helper_api.check_booking_status",
"team.models.Invoice.objects.get",
"rest_framework.response.Response",
"utils.models.VehicleCategory.objects.all",
"django.http.HttpResponseRedirect",
"fileupload.models.ChequeFile.objects.filter",
"restapi.helper_api.manual_booking_id_list",
"restapi.service.credit_debit_note.approve_debit_note_customer_data",
"restapi.service.credit_debit_note.approve_debit_note_supplier_data",
"django.contrib.auth.models.User.objects.get",
"restapi.serializers.file_upload.ChequeFileSerializer",
"restapi.service.credit_debit_note.approve_credit_note_customer_data",
"datetime.timedelta",
"restapi.helper_api.get_booking_status_mapping_object",
"restapi.serializers.utils.IfscDetailSerializer",
"team.models.ManualBooking.objects.filter",
"restapi.service.invoices.get_amount_data",
"datetime.datetime.now",
"team.models.ManualBooking.objects.get",
"restapi.service.invoices.get_comment_list",
"team.models.CreditNoteCustomer.objects.filter",
"restapi.service.credit_debit_note.approve_credit_note_customer_direct_advance_data",
"restapi.service.invoices.full_booking_invoice_data",
"restapi.helper_api.verify_pod_data",
"restapi.service.payments.pending_payments_data",
"restapi.models.BookingStatusChain.objects.get",
"team.models.CreditNoteSupplier.objects.filter",
"team.models.DebitNoteSupplier.objects.filter",
"restapi.service.invoices.get_invoice_data",
"team.models.CreditNoteCustomerDirectAdvance.objects.filter",
"restapi.serializers.team.InvoiceSerializer",
"restapi.service.booking.detailed_full_booking_page_data",
"team.models.DebitNoteCustomer.objects.filter",
"django.db.models.Q",
"restapi.helper_api.my_uploaded_pod_data",
"django.db.models.Count",
"restapi.service.booking.detailed_commission_booking_page_data",
"restapi.serializers.team.ManualBookingSerializer",
"django.utils.html.format_html"
] |
[((2126, 2223), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK', 'template_name': '"""team/download_outward_payment_file.html"""'}), "(status=status.HTTP_200_OK, template_name=\n 'team/download_outward_payment_file.html')\n", (2134, 2223), False, 'from rest_framework.response import Response\n'), ((2384, 2456), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/booking/fetch_full_booking_data_page.html"""'}), "(template_name='team/booking/fetch_full_booking_data_page.html')\n", (2392, 2456), False, 'from rest_framework.response import Response\n'), ((2517, 2581), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/booking/confirm_booking_page.html"""'}), "(template_name='team/booking/confirm_booking_page.html')\n", (2525, 2581), False, 'from rest_framework.response import Response\n'), ((3352, 3400), 'restapi.serializers.team.ManualBookingSerializer', 'ManualBookingSerializer', ([], {'instance': 'manual_booking'}), '(instance=manual_booking)\n', (3375, 3400), False, 'from restapi.serializers.team import ManualBookingSerializer\n'), ((3416, 3536), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/booking/detailed_lr_generation.html"""', 'data': 'serializer.data', 'status': 'status.HTTP_200_OK'}), "(template_name='team/booking/detailed_lr_generation.html', data=\n serializer.data, status=status.HTTP_200_OK)\n", (3424, 3536), False, 'from rest_framework.response import Response\n'), ((3625, 3698), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/booking/fetch-commission-booking-data.html"""'}), "(template_name='team/booking/fetch-commission-booking-data.html')\n", (3633, 3698), False, 'from rest_framework.response import Response\n'), ((4143, 4211), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/payments/outward_payment_history.html"""'}), "(template_name='team/payments/outward_payment_history.html')\n", (4151, 4211), False, 'from rest_framework.response import Response\n'), ((4272, 4372), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/payments/supplier_payment_receipt.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/payments/supplier_payment_receipt.html',\n status=status.HTTP_200_OK)\n", (4280, 4372), False, 'from rest_framework.response import Response\n'), ((4634, 4666), 'restapi.serializers.employee.EmployeeSerializer', 'EmployeeSerializer', ([], {'instance': 'emp'}), '(instance=emp)\n', (4652, 4666), False, 'from restapi.serializers.employee import EmployeeSerializer\n'), ((4682, 4801), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/employee/emp-profile.html"""', 'data': 'employee_serializer.data', 'status': 'status.HTTP_200_OK'}), "(template_name='team/employee/emp-profile.html', data=\n employee_serializer.data, status=status.HTTP_200_OK)\n", (4690, 4801), False, 'from rest_framework.response import Response\n'), ((4975, 5067), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/employee/change-password.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/employee/change-password.html', status=status.\n HTTP_200_OK)\n", (4983, 5067), False, 'from rest_framework.response import Response\n'), ((5207, 5274), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/payments/inward_payment_history.html"""'}), "(template_name='team/payments/inward_payment_history.html')\n", (5215, 5274), False, 'from rest_framework.response import Response\n'), ((5420, 5516), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK', 'template_name': '"""team/payments/add_outward_payment.html"""'}), "(status=status.HTTP_200_OK, template_name=\n 'team/payments/add_outward_payment.html')\n", (5428, 5516), False, 'from rest_framework.response import Response\n'), ((5668, 5773), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK', 'template_name': '"""team/monitoring/senior_mgmt_booking_status.html"""'}), "(status=status.HTTP_200_OK, template_name=\n 'team/monitoring/senior_mgmt_booking_status.html')\n", (5676, 5773), False, 'from rest_framework.response import Response\n'), ((5922, 6024), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK', 'template_name': '"""team/monitoring/senior_mgmt_task_status.html"""'}), "(status=status.HTTP_200_OK, template_name=\n 'team/monitoring/senior_mgmt_task_status.html')\n", (5930, 6024), False, 'from rest_framework.response import Response\n'), ((6174, 6239), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/payments/add_received_payment.html"""'}), "(template_name='team/payments/add_received_payment.html')\n", (6182, 6239), False, 'from rest_framework.response import Response\n'), ((6952, 7075), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/payments/payment-adjustment-page.html"""', 'status': 'status.HTTP_200_OK', 'data': "response['data']"}), "(template_name='team/payments/payment-adjustment-page.html', status\n =status.HTTP_200_OK, data=response['data'])\n", (6960, 7075), False, 'from rest_framework.response import Response\n'), ((7245, 7298), 'rest_framework.response.Response', 'Response', ([], {'template_name': '""""""', 'status': 'status.HTTP_200_OK'}), "(template_name='', status=status.HTTP_200_OK)\n", (7253, 7298), False, 'from rest_framework.response import Response\n'), ((8264, 8383), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/payments/uncredited-cheques.html"""', 'status': 'status.HTTP_200_OK', 'data': "{'cheques': data}"}), "(template_name='team/payments/uncredited-cheques.html', status=\n status.HTTP_200_OK, data={'cheques': data})\n", (8272, 8383), False, 'from rest_framework.response import Response\n'), ((8538, 8627), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/invoices/invoice_list.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/invoices/invoice_list.html', status=status.\n HTTP_200_OK)\n", (8546, 8627), False, 'from rest_framework.response import Response\n'), ((8671, 8772), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/invoices/invoice_summary_statement.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/invoices/invoice_summary_statement.html',\n status=status.HTTP_200_OK)\n", (8679, 8772), False, 'from rest_framework.response import Response\n'), ((8836, 8943), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/invoices/fetch_full_booking_invoice_data.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/invoices/fetch_full_booking_invoice_data.html',\n status=status.HTTP_200_OK)\n", (8844, 8943), False, 'from rest_framework.response import Response\n'), ((9324, 9424), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/invoices/fetch-commission-invoice.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/invoices/fetch-commission-invoice.html',\n status=status.HTTP_200_OK)\n", (9332, 9424), False, 'from rest_framework.response import Response\n'), ((9815, 9855), 'restapi.service.invoices.get_invoice_data', 'get_invoice_data', (['bookings', '"""commission"""'], {}), "(bookings, 'commission')\n", (9831, 9855), False, 'from restapi.service.invoices import get_invoice_data, get_comment_list, get_amount_data, full_booking_invoice_data\n'), ((9879, 9919), 'restapi.service.invoices.get_comment_list', 'get_comment_list', (['bookings', 'invoice_data'], {}), '(bookings, invoice_data)\n', (9895, 9919), False, 'from restapi.service.invoices import get_invoice_data, get_comment_list, get_amount_data, full_booking_invoice_data\n'), ((10637, 10724), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/booking/download-lr.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/booking/download-lr.html', status=status.\n HTTP_200_OK)\n", (10645, 10724), False, 'from rest_framework.response import Response\n'), ((10853, 10906), 'rest_framework.response.Response', 'Response', ([], {'template_name': '""""""', 'status': 'status.HTTP_200_OK'}), "(template_name='', status=status.HTTP_200_OK)\n", (10861, 10906), False, 'from rest_framework.response import Response\n'), ((10952, 11031), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/booking/pod-list.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/booking/pod-list.html', status=status.HTTP_200_OK)\n", (10960, 11031), False, 'from rest_framework.response import Response\n'), ((11855, 11962), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/accounting/placed-order-customer-summary.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/accounting/placed-order-customer-summary.html',\n status=status.HTTP_200_OK)\n", (11863, 11962), False, 'from rest_framework.response import Response\n'), ((12027, 12128), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/accounting/billed-customer-summary.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/accounting/billed-customer-summary.html',\n status=status.HTTP_200_OK)\n", (12035, 12128), False, 'from rest_framework.response import Response\n'), ((12186, 12281), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/accounting/supplier-summary.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/accounting/supplier-summary.html', status=\n status.HTTP_200_OK)\n", (12194, 12281), False, 'from rest_framework.response import Response\n'), ((12337, 12431), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/accounting/vehicle-summary.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/accounting/vehicle-summary.html', status=\n status.HTTP_200_OK)\n", (12345, 12431), False, 'from rest_framework.response import Response\n'), ((12585, 12701), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/registrations/fetch-bank-details-using-ifsc.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name=\n 'team/registrations/fetch-bank-details-using-ifsc.html', status=status.\n HTTP_200_OK)\n", (12593, 12701), False, 'from rest_framework.response import Response\n'), ((12979, 13110), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/registrations/register_beneficiary_bank_account.html"""', 'status': 'status.HTTP_200_OK', 'data': 'data'}), "(template_name=\n 'team/registrations/register_beneficiary_bank_account.html', status=\n status.HTTP_200_OK, data=data)\n", (12987, 13110), False, 'from rest_framework.response import Response\n'), ((13170, 13263), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/payments/beneficiary_list.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/payments/beneficiary_list.html', status=status\n .HTTP_200_OK)\n", (13178, 13263), False, 'from rest_framework.response import Response\n'), ((14647, 14781), 'rest_framework.response.Response', 'Response', (["{'lr_numbers': lr_numbers, 'bookings': bookings}"], {'template_name': '"""fileupload/pod_upload.html"""', 'status': 'status.HTTP_200_OK'}), "({'lr_numbers': lr_numbers, 'bookings': bookings}, template_name=\n 'fileupload/pod_upload.html', status=status.HTTP_200_OK)\n", (14655, 14781), False, 'from rest_framework.response import Response\n'), ((15063, 15103), 'restapi.serializers.file_upload.ChequeFileSerializer', 'ChequeFileSerializer', (['cheques'], {'many': '(True)'}), '(cheques, many=True)\n', (15083, 15103), False, 'from restapi.serializers.file_upload import ChequeFileSerializer\n'), ((15119, 15248), 'rest_framework.response.Response', 'Response', (["{'data': cheques_serializer.data}"], {'status': 'status.HTTP_200_OK', 'template_name': '"""team/payments/uncredited-cheques.html"""'}), "({'data': cheques_serializer.data}, status=status.HTTP_200_OK,\n template_name='team/payments/uncredited-cheques.html')\n", (15127, 15248), False, 'from rest_framework.response import Response\n'), ((15438, 15529), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK', 'template_name': '"""team/booking/partial_booking.html"""'}), "(status=status.HTTP_200_OK, template_name=\n 'team/booking/partial_booking.html')\n", (15446, 15529), False, 'from rest_framework.response import Response\n'), ((15582, 15673), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK', 'template_name': '"""team/booking/booking-archive.html"""'}), "(status=status.HTTP_200_OK, template_name=\n 'team/booking/booking-archive.html')\n", (15590, 15673), False, 'from rest_framework.response import Response\n'), ((15725, 15822), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK', 'template_name': '"""team/booking/booking_status_loaded.html"""'}), "(status=status.HTTP_200_OK, template_name=\n 'team/booking/booking_status_loaded.html')\n", (15733, 15822), False, 'from rest_framework.response import Response\n'), ((15883, 15979), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK', 'template_name': '"""team/booking/bookings_pay_advance.html"""'}), "(status=status.HTTP_200_OK, template_name=\n 'team/booking/bookings_pay_advance.html')\n", (15891, 15979), False, 'from rest_framework.response import Response\n'), ((16108, 16195), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/booking/mis-booking.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/booking/mis-booking.html', status=status.\n HTTP_200_OK)\n", (16116, 16195), False, 'from rest_framework.response import Response\n'), ((17672, 17775), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/payments/pay_balance_booking_history.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/payments/pay_balance_booking_history.html',\n status=status.HTTP_200_OK)\n", (17680, 17775), False, 'from rest_framework.response import Response\n'), ((17921, 18026), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/invoices/raise_invoice_booking_history.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/invoices/raise_invoice_booking_history.html',\n status=status.HTTP_200_OK)\n", (17929, 18026), False, 'from rest_framework.response import Response\n'), ((18177, 18218), 'restapi.helper_api.manual_booking_id_list', 'manual_booking_id_list', ([], {'user': 'request.user'}), '(user=request.user)\n', (18199, 18218), False, 'from restapi.helper_api import verify_pod_data, my_uploaded_pod_data, manual_booking_id_list, check_booking_status, get_booking_status_mapping_object\n'), ((19409, 19456), 'restapi.serializers.team.InvoiceSerializer', 'InvoiceSerializer', ([], {'instance': 'invoices', 'many': '(True)'}), '(instance=invoices, many=True)\n', (19426, 19456), False, 'from restapi.serializers.team import InvoiceSerializer\n'), ((19472, 19601), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/invoices/invoice_sent_receipt.html"""', 'status': 'status.HTTP_200_OK', 'data': "{'data': serializer.data}"}), "(template_name='team/invoices/invoice_sent_receipt.html', status=\n status.HTTP_200_OK, data={'data': serializer.data})\n", (19480, 19601), False, 'from rest_framework.response import Response\n'), ((19769, 19810), 'restapi.helper_api.manual_booking_id_list', 'manual_booking_id_list', ([], {'user': 'request.user'}), '(user=request.user)\n', (19791, 19810), False, 'from restapi.helper_api import verify_pod_data, my_uploaded_pod_data, manual_booking_id_list, check_booking_status, get_booking_status_mapping_object\n'), ((21018, 21065), 'restapi.serializers.team.InvoiceSerializer', 'InvoiceSerializer', ([], {'instance': 'invoices', 'many': '(True)'}), '(instance=invoices, many=True)\n', (21035, 21065), False, 'from restapi.serializers.team import InvoiceSerializer\n'), ((21150, 21268), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/invoices/confirm_sent_invoice.html"""', 'status': 'status.HTTP_200_OK', 'data': "{'data': data}"}), "(template_name='team/invoices/confirm_sent_invoice.html', status=\n status.HTTP_200_OK, data={'data': data})\n", (21158, 21268), False, 'from rest_framework.response import Response\n'), ((23080, 23177), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/payments/process_payment_page.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/payments/process_payment_page.html', status=\n status.HTTP_200_OK)\n", (23088, 23177), False, 'from rest_framework.response import Response\n'), ((23325, 23424), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""team/payments/reconcile_payment_page.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='team/payments/reconcile_payment_page.html', status=\n status.HTTP_200_OK)\n", (23333, 23424), False, 'from rest_framework.response import Response\n'), ((23617, 23652), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (23625, 23652), False, 'from rest_framework.response import Response\n'), ((23859, 23894), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (23867, 23894), False, 'from rest_framework.response import Response\n'), ((24096, 24131), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (24104, 24131), False, 'from rest_framework.response import Response\n'), ((24335, 24370), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (24343, 24370), False, 'from rest_framework.response import Response\n'), ((24575, 24610), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (24583, 24610), False, 'from rest_framework.response import Response\n'), ((25334, 25566), 'rest_framework.response.Response', 'Response', (["{'vehicle_categories': vehicle_categories, 'body_type_choices':\n body_type_choices, 'gps_enable_choices': gps_enable_choices}"], {'template_name': '"""team/registrations/register_vehicle.html"""', 'status': 'status.HTTP_200_OK'}), "({'vehicle_categories': vehicle_categories, 'body_type_choices':\n body_type_choices, 'gps_enable_choices': gps_enable_choices},\n template_name='team/registrations/register_vehicle.html', status=status\n .HTTP_200_OK)\n", (25342, 25566), False, 'from rest_framework.response import Response\n'), ((25805, 25840), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (25813, 25840), False, 'from rest_framework.response import Response\n'), ((26047, 26082), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (26055, 26082), False, 'from rest_framework.response import Response\n'), ((26294, 26329), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (26302, 26329), False, 'from rest_framework.response import Response\n'), ((26537, 26572), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (26545, 26572), False, 'from rest_framework.response import Response\n'), ((26826, 26861), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (26834, 26861), False, 'from rest_framework.response import Response\n'), ((27082, 27117), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (27090, 27117), False, 'from rest_framework.response import Response\n'), ((27338, 27373), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (27346, 27373), False, 'from rest_framework.response import Response\n'), ((27593, 27628), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (27601, 27628), False, 'from rest_framework.response import Response\n'), ((27848, 27883), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (27856, 27883), False, 'from rest_framework.response import Response\n'), ((28118, 28153), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_200_OK'}), '(status=status.HTTP_200_OK)\n', (28126, 28153), False, 'from rest_framework.response import Response\n'), ((30451, 30564), 'rest_framework.response.Response', 'Response', (["{'data': data}"], {'template_name': '"""team/credit_debit_note/approve/cnc.html"""', 'status': 'status.HTTP_200_OK'}), "({'data': data}, template_name=\n 'team/credit_debit_note/approve/cnc.html', status=status.HTTP_200_OK)\n", (30459, 30564), False, 'from rest_framework.response import Response\n'), ((32285, 32398), 'rest_framework.response.Response', 'Response', (["{'data': data}"], {'template_name': '"""team/credit_debit_note/approve/cns.html"""', 'status': 'status.HTTP_200_OK'}), "({'data': data}, template_name=\n 'team/credit_debit_note/approve/cns.html', status=status.HTTP_200_OK)\n", (32293, 32398), False, 'from rest_framework.response import Response\n'), ((34120, 34233), 'rest_framework.response.Response', 'Response', (["{'data': data}"], {'template_name': '"""team/credit_debit_note/approve/dnc.html"""', 'status': 'status.HTTP_200_OK'}), "({'data': data}, template_name=\n 'team/credit_debit_note/approve/dnc.html', status=status.HTTP_200_OK)\n", (34128, 34233), False, 'from rest_framework.response import Response\n'), ((36016, 36129), 'rest_framework.response.Response', 'Response', (["{'data': data}"], {'template_name': '"""team/credit_debit_note/approve/dns.html"""', 'status': 'status.HTTP_200_OK'}), "({'data': data}, template_name=\n 'team/credit_debit_note/approve/dns.html', status=status.HTTP_200_OK)\n", (36024, 36129), False, 'from rest_framework.response import Response\n'), ((38046, 38160), 'rest_framework.response.Response', 'Response', (["{'data': data}"], {'template_name': '"""team/credit_debit_note/approve/cnca.html"""', 'status': 'status.HTTP_200_OK'}), "({'data': data}, template_name=\n 'team/credit_debit_note/approve/cnca.html', status=status.HTTP_200_OK)\n", (38054, 38160), False, 'from rest_framework.response import Response\n'), ((38319, 38393), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""mobile/dashboard.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='mobile/dashboard.html', status=status.HTTP_200_OK)\n", (38327, 38393), False, 'from rest_framework.response import Response\n'), ((39245, 39324), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""fileupload/pod_upload.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='fileupload/pod_upload.html', status=status.HTTP_200_OK)\n", (39253, 39324), False, 'from rest_framework.response import Response\n'), ((39373, 39471), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""fileupload/upload_vehicle_documents.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='fileupload/upload_vehicle_documents.html', status=\n status.HTTP_200_OK)\n", (39381, 39471), False, 'from rest_framework.response import Response\n'), ((39516, 39615), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""fileupload/upload_supplier_documents.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='fileupload/upload_supplier_documents.html', status=\n status.HTTP_200_OK)\n", (39524, 39615), False, 'from rest_framework.response import Response\n'), ((39665, 39759), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""fileupload/weighing_slip_upload.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='fileupload/weighing_slip_upload.html', status=\n status.HTTP_200_OK)\n", (39673, 39759), False, 'from rest_framework.response import Response\n'), ((39801, 39897), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""fileupload/upload_owner_documents.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='fileupload/upload_owner_documents.html', status=\n status.HTTP_200_OK)\n", (39809, 39897), False, 'from rest_framework.response import Response\n'), ((39940, 40037), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""fileupload/upload_driver_documents.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='fileupload/upload_driver_documents.html', status=\n status.HTTP_200_OK)\n", (39948, 40037), False, 'from rest_framework.response import Response\n'), ((40080, 40167), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""fileupload/upload_cheque.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='fileupload/upload_cheque.html', status=status.\n HTTP_200_OK)\n", (40088, 40167), False, 'from rest_framework.response import Response\n'), ((40219, 40308), 'rest_framework.response.Response', 'Response', ([], {'template_name': '"""fileupload/invoice_receipt.html"""', 'status': 'status.HTTP_200_OK'}), "(template_name='fileupload/invoice_receipt.html', status=status.\n HTTP_200_OK)\n", (40227, 40308), False, 'from rest_framework.response import Response\n'), ((2986, 3018), 'team.models.ManualBooking.objects.get', 'ManualBooking.objects.get', ([], {'id': 'pk'}), '(id=pk)\n', (3011, 3018), False, 'from team.models import LrNumber, ManualBooking, CreditNoteCustomer, CreditNoteSupplier, DebitNoteCustomer, DebitNoteSupplier, CreditNoteCustomerDirectAdvance, Invoice\n'), ((6870, 6936), 'rest_framework.response.Response', 'Response', ([], {'status': "response['status']", 'data': "{'msg': response['msg']}"}), "(status=response['status'], data={'msg': response['msg']})\n", (6878, 6936), False, 'from rest_framework.response import Response\n'), ((7530, 7552), 'django.db.models.Count', 'Count', (['"""cheque_number"""'], {}), "('cheque_number')\n", (7535, 7552), False, 'from django.db.models import Q, Count\n'), ((9732, 9791), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (['"""/team/commission-invoice-data-page/"""'], {}), "('/team/commission-invoice-data-page/')\n", (9752, 9791), False, 'from django.http import HttpResponseRedirect\n'), ((2782, 2824), 'restapi.service.booking.detailed_full_booking_page_data', 'detailed_full_booking_page_data', (['json_data'], {}), '(json_data)\n', (2813, 2824), False, 'from restapi.service.booking import detailed_full_booking_page_data, detailed_commission_booking_page_data\n'), ((3081, 3252), 'rest_framework.response.Response', 'Response', (['{\'status\': \'failure\', \'msg\': "ManualBooking Doesn\'t exists", \'status_code\':\n status.HTTP_400_BAD_REQUEST, \'data\': {}}'], {'status': 'status.HTTP_400_BAD_REQUEST'}), '({\'status\': \'failure\', \'msg\': "ManualBooking Doesn\'t exists",\n \'status_code\': status.HTTP_400_BAD_REQUEST, \'data\': {}}, status=status.\n HTTP_400_BAD_REQUEST)\n', (3089, 3252), False, 'from rest_framework.response import Response\n'), ((3948, 3996), 'restapi.service.booking.detailed_commission_booking_page_data', 'detailed_commission_booking_page_data', (['json_data'], {}), '(json_data)\n', (3985, 3996), False, 'from restapi.service.booking import detailed_full_booking_page_data, detailed_commission_booking_page_data\n'), ((4554, 4602), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': 'request.user.username'}), '(username=request.user.username)\n', (4570, 4602), False, 'from django.contrib.auth.models import User\n'), ((9178, 9222), 'restapi.service.invoices.full_booking_invoice_data', 'full_booking_invoice_data', ([], {'customer': 'customer'}), '(customer=customer)\n', (9203, 9222), False, 'from restapi.service.invoices import get_invoice_data, get_comment_list, get_amount_data, full_booking_invoice_data\n'), ((12896, 12922), 'restapi.serializers.utils.IfscDetailSerializer', 'IfscDetailSerializer', (['ifsc'], {}), '(ifsc)\n', (12916, 12922), False, 'from restapi.serializers.utils import IfscDetailSerializer\n'), ((13522, 13543), 'restapi.service.trackvehicle.track_vehicles_data', 'track_vehicles_data', ([], {}), '()\n', (13541, 13543), False, 'from restapi.service.trackvehicle import track_vehicles_data, track_vehicle_data\n'), ((14518, 14547), 'django.db.models.Q', 'Q', ([], {'booking_status': '"""cancelled"""'}), "(booking_status='cancelled')\n", (14519, 14547), False, 'from django.db.models import Q, Count\n'), ((14550, 14565), 'django.db.models.Q', 'Q', ([], {'deleted': '(True)'}), '(deleted=True)\n', (14551, 14565), False, 'from django.db.models import Q, Count\n'), ((16465, 16480), 'django.db.models.Q', 'Q', ([], {'deleted': '(True)'}), '(deleted=True)\n', (16466, 16480), False, 'from django.db.models import Q, Count\n'), ((16483, 16512), 'django.db.models.Q', 'Q', ([], {'booking_status': '"""cancelled"""'}), "(booking_status='cancelled')\n", (16484, 16512), False, 'from django.db.models import Q, Count\n'), ((21609, 21660), 'restapi.helper_api.check_booking_status', 'check_booking_status', (['booking', '"""party_invoice_sent"""'], {}), "(booking, 'party_invoice_sent')\n", (21629, 21660), False, 'from restapi.helper_api import verify_pod_data, my_uploaded_pod_data, manual_booking_id_list, check_booking_status, get_booking_status_mapping_object\n'), ((24972, 25001), 'utils.models.VehicleCategory.objects.all', 'VehicleCategory.objects.all', ([], {}), '()\n', (24999, 25001), False, 'from utils.models import VehicleCategory, IfscDetail\n'), ((39116, 39145), 'django.db.models.Q', 'Q', ([], {'booking_status': '"""cancelled"""'}), "(booking_status='cancelled')\n", (39117, 39145), False, 'from django.db.models import Q, Count\n'), ((39148, 39163), 'django.db.models.Q', 'Q', ([], {'deleted': '(True)'}), '(deleted=True)\n', (39149, 39163), False, 'from django.db.models import Q, Count\n'), ((6401, 6424), 'restapi.service.payments.pending_payments_data', 'pending_payments_data', ([], {}), '()\n', (6422, 6424), False, 'from restapi.service.payments import pending_payments_data, pending_payment_adjustment_data\n'), ((10411, 10466), 'restapi.service.invoices.get_amount_data', 'get_amount_data', ([], {'bookings': 'bookings', 'booking_type': '"""full"""'}), "(bookings=bookings, booking_type='full')\n", (10426, 10466), False, 'from restapi.service.invoices import get_invoice_data, get_comment_list, get_amount_data, full_booking_invoice_data\n'), ((11167, 11184), 'restapi.helper_api.verify_pod_data', 'verify_pod_data', ([], {}), '()\n', (11182, 11184), False, 'from restapi.helper_api import verify_pod_data, my_uploaded_pod_data, manual_booking_id_list, check_booking_status, get_booking_status_mapping_object\n'), ((11379, 11396), 'restapi.helper_api.verify_pod_data', 'verify_pod_data', ([], {}), '()\n', (11394, 11396), False, 'from restapi.helper_api import verify_pod_data, my_uploaded_pod_data, manual_booking_id_list, check_booking_status, get_booking_status_mapping_object\n'), ((11612, 11651), 'restapi.helper_api.my_uploaded_pod_data', 'my_uploaded_pod_data', ([], {'user': 'request.user'}), '(user=request.user)\n', (11632, 11651), False, 'from restapi.helper_api import verify_pod_data, my_uploaded_pod_data, manual_booking_id_list, check_booking_status, get_booking_status_mapping_object\n'), ((21915, 21979), 'restapi.helper_api.get_booking_status_mapping_object', 'get_booking_status_mapping_object', (['booking', '"""party_invoice_sent"""'], {}), "(booking, 'party_invoice_sent')\n", (21948, 21979), False, 'from restapi.helper_api import verify_pod_data, my_uploaded_pod_data, manual_booking_id_list, check_booking_status, get_booking_status_mapping_object\n'), ((28458, 28493), 'restapi.service.credit_debit_note.approve_credit_note_customer_data', 'approve_credit_note_customer_data', ([], {}), '()\n', (28491, 28493), False, 'from restapi.service.credit_debit_note import approve_credit_note_customer_data, approve_debit_note_customer_data, approve_credit_note_supplier_data, approve_debit_note_supplier_data, approve_credit_note_customer_direct_advance_data\n'), ((28514, 28548), 'restapi.service.credit_debit_note.approve_debit_note_customer_data', 'approve_debit_note_customer_data', ([], {}), '()\n', (28546, 28548), False, 'from restapi.service.credit_debit_note import approve_credit_note_customer_data, approve_debit_note_customer_data, approve_credit_note_supplier_data, approve_debit_note_supplier_data, approve_credit_note_customer_direct_advance_data\n'), ((28569, 28604), 'restapi.service.credit_debit_note.approve_credit_note_supplier_data', 'approve_credit_note_supplier_data', ([], {}), '()\n', (28602, 28604), False, 'from restapi.service.credit_debit_note import approve_credit_note_customer_data, approve_debit_note_customer_data, approve_credit_note_supplier_data, approve_debit_note_supplier_data, approve_credit_note_customer_direct_advance_data\n'), ((28625, 28659), 'restapi.service.credit_debit_note.approve_debit_note_supplier_data', 'approve_debit_note_supplier_data', ([], {}), '()\n', (28657, 28659), False, 'from restapi.service.credit_debit_note import approve_credit_note_customer_data, approve_debit_note_customer_data, approve_credit_note_supplier_data, approve_debit_note_supplier_data, approve_credit_note_customer_direct_advance_data\n'), ((28681, 28731), 'restapi.service.credit_debit_note.approve_credit_note_customer_direct_advance_data', 'approve_credit_note_customer_direct_advance_data', ([], {}), '()\n', (28729, 28731), False, 'from restapi.service.credit_debit_note import approve_credit_note_customer_data, approve_debit_note_customer_data, approve_credit_note_supplier_data, approve_debit_note_supplier_data, approve_credit_note_customer_direct_advance_data\n'), ((14945, 14986), 'fileupload.models.ChequeFile.objects.filter', 'ChequeFile.objects.filter', ([], {'resolved': '(False)'}), '(resolved=False)\n', (14970, 14986), False, 'from fileupload.models import ChequeFile\n'), ((18253, 18366), 'restapi.models.BookingStatusesMapping.objects.filter', 'BookingStatusesMapping.objects.filter', ([], {'booking_status_chain__booking_status__status__iexact': '"""invoice_raised"""'}), "(\n booking_status_chain__booking_status__status__iexact='invoice_raised')\n", (18290, 18366), False, 'from restapi.models import BookingStatusesMapping, BookingStatusChain\n'), ((18396, 18411), 'django.db.models.Q', 'Q', ([], {'deleted': '(True)'}), '(deleted=True)\n', (18397, 18411), False, 'from django.db.models import Q, Count\n'), ((18414, 18441), 'django.db.models.Q', 'Q', ([], {'booking_stage': '"""reverted"""'}), "(booking_stage='reverted')\n", (18415, 18441), False, 'from django.db.models import Q, Count\n'), ((18540, 18657), 'restapi.models.BookingStatusesMapping.objects.filter', 'BookingStatusesMapping.objects.filter', ([], {'booking_status_chain__booking_status__status__iexact': '"""party_invoice_sent"""'}), "(\n booking_status_chain__booking_status__status__iexact='party_invoice_sent')\n", (18577, 18657), False, 'from restapi.models import BookingStatusesMapping, BookingStatusChain\n'), ((18687, 18702), 'django.db.models.Q', 'Q', ([], {'deleted': '(True)'}), '(deleted=True)\n', (18688, 18702), False, 'from django.db.models import Q, Count\n'), ((18705, 18732), 'django.db.models.Q', 'Q', ([], {'booking_stage': '"""reverted"""'}), "(booking_stage='reverted')\n", (18706, 18732), False, 'from django.db.models import Q, Count\n'), ((19849, 19966), 'restapi.models.BookingStatusesMapping.objects.filter', 'BookingStatusesMapping.objects.filter', ([], {'booking_status_chain__booking_status__status__iexact': '"""party_invoice_sent"""'}), "(\n booking_status_chain__booking_status__status__iexact='party_invoice_sent')\n", (19886, 19966), False, 'from restapi.models import BookingStatusesMapping, BookingStatusChain\n'), ((19996, 20011), 'django.db.models.Q', 'Q', ([], {'deleted': '(True)'}), '(deleted=True)\n', (19997, 20011), False, 'from django.db.models import Q, Count\n'), ((20014, 20041), 'django.db.models.Q', 'Q', ([], {'booking_stage': '"""reverted"""'}), "(booking_stage='reverted')\n", (20015, 20041), False, 'from django.db.models import Q, Count\n'), ((20139, 20255), 'restapi.models.BookingStatusesMapping.objects.filter', 'BookingStatusesMapping.objects.filter', ([], {'booking_status_chain__booking_status__status__iexact': '"""invoice_confirmed"""'}), "(\n booking_status_chain__booking_status__status__iexact='invoice_confirmed')\n", (20176, 20255), False, 'from restapi.models import BookingStatusesMapping, BookingStatusChain\n'), ((20285, 20300), 'django.db.models.Q', 'Q', ([], {'deleted': '(True)'}), '(deleted=True)\n', (20286, 20300), False, 'from django.db.models import Q, Count\n'), ((20303, 20330), 'django.db.models.Q', 'Q', ([], {'booking_stage': '"""reverted"""'}), "(booking_stage='reverted')\n", (20304, 20330), False, 'from django.db.models import Q, Count\n'), ((21442, 21475), 'team.models.Invoice.objects.get', 'Invoice.objects.get', ([], {'id': "inv['id']"}), "(id=inv['id'])\n", (21461, 21475), False, 'from team.models import LrNumber, ManualBooking, CreditNoteCustomer, CreditNoteSupplier, DebitNoteCustomer, DebitNoteSupplier, CreditNoteCustomerDirectAdvance, Invoice\n'), ((28922, 28973), 'team.models.CreditNoteCustomer.objects.filter', 'CreditNoteCustomer.objects.filter', ([], {'status': '"""pending"""'}), "(status='pending')\n", (28955, 28973), False, 'from team.models import LrNumber, ManualBooking, CreditNoteCustomer, CreditNoteSupplier, DebitNoteCustomer, DebitNoteSupplier, CreditNoteCustomerDirectAdvance, Invoice\n'), ((30762, 30813), 'team.models.CreditNoteSupplier.objects.filter', 'CreditNoteSupplier.objects.filter', ([], {'status': '"""pending"""'}), "(status='pending')\n", (30795, 30813), False, 'from team.models import LrNumber, ManualBooking, CreditNoteCustomer, CreditNoteSupplier, DebitNoteCustomer, DebitNoteSupplier, CreditNoteCustomerDirectAdvance, Invoice\n'), ((32595, 32645), 'team.models.DebitNoteCustomer.objects.filter', 'DebitNoteCustomer.objects.filter', ([], {'status': '"""pending"""'}), "(status='pending')\n", (32627, 32645), False, 'from team.models import LrNumber, ManualBooking, CreditNoteCustomer, CreditNoteSupplier, DebitNoteCustomer, DebitNoteSupplier, CreditNoteCustomerDirectAdvance, Invoice\n'), ((34496, 34546), 'team.models.DebitNoteSupplier.objects.filter', 'DebitNoteSupplier.objects.filter', ([], {'status': '"""pending"""'}), "(status='pending')\n", (34528, 34546), False, 'from team.models import LrNumber, ManualBooking, CreditNoteCustomer, CreditNoteSupplier, DebitNoteCustomer, DebitNoteSupplier, CreditNoteCustomerDirectAdvance, Invoice\n'), ((36407, 36471), 'team.models.CreditNoteCustomerDirectAdvance.objects.filter', 'CreditNoteCustomerDirectAdvance.objects.filter', ([], {'status': '"""pending"""'}), "(status='pending')\n", (36453, 36471), False, 'from team.models import LrNumber, ManualBooking, CreditNoteCustomer, CreditNoteSupplier, DebitNoteCustomer, DebitNoteSupplier, CreditNoteCustomerDirectAdvance, Invoice\n'), ((8178, 8232), 'fileupload.models.ChequeFile.objects.filter', 'ChequeFile.objects.filter', ([], {'cheque_number': 'cheque_number'}), '(cheque_number=cheque_number)\n', (8203, 8232), False, 'from fileupload.models import ChequeFile\n'), ((14329, 14360), 'django.db.models.Q', 'Q', ([], {'pod_status__iexact': '"""pending"""'}), "(pod_status__iexact='pending')\n", (14330, 14360), False, 'from django.db.models import Q, Count\n'), ((14363, 14395), 'django.db.models.Q', 'Q', ([], {'pod_status__iexact': '"""rejected"""'}), "(pod_status__iexact='rejected')\n", (14364, 14395), False, 'from django.db.models import Q, Count\n'), ((14425, 14460), 'django.db.models.Q', 'Q', ([], {'booking_id__istartswith': '"""BROKER"""'}), "(booking_id__istartswith='BROKER')\n", (14426, 14460), False, 'from django.db.models import Q, Count\n'), ((14463, 14494), 'django.db.models.Q', 'Q', ([], {'booking_id__istartswith': '"""AB"""'}), "(booking_id__istartswith='AB')\n", (14464, 14494), False, 'from django.db.models import Q, Count\n'), ((16382, 16410), 'django.db.models.Q', 'Q', ([], {'total_amount_to_company': '(0)'}), '(total_amount_to_company=0)\n', (16383, 16410), False, 'from django.db.models import Q, Count\n'), ((19244, 19263), 'datetime.timedelta', 'timedelta', ([], {'days': '(365)'}), '(days=365)\n', (19253, 19263), False, 'from datetime import datetime, timedelta\n'), ((20853, 20872), 'datetime.timedelta', 'timedelta', ([], {'days': '(365)'}), '(days=365)\n', (20862, 20872), False, 'from datetime import datetime, timedelta\n'), ((22055, 22130), 'restapi.models.BookingStatusChain.objects.get', 'BookingStatusChain.objects.get', ([], {'booking_status__status': '"""party_invoice_sent"""'}), "(booking_status__status='party_invoice_sent')\n", (22085, 22130), False, 'from restapi.models import BookingStatusesMapping, BookingStatusChain\n'), ((29218, 29320), 'django.utils.html.format_html', 'format_html', (['"""<a href="/team/booking-edit/?booking_id={}">{}</a>"""', 'booking.id', 'booking.booking_id'], {}), '(\'<a href="/team/booking-edit/?booking_id={}">{}</a>\', booking.\n id, booking.booking_id)\n', (29229, 29320), False, 'from django.utils.html import format_html\n'), ((31052, 31154), 'django.utils.html.format_html', 'format_html', (['"""<a href="/team/booking-edit/?booking_id={}">{}</a>"""', 'booking.id', 'booking.booking_id'], {}), '(\'<a href="/team/booking-edit/?booking_id={}">{}</a>\', booking.\n id, booking.booking_id)\n', (31063, 31154), False, 'from django.utils.html import format_html\n'), ((32890, 32992), 'django.utils.html.format_html', 'format_html', (['"""<a href="/team/booking-edit/?booking_id={}">{}</a>"""', 'booking.id', 'booking.booking_id'], {}), '(\'<a href="/team/booking-edit/?booking_id={}">{}</a>\', booking.\n id, booking.booking_id)\n', (32901, 32992), False, 'from django.utils.html import format_html\n'), ((34785, 34887), 'django.utils.html.format_html', 'format_html', (['"""<a href="/team/booking-edit/?booking_id={}">{}</a>"""', 'booking.id', 'booking.booking_id'], {}), '(\'<a href="/team/booking-edit/?booking_id={}">{}</a>\', booking.\n id, booking.booking_id)\n', (34796, 34887), False, 'from django.utils.html import format_html\n'), ((36805, 36907), 'django.utils.html.format_html', 'format_html', (['"""<a href="/team/booking-edit/?booking_id={}">{}</a>"""', 'booking.id', 'booking.booking_id'], {}), '(\'<a href="/team/booking-edit/?booking_id={}">{}</a>\', booking.\n id, booking.booking_id)\n', (36816, 36907), False, 'from django.utils.html import format_html\n'), ((38927, 38958), 'django.db.models.Q', 'Q', ([], {'pod_status__iexact': '"""pending"""'}), "(pod_status__iexact='pending')\n", (38928, 38958), False, 'from django.db.models import Q, Count\n'), ((38961, 38993), 'django.db.models.Q', 'Q', ([], {'pod_status__iexact': '"""rejected"""'}), "(pod_status__iexact='rejected')\n", (38962, 38993), False, 'from django.db.models import Q, Count\n'), ((39023, 39058), 'django.db.models.Q', 'Q', ([], {'booking_id__istartswith': '"""BROKER"""'}), "(booking_id__istartswith='BROKER')\n", (39024, 39058), False, 'from django.db.models import Q, Count\n'), ((39061, 39092), 'django.db.models.Q', 'Q', ([], {'booking_id__istartswith': '"""AB"""'}), "(booking_id__istartswith='AB')\n", (39062, 39092), False, 'from django.db.models import Q, Count\n'), ((7365, 7406), 'fileupload.models.ChequeFile.objects.filter', 'ChequeFile.objects.filter', ([], {'resolved': '(False)'}), '(resolved=False)\n', (7390, 7406), False, 'from fileupload.models import ChequeFile\n'), ((7736, 7790), 'fileupload.models.ChequeFile.objects.filter', 'ChequeFile.objects.filter', ([], {'cheque_number': 'cheque_number'}), '(cheque_number=cheque_number)\n', (7761, 7790), False, 'from fileupload.models import ChequeFile\n'), ((18925, 18973), 'team.models.ManualBooking.objects.filter', 'ManualBooking.objects.filter', ([], {'id__in': 'booking_ids'}), '(id__in=booking_ids)\n', (18953, 18973), False, 'from team.models import LrNumber, ManualBooking, CreditNoteCustomer, CreditNoteSupplier, DebitNoteCustomer, DebitNoteSupplier, CreditNoteCustomerDirectAdvance, Invoice\n'), ((20531, 20579), 'team.models.ManualBooking.objects.filter', 'ManualBooking.objects.filter', ([], {'id__in': 'booking_ids'}), '(id__in=booking_ids)\n', (20559, 20579), False, 'from team.models import LrNumber, ManualBooking, CreditNoteCustomer, CreditNoteSupplier, DebitNoteCustomer, DebitNoteSupplier, CreditNoteCustomerDirectAdvance, Invoice\n'), ((14125, 14160), 'django.db.models.Q', 'Q', ([], {'booking__pod_status': '"""unverified"""'}), "(booking__pod_status='unverified')\n", (14126, 14160), False, 'from django.db.models import Q, Count\n'), ((19220, 19234), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (19232, 19234), False, 'from datetime import datetime, timedelta\n'), ((20829, 20843), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (20841, 20843), False, 'from datetime import datetime, timedelta\n'), ((38723, 38758), 'django.db.models.Q', 'Q', ([], {'booking__pod_status': '"""unverified"""'}), "(booking__pod_status='unverified')\n", (38724, 38758), False, 'from django.db.models import Q, Count\n'), ((14054, 14086), 'django.db.models.Q', 'Q', ([], {'booking__pod_status': '"""pending"""'}), "(booking__pod_status='pending')\n", (14055, 14086), False, 'from django.db.models import Q, Count\n'), ((14089, 14122), 'django.db.models.Q', 'Q', ([], {'booking__pod_status': '"""rejected"""'}), "(booking__pod_status='rejected')\n", (14090, 14122), False, 'from django.db.models import Q, Count\n'), ((38652, 38684), 'django.db.models.Q', 'Q', ([], {'booking__pod_status': '"""pending"""'}), "(booking__pod_status='pending')\n", (38653, 38684), False, 'from django.db.models import Q, Count\n'), ((38687, 38720), 'django.db.models.Q', 'Q', ([], {'booking__pod_status': '"""rejected"""'}), "(booking__pod_status='rejected')\n", (38688, 38720), False, 'from django.db.models import Q, Count\n'), ((14013, 14032), 'datetime.timedelta', 'timedelta', ([], {'days': '(180)'}), '(days=180)\n', (14022, 14032), False, 'from datetime import datetime, timedelta\n'), ((38611, 38630), 'datetime.timedelta', 'timedelta', ([], {'days': '(180)'}), '(days=180)\n', (38620, 38630), False, 'from datetime import datetime, timedelta\n'), ((13989, 14003), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (14001, 14003), False, 'from datetime import datetime, timedelta\n'), ((38587, 38601), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (38599, 38601), False, 'from datetime import datetime, timedelta\n')]
|
from hail2 import f
class TestHailStones():
def test_f(self):
ans = [0, 0, 1, 7, 2, 5, 8, 16, 3, 19, 6]
for i in range(1, 11):
print(i)
assert f(i) == ans[i]
|
[
"hail2.f"
] |
[((189, 193), 'hail2.f', 'f', (['i'], {}), '(i)\n', (190, 193), False, 'from hail2 import f\n')]
|
from math import floor, ceil
from decimal import Decimal
from django import template
from django.utils.safestring import mark_safe
from .customformatting import ubrdecimal
register = template.Library()
def _make_context(
context,
css,
obj,
field,
bold="gross",
has_form=False,
comment=None,
select_if_equal=None,
):
ctx = {
"TAX_RATE": context["TAX_RATE"],
"css_class": css,
"amount": getattr(obj, field + "_amount"),
"diff": getattr(obj, field + "_diff_amount", None),
"percent": getattr(obj, field + "_percent", None),
"has_form": has_form,
"bold": bold,
}
if select_if_equal == ctx["amount"]:
ctx["css_class"] += " selected"
if has_form:
ctx.update(
{"net": obj[field + "_net"], "tax": obj[field + "_tax"], "comment": comment}
)
for field_name in ["net", "tax", "comment"]:
field_obj = ctx[field_name]
if field_obj is not None and field_obj.errors:
ctx["css_class"] += " has-error bg-danger"
break
return ctx
@register.inclusion_tag("accounting/amount_view_cell.html", takes_context=True)
def amount_view(context, *args, **kwargs):
return _make_context(context, *args, **kwargs)
@register.inclusion_tag("accounting/amount_view_cell.html", takes_context=True)
def amount_stateful(context, *args, **kwargs):
return _make_context(context, *args, has_form=True, **kwargs)
@register.inclusion_tag("accounting/amount_input_cell.html", takes_context=True)
def amount_input(context, *args, **kwargs):
return _make_context(context, *args, has_form=True, **kwargs)
@register.simple_tag
def amount_diff_part(amount, part):
color = ""
value = getattr(amount, part, Decimal(0))
if value > 0:
color = "green"
elif value < 0:
color = "red"
str_value = ""
if value != 0:
str_value = ubrdecimal(value, 2)
if value > 0:
str_value = "+" + str_value
return mark_safe('<span class="amount-diff %s">%s</span>' % (color, str_value))
@register.simple_tag
def amount_value_part(amount, part):
value = getattr(amount, part, Decimal(0))
str_value = ubrdecimal(value, 2)
return mark_safe('<span class="amount-value">%s</span>' % (str_value,))
@register.simple_tag
def amount_percent(percent):
color = ""
str_value = ""
if percent is not None:
if percent == 100:
color = "green"
elif percent > 100:
color = "red"
percent = ceil(percent)
elif percent < 100:
color = "blue"
percent = floor(percent)
str_value = str(percent) + "%"
return mark_safe('<div class="amount-percent %s">%s</div>' % (color, str_value))
|
[
"django.template.Library",
"math.ceil",
"decimal.Decimal",
"math.floor",
"django.utils.safestring.mark_safe"
] |
[((185, 203), 'django.template.Library', 'template.Library', ([], {}), '()\n', (201, 203), False, 'from django import template\n'), ((2042, 2114), 'django.utils.safestring.mark_safe', 'mark_safe', (['(\'<span class="amount-diff %s">%s</span>\' % (color, str_value))'], {}), '(\'<span class="amount-diff %s">%s</span>\' % (color, str_value))\n', (2051, 2114), False, 'from django.utils.safestring import mark_safe\n'), ((2269, 2333), 'django.utils.safestring.mark_safe', 'mark_safe', (['(\'<span class="amount-value">%s</span>\' % (str_value,))'], {}), '(\'<span class="amount-value">%s</span>\' % (str_value,))\n', (2278, 2333), False, 'from django.utils.safestring import mark_safe\n'), ((2735, 2808), 'django.utils.safestring.mark_safe', 'mark_safe', (['(\'<div class="amount-percent %s">%s</div>\' % (color, str_value))'], {}), '(\'<div class="amount-percent %s">%s</div>\' % (color, str_value))\n', (2744, 2808), False, 'from django.utils.safestring import mark_safe\n'), ((1802, 1812), 'decimal.Decimal', 'Decimal', (['(0)'], {}), '(0)\n', (1809, 1812), False, 'from decimal import Decimal\n'), ((2209, 2219), 'decimal.Decimal', 'Decimal', (['(0)'], {}), '(0)\n', (2216, 2219), False, 'from decimal import Decimal\n'), ((2579, 2592), 'math.ceil', 'ceil', (['percent'], {}), '(percent)\n', (2583, 2592), False, 'from math import floor, ceil\n'), ((2670, 2684), 'math.floor', 'floor', (['percent'], {}), '(percent)\n', (2675, 2684), False, 'from math import floor, ceil\n')]
|
from __future__ import absolute_import
from zoomus import util
from zoomus.components import base
class LiveStreamComponentV2(base.BaseComponent):
def update(self, **kwargs):
"""
Use this API to update the meeting's stream information.
Expects:
- meeting_id: int
- stream_url: string (URL)
- stream_key: string
- page_url: string (URL)
"""
util.require_keys(kwargs, "meeting_id")
return self.patch_request(
"/meetings/{}/livestream".format(kwargs.get("meeting_id")), data=kwargs
)
def update_status(self, **kwargs):
"""
Use this API to update the status of a meeting's live stream.
Expects:
- meeting_id: int
- action (start|stop)
- settings: dict
"""
util.require_keys(kwargs, "meeting_id")
return self.patch_request(
"/meetings/{}/livestream/status".format(kwargs.get("meeting_id")),
data=kwargs,
)
|
[
"zoomus.util.require_keys"
] |
[((419, 458), 'zoomus.util.require_keys', 'util.require_keys', (['kwargs', '"""meeting_id"""'], {}), "(kwargs, 'meeting_id')\n", (436, 458), False, 'from zoomus import util\n'), ((828, 867), 'zoomus.util.require_keys', 'util.require_keys', (['kwargs', '"""meeting_id"""'], {}), "(kwargs, 'meeting_id')\n", (845, 867), False, 'from zoomus import util\n')]
|
import numpy as np
import matplotlib.pyplot as plt
from scipy import interpolate
plt.rc('text', usetex=True)
plt.rc('text.latex', preamble=r'\usepackage{amsmath}\usepackage{amssymb}\usepackage{siunitx}')
# Colours
col_b16agss09 = '#A50026'
col_b16gs98 = '#D73027'
col_agss09 = '#F46D43'
col_agss09ph = '#FDAE61'
col_ags05 = '#fEE090'
col_bs05agsop = '#FFFFBF'
col_bs05op = '#E0F3F8'
col_bp04 = '#ABD9E9'
col_bp00 = '#74ADD1'
col_bp98 = '#4575B4'
col_gs98 = '#313695'
def plot_setup(size=6,ratio=0.618):
fig.set_size_inches(size,ratio*size)
ax.tick_params(which='both', direction='in', bottom=True, top=True, left=True, right=True)
ax.tick_params(which='major', length=6)
ax.tick_params(which='minor', length=4)
#plt.minorticks_on()
conversion = 365.0*24.0*60.0*60.0*1.0e4*1.0e-20
res1 = np.genfromtxt("primakoff.dat")
res2 = np.genfromtxt("compton.dat")
res3 = np.genfromtxt("all_ff.dat")
res4 = np.genfromtxt("all_gaee.dat")
res5 = np.genfromtxt("metals.dat")
res6 = np.genfromtxt("TP.dat")
res7 = np.genfromtxt("LP.dat")
res8 = np.genfromtxt("TP_Rosseland.dat")
res9 = np.genfromtxt("LP_Rosseland.dat")
#corr = np.genfromtxt("weighted_compton.dat")
#weighted_compton = interpolate.interp1d(corr[:,0], corr[:,1], bounds_error=False, fill_value=0)
common_path = "../data/benchmarks/"
ref1 = np.genfromtxt(common_path+"2013_redondo_primakoff.dat")
ref2 = np.genfromtxt(common_path+"2013_redondo_compton.dat")
compton = interpolate.interp1d(ref2[:,0], ref2[:,1], bounds_error=False, fill_value=0)
ref3 = np.genfromtxt(common_path+"2013_redondo_ff.dat")
ref4 = np.genfromtxt(common_path+"2013_redondo_all.dat")
ref5 = np.genfromtxt(common_path+"2020_giannotti_TP.dat")
ref6 = np.genfromtxt(common_path+"2020_giannotti_LP.dat")
ref7 = np.genfromtxt(common_path+"2020-o'hare.dat")
ref8 = np.genfromtxt(common_path+"2020_caputo_LP.dat")
conv_fac = 1.0e-4/(365.0*24.0*60.0*60.0*1.0e10)
## Validation plots for axion-photon interactions
# Primakoff approximation [hep-ex/0702006] based on [astro-ph/0402114]
omega = np.linspace(0,10,300)
fig, ax = plt.subplots()
plot_setup()
plt.plot(omega, 6.02*omega**2.481*np.exp(-omega/1.205),':', color=col_agss09, label=r'Primakoff approx. (BP04)')
plt.plot(ref1[:,0], conv_fac*(1.0e4/50.0)*ref1[:,1], '-', color=col_b16agss09, label=r'Primakoff (Redondo)')
plt.plot(res1[:,0], res1[:,1]/1.0e10, 'k--', label=r'Primakoff (AGSS09)')
plt.plot(res6[:,0], res6[:,1]/1.0e10, 'k--', label=r'TP (AGSS09)')
plt.title(r'Axion-photon interactions, $g_{a\gamma\gamma} = \SI{e-10}{\GeV^{-1}}$, OP opacities')
plt.xlabel(r'Energy $\omega$ [keV]')
plt.ylabel(r'Axion flux $\mathrm{d}\Phi_a/\mathrm{d}\omega$ [\SI{e10}{\per\cm\squared\per\keV\per\s}]')
plt.xlim([0,10])
#plt.ylim([0,8])
plt.legend(frameon=False)
plt.savefig("validation_gagg.pdf", bbox_inches='tight')
#plt.show()
plt.close()
fig, ax = plt.subplots()
plot_setup()
plt.plot(omega, 6.02*omega**2.481*np.exp(-omega/1.205),':', color=col_agss09, label=r'Primakoff approx. (BP04)')
plt.plot(ref1[:,0], conv_fac*(1.0e4/50.0)*ref1[:,1], '-', color=col_b16agss09, label=r'Primakoff (Redondo)')
plt.plot(res1[:,0], res1[:,1]/1.0e10, 'k--', label=r'Primakoff (AGSS09)')
plt.plot(res6[:,0], res6[:,1]/1.0e10, 'k-', label=r'TP (AGSS09)')
plt.plot(res8[:,0], res8[:,1]/1.0e10, 'k--', label=r'TP Rosseland (AGSS09)')
plt.plot(ref5[:,0], ref5[:,1]*4.0*1.4995, '-', color='green', label=r'TP (Giannotti)')#correct B conversion in giannotti result and adjust coupling constant
plt.title(r'Axion-photon interactions, $g_{a\gamma\gamma} = \SI{e-10}{\GeV^{-1}}$, OP opacities')
plt.xlabel(r'Energy $\omega$ [keV]')
plt.ylabel(r'Axion flux $\mathrm{d}\Phi_a/\mathrm{d}\omega$ [\SI{e10}{\per\cm\squared\per\keV\per\s}]')
plt.xlim([0.1,10])
plt.yscale('log')
plt.xscale('log')
#plt.ylim([0,8])
plt.legend(frameon=False)
plt.savefig("validation_Tplasmon.pdf", bbox_inches='tight')
plt.show()
plt.close()
fig, ax = plt.subplots()
plot_setup()
plt.plot(omega, 6.02*omega**2.481*np.exp(-omega/1.205),':', color=col_agss09, label=r'Primakoff approx. (BP04)')
plt.plot(ref1[:,0], conv_fac*(1.0e4/50.0)*ref1[:,1], '-', color=col_b16agss09, label=r'Primakoff (Redondo)')
plt.plot(res1[:,0], res1[:,1]/1.0e10, 'k--', label=r'Primakoff (AGSS09)')
plt.plot(res7[:,0], res7[:,1]/1.0e10, 'k-', label=r'LP (AGSS09)')
plt.plot(res9[:,0], res9[:,1]/1.0e10, 'k--', label=r'LP Rosseland (AGSS09)')
plt.plot(ref6[:,0], ref6[:,1]*4.0, '--', color='green', label=r'LP (Giannotti)') # correct coupling
plt.plot(ref7[:,0], ref7[:,1]/1.0e10*4.0/1.7856, '--', color='orange', label=r'LP (O´Hare)') # correct coupling and angular average
plt.plot(ref8[:,0], ref8[:,1]/1.0e10*(3.0/5.0)**2, '--', color='gold', label=r'LP (Caputo)') #correct field values
plt.title(r'Axion-photon interactions, $g_{a\gamma\gamma} = \SI{e-10}{\GeV^{-1}}$, OP opacities')
plt.xlabel(r'Energy $\omega$ [keV]')
plt.ylabel(r'Axion flux $\mathrm{d}\Phi_a/\mathrm{d}\omega$ [\SI{e10}{\per\cm\squared\per\keV\per\s}]')
plt.xlim([0.001,0.4])
plt.yscale('log')
plt.xscale('log')
plt.ylim([0.0,37])
plt.legend(frameon=False)
plt.savefig("validation_Lplasmon.pdf", bbox_inches='tight')
plt.show()
plt.close()
fig, ax = plt.subplots()
## Validation plots for axion-electron interactions
plot_setup()
plt.plot(ref2[:,0], 100.0*conv_fac*(0.5*ref2[:,1]), 'b-', label=r'Compton (Redondo)')
plt.plot(ref3[:,0], 100.0*conv_fac*ref3[:,1], 'm-', label=r'FF (Redondo)')
plt.plot(ref4[:,0], 1.0e11*ref4[:,1]*(1.0e-13/0.511e-10)**2/(24.0*60.0*60.0) - 100.0*conv_fac*(0.5*compton(ref4[:,0])), 'g-', label=r'All')
plt.plot(res2[:,0], res2[:,1]/1.0e8, 'k--', label=r'Compton (B16-AGSS09)')
plt.plot(res3[:,0], res3[:,1]/1.0e8, 'k--', label=r'FF (B16-AGSS09)')
plt.plot(res4[:,0], res4[:,1]/1.0e8, 'k--', label=r'All (B16-AGSS09)')
plt.plot(res5[:,0], res5[:,1]/1.0e8, 'k--', label=r'Metals (B16-AGSS09)')
plt.title(r'Axion-electron interactions, $g_{aee} = \num{e-13}$, OP opacities')
plt.xlabel(r'Energy $\omega$ [keV]')
plt.ylabel(r'Axion flux $\mathrm{d}\Phi_a/\mathrm{d}\omega$ [\SI{e8}{\per\cm\squared\per\keV\per\s}]')
plt.xlim([0,10])
plt.ylim([0,12])
plt.legend(ncol=2, frameon=False)
plt.savefig("validation_gaee.pdf")
#plt.show()
plt.close()
|
[
"matplotlib.pyplot.title",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.yscale",
"matplotlib.pyplot.xscale",
"matplotlib.pyplot.show",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.close",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.ylabel",
"numpy.genfromtxt",
"matplotlib.pyplot.rc",
"numpy.linspace",
"numpy.exp",
"scipy.interpolate.interp1d",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.savefig"
] |
[((83, 110), 'matplotlib.pyplot.rc', 'plt.rc', (['"""text"""'], {'usetex': '(True)'}), "('text', usetex=True)\n", (89, 110), True, 'import matplotlib.pyplot as plt\n'), ((111, 212), 'matplotlib.pyplot.rc', 'plt.rc', (['"""text.latex"""'], {'preamble': '"""\\\\usepackage{amsmath}\\\\usepackage{amssymb}\\\\usepackage{siunitx}"""'}), "('text.latex', preamble=\n '\\\\usepackage{amsmath}\\\\usepackage{amssymb}\\\\usepackage{siunitx}')\n", (117, 212), True, 'import matplotlib.pyplot as plt\n'), ((812, 842), 'numpy.genfromtxt', 'np.genfromtxt', (['"""primakoff.dat"""'], {}), "('primakoff.dat')\n", (825, 842), True, 'import numpy as np\n'), ((850, 878), 'numpy.genfromtxt', 'np.genfromtxt', (['"""compton.dat"""'], {}), "('compton.dat')\n", (863, 878), True, 'import numpy as np\n'), ((886, 913), 'numpy.genfromtxt', 'np.genfromtxt', (['"""all_ff.dat"""'], {}), "('all_ff.dat')\n", (899, 913), True, 'import numpy as np\n'), ((921, 950), 'numpy.genfromtxt', 'np.genfromtxt', (['"""all_gaee.dat"""'], {}), "('all_gaee.dat')\n", (934, 950), True, 'import numpy as np\n'), ((958, 985), 'numpy.genfromtxt', 'np.genfromtxt', (['"""metals.dat"""'], {}), "('metals.dat')\n", (971, 985), True, 'import numpy as np\n'), ((993, 1016), 'numpy.genfromtxt', 'np.genfromtxt', (['"""TP.dat"""'], {}), "('TP.dat')\n", (1006, 1016), True, 'import numpy as np\n'), ((1024, 1047), 'numpy.genfromtxt', 'np.genfromtxt', (['"""LP.dat"""'], {}), "('LP.dat')\n", (1037, 1047), True, 'import numpy as np\n'), ((1055, 1088), 'numpy.genfromtxt', 'np.genfromtxt', (['"""TP_Rosseland.dat"""'], {}), "('TP_Rosseland.dat')\n", (1068, 1088), True, 'import numpy as np\n'), ((1096, 1129), 'numpy.genfromtxt', 'np.genfromtxt', (['"""LP_Rosseland.dat"""'], {}), "('LP_Rosseland.dat')\n", (1109, 1129), True, 'import numpy as np\n'), ((1318, 1375), 'numpy.genfromtxt', 'np.genfromtxt', (["(common_path + '2013_redondo_primakoff.dat')"], {}), "(common_path + '2013_redondo_primakoff.dat')\n", (1331, 1375), True, 'import numpy as np\n'), ((1381, 1436), 'numpy.genfromtxt', 'np.genfromtxt', (["(common_path + '2013_redondo_compton.dat')"], {}), "(common_path + '2013_redondo_compton.dat')\n", (1394, 1436), True, 'import numpy as np\n'), ((1445, 1523), 'scipy.interpolate.interp1d', 'interpolate.interp1d', (['ref2[:, 0]', 'ref2[:, 1]'], {'bounds_error': '(False)', 'fill_value': '(0)'}), '(ref2[:, 0], ref2[:, 1], bounds_error=False, fill_value=0)\n', (1465, 1523), False, 'from scipy import interpolate\n'), ((1529, 1579), 'numpy.genfromtxt', 'np.genfromtxt', (["(common_path + '2013_redondo_ff.dat')"], {}), "(common_path + '2013_redondo_ff.dat')\n", (1542, 1579), True, 'import numpy as np\n'), ((1585, 1636), 'numpy.genfromtxt', 'np.genfromtxt', (["(common_path + '2013_redondo_all.dat')"], {}), "(common_path + '2013_redondo_all.dat')\n", (1598, 1636), True, 'import numpy as np\n'), ((1642, 1694), 'numpy.genfromtxt', 'np.genfromtxt', (["(common_path + '2020_giannotti_TP.dat')"], {}), "(common_path + '2020_giannotti_TP.dat')\n", (1655, 1694), True, 'import numpy as np\n'), ((1700, 1752), 'numpy.genfromtxt', 'np.genfromtxt', (["(common_path + '2020_giannotti_LP.dat')"], {}), "(common_path + '2020_giannotti_LP.dat')\n", (1713, 1752), True, 'import numpy as np\n'), ((1758, 1804), 'numpy.genfromtxt', 'np.genfromtxt', (['(common_path + "2020-o\'hare.dat")'], {}), '(common_path + "2020-o\'hare.dat")\n', (1771, 1804), True, 'import numpy as np\n'), ((1810, 1859), 'numpy.genfromtxt', 'np.genfromtxt', (["(common_path + '2020_caputo_LP.dat')"], {}), "(common_path + '2020_caputo_LP.dat')\n", (1823, 1859), True, 'import numpy as np\n'), ((2038, 2061), 'numpy.linspace', 'np.linspace', (['(0)', '(10)', '(300)'], {}), '(0, 10, 300)\n', (2049, 2061), True, 'import numpy as np\n'), ((2071, 2085), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2083, 2085), True, 'import matplotlib.pyplot as plt\n'), ((2212, 2334), 'matplotlib.pyplot.plot', 'plt.plot', (['ref1[:, 0]', '(conv_fac * (10000.0 / 50.0) * ref1[:, 1])', '"""-"""'], {'color': 'col_b16agss09', 'label': '"""Primakoff (Redondo)"""'}), "(ref1[:, 0], conv_fac * (10000.0 / 50.0) * ref1[:, 1], '-', color=\n col_b16agss09, label='Primakoff (Redondo)')\n", (2220, 2334), True, 'import matplotlib.pyplot as plt\n'), ((2321, 2409), 'matplotlib.pyplot.plot', 'plt.plot', (['res1[:, 0]', '(res1[:, 1] / 10000000000.0)', '"""k--"""'], {'label': '"""Primakoff (AGSS09)"""'}), "(res1[:, 0], res1[:, 1] / 10000000000.0, 'k--', label=\n 'Primakoff (AGSS09)')\n", (2329, 2409), True, 'import matplotlib.pyplot as plt\n'), ((2395, 2471), 'matplotlib.pyplot.plot', 'plt.plot', (['res6[:, 0]', '(res6[:, 1] / 10000000000.0)', '"""k--"""'], {'label': '"""TP (AGSS09)"""'}), "(res6[:, 0], res6[:, 1] / 10000000000.0, 'k--', label='TP (AGSS09)')\n", (2403, 2471), True, 'import matplotlib.pyplot as plt\n'), ((2463, 2573), 'matplotlib.pyplot.title', 'plt.title', (['"""Axion-photon interactions, $g_{a\\\\gamma\\\\gamma} = \\\\SI{e-10}{\\\\GeV^{-1}}$, OP opacities"""'], {}), "(\n 'Axion-photon interactions, $g_{a\\\\gamma\\\\gamma} = \\\\SI{e-10}{\\\\GeV^{-1}}$, OP opacities'\n )\n", (2472, 2573), True, 'import matplotlib.pyplot as plt\n'), ((2561, 2597), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Energy $\\\\omega$ [keV]"""'], {}), "('Energy $\\\\omega$ [keV]')\n", (2571, 2597), True, 'import matplotlib.pyplot as plt\n'), ((2598, 2722), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Axion flux $\\\\mathrm{d}\\\\Phi_a/\\\\mathrm{d}\\\\omega$ [\\\\SI{e10}{\\\\per\\\\cm\\\\squared\\\\per\\\\keV\\\\per\\\\s}]"""'], {}), "(\n 'Axion flux $\\\\mathrm{d}\\\\Phi_a/\\\\mathrm{d}\\\\omega$ [\\\\SI{e10}{\\\\per\\\\cm\\\\squared\\\\per\\\\keV\\\\per\\\\s}]'\n )\n", (2608, 2722), True, 'import matplotlib.pyplot as plt\n'), ((2702, 2719), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0, 10]'], {}), '([0, 10])\n', (2710, 2719), True, 'import matplotlib.pyplot as plt\n'), ((2737, 2762), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'frameon': '(False)'}), '(frameon=False)\n', (2747, 2762), True, 'import matplotlib.pyplot as plt\n'), ((2764, 2819), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""validation_gagg.pdf"""'], {'bbox_inches': '"""tight"""'}), "('validation_gagg.pdf', bbox_inches='tight')\n", (2775, 2819), True, 'import matplotlib.pyplot as plt\n'), ((2832, 2843), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (2841, 2843), True, 'import matplotlib.pyplot as plt\n'), ((2856, 2870), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (2868, 2870), True, 'import matplotlib.pyplot as plt\n'), ((2997, 3119), 'matplotlib.pyplot.plot', 'plt.plot', (['ref1[:, 0]', '(conv_fac * (10000.0 / 50.0) * ref1[:, 1])', '"""-"""'], {'color': 'col_b16agss09', 'label': '"""Primakoff (Redondo)"""'}), "(ref1[:, 0], conv_fac * (10000.0 / 50.0) * ref1[:, 1], '-', color=\n col_b16agss09, label='Primakoff (Redondo)')\n", (3005, 3119), True, 'import matplotlib.pyplot as plt\n'), ((3106, 3194), 'matplotlib.pyplot.plot', 'plt.plot', (['res1[:, 0]', '(res1[:, 1] / 10000000000.0)', '"""k--"""'], {'label': '"""Primakoff (AGSS09)"""'}), "(res1[:, 0], res1[:, 1] / 10000000000.0, 'k--', label=\n 'Primakoff (AGSS09)')\n", (3114, 3194), True, 'import matplotlib.pyplot as plt\n'), ((3180, 3255), 'matplotlib.pyplot.plot', 'plt.plot', (['res6[:, 0]', '(res6[:, 1] / 10000000000.0)', '"""k-"""'], {'label': '"""TP (AGSS09)"""'}), "(res6[:, 0], res6[:, 1] / 10000000000.0, 'k-', label='TP (AGSS09)')\n", (3188, 3255), True, 'import matplotlib.pyplot as plt\n'), ((3246, 3337), 'matplotlib.pyplot.plot', 'plt.plot', (['res8[:, 0]', '(res8[:, 1] / 10000000000.0)', '"""k--"""'], {'label': '"""TP Rosseland (AGSS09)"""'}), "(res8[:, 0], res8[:, 1] / 10000000000.0, 'k--', label=\n 'TP Rosseland (AGSS09)')\n", (3254, 3337), True, 'import matplotlib.pyplot as plt\n'), ((3323, 3419), 'matplotlib.pyplot.plot', 'plt.plot', (['ref5[:, 0]', '(ref5[:, 1] * 4.0 * 1.4995)', '"""-"""'], {'color': '"""green"""', 'label': '"""TP (Giannotti)"""'}), "(ref5[:, 0], ref5[:, 1] * 4.0 * 1.4995, '-', color='green', label=\n 'TP (Giannotti)')\n", (3331, 3419), True, 'import matplotlib.pyplot as plt\n'), ((3481, 3591), 'matplotlib.pyplot.title', 'plt.title', (['"""Axion-photon interactions, $g_{a\\\\gamma\\\\gamma} = \\\\SI{e-10}{\\\\GeV^{-1}}$, OP opacities"""'], {}), "(\n 'Axion-photon interactions, $g_{a\\\\gamma\\\\gamma} = \\\\SI{e-10}{\\\\GeV^{-1}}$, OP opacities'\n )\n", (3490, 3591), True, 'import matplotlib.pyplot as plt\n'), ((3579, 3615), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Energy $\\\\omega$ [keV]"""'], {}), "('Energy $\\\\omega$ [keV]')\n", (3589, 3615), True, 'import matplotlib.pyplot as plt\n'), ((3616, 3740), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Axion flux $\\\\mathrm{d}\\\\Phi_a/\\\\mathrm{d}\\\\omega$ [\\\\SI{e10}{\\\\per\\\\cm\\\\squared\\\\per\\\\keV\\\\per\\\\s}]"""'], {}), "(\n 'Axion flux $\\\\mathrm{d}\\\\Phi_a/\\\\mathrm{d}\\\\omega$ [\\\\SI{e10}{\\\\per\\\\cm\\\\squared\\\\per\\\\keV\\\\per\\\\s}]'\n )\n", (3626, 3740), True, 'import matplotlib.pyplot as plt\n'), ((3720, 3739), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0.1, 10]'], {}), '([0.1, 10])\n', (3728, 3739), True, 'import matplotlib.pyplot as plt\n'), ((3739, 3756), 'matplotlib.pyplot.yscale', 'plt.yscale', (['"""log"""'], {}), "('log')\n", (3749, 3756), True, 'import matplotlib.pyplot as plt\n'), ((3757, 3774), 'matplotlib.pyplot.xscale', 'plt.xscale', (['"""log"""'], {}), "('log')\n", (3767, 3774), True, 'import matplotlib.pyplot as plt\n'), ((3793, 3818), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'frameon': '(False)'}), '(frameon=False)\n', (3803, 3818), True, 'import matplotlib.pyplot as plt\n'), ((3820, 3879), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""validation_Tplasmon.pdf"""'], {'bbox_inches': '"""tight"""'}), "('validation_Tplasmon.pdf', bbox_inches='tight')\n", (3831, 3879), True, 'import matplotlib.pyplot as plt\n'), ((3880, 3890), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3888, 3890), True, 'import matplotlib.pyplot as plt\n'), ((3891, 3902), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (3900, 3902), True, 'import matplotlib.pyplot as plt\n'), ((3915, 3929), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (3927, 3929), True, 'import matplotlib.pyplot as plt\n'), ((4056, 4178), 'matplotlib.pyplot.plot', 'plt.plot', (['ref1[:, 0]', '(conv_fac * (10000.0 / 50.0) * ref1[:, 1])', '"""-"""'], {'color': 'col_b16agss09', 'label': '"""Primakoff (Redondo)"""'}), "(ref1[:, 0], conv_fac * (10000.0 / 50.0) * ref1[:, 1], '-', color=\n col_b16agss09, label='Primakoff (Redondo)')\n", (4064, 4178), True, 'import matplotlib.pyplot as plt\n'), ((4165, 4253), 'matplotlib.pyplot.plot', 'plt.plot', (['res1[:, 0]', '(res1[:, 1] / 10000000000.0)', '"""k--"""'], {'label': '"""Primakoff (AGSS09)"""'}), "(res1[:, 0], res1[:, 1] / 10000000000.0, 'k--', label=\n 'Primakoff (AGSS09)')\n", (4173, 4253), True, 'import matplotlib.pyplot as plt\n'), ((4239, 4314), 'matplotlib.pyplot.plot', 'plt.plot', (['res7[:, 0]', '(res7[:, 1] / 10000000000.0)', '"""k-"""'], {'label': '"""LP (AGSS09)"""'}), "(res7[:, 0], res7[:, 1] / 10000000000.0, 'k-', label='LP (AGSS09)')\n", (4247, 4314), True, 'import matplotlib.pyplot as plt\n'), ((4305, 4396), 'matplotlib.pyplot.plot', 'plt.plot', (['res9[:, 0]', '(res9[:, 1] / 10000000000.0)', '"""k--"""'], {'label': '"""LP Rosseland (AGSS09)"""'}), "(res9[:, 0], res9[:, 1] / 10000000000.0, 'k--', label=\n 'LP Rosseland (AGSS09)')\n", (4313, 4396), True, 'import matplotlib.pyplot as plt\n'), ((4382, 4470), 'matplotlib.pyplot.plot', 'plt.plot', (['ref6[:, 0]', '(ref6[:, 1] * 4.0)', '"""--"""'], {'color': '"""green"""', 'label': '"""LP (Giannotti)"""'}), "(ref6[:, 0], ref6[:, 1] * 4.0, '--', color='green', label=\n 'LP (Giannotti)')\n", (4390, 4470), True, 'import matplotlib.pyplot as plt\n'), ((4482, 4593), 'matplotlib.pyplot.plot', 'plt.plot', (['ref7[:, 0]', '(ref7[:, 1] / 10000000000.0 * 4.0 / 1.7856)', '"""--"""'], {'color': '"""orange"""', 'label': '"""LP (O´Hare)"""'}), "(ref7[:, 0], ref7[:, 1] / 10000000000.0 * 4.0 / 1.7856, '--', color\n ='orange', label='LP (O´Hare)')\n", (4490, 4593), True, 'import matplotlib.pyplot as plt\n'), ((4614, 4726), 'matplotlib.pyplot.plot', 'plt.plot', (['ref8[:, 0]', '(ref8[:, 1] / 10000000000.0 * (3.0 / 5.0) ** 2)', '"""--"""'], {'color': '"""gold"""', 'label': '"""LP (Caputo)"""'}), "(ref8[:, 0], ref8[:, 1] / 10000000000.0 * (3.0 / 5.0) ** 2, '--',\n color='gold', label='LP (Caputo)')\n", (4622, 4726), True, 'import matplotlib.pyplot as plt\n'), ((4731, 4841), 'matplotlib.pyplot.title', 'plt.title', (['"""Axion-photon interactions, $g_{a\\\\gamma\\\\gamma} = \\\\SI{e-10}{\\\\GeV^{-1}}$, OP opacities"""'], {}), "(\n 'Axion-photon interactions, $g_{a\\\\gamma\\\\gamma} = \\\\SI{e-10}{\\\\GeV^{-1}}$, OP opacities'\n )\n", (4740, 4841), True, 'import matplotlib.pyplot as plt\n'), ((4829, 4865), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Energy $\\\\omega$ [keV]"""'], {}), "('Energy $\\\\omega$ [keV]')\n", (4839, 4865), True, 'import matplotlib.pyplot as plt\n'), ((4866, 4990), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Axion flux $\\\\mathrm{d}\\\\Phi_a/\\\\mathrm{d}\\\\omega$ [\\\\SI{e10}{\\\\per\\\\cm\\\\squared\\\\per\\\\keV\\\\per\\\\s}]"""'], {}), "(\n 'Axion flux $\\\\mathrm{d}\\\\Phi_a/\\\\mathrm{d}\\\\omega$ [\\\\SI{e10}{\\\\per\\\\cm\\\\squared\\\\per\\\\keV\\\\per\\\\s}]'\n )\n", (4876, 4990), True, 'import matplotlib.pyplot as plt\n'), ((4970, 4992), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0.001, 0.4]'], {}), '([0.001, 0.4])\n', (4978, 4992), True, 'import matplotlib.pyplot as plt\n'), ((4992, 5009), 'matplotlib.pyplot.yscale', 'plt.yscale', (['"""log"""'], {}), "('log')\n", (5002, 5009), True, 'import matplotlib.pyplot as plt\n'), ((5010, 5027), 'matplotlib.pyplot.xscale', 'plt.xscale', (['"""log"""'], {}), "('log')\n", (5020, 5027), True, 'import matplotlib.pyplot as plt\n'), ((5028, 5047), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0.0, 37]'], {}), '([0.0, 37])\n', (5036, 5047), True, 'import matplotlib.pyplot as plt\n'), ((5048, 5073), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'frameon': '(False)'}), '(frameon=False)\n', (5058, 5073), True, 'import matplotlib.pyplot as plt\n'), ((5075, 5134), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""validation_Lplasmon.pdf"""'], {'bbox_inches': '"""tight"""'}), "('validation_Lplasmon.pdf', bbox_inches='tight')\n", (5086, 5134), True, 'import matplotlib.pyplot as plt\n'), ((5135, 5145), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5143, 5145), True, 'import matplotlib.pyplot as plt\n'), ((5146, 5157), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (5155, 5157), True, 'import matplotlib.pyplot as plt\n'), ((5169, 5183), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (5181, 5183), True, 'import matplotlib.pyplot as plt\n'), ((5249, 5346), 'matplotlib.pyplot.plot', 'plt.plot', (['ref2[:, 0]', '(100.0 * conv_fac * (0.5 * ref2[:, 1]))', '"""b-"""'], {'label': '"""Compton (Redondo)"""'}), "(ref2[:, 0], 100.0 * conv_fac * (0.5 * ref2[:, 1]), 'b-', label=\n 'Compton (Redondo)')\n", (5257, 5346), True, 'import matplotlib.pyplot as plt\n'), ((5335, 5414), 'matplotlib.pyplot.plot', 'plt.plot', (['ref3[:, 0]', '(100.0 * conv_fac * ref3[:, 1])', '"""m-"""'], {'label': '"""FF (Redondo)"""'}), "(ref3[:, 0], 100.0 * conv_fac * ref3[:, 1], 'm-', label='FF (Redondo)')\n", (5343, 5414), True, 'import matplotlib.pyplot as plt\n'), ((5550, 5638), 'matplotlib.pyplot.plot', 'plt.plot', (['res2[:, 0]', '(res2[:, 1] / 100000000.0)', '"""k--"""'], {'label': '"""Compton (B16-AGSS09)"""'}), "(res2[:, 0], res2[:, 1] / 100000000.0, 'k--', label=\n 'Compton (B16-AGSS09)')\n", (5558, 5638), True, 'import matplotlib.pyplot as plt\n'), ((5625, 5703), 'matplotlib.pyplot.plot', 'plt.plot', (['res3[:, 0]', '(res3[:, 1] / 100000000.0)', '"""k--"""'], {'label': '"""FF (B16-AGSS09)"""'}), "(res3[:, 0], res3[:, 1] / 100000000.0, 'k--', label='FF (B16-AGSS09)')\n", (5633, 5703), True, 'import matplotlib.pyplot as plt\n'), ((5695, 5774), 'matplotlib.pyplot.plot', 'plt.plot', (['res4[:, 0]', '(res4[:, 1] / 100000000.0)', '"""k--"""'], {'label': '"""All (B16-AGSS09)"""'}), "(res4[:, 0], res4[:, 1] / 100000000.0, 'k--', label='All (B16-AGSS09)')\n", (5703, 5774), True, 'import matplotlib.pyplot as plt\n'), ((5766, 5853), 'matplotlib.pyplot.plot', 'plt.plot', (['res5[:, 0]', '(res5[:, 1] / 100000000.0)', '"""k--"""'], {'label': '"""Metals (B16-AGSS09)"""'}), "(res5[:, 0], res5[:, 1] / 100000000.0, 'k--', label=\n 'Metals (B16-AGSS09)')\n", (5774, 5853), True, 'import matplotlib.pyplot as plt\n'), ((5841, 5920), 'matplotlib.pyplot.title', 'plt.title', (['"""Axion-electron interactions, $g_{aee} = \\\\num{e-13}$, OP opacities"""'], {}), "('Axion-electron interactions, $g_{aee} = \\\\num{e-13}$, OP opacities')\n", (5850, 5920), True, 'import matplotlib.pyplot as plt\n'), ((5921, 5957), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Energy $\\\\omega$ [keV]"""'], {}), "('Energy $\\\\omega$ [keV]')\n", (5931, 5957), True, 'import matplotlib.pyplot as plt\n'), ((5958, 6081), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Axion flux $\\\\mathrm{d}\\\\Phi_a/\\\\mathrm{d}\\\\omega$ [\\\\SI{e8}{\\\\per\\\\cm\\\\squared\\\\per\\\\keV\\\\per\\\\s}]"""'], {}), "(\n 'Axion flux $\\\\mathrm{d}\\\\Phi_a/\\\\mathrm{d}\\\\omega$ [\\\\SI{e8}{\\\\per\\\\cm\\\\squared\\\\per\\\\keV\\\\per\\\\s}]'\n )\n", (5968, 6081), True, 'import matplotlib.pyplot as plt\n'), ((6061, 6078), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[0, 10]'], {}), '([0, 10])\n', (6069, 6078), True, 'import matplotlib.pyplot as plt\n'), ((6078, 6095), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0, 12]'], {}), '([0, 12])\n', (6086, 6095), True, 'import matplotlib.pyplot as plt\n'), ((6096, 6129), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'ncol': '(2)', 'frameon': '(False)'}), '(ncol=2, frameon=False)\n', (6106, 6129), True, 'import matplotlib.pyplot as plt\n'), ((6131, 6165), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""validation_gaee.pdf"""'], {}), "('validation_gaee.pdf')\n", (6142, 6165), True, 'import matplotlib.pyplot as plt\n'), ((6178, 6189), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (6187, 6189), True, 'import matplotlib.pyplot as plt\n'), ((2133, 2155), 'numpy.exp', 'np.exp', (['(-omega / 1.205)'], {}), '(-omega / 1.205)\n', (2139, 2155), True, 'import numpy as np\n'), ((2918, 2940), 'numpy.exp', 'np.exp', (['(-omega / 1.205)'], {}), '(-omega / 1.205)\n', (2924, 2940), True, 'import numpy as np\n'), ((3977, 3999), 'numpy.exp', 'np.exp', (['(-omega / 1.205)'], {}), '(-omega / 1.205)\n', (3983, 3999), True, 'import numpy as np\n')]
|
from __future__ import division
"""
Workflow Maker
==============
Handy function to build dynamic workflows using BIDS formatted data files.
"""
# These imports are not nipype-dependent so we can import them early; see config notes below
import matplotlib
matplotlib.use('Agg')
import nibabel as nib
import os
from .utils import get_resource_path
import six
from bids.grabbids import BIDSLayout
def wfmaker(project_dir,raw_dir,subject_id,task_name='',apply_trim=False,apply_dist_corr=False,apply_smooth=False,apply_filter=False,mni_template='2mm',apply_n4 =True,ants_threads=8,readable_crash_files=False):
"""
This function returns a "standard" workflow based on requested settings. Assumes data is in the following directory structure in BIDS format:
*Work flow steps*:
1) EPI Distortion Correction (FSL; optional)
2) Trimming (nipy)
3) Realignment/Motion Correction (FSL)
4) Artifact Detection (rapidART/python)
5) Brain Extraction + N4 Bias Correction (ANTs)
6) Coregistration (rigid) (ANTs)
7) Normalization to MNI (non-linear) (ANTs)
8) Low-pass filtering (nilearn; optional)
8) Smoothing (FSL; optional)
9) Downsampling to INT16 precision to save space (nibabel)
Args:
project_dir (str): full path to the root of project folder, e.g. /my/data/myproject. All preprocessed data will be placed under this foler and the raw_dir folder will be searched for under this folder
raw_dir (str): folder name for raw data, e.g. 'raw' which would be automatically converted to /my/data/myproject/raw
subject_id (str/int): subject ID to process. Can be either a subject ID string e.g. 'sid-0001' or an integer to index the entire list of subjects in raw_dir, e.g. 0, which would process the first subject
apply_trim (int/bool; optional): number of volumes to trim from the beginning of each functional run; default is None
task_name (str; optional): which functional task runs to process; default is all runs
apply_dist_corr (bool; optional): look for fmap files and perform distortion correction; default False
smooth (int/list; optional): smoothing to perform in FWHM mm; if a list is provided will create outputs for each smoothing kernel separately; default False
apply_filter (float/list; optional): low-pass/high-freq filtering cut-offs in Hz; if a list is provided will create outputs for each filter cut-off separately. With high temporal resolution scans .25Hz is a decent value to capture respitory artifacts; default None/False
mni_template (str; optional): which mm resolution template to use, e.g. '3mm'; default '2mm'
apply_n4 (bool; optional): perform N4 Bias Field correction on the anatomical image; default true
ants_threads (int; optional): number of threads ANTs should use for its processes; default 8
readable_crash_files (bool; optional): should nipype crash files be saved as txt? This makes them easily readable, but sometimes interferes with nipype's ability to use cached results of successfully run nodes (i.e. picking up where it left off after bugs are fixed); default False
Examples:
>>> from cosanlab_preproc.wfmaker import wfmaker
>>> # Create workflow that performs no distortion correction, trims first 5 TRs, no filtering, 6mm smoothing, and normalizes to 2mm MNI space. Run it with 16 cores.
>>>
>>> workflow = wfmaker(
project_dir = '/data/project',
raw_dir = 'raw',
apply_trim = 5)
>>>
>>> workflow.run('MultiProc',plugin_args = {'n_procs': 16})
>>>
>>> # Create workflow that performs distortion correction, trims first 25 TRs, no filtering and filtering .25hz, 6mm and 8mm smoothing, and normalizes to 3mm MNI space. Run it serially (will be super slow!).
>>>
>>> workflow = wfmaker(
project_dir = '/data/project',
raw_dir = 'raw',
apply_trim = 25,
apply_dist_corr = True,
apply_filter = [0, .25],
apply_smooth = [6.0, 8.0],
mni = '3mm')
>>>
>>> workflow.run()
"""
##################
### PATH SETUP ###
##################
if mni_template not in ['1mm','2mm','3mm']:
raise ValueError("MNI template must be: 1mm, 2mm, or 3mm")
data_dir = os.path.join(project_dir,raw_dir)
output_dir = os.path.join(project_dir,'preprocessed')
output_final_dir = os.path.join(output_dir,'final')
output_interm_dir = os.path.join(output_dir,'intermediate')
log_dir = os.path.join(project_dir,'logs','nipype')
if not os.path.exists(output_final_dir):
os.makedirs(output_final_dir)
if not os.path.exists(output_interm_dir):
os.makedirs(output_interm_dir)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
# Set MNI template
MNItemplate = os.path.join(get_resource_path(),'MNI152_T1_' + mni_template + '_brain.nii.gz')
MNImask = os.path.join(get_resource_path(),'MNI152_T1_' + mni_template + '_brain_mask.nii.gz')
MNItemplatehasskull = os.path.join(get_resource_path(),'MNI152_T1_' + mni_template + '.nii.gz')
# Set ANTs files
bet_ants_template = os.path.join(get_resource_path(),'OASIS_template.nii.gz')
bet_ants_prob_mask = os.path.join(get_resource_path(),'OASIS_BrainCerebellumProbabilityMask.nii.gz')
bet_ants_registration_mask = os.path.join(get_resource_path(),'OASIS_BrainCerebellumRegistrationMask.nii.gz')
#################################
### NIPYPE IMPORTS AND CONFIG ###
#################################
# Update nipype global config because workflow.config[] = ..., doesn't seem to work
# Can't store nipype config/rc file in container anyway so set them globaly before importing and setting up workflow as suggested here: http://nipype.readthedocs.io/en/latest/users/config_file.html#config-file
from nipype import config
if readable_crash_files:
cfg = dict(execution={'crashfile_format':'txt'})
config.update_config(cfg)
config.update_config({'logging':{'log_directory':log_dir,'log_to_file':True}})
from nipype import logging
logging.update_logging(config)
# Now import everything else
from nipype.interfaces.io import DataSink
from nipype.interfaces.utility import Merge, IdentityInterface
from nipype.pipeline.engine import Node, Workflow
from nipype.interfaces.nipy.preprocess import ComputeMask
from nipype.algorithms.rapidart import ArtifactDetect
from nipype.interfaces.ants.segmentation import BrainExtraction, N4BiasFieldCorrection
from nipype.interfaces.ants import Registration, ApplyTransforms
from nipype.interfaces.fsl import MCFLIRT, TOPUP, ApplyTOPUP
from nipype.interfaces.fsl.maths import MeanImage
from nipype.interfaces.fsl import Merge as MERGE
from nipype.interfaces.fsl.utils import Smooth
from nipype.interfaces.nipy.preprocess import Trim
from .interfaces import Plot_Coregistration_Montage,Plot_Quality_Control,Plot_Realignment_Parameters,Create_Covariates,Down_Sample_Precision,Create_Encoding_File, Filter_In_Mask
##################
### INPUT NODE ###
##################
layout = BIDSLayout(data_dir)
# Dartmouth subjects are named with the sub- prefix, handle whether we receive an integer identifier for indexing or the full subject id with prefixg
if isinstance(subject_id, six.string_types):
subId = subject_id[4:]
elif isinstance(subject_id, int):
subId = layout.get_subjects()[subject_id]
subject_id = 'sub-' + subId
else:
raise TypeError("subject_id should be a string or integer")
#Get anat file location
anat = layout.get(subject=subId,type='T1w',extensions='.nii.gz')[0].filename
#Get functional file locations
if task_name:
funcs = [f.filename for f in layout.get(subject=subId,type='bold',task=task_name,extensions='.nii.gz')]
else:
funcs = [f.filename for f in layout.get(subject=subId,type='bold',extensions='.nii.gz')]
#Turn functional file list into interable Node
func_scans = Node(IdentityInterface(fields=['scan']),name='func_scans')
func_scans.iterables = ('scan',funcs)
#Get TR for use in filtering below; we're assuming all BOLD runs have the same TR
tr_length = layout.get_metadata(funcs[0])['RepetitionTime']
#####################################
## TRIM ##
#####################################
if apply_trim:
trim = Node(Trim(),name = 'trim')
trim.inputs.begin_index = apply_trim
#####################################
## DISTORTION CORRECTION ##
#####################################
if apply_dist_corr:
#Get fmap file locations
fmaps = [f.filename for f in layout.get(subject=subId,modality='fmap',extensions='.nii.gz')]
if not fmaps:
raise IOError("Distortion Correction requested but field map scans not found...")
#Get fmap metadata
totalReadoutTimes, measurements, fmap_pes = [],[],[]
for i, fmap in enumerate(fmaps):
# Grab total readout time for each fmap
totalReadoutTimes.append(layout.get_metadata(fmap)['TotalReadoutTime'])
# Grab measurements (for some reason pyBIDS doesn't grab dcm_meta... fields from side-car json file and json.load, doesn't either; so instead just read the header using nibabel to determine number of scans)
measurements.append(nib.load(fmap).header['dim'][4])
# Get phase encoding direction
fmap_pe = layout.get_metadata(fmap)["PhaseEncodingDirection"]
fmap_pes.append(fmap_pe)
encoding_file_writer = Node(interface=Create_Encoding_File(),name='create_encoding')
encoding_file_writer.inputs.totalReadoutTimes = totalReadoutTimes
encoding_file_writer.inputs.fmaps = fmaps
encoding_file_writer.inputs.fmap_pes = fmap_pes
encoding_file_writer.inputs.measurements = measurements
encoding_file_writer.inputs.file_name='encoding_file.txt'
merge_to_file_list = Node(interface=Merge(2), infields = ['in1','in2'],name='merge_to_file_list')
merge_to_file_list.inputs.in1 = fmaps[0]
merge_to_file_list.inputs.in1 = fmaps[1]
#Merge AP and PA distortion correction scans
merger = Node(interface=MERGE(dimension='t'),name='merger')
merger.inputs.output_type = 'NIFTI_GZ'
merger.inputs.in_files = fmaps
merger.inputs.merged_file = 'merged_epi.nii.gz'
#Create distortion correction map
topup = Node(interface=TOPUP(),name='topup')
topup.inputs.output_type = 'NIFTI_GZ'
#Apply distortion correction to other scans
apply_topup = Node(interface=ApplyTOPUP(),name='apply_topup')
apply_topup.inputs.output_type = 'NIFTI_GZ'
apply_topup.inputs.method = 'jac'
apply_topup.inputs.interp = 'spline'
###################################
### REALIGN ###
###################################
realign_fsl = Node(MCFLIRT(),name="realign")
realign_fsl.inputs.cost = 'mutualinfo'
realign_fsl.inputs.mean_vol = True
realign_fsl.inputs.output_type = 'NIFTI_GZ'
realign_fsl.inputs.save_mats = True
realign_fsl.inputs.save_rms = True
realign_fsl.inputs.save_plots = True
###################################
### MEAN EPIs ###
###################################
#For coregistration after realignment
mean_epi = Node(MeanImage(),name='mean_epi')
mean_epi.inputs.dimension = 'T'
#For after normalization is done to plot checks
mean_norm_epi = Node(MeanImage(),name='mean_norm_epi')
mean_norm_epi.inputs.dimension = 'T'
###################################
### MASK, ART, COV CREATION ###
###################################
compute_mask = Node(ComputeMask(), name='compute_mask')
compute_mask.inputs.m = .05
art = Node(ArtifactDetect(),name='art')
art.inputs.use_differences = [True, False]
art.inputs.use_norm = True
art.inputs.norm_threshold = 1
art.inputs.zintensity_threshold = 3
art.inputs.mask_type = 'file'
art.inputs.parameter_source = 'FSL'
make_cov = Node(Create_Covariates(),name='make_cov')
################################
### N4 BIAS FIELD CORRECTION ###
################################
if apply_n4:
n4_correction = Node(N4BiasFieldCorrection(), name='n4_correction')
n4_correction.inputs.copy_header = True
n4_correction.inputs.save_bias = False
n4_correction.inputs.num_threads = ants_threads
n4_correction.inputs.input_image = anat
###################################
### BRAIN EXTRACTION ###
###################################
brain_extraction_ants = Node(BrainExtraction(),name='brain_extraction')
brain_extraction_ants.inputs.dimension = 3
brain_extraction_ants.inputs.use_floatingpoint_precision = 1
brain_extraction_ants.inputs.num_threads = ants_threads
brain_extraction_ants.inputs.brain_probability_mask = bet_ants_prob_mask
brain_extraction_ants.inputs.keep_temporary_files = 1
brain_extraction_ants.inputs.brain_template = bet_ants_template
brain_extraction_ants.inputs.extraction_registration_mask = bet_ants_registration_mask
brain_extraction_ants.inputs.out_prefix = 'bet'
###################################
### COREGISTRATION ###
###################################
coregistration = Node(Registration(), name='coregistration')
coregistration.inputs.float = False
coregistration.inputs.output_transform_prefix = "meanEpi2highres"
coregistration.inputs.transforms = ['Rigid']
coregistration.inputs.transform_parameters = [(0.1,), (0.1,)]
coregistration.inputs.number_of_iterations = [[1000,500,250,100]]
coregistration.inputs.dimension = 3
coregistration.inputs.num_threads = ants_threads
coregistration.inputs.write_composite_transform = True
coregistration.inputs.collapse_output_transforms = True
coregistration.inputs.metric = ['MI']
coregistration.inputs.metric_weight = [1]
coregistration.inputs.radius_or_number_of_bins = [32]
coregistration.inputs.sampling_strategy = ['Regular']
coregistration.inputs.sampling_percentage = [0.25]
coregistration.inputs.convergence_threshold = [1e-08]
coregistration.inputs.convergence_window_size = [10]
coregistration.inputs.smoothing_sigmas = [[3,2,1,0]]
coregistration.inputs.sigma_units = ['mm']
coregistration.inputs.shrink_factors = [[4,3,2,1]]
coregistration.inputs.use_estimate_learning_rate_once = [True]
coregistration.inputs.use_histogram_matching = [False]
coregistration.inputs.initial_moving_transform_com = True
coregistration.inputs.output_warped_image = True
coregistration.inputs.winsorize_lower_quantile = 0.01
coregistration.inputs.winsorize_upper_quantile = 0.99
###################################
### NORMALIZATION ###
###################################
# Settings Explanations
# Only a few key settings are worth adjusting and most others relate to how ANTs optimizer starts or iterates and won't make a ton of difference
# B<NAME> referred to these settings as the last "best tested" when he was aligning fMRI data: https://github.com/ANTsX/ANTsRCore/blob/master/R/antsRegistration.R#L275
# Things that matter the most:
# smoothing_sigmas:
# how much gaussian smoothing to apply when performing registration, probably want the upper limit of this to match the resolution that the data is collected at e.g. 3mm
# Old settings [[3,2,1,0]]*3
# shrink_factors
# The coarseness with which to do registration
# Old settings [[8,4,2,1]] * 3
# >= 8 may result is some problems causing big chunks of cortex with little fine grain spatial structure to be moved to other parts of cortex
# Other settings
# transform_parameters:
# how much regularization to do for fitting that transformation
# for syn this pertains to both the gradient regularization term, and the flow, and elastic terms. Leave the syn settings alone as they seem to be the most well tested across published data sets
# radius_or_number_of_bins
# This is the bin size for MI metrics and 32 is probably adequate for most use cases. Increasing this might increase precision (e.g. to 64) but takes exponentially longer
# use_histogram_matching
# Use image intensity distribution to guide registration
# Leave it on for within modality registration (e.g. T1 -> MNI), but off for between modality registration (e.g. EPI -> T1)
# convergence_threshold
# threshold for optimizer
# convergence_window_size
# how many samples should optimizer average to compute threshold?
# sampling_strategy
# what strategy should ANTs use to initialize the transform. Regular here refers to approximately random sampling around the center of the image mass
normalization = Node(Registration(),name='normalization')
normalization.inputs.float = False
normalization.inputs.collapse_output_transforms=True
normalization.inputs.convergence_threshold=[1e-06,1e-06,1e-07]
normalization.inputs.convergence_window_size=[10]
normalization.inputs.dimension = 3
normalization.inputs.fixed_image = MNItemplate
normalization.inputs.initial_moving_transform_com=True
normalization.inputs.metric=['MI', 'MI', 'CC']
normalization.inputs.metric_weight=[1.0]*3
normalization.inputs.number_of_iterations=[[1000, 500, 250, 100],
[1000, 500, 250, 100],
[100, 70, 50, 20]]
normalization.inputs.num_threads= ants_threads
normalization.inputs.output_transform_prefix = 'anat2template'
normalization.inputs.output_inverse_warped_image=True
normalization.inputs.output_warped_image = True
normalization.inputs.radius_or_number_of_bins=[32, 32, 4]
normalization.inputs.sampling_percentage=[0.25, 0.25, 1]
normalization.inputs.sampling_strategy=['Regular',
'Regular',
'None']
normalization.inputs.shrink_factors=[[4, 3, 2, 1]]*3
normalization.inputs.sigma_units=['vox']*3
normalization.inputs.smoothing_sigmas=[[2,1],[2,1],[3, 2, 1, 0]]
normalization.inputs.transforms = ['Rigid','Affine','SyN']
normalization.inputs.transform_parameters=[(0.1,),
(0.1,),
(0.1, 3.0, 0.0)]
normalization.inputs.use_histogram_matching=True
normalization.inputs.winsorize_lower_quantile=0.005
normalization.inputs.winsorize_upper_quantile=0.995
normalization.inputs.write_composite_transform=True
###################################
### APPLY TRANSFORMS AND SMOOTH ###
###################################
merge_transforms = Node(Merge(2), iterfield=['in2'], name ='merge_transforms')
# Used for epi -> mni, via (coreg + norm)
apply_transforms = Node(ApplyTransforms(),iterfield=['input_image'],name='apply_transforms')
apply_transforms.inputs.input_image_type = 3
apply_transforms.inputs.float = False
apply_transforms.inputs.num_threads = 12
apply_transforms.inputs.environ = {}
apply_transforms.inputs.interpolation = 'BSpline'
apply_transforms.inputs.invert_transform_flags = [False, False]
apply_transforms.inputs.reference_image = MNItemplate
# Used for t1 segmented -> mni, via (norm)
apply_transform_seg = Node(ApplyTransforms(),name='apply_transform_seg')
apply_transform_seg.inputs.input_image_type = 3
apply_transform_seg.inputs.float = False
apply_transform_seg.inputs.num_threads = 12
apply_transform_seg.inputs.environ = {}
apply_transform_seg.inputs.interpolation = 'MultiLabel'
apply_transform_seg.inputs.invert_transform_flags = [False]
apply_transform_seg.inputs.reference_image = MNItemplate
###################################
### PLOTS ###
###################################
plot_realign = Node(Plot_Realignment_Parameters(),name="plot_realign")
plot_qa = Node(Plot_Quality_Control(),name="plot_qa")
plot_normalization_check = Node(Plot_Coregistration_Montage(),name="plot_normalization_check")
plot_normalization_check.inputs.canonical_img = MNItemplatehasskull
############################################
### FILTER, SMOOTH, DOWNSAMPLE PRECISION ###
############################################
#Use cosanlab_preproc for down sampling
down_samp = Node(Down_Sample_Precision(),name="down_samp")
#Use FSL for smoothing
if apply_smooth:
smooth = Node(Smooth(),name='smooth')
if isinstance(apply_smooth, list):
smooth.iterables = ("fwhm",apply_smooth)
elif isinstance(apply_smooth, int) or isinstance(apply_smooth, float):
smooth.inputs.fwhm = apply_smooth
else:
raise ValueError("apply_smooth must be a list or int/float")
#Use cosanlab_preproc for low-pass filtering
if apply_filter:
lp_filter = Node(Filter_In_Mask(),name='lp_filter')
lp_filter.inputs.mask = MNImask
lp_filter.inputs.sampling_rate = tr_length
lp_filter.inputs.high_pass_cutoff = 0
if isinstance(apply_filter,list):
lp_filter.iterables = ("low_pass_cutoff",apply_filter)
elif isinstance(apply_filter, int) or isinstance(apply_filter, float):
lp_filter.inputs.low_pass_cutoff = apply_filter
else:
raise ValueError("apply_filter must be a list or int/float")
###################
### OUTPUT NODE ###
###################
#Collect all final outputs in the output dir and get rid of file name additions
datasink = Node(DataSink(),name='datasink')
datasink.inputs.base_directory = output_final_dir
datasink.inputs.container = subject_id
# Remove substitutions
data_dir_parts = data_dir.split('/')[1:]
prefix = ['_scan_'] + data_dir_parts + [subject_id] + ['func']
func_scan_names = [os.path.split(elem)[-1] for elem in funcs]
to_replace = []
for elem in func_scan_names:
bold_name = elem.split(subject_id + '_')[-1]
bold_name = bold_name.split('.nii.gz')[0]
to_replace.append(('..'.join(prefix + [elem]), bold_name))
datasink.inputs.substitutions = to_replace
#####################
### INIT WORKFLOW ###
#####################
workflow = Workflow(name=subId)
workflow.base_dir = output_interm_dir
############################
######### PART (1a) #########
# func -> discorr -> trim -> realign
# OR
# func -> trim -> realign
# OR
# func -> discorr -> realign
# OR
# func -> realign
############################
if apply_dist_corr:
workflow.connect([
(encoding_file_writer, topup,[('encoding_file','encoding_file')]),
(encoding_file_writer, apply_topup,[('encoding_file','encoding_file')]),
(merger,topup,[('merged_file','in_file')]),
(func_scans,apply_topup,[('scan','in_files')]),
(topup,apply_topup,[('out_fieldcoef','in_topup_fieldcoef'),
('out_movpar','in_topup_movpar')])
])
if apply_trim:
# Dist Corr + Trim
workflow.connect([
(apply_topup,trim,[('out_corrected','in_file')]),
(trim, realign_fsl, [('out_file','in_file')])
])
else:
# Dist Corr + No Trim
workflow.connect([
(apply_topup,realign_fsl,[('out_corrected','in_file')])
])
else:
if apply_trim:
# No Dist Corr + Trim
workflow.connect([
(func_scans, trim, [('scan','in_file')]),
(trim, realign_fsl, [('out_file','in_file')])
])
else:
# No Dist Corr + No Trim
workflow.connect([
(func_scans, realign_fsl, [('scan','in_file')]),
])
############################
######### PART (1n) #########
# anat -> N4 -> bet
# OR
# anat -> bet
############################
if apply_n4:
workflow.connect([
(n4_correction, brain_extraction_ants, [('output_image','anatomical_image')])
])
else:
brain_extraction_ants.inputs.anatomical_image = anat
##########################################
############### PART (2) #################
# realign -> coreg -> mni (via t1)
# t1 -> mni
# covariate creation
# plot creation
###########################################
workflow.connect([
(realign_fsl, plot_realign, [('par_file','realignment_parameters')]),
(realign_fsl, plot_qa, [('out_file','dat_img')]),
(realign_fsl, art, [('out_file','realigned_files'),
('par_file','realignment_parameters')]),
(realign_fsl, mean_epi, [('out_file','in_file')]),
(realign_fsl, make_cov, [('par_file','realignment_parameters')]),
(mean_epi, compute_mask, [('out_file','mean_volume')]),
(compute_mask, art, [('brain_mask','mask_file')]),
(art, make_cov, [('outlier_files','spike_id')]),
(art, plot_realign, [('outlier_files','outliers')]),
(plot_qa, make_cov, [('fd_outliers','fd_outliers')]),
(brain_extraction_ants, coregistration, [('BrainExtractionBrain','fixed_image')]),
(mean_epi, coregistration, [('out_file','moving_image')]),
(brain_extraction_ants, normalization, [('BrainExtractionBrain','moving_image')]),
(coregistration, merge_transforms, [('composite_transform','in2')]),
(normalization, merge_transforms, [('composite_transform','in1')]),
(merge_transforms, apply_transforms, [('out','transforms')]),
(realign_fsl, apply_transforms, [('out_file','input_image')]),
(apply_transforms, mean_norm_epi, [('output_image','in_file')]),
(normalization, apply_transform_seg, [('composite_transform','transforms')]),
(brain_extraction_ants, apply_transform_seg, [('BrainExtractionSegmentation','input_image')]),
(mean_norm_epi, plot_normalization_check, [('out_file','wra_img')])
])
##################################################
################### PART (3) #####################
# epi (in mni) -> filter -> smooth -> down sample
# OR
# epi (in mni) -> filter -> down sample
# OR
# epi (in mni) -> smooth -> down sample
# OR
# epi (in mni) -> down sample
###################################################
if apply_filter:
workflow.connect([
(apply_transforms, lp_filter, [('output_image','in_file')])
])
if apply_smooth:
# Filtering + Smoothing
workflow.connect([
(lp_filter, smooth, [('out_file','in_file')]),
(smooth, down_samp, [('smoothed_file','in_file')])
])
else:
# Filtering + No Smoothing
workflow.connect([
(lp_filter, down_samp, [('out_file','in_file')])
])
else:
if apply_smooth:
# No Filtering + Smoothing
workflow.connect([
(apply_transforms, smooth, [('output_image', 'in_file')]),
(smooth, down_samp, [('smoothed_file','in_file')])
])
else:
# No Filtering + No Smoothing
workflow.connect([
(apply_transforms, down_samp, [('output_image', 'in_file')])
])
##########################################
############### PART (4) #################
# down sample -> save
# plots -> save
# covs -> save
# t1 (in mni) -> save
# t1 segmented masks (in mni) -> save
##########################################
workflow.connect([
(down_samp, datasink, [('out_file','functional.@down_samp')]),
(plot_realign, datasink, [('plot','functional.@plot_realign')]),
(plot_qa, datasink, [('plot','functional.@plot_qa')]),
(plot_normalization_check, datasink, [('plot','functional.@plot_normalization')]),
(make_cov, datasink, [('covariates','functional.@covariates')]),
(normalization, datasink, [('warped_image','structural.@normanat')]),
(apply_transform_seg, datasink,[('output_image','structural.@normanatseg')])
])
if not os.path.exists(os.path.join(output_dir,'pipeline.png')):
workflow.write_graph(dotfilename=os.path.join(output_dir,'pipeline'),format='png')
print(f"Creating workflow for subject: {subject_id}")
if ants_threads == 8:
print(f"ANTs will utilize the default of {ants_threads} threads for parallel processing.")
else:
print(f"ANTs will utilize the user-requested {ants_threads} threads for parallel processing.")
return workflow
|
[
"nipype.interfaces.fsl.ApplyTOPUP",
"nipype.interfaces.utility.IdentityInterface",
"nipype.interfaces.ants.Registration",
"nipype.interfaces.ants.ApplyTransforms",
"nipype.interfaces.fsl.Merge",
"os.path.join",
"bids.grabbids.BIDSLayout",
"os.path.exists",
"nipype.interfaces.fsl.maths.MeanImage",
"nipype.interfaces.fsl.MCFLIRT",
"nipype.pipeline.engine.Workflow",
"nipype.interfaces.ants.segmentation.BrainExtraction",
"nipype.interfaces.fsl.TOPUP",
"nipype.interfaces.fsl.utils.Smooth",
"nipype.config.update_config",
"nipype.interfaces.utility.Merge",
"matplotlib.use",
"nipype.interfaces.io.DataSink",
"nipype.algorithms.rapidart.ArtifactDetect",
"nipype.interfaces.nipy.preprocess.ComputeMask",
"nipype.interfaces.nipy.preprocess.Trim",
"os.makedirs",
"nibabel.load",
"nipype.logging.update_logging",
"nipype.interfaces.ants.segmentation.N4BiasFieldCorrection",
"os.path.split"
] |
[((260, 281), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (274, 281), False, 'import matplotlib\n'), ((4507, 4541), 'os.path.join', 'os.path.join', (['project_dir', 'raw_dir'], {}), '(project_dir, raw_dir)\n', (4519, 4541), False, 'import os\n'), ((4558, 4599), 'os.path.join', 'os.path.join', (['project_dir', '"""preprocessed"""'], {}), "(project_dir, 'preprocessed')\n", (4570, 4599), False, 'import os\n'), ((4622, 4655), 'os.path.join', 'os.path.join', (['output_dir', '"""final"""'], {}), "(output_dir, 'final')\n", (4634, 4655), False, 'import os\n'), ((4679, 4719), 'os.path.join', 'os.path.join', (['output_dir', '"""intermediate"""'], {}), "(output_dir, 'intermediate')\n", (4691, 4719), False, 'import os\n'), ((4733, 4776), 'os.path.join', 'os.path.join', (['project_dir', '"""logs"""', '"""nipype"""'], {}), "(project_dir, 'logs', 'nipype')\n", (4745, 4776), False, 'import os\n'), ((6224, 6311), 'nipype.config.update_config', 'config.update_config', (["{'logging': {'log_directory': log_dir, 'log_to_file': True}}"], {}), "({'logging': {'log_directory': log_dir, 'log_to_file': \n True}})\n", (6244, 6311), False, 'from nipype import config\n'), ((6338, 6368), 'nipype.logging.update_logging', 'logging.update_logging', (['config'], {}), '(config)\n', (6360, 6368), False, 'from nipype import logging\n'), ((7394, 7414), 'bids.grabbids.BIDSLayout', 'BIDSLayout', (['data_dir'], {}), '(data_dir)\n', (7404, 7414), False, 'from bids.grabbids import BIDSLayout\n'), ((22798, 22818), 'nipype.pipeline.engine.Workflow', 'Workflow', ([], {'name': 'subId'}), '(name=subId)\n', (22806, 22818), False, 'from nipype.pipeline.engine import Node, Workflow\n'), ((4787, 4819), 'os.path.exists', 'os.path.exists', (['output_final_dir'], {}), '(output_final_dir)\n', (4801, 4819), False, 'import os\n'), ((4829, 4858), 'os.makedirs', 'os.makedirs', (['output_final_dir'], {}), '(output_final_dir)\n', (4840, 4858), False, 'import os\n'), ((4870, 4903), 'os.path.exists', 'os.path.exists', (['output_interm_dir'], {}), '(output_interm_dir)\n', (4884, 4903), False, 'import os\n'), ((4913, 4943), 'os.makedirs', 'os.makedirs', (['output_interm_dir'], {}), '(output_interm_dir)\n', (4924, 4943), False, 'import os\n'), ((4955, 4978), 'os.path.exists', 'os.path.exists', (['log_dir'], {}), '(log_dir)\n', (4969, 4978), False, 'import os\n'), ((4988, 5008), 'os.makedirs', 'os.makedirs', (['log_dir'], {}), '(log_dir)\n', (4999, 5008), False, 'import os\n'), ((6194, 6219), 'nipype.config.update_config', 'config.update_config', (['cfg'], {}), '(cfg)\n', (6214, 6219), False, 'from nipype import config\n'), ((8308, 8342), 'nipype.interfaces.utility.IdentityInterface', 'IdentityInterface', ([], {'fields': "['scan']"}), "(fields=['scan'])\n", (8325, 8342), False, 'from nipype.interfaces.utility import Merge, IdentityInterface\n'), ((11264, 11273), 'nipype.interfaces.fsl.MCFLIRT', 'MCFLIRT', ([], {}), '()\n', (11271, 11273), False, 'from nipype.interfaces.fsl import MCFLIRT, TOPUP, ApplyTOPUP\n'), ((11705, 11716), 'nipype.interfaces.fsl.maths.MeanImage', 'MeanImage', ([], {}), '()\n', (11714, 11716), False, 'from nipype.interfaces.fsl.maths import MeanImage\n'), ((11848, 11859), 'nipype.interfaces.fsl.maths.MeanImage', 'MeanImage', ([], {}), '()\n', (11857, 11859), False, 'from nipype.interfaces.fsl.maths import MeanImage\n'), ((12064, 12077), 'nipype.interfaces.nipy.preprocess.ComputeMask', 'ComputeMask', ([], {}), '()\n', (12075, 12077), False, 'from nipype.interfaces.nipy.preprocess import ComputeMask\n'), ((12148, 12164), 'nipype.algorithms.rapidart.ArtifactDetect', 'ArtifactDetect', ([], {}), '()\n', (12162, 12164), False, 'from nipype.algorithms.rapidart import ArtifactDetect\n'), ((13008, 13025), 'nipype.interfaces.ants.segmentation.BrainExtraction', 'BrainExtraction', ([], {}), '()\n', (13023, 13025), False, 'from nipype.interfaces.ants.segmentation import BrainExtraction, N4BiasFieldCorrection\n'), ((13703, 13717), 'nipype.interfaces.ants.Registration', 'Registration', ([], {}), '()\n', (13715, 13717), False, 'from nipype.interfaces.ants import Registration, ApplyTransforms\n'), ((17267, 17281), 'nipype.interfaces.ants.Registration', 'Registration', ([], {}), '()\n', (17279, 17281), False, 'from nipype.interfaces.ants import Registration, ApplyTransforms\n'), ((19207, 19215), 'nipype.interfaces.utility.Merge', 'Merge', (['(2)'], {}), '(2)\n', (19212, 19215), False, 'from nipype.interfaces.utility import Merge, IdentityInterface\n'), ((19337, 19354), 'nipype.interfaces.ants.ApplyTransforms', 'ApplyTransforms', ([], {}), '()\n', (19352, 19354), False, 'from nipype.interfaces.ants import Registration, ApplyTransforms\n'), ((19842, 19859), 'nipype.interfaces.ants.ApplyTransforms', 'ApplyTransforms', ([], {}), '()\n', (19857, 19859), False, 'from nipype.interfaces.ants import Registration, ApplyTransforms\n'), ((22103, 22113), 'nipype.interfaces.io.DataSink', 'DataSink', ([], {}), '()\n', (22111, 22113), False, 'from nipype.interfaces.io import DataSink\n'), ((8694, 8700), 'nipype.interfaces.nipy.preprocess.Trim', 'Trim', ([], {}), '()\n', (8698, 8700), False, 'from nipype.interfaces.nipy.preprocess import Trim\n'), ((12619, 12642), 'nipype.interfaces.ants.segmentation.N4BiasFieldCorrection', 'N4BiasFieldCorrection', ([], {}), '()\n', (12640, 12642), False, 'from nipype.interfaces.ants.segmentation import BrainExtraction, N4BiasFieldCorrection\n'), ((20991, 20999), 'nipype.interfaces.fsl.utils.Smooth', 'Smooth', ([], {}), '()\n', (20997, 20999), False, 'from nipype.interfaces.fsl.utils import Smooth\n'), ((22391, 22410), 'os.path.split', 'os.path.split', (['elem'], {}), '(elem)\n', (22404, 22410), False, 'import os\n'), ((28824, 28864), 'os.path.join', 'os.path.join', (['output_dir', '"""pipeline.png"""'], {}), "(output_dir, 'pipeline.png')\n", (28836, 28864), False, 'import os\n'), ((10311, 10319), 'nipype.interfaces.utility.Merge', 'Merge', (['(2)'], {}), '(2)\n', (10316, 10319), False, 'from nipype.interfaces.utility import Merge, IdentityInterface\n'), ((10557, 10577), 'nipype.interfaces.fsl.Merge', 'MERGE', ([], {'dimension': '"""t"""'}), "(dimension='t')\n", (10562, 10577), True, 'from nipype.interfaces.fsl import Merge as MERGE\n'), ((10809, 10816), 'nipype.interfaces.fsl.TOPUP', 'TOPUP', ([], {}), '()\n', (10814, 10816), False, 'from nipype.interfaces.fsl import MCFLIRT, TOPUP, ApplyTOPUP\n'), ((10967, 10979), 'nipype.interfaces.fsl.ApplyTOPUP', 'ApplyTOPUP', ([], {}), '()\n', (10977, 10979), False, 'from nipype.interfaces.fsl import MCFLIRT, TOPUP, ApplyTOPUP\n'), ((28907, 28943), 'os.path.join', 'os.path.join', (['output_dir', '"""pipeline"""'], {}), "(output_dir, 'pipeline')\n", (28919, 28943), False, 'import os\n'), ((9672, 9686), 'nibabel.load', 'nib.load', (['fmap'], {}), '(fmap)\n', (9680, 9686), True, 'import nibabel as nib\n')]
|
from datetime import datetime
from scrapy import FormRequest, Request, Spider
from tcmba.items import ProcessItem
class ProcessesSpider(Spider):
name = "processos"
allowed_domains = ["www.tcm.ba.gov.br/"]
start_urls = [
"https://www.tcm.ba.gov.br/consulta/jurisprudencia/consulta-ementario-juridico/#todos/" # noqa
]
handle_httpstatus_list = [302]
def parse(self, response):
descriptions = response.css("table#tabela tr td a span::text").extract()
file_urls = response.css("table#tabela tr td a::attr(href)").extract()
process_numbers = response.css(
"table#tabela tr td:nth-child(1)::text"
).extract()
assert len(descriptions) == len(file_urls) == len(process_numbers)
for process_number, description, file_url in zip(
process_numbers, descriptions, file_urls
):
item = ProcessItem(
process_number=process_number,
description=description,
file_url=file_url,
crawled_at=datetime.now(),
)
yield Request(
"https://www.tcm.ba.gov.br/consulta-processual/",
dont_filter=True,
callback=self.parse_process,
meta={"item": item},
)
def parse_process(self, response):
yield FormRequest.from_response(
response,
method="POST",
dont_filter=True,
formxpath='.//form[@name="formProtocolo"]',
formdata={
"proc": response.meta["item"]["process_number"],
"consulta": "ok",
"B1": "+Consultar+",
},
callback=self.parse_details,
meta={"item": response.meta["item"]},
)
def get_history(self, table):
units = table.css("td:nth-child(1)")
entry_dates = table.css("td:nth-child(2)")
statuses = table.css("td:nth-child(3)")
notes = table.css("td:nth-child(4)")
history = []
for unit, entry_date, status, note in zip(units, entry_dates, statuses, notes):
unit_str = unit.css("::text").get()
entry_date_str = entry_date.css("::text").get()
status_str = status.css("::text").get()
note_str = note.css("::text").get()
history.append(
{
"unity": unit_str.strip() if unit_str else "",
"entry_date": entry_date_str.strip() if entry_date_str else "",
"situation": status_str.strip() if status_str else "",
"notes": note_str.strip() if note_str else "",
}
)
return history
def get_field(self, response, label):
field_str = response.xpath(
f"//label[contains(text(),'{label}')]/following-sibling::span/text()"
).get()
if field_str:
return field_str.strip()
return ""
def parse_details(self, response):
item = response.meta["item"]
item["process_at"] = response.css("div.subtitle span::text").get()
item["entry_at"] = self.get_field(response, "Data de Entrada:")
item["nature"] = self.get_field(response, "Natureza:")
item["complement"] = self.get_field(response, "Complemento:")
item["city"] = self.get_field(response, "Município:")
item["author"] = self.get_field(response, "Interessado/Autor:")
item["received"] = self.get_field(response, "Recebido(S/N):")
item["last_update_at"] = self.get_field(response, "Data:")
item["unit"] = self.get_field(response, "Unidade:")
item["history"] = self.get_history(response.css("table#tabelaResultado"))
item["number_of_origin_document"] = self.get_field(
response, "Nº Doc.de Origem:"
)
item["entrance"] = self.get_field(response, "Meio:")
item["document_date"] = self.get_field(response, "Data do Documento:")
item["attachments"] = self.get_field(response, "Anexos:")
item["notes"] = self.get_field(response, "Observações:")
item["place_of_origin"] = self.get_field(response, "Local de Origem:")
yield item
|
[
"datetime.datetime.now",
"scrapy.FormRequest.from_response",
"scrapy.Request"
] |
[((1372, 1663), 'scrapy.FormRequest.from_response', 'FormRequest.from_response', (['response'], {'method': '"""POST"""', 'dont_filter': '(True)', 'formxpath': '""".//form[@name="formProtocolo"]"""', 'formdata': "{'proc': response.meta['item']['process_number'], 'consulta': 'ok', 'B1':\n '+Consultar+'}", 'callback': 'self.parse_details', 'meta': "{'item': response.meta['item']}"}), '(response, method=\'POST\', dont_filter=True,\n formxpath=\'.//form[@name="formProtocolo"]\', formdata={\'proc\': response.\n meta[\'item\'][\'process_number\'], \'consulta\': \'ok\', \'B1\': \'+Consultar+\'},\n callback=self.parse_details, meta={\'item\': response.meta[\'item\']})\n', (1397, 1663), False, 'from scrapy import FormRequest, Request, Spider\n'), ((1113, 1242), 'scrapy.Request', 'Request', (['"""https://www.tcm.ba.gov.br/consulta-processual/"""'], {'dont_filter': '(True)', 'callback': 'self.parse_process', 'meta': "{'item': item}"}), "('https://www.tcm.ba.gov.br/consulta-processual/', dont_filter=True,\n callback=self.parse_process, meta={'item': item})\n", (1120, 1242), False, 'from scrapy import FormRequest, Request, Spider\n'), ((1065, 1079), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1077, 1079), False, 'from datetime import datetime\n')]
|
import props.graph_representation.node
from props.graph_representation.graph_wrapper import GraphWrapper
from props.graph_representation.word import Word, NO_INDEX
from props.graph_representation.node import Node,CopularNode,PossessiveNode,PropNode,\
AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode,\
TimeNode, isTime, LocationNode, isLocation
from props.dependency_tree.definitions import adjectival_mod_dependencies, labels_ban,\
filter_labels_ban, condition_outcome_markers, reason_outcome_markers,\
comp_markers
import props.graph_utils
from props.proposition_structure import syntactic_item
from props.graph_representation import word
from time_annotator.timex_wrapper import timexWrapper
from mx.DateTime.ISO import ParseTime
from location_annotator.textual_location_annotator import textualLocationAnnotator
FIRST_ENTITY_LABEL = "entity"#"first_entity"
SECOND_ENTITY_LABEL = "entity"#"second_entity"
POSSESSOR_LABEL = "possessor"
POSSESSED_LABEL = "possessed"
COMP_LABEL = "comp"
DISCOURSE_LABEL = "discourse"
OUTCOME_LABEL = "outcome"
CONDITION_LABEL = "condition"
REASON_LABEL = "reason"
ADV_LABEL = "adverb"
SORUCE_LABEL = "source"
#types for appendix:
APPENDIX_PREP = "Prepositions"
APPENDIX_COP = "Copular"
APPENDIX_POSS = "Possessives"
APPENDIX_APPOS = "Appositions"
APPENDIX_ADJ = "Adjectives"
APPENDIX_VERB = "Verbal Predicates"
APPENDIX_COND = "Conditionals and Temporals"
APPENDIX_COMPLEMENT = "Clausal Complements"
APPENDIX_RCMOD = "Relative Clauses"
APPENDIX_CONJUNCTION = "Conjunctions"
APPENDIX_NEGATION = "Negation"
APPENDIX_PASSIVE = "Passive Voice"
APPENDIX_LEMMA = "Lemma"
APPENDIX_LOCATION = "Locations"
APPENDIX_MODAL = "Modal"
APPENDIX_EXISTENSIALS = "Existensials"
APPENDIX_TENSE = "Tense"
APPENDIX_TIME = "Time"
APPENDIX_RANGE = "Ranges"
APPENDIX_KEYS = (APPENDIX_ADJ,
APPENDIX_APPOS,
APPENDIX_COND,
APPENDIX_CONJUNCTION,
APPENDIX_COMPLEMENT,
APPENDIX_COP,
APPENDIX_EXISTENSIALS,
APPENDIX_LEMMA,
APPENDIX_LOCATION,
APPENDIX_MODAL,
APPENDIX_NEGATION,
APPENDIX_PASSIVE,
APPENDIX_POSS,
APPENDIX_PREP,
APPENDIX_RANGE,
APPENDIX_RCMOD,
APPENDIX_TENSE,
APPENDIX_TIME,
APPENDIX_VERB
)
class ParseGraph:
"""
class to bunch together all function of conversion from DepTree to digraph
Mainly in order to store the graph as a member which all these functions can edit.
"""
def __init__(self,t,locationAnnotator):
"""
initialize a graph class, followed by converting a tree
@type t: Tree
@param tree: syntactic tree to be converted
@type id: int
@param id: a unique id for current Tree
@type gr: digraph
@var gr: the graph representing t
"""
if not t.id: # meaning this is the ROOT element
self.tree = t.children[0]
else:
self.tree = t
self.gr = GraphWrapper(t.get_original_sentence())
self.locationAnnotator = locationAnnotator
# maintain an appendix for easier browsing
self.types = appendix_types()
self.parse(self.tree)
def parse(self,t):
"""
Get the graph representation from a syntactic representation
Returns through the graph parameter.
@type t: DepTree
@param tree: syntactic tree to be converted
@rtype: Node
@return: the node in the graph corresponding to the top node in t
"""
#order matters!
if t.is_conditional_predicate():
self.types.add(APPENDIX_COND)
return self.parseConditional(outcome = t._CONDITIONAL_PREDICATE_FEATURE_Outcome()["Value"],
condList = t.condPred)
if t._VERBAL_PREDICATE_SUBTREE_Adv():
advChildren = t.adverb_children
advSubj = t.adverb_subj
return self.parseAdverb(subj=advSubj,
advChildren=advChildren)
if t.is_conjunction_predicate():
self.types.add(APPENDIX_CONJUNCTION)
return self.parseConjunction(baseElm = t.baseElm,
conjResult = t.conjResult)
if t.is_appositional_predicate():
self.types.add(APPENDIX_APPOS)
firstEntity = t._APPOSITIONAL_PREDICATE_FEATURE_Left_Side()["Value"]
secondEntity = t._APPOSITIONAL_PREDICATE_FEATURE_Right_Side()["Value"]
return self.parseApposition(index = t.id,
first_entity=firstEntity,
second_entity=secondEntity)
if t.is_relative_clause():
self.types.add(APPENDIX_RCMOD)
return self.parseRcmod(np = t._RELCLAUSE_PREDICATE_FEATURE_Rest()['Value'],
modList = t.rcmodPred)
if t.is_prepositional_predicate():
self.types.add(APPENDIX_PREP)
return self.parsePreposition(psubj=t._PREPOSITIONAL_PREDICATE_FEATURE_psubj()["Value"],
prepChildList=t.prepChildList)
if t.is_copular_predicate():
self.types.add(APPENDIX_COP)
firstEntity = t._COPULAR_PREDICATE_FEATURE_Copular_Predicate()["Value"]
secondEntity = t._COPULAR_PREDICATE_FEATURE_Copular_Object()["Value"]
return self.parseCopular(index = t.id,
first_entity=firstEntity,
second_entity=secondEntity,
features = syntactic_item.get_verbal_features(t))
if t.is_possesive_predicate():
self.types.add(APPENDIX_POSS)
possessor = t._POSSESSIVE_PREDICATE_FEATURE_Possessor()["Value"]
possessed = t._POSSESSIVE_PREDICATE_FEATURE_Possessed()["Value"]
possessive = t._POSSESSIVE_PREDICATE_FEATURE_Possessive()["Value"]
return self.parsePossessive(possessor = possessor,
possessed = possessed,
possessive = possessive)
if t.is_adjectival_predicate():
self.types.add(APPENDIX_ADJ)
return self.parseProp(subject = t._ADJECTIVAL_PREDICATE_FEATURE_Subject()["Value"],
copulaIndex = NO_INDEX,
adjectiveChildList = t.adjectivalChildList,
propAsHead=False)
if t.is_clausal_complement():
self.types.add(APPENDIX_COMPLEMENT)
return self.parseComplement(compSubj = t.compSubj,
compChildren = t.compChildList)
if t.unhandled_advcl():
# put each unhandled advcl as a disconnected subgraph
for c in t.advcl:
self.parse(c)
return self.parse(t)
if t.is_verbal_predicate():
self.types.add(APPENDIX_VERB)
head_ret = t._VERBAL_PREDICATE_SUBTREE_Head()
return self.parseVerbal(indexes = head_ret["Span"],
verbs = head_ret["Value"].split(" "),
arguments = t.collect_arguments(),
tree = t)
else:
# fall back - pack all the tree in a single node
if len(t.children)==1:
if (t.children[0].parent_relation == "nn") and (t.word.endswith(",")) and (t.children[0].word.endswith(",")):
#conjunction in disguise
child = t.children[0]
t.children = []
ret = self.parseConjunction(cc = [(t.id,"and")],
conjElements = [t,child])
t.children = [child]
return ret
nodes = t._get_subtree(filter_labels_ban)
text = [Word(index=index,
word=nodes[index]) for index in sorted(nodes.keys())]
topNode = self.parseBottom(text = sorted(text,key=lambda x:x.index),
features = syntactic_item.get_verbal_features(t))
return topNode
def parseBottom(self,text,features):
"""
Parse a node for which all other construction test has failed,
no tree structure is assumed over the input text.
@type text: list[Word]
@param text: words to appear at node, oredered by index
@type features: dict{string:string}
@param features: features of the node
@rtype Node
@return the node which was inserted into the graph
"""
time_res = timexWrapper(text)
if time_res[0]:
self.types.add(APPENDIX_TIME)
time_node = self.parseTime(time_res[0])
else:
time_node = False
s = " ".join([w.word for w in text])
if self.locationAnnotator.is_location(s):
locNode = LocationNode.init(features={})
self.gr.add_node(locNode)
bottomNode = Node(isPredicate=False,
text = text,
features = features,
valid=True)
self.gr.add_node(bottomNode)
self.gr.add_edge((locNode,bottomNode),
label="loc")
self.types.add(APPENDIX_LOCATION)
return locNode
left_text = time_res[1]
if left_text:
topNode = Node(isPredicate=False,
text = left_text,
features = features,
valid=True)
if not topNode.str:
time_node.features.update(topNode.features)
topNode = time_node
else:
self.gr.add_node(topNode)
if time_node:
self.gr.add_edge((topNode,time_node))
else:
if not time_node:
#TODO: probably not good, but happens
topNode = Node(isPredicate=False,
text = [],
features = features,
valid=True)
self.gr.add_node(topNode)
else:
topNode = time_node
return topNode
def parseTime(self,time_res):
"""
Add a time node to the graph, given the results of the automated tool.
@type time_res: list[TimeExpression]
@param time_res: Time Expressions to be added to the graph, all as single nodes, and under the same "time" node
@rtype Node
@return the top node (time node)
"""
topNode = TimeNode.init(features={})
self.gr.add_node(topNode)
for timeExpression in time_res:
curNode = Node(isPredicate = False,
text = timeExpression.text,
features = {"Time Value":timeExpression.value},
valid = True)
self.gr.add_node(curNode)
self.gr.add_edge((topNode,curNode))
return topNode
def parseComplement(self,compSubj,compChildren):
"""
add a complement subgraph to the graph
@type compSubj: DepTree
@param compSubj: the subject of all following complements
@type compChildren: list [depTree]
@param compChildren: all subclauses
"""
topNode = self.parse(compSubj)
for child in compChildren:
curNode = self.parse(child)
self.gr.add_edge(edge=(topNode,curNode),
label=child.parent_relation)
return topNode
def parseConjunction(self,baseElm,conjResult):
"""
add a conjunction subgraph to the graph
@type cc: list [(int,string)]
@param cc: the connecting element
@type conjElements: list [DepTree]
@param conjElements: subtrees to be joined in conjunction
"""
retNode = self.parse(baseElm)
for cc,conjElements in conjResult:
if not conjElements:
# discourse marker
discourseNode = Node(isPredicate = False,
text = [Word(ind,word) for ind,word in cc],
features = {},
valid=True)
self.gr.add_node(discourseNode)
self.gr.add_edge(edge =(retNode,discourseNode),
label= DISCOURSE_LABEL)
else:
# generate top conjunction node
conjNode = ConjunctionNode.init(text = [Word(ind,word) for ind,word in cc],
features = {})
self.gr.add_node(conjNode)
#connect cc to base element
self.gr.add_edge((conjNode,retNode))
#generate node for each element and connect to topNode
for elm in conjElements:
curNode = self.parse(elm)
self.gr.add_edge(edge = (conjNode,curNode))
return retNode
def parseRcmod(self,np,modList):
"""
add a relative clause subgraph to the graph
@type np: DepTree
@param np: the entity being modified by the relative clause
@type modlist: a list of DepTrees,
@param modList: trees modifying np
"""
topNode = self.parse(np)
for temp_t in modList:
# add nodes
rcmodNode = self.parse(temp_t._RELCLAUSE_PREDICATE_FEATURE_Relclause()["Value"])
propNode = RCMODPropNode.init(features={},
valid=True)
self.gr.add_node(propNode)
#add edges
self.gr.add_edge(edge=(topNode,propNode))
self.gr.add_edge(edge=(propNode,rcmodNode))
if rcmodNode.isPredicate:
# this will create a cycle, label is a hurestic to guess the connection between relative clause and top node
self.gr.add_edge(edge=(rcmodNode,topNode), label=temp_t.rcmodRel)
# record that this construction came from rcmod
topNode.rcmod = [propNode,rcmodNode]
return topNode
def parseConditional(self,outcome,condList):
"""
add a conditional subgraph to the graph
@type outcome: DepTree
@param outcome: the outcome of all following conditions
@type condList: a list of DepTrees,
@param condList: all conditionals regarding outcome
"""
outcomeNode = self.parse(outcome)
for temp_t in condList:
mark = temp_t._CONDITIONAL_PREDICATE_FEATURE_Mark()
markValue = mark["Value"]
markIndex = mark["Span"][0]
conditionNode = self.parse(temp_t._CONDITIONAL_PREDICATE_FEATURE_Condition()["Value"])
#create nodes
markNode = CondNode.init(index = markIndex,
condType = markValue,
features = {},
valid=True)
self.gr.add_node(markNode)
markValue = markValue.lower()
# add edges according to the type of conditional
if markValue in condition_outcome_markers:
self.gr.add_edge(edge = (markNode,outcomeNode),
label = OUTCOME_LABEL)
self.gr.add_edge(edge = (markNode,conditionNode),
label = CONDITION_LABEL)
elif markValue in reason_outcome_markers:
self.gr.add_edge(edge = (markNode,outcomeNode),
label = OUTCOME_LABEL)
self.gr.add_edge(edge = (markNode,conditionNode),
label = REASON_LABEL)
elif markValue in comp_markers:
self.gr.add_edge(edge = (conditionNode,outcomeNode),
label = COMP_LABEL)
else:
#add edges
self.gr.add_edge((outcomeNode,markNode))
self.gr.add_edge((markNode,conditionNode))
#return top node
return outcomeNode
def parsePreposition(self,psubj,prepChildList):
"""
add a preposition subgraph to the graph
@type psubj: DepTree
@param psubj: the subject of all following prepositions
@type prepChildList: a list of DepTrees,
@param prepChildList: all prepositions regarding nsubj
"""
#create top nodes:
topNode = self.parse(psubj)
for temp_t in prepChildList:
#generate bottom node and connect to prep
pobj = temp_t._PREPOSITIONAL_PREDICATE_FEATURE_pobj()["Value"]
if not pobj: # e.g., #460
continue
bottomNode = self.parse(pobj)
#generate prep node and connect to top node
prepNode = PrepNode.init(index=temp_t.prepInd,
prepType=temp_t.prepType,
features={},
valid = True)
# self.gr.add_node(prepNode)
#self.gr.add_edge(edge = (prepNode,bottomNode))
self.gr.add_edge(edge = (topNode,bottomNode),
label = " ".join([w.word for w in prepNode.str]))
return topNode
def parseVerbal(self,indexes,verbs,arguments,tree):
"""
add a verbal subgraph to the graph
@type indexes: list [int]
@param indexes: the index(es) of the verb in the sentence
@type verbs: list [string]
@param verbs: the string(s) representing the verb
@type tree: DepTree
@param tree: tree object from which to extract various features
@type arguments: list
@param arguments: list of DepTrees of arguments
"""
# create verbal head node
# start by extracting features
feats = syntactic_item.get_verbal_features(tree)
if feats['Lemma'] == verbs[0]:
del(feats['Lemma'])
for k in feats:
self.types.add(k)
verbNode = graph_representation.node.Node(isPredicate=True,
text = [Word(index=index,
word=verb) for index,verb in zip(indexes,verbs)],
features=feats,
valid=True)
self.gr.add_node(verbNode)
# handle arguments
for arg_t in arguments:
curNode = self.parse(arg_t)
#curNode.features = syntactic_item.get_verbal_features(arg_t)
self.gr.add_edge((verbNode,curNode), arg_t.parent_relation)
# handle time expressions
(timeSubtree,_) = tree._VERBAL_PREDICATE_SUBTREE_Time()
if timeSubtree:
timeNode = graph_representation.node.TimeNode.init(features = {})
self.gr.add_node(timeNode)
timeSubGraph = self.parse(timeSubtree)
self.gr.add_edge((verbNode,timeNode))
self.gr.add_edge((timeNode,timeSubGraph))
return verbNode
def parseAdverb(self,subj,advChildren):
topNode = self.parse(subj)
for advChild,mwe in advChildren:
# advTopNode = advNode.init(features = {})
# self.gr.add_node(advTopNode)
# self.gr.add_edge(edge = (topNode,advTopNode))
if mwe:
# in case this is a complex adverb ("as long as")
curAdvNode = Node(isPredicate = False,
text = [Word(ind,word) for ind,word in mwe],
features = {},
valid = True)
self.gr.add_node(curAdvNode)
curChildNode = self.parse(advChild)
self.gr.add_edge(edge=(topNode,curAdvNode),
label = ADV_LABEL)
self.gr.add_edge(edge = (curAdvNode,curChildNode),
label = advChild.parent_relation)
else:
curChildNode = self.parse(advChild)
self.gr.add_edge(edge = (topNode,curChildNode),
label = ADV_LABEL)
return topNode
def parseCopular(self,index,first_entity,second_entity,features):
"""
add a copular subgraph to the graph
@type index: int
@param index: the index of the copula in the sentence
@type first_entity: DepTree
@param first_entity: the syntax tree of the first entity
@type second_entity: DepTree
@param second_entity: the syntax tree of the second entity
@rtype: Node
@return: the top node of the copula subgraph
"""
if (second_entity.parent_relation in adjectival_mod_dependencies) \
or (not second_entity.is_definite()):
# reduce to prop construction when the second element in the copula is an adjective
# e.g., Rabbit is white -> white rabbit
# or when the second element is indefinite
second_entity.adjectivalChild = [second_entity]
second_entity.relative_adj = False #TODO: calculate this
second_entity.parent_relation = "copular" #TODO: this might be dangerous :\
return self.parseProp(subject = first_entity,
copulaIndex = index,
adjectiveChildList = [second_entity],
features=features,
propAsHead = True)
# generate the top node and add to the graph
topNode = CopularNode.init(index=index,
features=features,
valid=True)
self.gr.add_node(topNode)
# generate both entities subgraphs
firstEntityNode = self.parse(first_entity)
secondEntityNode = self.parse(second_entity)
#propagate properties between the two nodes
graph_representation.node.addSymmetricPropogation(firstEntityNode,
secondEntityNode)
#add labeled edges
self.gr.add_edge(edge=(topNode,firstEntityNode),
label=FIRST_ENTITY_LABEL)
self.gr.add_edge(edge=(topNode,secondEntityNode),
label=SECOND_ENTITY_LABEL)
return topNode
def parseApposition(self,index,first_entity,second_entity):
"""
add an apposition subgraph to the graph
@type index: int
@param index: the index of the apposition in the sentence
@type first_entity: DepTree
@param first_entity: the syntax tree of the first entity
@type second_entity: DepTree
@param second_entity: the syntax tree of the second entity
@rtype: Node
@return: the top node of the apposition subgraph
"""
#copied from copular, interesting to see if this happens
if (second_entity.parent_relation in adjectival_mod_dependencies) \
or (not second_entity.is_definite()):
# reduce to prop construction when the second element in the copula is an adective
# e.g., Rabbit is white -> white rabbit
second_entity.adjectivalChild = [second_entity]
second_entity.relative_adj = False #TODO - calculate this
second_entity.parent_relation = "appos" #TODO: this might be dangerous :\
return self.parseProp(subject = first_entity,
copulaIndex = NO_INDEX,
adjectiveChildList = [second_entity],
propAsHead = True)
# generate the top node and add to the graph
topNode = AppositionNode.init(index=index,
features={})
self.gr.add_node(topNode)
# generate both entities subgraphs
firstEntityNode = self.parse(first_entity)
secondEntityNode = self.parse(second_entity)
# remember first and second entities in apposition's node
# topNode.entities = [firstEntityNode,secondEntityNode]
# propagate properties between the two nodes
graph_representation.node.addSymmetricPropogation(firstEntityNode,
secondEntityNode)
#add labeled edges
self.gr.add_edge(edge=(topNode,firstEntityNode),
label=FIRST_ENTITY_LABEL)
self.gr.add_edge(edge=(topNode,secondEntityNode),
label=SECOND_ENTITY_LABEL)
return topNode
def parsePossessive(self,possessor,possessed,possessive):
"""
add a possessive subgraph to the graph
@type index: int
@param index: the index of the possessive in the sentence
@type possessor: DepTree
@param possessor: the syntax tree of the possessor
@type possessed: DepTree
@param possessed: the syntax tree of the possessed
@type possessive: DepTree
@param possessive: the syntax tree of the possessive - e.g - 's
@rtype: Node
@return: the top node of the possessive subgraph
"""
if not possessive:
index = graph_representation.word.NO_INDEX
else:
index = possessive.id
# generate nodes
possessorNode = self.parse(possessor)
possessedNode = self.parse(possessed)
if isTime(possessorNode) or isLocation(possessorNode):
#possessive construction to indicate time
self.gr.add_edge((possessedNode,possessorNode))
return possessedNode
#otherwise - proper possessive:
hasNode = PossessiveNode.init(index=index,
features={},
valid=True)
self.gr.add_node(hasNode)
# add edges to graph
self.gr.add_edge(edge=(hasNode,possessorNode),
label=POSSESSOR_LABEL)
self.gr.add_edge(edge=(hasNode,possessedNode),
label=POSSESSED_LABEL)
# create top node
# get list of all relevant nodes
nodeLs = [possessorNode,possessedNode]
if possessive: # in some cases there's no possessive marker (e.g., "their woman")
possessiveNode = graph_representation.node.Node(isPredicate=False,
text = [Word(possessive.id,
possessive.get_original_sentence(root=False))],
features = {},
valid=True)
nodeLs.append(possessiveNode)
# create possessive top node, add to graph, and return it
topNode = graph_utils.generate_possessive_top_node(graph=self.gr, nodeLs=nodeLs)
self.gr.add_node(topNode)
#mark that features and neighbours should propagate from the top node to the possessed
# John's results were low -> features should propogate between (John's results) and (results)
graph_representation.node.addSymmetricPropogation(topNode, possessedNode)
return topNode
def parseProp(self,subject,copulaIndex,adjectiveChildList,propAsHead,features={}):
"""
add a prop subgraph to the graph
@type adjective: DepTree
@param adjective: the syntax tree of the adjective
@type subject: DepTree
@param subject: the syntax tree of the subject
@rtype: Node
@return: the top node of the copula subgraph
"""
# parse top node
subjectNode = self.parse(subject)
topNode = subjectNode
#parse each property and connect to top node
for temp_t in adjectiveChildList:
adjective = temp_t._ADJECTIVAL_PREDICATE_FEATURE_Adjective()["Value"]
adjectiveNode = self.parse(adjective)
if "Lemma" in features:
del(features["Lemma"])
adjectiveNode.features.update(features)
# generate the top node and add to the graph
propNode = PropNode.init(features={"relative":temp_t.relative_adj},
index = copulaIndex,
valid=True,
parent_relation = adjective.parent_relation)
self.gr.add_node(propNode)
if propAsHead:
topNode = propNode
#add labeled edges
self.gr.add_edge(edge=(subjectNode,propNode),
label="")
self.gr.add_edge(edge=(propNode,adjectiveNode),
label="")
return topNode
class appendix_types:
def __init__(self):
self.d = {}
def add(self,obj):
self._update(obj, add=+1)
def getSet(self):
return set([k for k in self.d.keys() if self.d[k]>0])
def union(self,other):
for k in other.d:
self._update(obj=k, add=other.d[k])
def remove(self,obj):
self._update(obj, add=-1)
def _update(self,obj,add):
if obj not in self.d:
self.d[obj] = 0
self.d[obj]+=add
|
[
"time_annotator.timex_wrapper.timexWrapper",
"props.proposition_structure.syntactic_item.get_verbal_features",
"props.graph_representation.node.CondNode.init",
"props.graph_representation.node.RCMODPropNode.init",
"props.graph_representation.node.isTime",
"props.graph_representation.node.isLocation",
"props.graph_representation.node.CopularNode.init",
"props.graph_representation.node.PropNode.init",
"props.graph_representation.node.AppositionNode.init",
"props.graph_representation.node.PrepNode.init",
"props.graph_representation.word.Word",
"props.graph_representation.node.PossessiveNode.init",
"props.graph_representation.node.LocationNode.init",
"props.graph_representation.node.TimeNode.init",
"props.graph_representation.node.Node"
] |
[((9495, 9513), 'time_annotator.timex_wrapper.timexWrapper', 'timexWrapper', (['text'], {}), '(text)\n', (9507, 9513), False, 'from time_annotator.timex_wrapper import timexWrapper\n'), ((11739, 11765), 'props.graph_representation.node.TimeNode.init', 'TimeNode.init', ([], {'features': '{}'}), '(features={})\n', (11752, 11765), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((19991, 20031), 'props.proposition_structure.syntactic_item.get_verbal_features', 'syntactic_item.get_verbal_features', (['tree'], {}), '(tree)\n', (20025, 20031), False, 'from props.proposition_structure import syntactic_item\n'), ((24126, 24186), 'props.graph_representation.node.CopularNode.init', 'CopularNode.init', ([], {'index': 'index', 'features': 'features', 'valid': '(True)'}), '(index=index, features=features, valid=True)\n', (24142, 24186), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((26440, 26485), 'props.graph_representation.node.AppositionNode.init', 'AppositionNode.init', ([], {'index': 'index', 'features': '{}'}), '(index=index, features={})\n', (26459, 26485), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((28633, 28690), 'props.graph_representation.node.PossessiveNode.init', 'PossessiveNode.init', ([], {'index': 'index', 'features': '{}', 'valid': '(True)'}), '(index=index, features={}, valid=True)\n', (28652, 28690), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((10421, 10491), 'props.graph_representation.node.Node', 'Node', ([], {'isPredicate': '(False)', 'text': 'left_text', 'features': 'features', 'valid': '(True)'}), '(isPredicate=False, text=left_text, features=features, valid=True)\n', (10425, 10491), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((11875, 11987), 'props.graph_representation.node.Node', 'Node', ([], {'isPredicate': '(False)', 'text': 'timeExpression.text', 'features': "{'Time Value': timeExpression.value}", 'valid': '(True)'}), "(isPredicate=False, text=timeExpression.text, features={'Time Value':\n timeExpression.value}, valid=True)\n", (11879, 11987), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((15009, 15052), 'props.graph_representation.node.RCMODPropNode.init', 'RCMODPropNode.init', ([], {'features': '{}', 'valid': '(True)'}), '(features={}, valid=True)\n', (15027, 15052), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((16464, 16539), 'props.graph_representation.node.CondNode.init', 'CondNode.init', ([], {'index': 'markIndex', 'condType': 'markValue', 'features': '{}', 'valid': '(True)'}), '(index=markIndex, condType=markValue, features={}, valid=True)\n', (16477, 16539), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((18744, 18834), 'props.graph_representation.node.PrepNode.init', 'PrepNode.init', ([], {'index': 'temp_t.prepInd', 'prepType': 'temp_t.prepType', 'features': '{}', 'valid': '(True)'}), '(index=temp_t.prepInd, prepType=temp_t.prepType, features={},\n valid=True)\n', (18757, 18834), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((28361, 28382), 'props.graph_representation.node.isTime', 'isTime', (['possessorNode'], {}), '(possessorNode)\n', (28367, 28382), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((28386, 28411), 'props.graph_representation.node.isLocation', 'isLocation', (['possessorNode'], {}), '(possessorNode)\n', (28396, 28411), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((31319, 31454), 'props.graph_representation.node.PropNode.init', 'PropNode.init', ([], {'features': "{'relative': temp_t.relative_adj}", 'index': 'copulaIndex', 'valid': '(True)', 'parent_relation': 'adjective.parent_relation'}), "(features={'relative': temp_t.relative_adj}, index=copulaIndex,\n valid=True, parent_relation=adjective.parent_relation)\n", (31332, 31454), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((8679, 8715), 'props.graph_representation.word.Word', 'Word', ([], {'index': 'index', 'word': 'nodes[index]'}), '(index=index, word=nodes[index])\n', (8683, 8715), False, 'from props.graph_representation.word import Word, NO_INDEX\n'), ((9813, 9843), 'props.graph_representation.node.LocationNode.init', 'LocationNode.init', ([], {'features': '{}'}), '(features={})\n', (9830, 9843), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((9917, 9982), 'props.graph_representation.node.Node', 'Node', ([], {'isPredicate': '(False)', 'text': 'text', 'features': 'features', 'valid': '(True)'}), '(isPredicate=False, text=text, features=features, valid=True)\n', (9921, 9982), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((11024, 11087), 'props.graph_representation.node.Node', 'Node', ([], {'isPredicate': '(False)', 'text': '[]', 'features': 'features', 'valid': '(True)'}), '(isPredicate=False, text=[], features=features, valid=True)\n', (11028, 11087), False, 'from props.graph_representation.node import Node, CopularNode, PossessiveNode, PropNode, AppositionNode, PrepNode, CondNode, ConjunctionNode, advNode, RCMODPropNode, TimeNode, isTime, LocationNode, isLocation\n'), ((6178, 6215), 'props.proposition_structure.syntactic_item.get_verbal_features', 'syntactic_item.get_verbal_features', (['t'], {}), '(t)\n', (6212, 6215), False, 'from props.proposition_structure import syntactic_item\n'), ((8896, 8933), 'props.proposition_structure.syntactic_item.get_verbal_features', 'syntactic_item.get_verbal_features', (['t'], {}), '(t)\n', (8930, 8933), False, 'from props.proposition_structure import syntactic_item\n'), ((20327, 20355), 'props.graph_representation.word.Word', 'Word', ([], {'index': 'index', 'word': 'verb'}), '(index=index, word=verb)\n', (20331, 20355), False, 'from props.graph_representation.word import Word, NO_INDEX\n'), ((13470, 13485), 'props.graph_representation.word.Word', 'Word', (['ind', 'word'], {}), '(ind, word)\n', (13474, 13485), False, 'from props.graph_representation.word import Word, NO_INDEX\n'), ((13910, 13925), 'props.graph_representation.word.Word', 'Word', (['ind', 'word'], {}), '(ind, word)\n', (13914, 13925), False, 'from props.graph_representation.word import Word, NO_INDEX\n'), ((21839, 21854), 'props.graph_representation.word.Word', 'Word', (['ind', 'word'], {}), '(ind, word)\n', (21843, 21854), False, 'from props.graph_representation.word import Word, NO_INDEX\n')]
|
#!/usr/bin/env python
# Copyright (C) 2014 Open Data ("Open Data" refers to
# one or more of the following companies: Open Data Partners LLC,
# Open Data Research LLC, or Open Data Capital LLC.)
#
# This file is part of Hadrian.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import unittest
import numpy
from titus.genpy import PFAEngine
from titus.producer.tools import look
from titus.producer.cart import *
class TestProducerCart(unittest.TestCase):
@staticmethod
def data():
while True:
x = random.uniform(0, 10)
y = random.uniform(0, 10)
if x < 4.0:
if y < 6.0:
z = random.gauss(5, 1)
else:
z = random.gauss(8, 1)
else:
if y < 2.0:
z = random.gauss(1, 1)
else:
z = random.gauss(2, 1)
if z < 0.0:
z = 0.0
elif z >= 10.0:
z = 9.99999
a = "A" + str(int(x))
b = "B" + str(int(y/2) * 2)
c = "C" + str(int(z/3) * 3)
yield (x, y, z, a, b, c)
def testCartMustBuildNumericalNumerical(self):
random.seed(12345)
numpy.seterr(divide="ignore", invalid="ignore")
dataset = Dataset.fromIterable(((x, y, z) for (x, y, z, a, b, c) in TestProducerCart.data()), 100000, ("x", "y", "z"))
tree = TreeNode.fromWholeDataset(dataset, "z")
tree.splitMaxDepth(2)
doc = tree.pfaDocument({"type": "record", "name": "Datum", "fields": [{"name": "x", "type": "double"}, {"name": "y", "type": "double"}]}, "TreeNode")
# look(doc, maxDepth=8)
self.assertEqual(doc["cells"]["tree"]["init"]["field"], "x")
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["value"], 4.00, places=2)
self.assertEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["field"], "y")
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["value"], 6.00, places=2)
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["pass"]["double"], 5.00, places=2)
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["fail"]["double"], 8.02, places=2)
self.assertEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["field"], "y")
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["value"], 2.00, places=2)
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["pass"]["double"], 1.09, places=2)
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["fail"]["double"], 2.00, places=2)
engine, = PFAEngine.fromJson(doc)
self.assertAlmostEqual(engine.action({"x": 2.0, "y": 3.0}), 5.00, places=2)
self.assertAlmostEqual(engine.action({"x": 2.0, "y": 8.0}), 8.02, places=2)
self.assertAlmostEqual(engine.action({"x": 7.0, "y": 1.0}), 1.09, places=2)
self.assertAlmostEqual(engine.action({"x": 7.0, "y": 5.0}), 2.00, places=2)
doc = tree.pfaDocument(
{"type": "record", "name": "Datum", "fields": [{"name": "x", "type": "double"}, {"name": "y", "type": "double"}]},
"TreeNode",
nodeScores=True, datasetSize=True, predictandUnique=True, nTimesVariance=True, gain=True)
# look(doc, maxDepth=8)
engine, = PFAEngine.fromJson(doc)
def testCartMustBuildNumericalCategorical(self):
random.seed(12345)
numpy.seterr(divide="ignore", invalid="ignore")
dataset = Dataset.fromIterable(((x, y, c) for (x, y, z, a, b, c) in TestProducerCart.data()), 100000, ("x", "y", "c"))
tree = TreeNode.fromWholeDataset(dataset, "c")
tree.splitMaxDepth(2)
doc = tree.pfaDocument({"type": "record", "name": "Datum", "fields": [{"name": "x", "type": "double"}, {"name": "y", "type": "double"}]}, "TreeNode")
# look(doc, maxDepth=8)
self.assertEqual(doc["cells"]["tree"]["init"]["field"], "x")
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["value"], 4.00, places=2)
self.assertEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["field"], "y")
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["value"], 6.00, places=2)
self.assertEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["pass"]["string"], "C3")
self.assertEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["fail"]["string"], "C6")
self.assertEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["field"], "y")
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["value"], 2.00, places=2)
self.assertEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["pass"]["string"], "C0")
self.assertEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["fail"]["string"], "C0")
engine, = PFAEngine.fromJson(doc)
self.assertEqual(engine.action({"x": 2.0, "y": 3.0}), "C3")
self.assertEqual(engine.action({"x": 2.0, "y": 8.0}), "C6")
self.assertEqual(engine.action({"x": 7.0, "y": 1.0}), "C0")
self.assertEqual(engine.action({"x": 7.0, "y": 5.0}), "C0")
doc = tree.pfaDocument(
{"type": "record", "name": "Datum", "fields": [{"name": "x", "type": "double"}, {"name": "y", "type": "double"}]},
"TreeNode",
nodeScores=True, datasetSize=True, predictandDistribution=True, predictandUnique=True, entropy=True, gain=True)
# look(doc, maxDepth=8)
engine, = PFAEngine.fromJson(doc)
def testCartMustBuildCategoricalNumerical(self):
random.seed(12345)
numpy.seterr(divide="ignore", invalid="ignore")
dataset = Dataset.fromIterable(((a, b, z) for (x, y, z, a, b, c) in TestProducerCart.data()), 100000, ("a", "b", "z"))
tree = TreeNode.fromWholeDataset(dataset, "z")
tree.splitMaxDepth(2)
doc = tree.pfaDocument({"type": "record", "name": "Datum", "fields": [{"name": "a", "type": "string"}, {"name": "b", "type": "string"}]}, "TreeNode")
# look(doc, maxDepth=8)
self.assertEqual(doc["cells"]["tree"]["init"]["field"], "a")
self.assertEqual(doc["cells"]["tree"]["init"]["value"], ["A0", "A1", "A2", "A3"])
self.assertEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["field"], "b")
self.assertEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["value"], ["B6", "B8"])
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["pass"]["double"], 8.02, places=2)
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["fail"]["double"], 5.00, places=2)
self.assertEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["field"], "b")
self.assertEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["value"], ["B0"])
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["pass"]["double"], 1.09, places=2)
self.assertAlmostEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["fail"]["double"], 2.00, places=2)
engine, = PFAEngine.fromJson(doc)
self.assertAlmostEqual(engine.action({"a": "A1", "b": "B6"}), 8.02, places=2)
self.assertAlmostEqual(engine.action({"a": "A1", "b": "B2"}), 5.00, places=2)
self.assertAlmostEqual(engine.action({"a": "A5", "b": "B0"}), 1.09, places=2)
self.assertAlmostEqual(engine.action({"a": "A5", "b": "B4"}), 2.00, places=2)
doc = tree.pfaDocument(
{"type": "record", "name": "Datum", "fields": [{"name": "a", "type": "string"}, {"name": "b", "type": "string"}]},
"TreeNode",
nodeScores=True, datasetSize=True, predictandUnique=True, nTimesVariance=True, gain=True)
# look(doc, maxDepth=8)
engine, = PFAEngine.fromJson(doc)
def testCartMustBuildCategoricalCategorical(self):
random.seed(12345)
numpy.seterr(divide="ignore", invalid="ignore")
dataset = Dataset.fromIterable(((a, b, c) for (x, y, z, a, b, c) in TestProducerCart.data()), 100000, ("a", "b", "c"))
tree = TreeNode.fromWholeDataset(dataset, "c")
tree.splitMaxDepth(2)
doc = tree.pfaDocument({"type": "record", "name": "Datum", "fields": [{"name": "a", "type": "string"}, {"name": "b", "type": "string"}]}, "TreeNode")
# look(doc, maxDepth=8)
self.assertEqual(doc["cells"]["tree"]["init"]["field"], "a")
self.assertEqual(doc["cells"]["tree"]["init"]["value"], ["A0", "A1", "A2", "A3"])
self.assertEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["field"], "b")
self.assertEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["value"], ["B6", "B8"])
self.assertEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["pass"]["string"], "C6")
self.assertEqual(doc["cells"]["tree"]["init"]["pass"]["TreeNode"]["fail"]["string"], "C3")
self.assertEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["field"], "b")
self.assertEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["value"], ["B0"])
self.assertEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["pass"]["string"], "C0")
self.assertEqual(doc["cells"]["tree"]["init"]["fail"]["TreeNode"]["fail"]["string"], "C0")
engine, = PFAEngine.fromJson(doc)
self.assertEqual(engine.action({"a": "A1", "b": "B6"}), "C6")
self.assertEqual(engine.action({"a": "A1", "b": "B2"}), "C3")
self.assertEqual(engine.action({"a": "A5", "b": "B0"}), "C0")
self.assertEqual(engine.action({"a": "A5", "b": "B4"}), "C0")
doc = tree.pfaDocument(
{"type": "record", "name": "Datum", "fields": [{"name": "a", "type": "string"}, {"name": "b", "type": "string"}]},
"TreeNode",
nodeScores=True, datasetSize=True, predictandDistribution=True, predictandUnique=True, entropy=True, gain=True)
# look(doc, maxDepth=8)
engine, = PFAEngine.fromJson(doc)
if __name__ == "__main__":
unittest.main()
|
[
"unittest.main",
"random.uniform",
"numpy.seterr",
"random.seed",
"random.gauss",
"titus.genpy.PFAEngine.fromJson"
] |
[((10616, 10631), 'unittest.main', 'unittest.main', ([], {}), '()\n', (10629, 10631), False, 'import unittest\n'), ((1732, 1750), 'random.seed', 'random.seed', (['(12345)'], {}), '(12345)\n', (1743, 1750), False, 'import random\n'), ((1759, 1806), 'numpy.seterr', 'numpy.seterr', ([], {'divide': '"""ignore"""', 'invalid': '"""ignore"""'}), "(divide='ignore', invalid='ignore')\n", (1771, 1806), False, 'import numpy\n'), ((3236, 3259), 'titus.genpy.PFAEngine.fromJson', 'PFAEngine.fromJson', (['doc'], {}), '(doc)\n', (3254, 3259), False, 'from titus.genpy import PFAEngine\n'), ((3932, 3955), 'titus.genpy.PFAEngine.fromJson', 'PFAEngine.fromJson', (['doc'], {}), '(doc)\n', (3950, 3955), False, 'from titus.genpy import PFAEngine\n'), ((4018, 4036), 'random.seed', 'random.seed', (['(12345)'], {}), '(12345)\n', (4029, 4036), False, 'import random\n'), ((4045, 4092), 'numpy.seterr', 'numpy.seterr', ([], {'divide': '"""ignore"""', 'invalid': '"""ignore"""'}), "(divide='ignore', invalid='ignore')\n", (4057, 4092), False, 'import numpy\n'), ((5458, 5481), 'titus.genpy.PFAEngine.fromJson', 'PFAEngine.fromJson', (['doc'], {}), '(doc)\n', (5476, 5481), False, 'from titus.genpy import PFAEngine\n'), ((6112, 6135), 'titus.genpy.PFAEngine.fromJson', 'PFAEngine.fromJson', (['doc'], {}), '(doc)\n', (6130, 6135), False, 'from titus.genpy import PFAEngine\n'), ((6198, 6216), 'random.seed', 'random.seed', (['(12345)'], {}), '(12345)\n', (6209, 6216), False, 'import random\n'), ((6225, 6272), 'numpy.seterr', 'numpy.seterr', ([], {'divide': '"""ignore"""', 'invalid': '"""ignore"""'}), "(divide='ignore', invalid='ignore')\n", (6237, 6272), False, 'import numpy\n'), ((7684, 7707), 'titus.genpy.PFAEngine.fromJson', 'PFAEngine.fromJson', (['doc'], {}), '(doc)\n', (7702, 7707), False, 'from titus.genpy import PFAEngine\n'), ((8388, 8411), 'titus.genpy.PFAEngine.fromJson', 'PFAEngine.fromJson', (['doc'], {}), '(doc)\n', (8406, 8411), False, 'from titus.genpy import PFAEngine\n'), ((8476, 8494), 'random.seed', 'random.seed', (['(12345)'], {}), '(12345)\n', (8487, 8494), False, 'import random\n'), ((8503, 8550), 'numpy.seterr', 'numpy.seterr', ([], {'divide': '"""ignore"""', 'invalid': '"""ignore"""'}), "(divide='ignore', invalid='ignore')\n", (8515, 8550), False, 'import numpy\n'), ((9898, 9921), 'titus.genpy.PFAEngine.fromJson', 'PFAEngine.fromJson', (['doc'], {}), '(doc)\n', (9916, 9921), False, 'from titus.genpy import PFAEngine\n'), ((10560, 10583), 'titus.genpy.PFAEngine.fromJson', 'PFAEngine.fromJson', (['doc'], {}), '(doc)\n', (10578, 10583), False, 'from titus.genpy import PFAEngine\n'), ((1041, 1062), 'random.uniform', 'random.uniform', (['(0)', '(10)'], {}), '(0, 10)\n', (1055, 1062), False, 'import random\n'), ((1079, 1100), 'random.uniform', 'random.uniform', (['(0)', '(10)'], {}), '(0, 10)\n', (1093, 1100), False, 'import random\n'), ((1177, 1195), 'random.gauss', 'random.gauss', (['(5)', '(1)'], {}), '(5, 1)\n', (1189, 1195), False, 'import random\n'), ((1242, 1260), 'random.gauss', 'random.gauss', (['(8)', '(1)'], {}), '(8, 1)\n', (1254, 1260), False, 'import random\n'), ((1331, 1349), 'random.gauss', 'random.gauss', (['(1)', '(1)'], {}), '(1, 1)\n', (1343, 1349), False, 'import random\n'), ((1396, 1414), 'random.gauss', 'random.gauss', (['(2)', '(1)'], {}), '(2, 1)\n', (1408, 1414), False, 'import random\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Color scheme management."""
from __future__ import print_function, unicode_literals, absolute_import
import os, shutil
from . import helpers, paths, log
from .lnp import lnp
from .dfraw import DFRaw
_df_colors = (
'BLACK', 'BLUE', 'GREEN', 'CYAN',
'RED', 'MAGENTA', 'BROWN', 'LGRAY',
'DGRAY', 'LBLUE', 'LGREEN', 'LCYAN',
'LRED', 'LMAGENTA', 'YELLOW', 'WHITE'
)
def read_colors():
"""Returns a sorted tuple of color scheme basenames, in LNP/Colors."""
return tuple(sorted(
[os.path.splitext(os.path.basename(p))[0] for p in
helpers.get_text_files(paths.get('colors'))],
key=helpers.key_from_underscore_prefixed_string))
def get_colors(colorscheme=None):
"""
Returns RGB tuples for all 16 colors in <colorscheme>.txt, or
data/init/colors.txt if no scheme is provided. On errors, returns an empty
list."""
# pylint:disable=bare-except
try:
if colorscheme is not None:
f = colorscheme
if not f.endswith('.txt'):
f = f + '.txt'
if os.path.dirname(f) == '':
f = paths.get('colors', f)
else:
if lnp.df_info.version <= '0.31.03':
f = paths.get('init', 'init.txt')
else:
f = paths.get('init', 'colors.txt')
color_fields = [(c+'_R', c+'_G', c+'_B') for c in _df_colors]
result = DFRaw(f).get_values(*color_fields)
return [tuple(int(x) for x in t) for t in result]
except:
if colorscheme:
log.e('Unable to read colorscheme %s', colorscheme, stack=True)
else:
log.e('Unable to read current colors', stack=True)
return []
def load_colors(filename):
"""
Replaces the current DF color scheme.
Args:
filename: The name of the new colorscheme to apply (extension optional).
If no path is specified, file is assumed to be in LNP/Colors.
"""
log.i('Loading colorscheme ' + filename)
if not filename.endswith('.txt'):
filename = filename + '.txt'
if os.path.dirname(filename) == '':
filename = paths.get('colors', filename)
if lnp.df_info.version <= '0.31.03':
colors = ([c+'_R' for c in _df_colors] + [c+'_G' for c in _df_colors] +
[c+'_B' for c in _df_colors])
lnp.settings.read_file(filename, colors, False)
lnp.settings.write_settings()
else:
shutil.copyfile(filename, paths.get('init', 'colors.txt'))
def save_colors(filename):
"""
Save current keybindings to a file.
Args:
filename: the name of the new color scheme file.
"""
log.i('Saving colorscheme ' + filename)
if not filename.endswith('.txt'):
filename = filename + '.txt'
filename = paths.get('colors', filename)
if lnp.df_info.version <= '0.31.03':
colors = ([c+'_R' for c in _df_colors] + [c+'_G' for c in _df_colors] +
[c+'_B' for c in _df_colors])
lnp.settings.create_file(filename, colors)
else:
shutil.copyfile(paths.get('init', 'colors.txt'), filename)
def color_exists(filename):
"""
Returns whether or not a color scheme already exists.
Args:
filename: the filename to check.
"""
if not filename.endswith('.txt'):
filename = filename + '.txt'
return os.access(paths.get('colors', filename), os.F_OK)
def delete_colors(filename):
"""
Deletes a color scheme file.
Args:
filename: the filename to delete.
"""
log.i('Deleting colorscheme ' + filename)
if not filename.endswith('.txt'):
filename = filename + '.txt'
os.remove(paths.get('colors', filename))
def get_installed_file():
"""Returns the name of the currently installed color scheme, or None."""
files = helpers.get_text_files(paths.get('colors'))
current_scheme = get_colors()
for scheme in files:
if get_colors(scheme) == current_scheme:
return os.path.splitext(os.path.basename(scheme))[0]
return None
|
[
"os.path.dirname",
"os.path.basename"
] |
[((2130, 2155), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (2145, 2155), False, 'import os, shutil\n'), ((1112, 1130), 'os.path.dirname', 'os.path.dirname', (['f'], {}), '(f)\n', (1127, 1130), False, 'import os, shutil\n'), ((4058, 4082), 'os.path.basename', 'os.path.basename', (['scheme'], {}), '(scheme)\n', (4074, 4082), False, 'import os, shutil\n'), ((574, 593), 'os.path.basename', 'os.path.basename', (['p'], {}), '(p)\n', (590, 593), False, 'import os, shutil\n')]
|
#! /usr/bin/env python3
import argparse
from argparse import RawTextHelpFormatter
from collections import Counter, defaultdict
import yaml
from Bio.Seq import Seq
from Bio.Alphabet import generic_dna
import pysam
nucleotide_alphabet = {'A', 'T', 'C', 'G'}
def reverse_complement(sequence):
return str(Seq(sequence, generic_dna).reverse_complement())
def calculate_new_variant_definition(left_read, right_read, ref_fasta, original_vcf_rec):
"""
Resolve the variant definition from the flanking region alignment and old variant definition
TODO: Link to algorithm description once public
"""
# Flag to highlight low confidence in an event detected
failure_reason = None
old_ref = original_vcf_rec[3]
old_alts = original_vcf_rec[4].split(',')
operations = {}
# Define new ref and new pos
new_ref = fetch_bases(ref_fasta, left_read.reference_name, left_read.reference_end + 1,
right_read.reference_start - left_read.reference_end).upper()
if len(set(new_ref).difference(nucleotide_alphabet)) != 0 :
failure_reason = 'Reference Allele not in ACGT'
new_pos = left_read.reference_end + 1
# 1. Handle reference strand change
if not left_read.is_reverse and not right_read.is_reverse:
# Forward strand alignment
old_ref_conv = old_ref
old_alt_conv = old_alts
operations['st'] = '+'
elif left_read.is_reverse and right_read.is_reverse:
# Reverse strand alignment
old_ref_conv = reverse_complement(old_ref)
old_alt_conv = [reverse_complement(alt) for alt in old_alts]
operations['st'] = '-'
else:
# This case should be handled by the filtering but raise just in case...
error_msg = (f'Impossible read configuration: '
f'read1 is_reverse: {left_read.is_reverse}, '
f'read2 is_reverse: {right_read.is_reverse}, '
f'read1 position: {left_read.pos}, '
f'read2 position: {right_read.pos}')
raise ValueError(error_msg)
# 2. Assign new allele sequences
if new_ref == old_ref_conv:
new_alts = old_alt_conv
elif new_ref in old_alt_conv:
old_alt_conv.remove(new_ref)
new_alts = old_alt_conv
new_alts.append(old_ref_conv)
operations['rac'] = old_ref_conv + '-' + new_ref
if len(old_ref_conv) != len(new_ref):
failure_reason = 'Reference Allele length change'
else:
new_alts = old_alt_conv
new_alts.append(old_ref_conv)
operations['rac'] = old_ref_conv + '-' + new_ref
operations['nra'] = None
if len(old_ref_conv) != len(new_ref):
failure_reason = 'Novel Reference Allele length change'
# 3. Correct zero-length reference sequence
if len(new_ref) == 0:
new_pos -= 1
new_ref = fetch_bases(ref_fasta, left_read.reference_name, new_pos, 1).upper()
new_alts = [new_ref + alt for alt in new_alts]
operations['zlr'] = None
return new_pos, new_ref, new_alts, operations, failure_reason
def update_vcf_record(reference_name, varpos, new_ref, new_alts, operations, original_vcf_rec):
"""
Update the original vcf record with the different fields and use the operations to modify the info and genotypes
fields.
"""
original_vcf_rec[0] = reference_name
original_vcf_rec[1] = str(varpos)
original_vcf_rec[3] = new_ref
original_vcf_rec[4] = ','.join(new_alts)
# Update The INFO field by appending operations
operation_list = [op if operations[op] is None else '%s=%s' % (op, operations[op]) for op in operations]
if original_vcf_rec[7] != '.':
original_vcf_rec[7] = ';'.join(original_vcf_rec[7].strip(';').split(';') + operation_list)
else:
original_vcf_rec[7] = ';'.join(operation_list)
# If required Update SAMPLE fields by changing the Genotypes
if 'rac' in operations and len(original_vcf_rec) > 8 and 'GT' in original_vcf_rec[8]:
gt_index = original_vcf_rec[8].split(':').index('GT')
for genotype_i in range(9, len(original_vcf_rec)):
genotype_str_list = original_vcf_rec[genotype_i].split(':')
if genotype_str_list[gt_index] == '1/1':
genotype_str_list[gt_index] = '0/0'
elif 'nra' in operations and genotype_str_list[gt_index] == '0/1':
genotype_str_list[gt_index] = '1/2'
original_vcf_rec[genotype_i] = ':'.join(genotype_str_list)
def fetch_bases(fasta, contig, start, length):
"""
Returns a subsection from a specified FASTA contig. The start coordinate is 1-based.
"""
zero_base_start = start - 1
end = zero_base_start + length
new_ref = fasta.fetch(reference=contig, start=zero_base_start, end=end)
return new_ref
def group_reads(bam_file_path):
"""
This function assumes that the reads are sorted by query name.
It will group reads by query name and create three subgroups of primary, supplementary and secondary aligned reads.
It returns an iterators where each element is a tuple of the three lists
:param bam_file_path: the name sorted bam file
:return: iterator of tuples containing three lists
"""
with pysam.AlignmentFile(bam_file_path, 'rb') as inbam:
current_read_name = None
primary_group = None
secondary_group = None
supplementary_group = None
for read in inbam:
if read.query_name == current_read_name:
pass
else:
if current_read_name:
yield primary_group, supplementary_group, secondary_group
primary_group = []
secondary_group = []
supplementary_group = []
if read.is_secondary:
secondary_group.append(read)
elif read.is_supplementary:
supplementary_group.append(read)
else:
primary_group.append(read)
current_read_name = read.query_name
if primary_group:
yield primary_group, supplementary_group, secondary_group
def order_reads(primary_group, primary_to_supplementary):
"""
Order read and return the most 5' (smallest coordinates) first.
if a supplementary read exists and is closer to the other read then it is used in place of the primary
"""
read1, read2 = primary_group
suppl_read1 = suppl_read2 = None
if read1 in primary_to_supplementary:
suppl_read1 = primary_to_supplementary.get(read1)[0]
if read2 in primary_to_supplementary:
suppl_read2 = primary_to_supplementary.get(read2)[0]
if read1.reference_start <= read2.reference_start:
if suppl_read1 and suppl_read1.reference_start > read1.reference_start:
read1 = suppl_read1
if suppl_read2 and suppl_read2.reference_start < read2.reference_start:
read2 = suppl_read2
return read1, read2
else:
if suppl_read1 and suppl_read1.reference_start < read1.reference_start:
read1 = suppl_read1
if suppl_read2 and suppl_read2.reference_start > read2.reference_start:
read2 = suppl_read2
return read2, read1
def pass_basic_filtering(primary_group, secondary_group, primary_to_supplementary, counter, filter_align_with_secondary):
"""
Test if the alignment pass basic filtering such as presence of secondary alignments, any primary unmapped,
primary mapped on different chromosome, or primary mapped poorly.
"""
if filter_align_with_secondary and len(secondary_group):
counter['Too many alignments'] += 1
elif len(primary_group) < 2 or any(read.is_unmapped for read in primary_group):
counter['Flank unmapped'] += 1
elif len(set(read.reference_name for read in primary_group)) != 1:
counter['Different chromosomes'] += 1
elif any(len(suppl) > 1 for suppl in primary_to_supplementary.values()):
counter['Too many supplementary'] += 1
else:
return True
return False
def pass_aligned_filtering(left_read, right_read, counter):
"""
Test if the two reads pass the additional filters such as check for soft-clipped end next to the variant region,
or overlapping region between the two reads.
:param left_read: the left (or 5') most read
:param right_read: the right (or 3') most read
:param counter: Counter to report the number of reads filtered.
:return: True or False
"""
# in CIGAR tuples the operation is coded as an integer
# https://pysam.readthedocs.io/en/latest/api.html#pysam.AlignedSegment.cigartuples
if left_read.cigartuples[-1][0] == pysam.CSOFT_CLIP or right_read.cigartuples[0][0] == pysam.CSOFT_CLIP:
counter['Soft-clipped alignments'] += 1
elif left_read.reference_end > right_read.reference_start:
counter['Overlapping alignment'] += 1
elif left_read.is_reverse != right_read.is_reverse:
counter['Unexpected orientation'] += 1
else:
return True
return False
def output_alignment(original_vcf_rec, outfile):
"""
Output the original or updated VCF entry to the provided output file.
"""
print('\t'.join(original_vcf_rec), file=outfile)
def link_supplementary(primary_group, supplementary_group):
"""Link supplementary alignments to their primary."""
if not supplementary_group:
# No supplementary so no linking required
return {}
supplementary_dict = {}
primary_to_supplementary = defaultdict(list)
for supplementary_read in supplementary_group:
supplementary_dict[supplementary_read.reference_name + str(supplementary_read.reference_start + 1)] = supplementary_read
for primary in primary_group:
# chr2,808117,+,1211M790S,60,1;
if primary.has_tag('SA'):
for other_alignment in primary.get_tag('SA').split(';'):
if other_alignment:
rname, pos = other_alignment.split(',')[:2]
primary_to_supplementary[primary].append(
supplementary_dict[rname + pos]
)
return dict(primary_to_supplementary)
def process_bam_file(bam_file_paths, output_file, out_failed_file, new_genome,
filter_align_with_secondary, flank_length, summary_file):
counter = Counter()
fasta = pysam.FastaFile(new_genome)
with open(output_file, 'w') as outfile, open(out_failed_file, 'w') as out_failed:
for bam_file_path in bam_file_paths:
for primary_group, supplementary_group, secondary_group in group_reads(bam_file_path):
counter['total'] += 1
primary_to_supplementary = link_supplementary(primary_group, supplementary_group)
# Retrieve the full VCF record from the bam vr tag
original_vcf_rec = primary_group[0].get_tag('vr').split('|^')
if pass_basic_filtering(primary_group, secondary_group, primary_to_supplementary, counter, filter_align_with_secondary):
left_read, right_read = order_reads(primary_group, primary_to_supplementary)
if pass_aligned_filtering(left_read, right_read, counter):
varpos, new_ref, new_alts, ops, failure_reason = \
calculate_new_variant_definition(left_read, right_read, fasta, original_vcf_rec)
if not failure_reason:
counter['Remapped'] += 1
update_vcf_record(left_read.reference_name, varpos, new_ref, new_alts, ops, original_vcf_rec)
output_alignment(original_vcf_rec, outfile)
else:
# Currently the alignment is not precise enough to ensure that the allele change for INDEL and
# novel reference allele are correct. So we skip them.
# TODO: add realignment confirmation see #14 and EVA-2417
counter[failure_reason] += 1
output_alignment(original_vcf_rec, out_failed)
else:
output_alignment(original_vcf_rec, out_failed)
else:
output_alignment(original_vcf_rec, out_failed)
with open(summary_file, 'w') as open_summary:
yaml.safe_dump({f'Flank_{flank_length}': dict(counter)}, open_summary)
def main():
description = ('Process alignment results in bam format to determine the location of the variant in the new genome.'
' Each variant will be either output in the new genome VCF or the old VCF will be output in a '
'separate file.')
parser = argparse.ArgumentParser(description=description, formatter_class=RawTextHelpFormatter)
parser.add_argument('-i', '--bams', type=str, required=True, nargs='+',
help='Input BAM file with remapped flanking regions')
parser.add_argument('-o', '--outfile', type=str, required=True,
help='Output VCF file with remapped variants')
parser.add_argument('--out_failed_file', type=str, required=True,
help='Name of the file containing reads that did not align correctly')
parser.add_argument('--flank_length', type=int, required=True,
help='Length of the flanking region used.')
parser.add_argument('--summary', type=str, required=True,
help='YAML files containing the summary metrics')
parser.add_argument('-f', '--filter_align_with_secondary', action='store_true', default=False,
help='Filter out alignments that have one or several secondary alignments.')
parser.add_argument('-n', '--newgenome', required=True, help='FASTA file of the target genome')
args = parser.parse_args()
process_bam_file(
bam_file_paths=args.bams,
output_file=args.outfile,
out_failed_file=args.out_failed_file,
new_genome=args.newgenome,
filter_align_with_secondary=args.filter_align_with_secondary,
flank_length=args.flank_length,
summary_file=args.summary
)
if __name__ == '__main__':
main()
|
[
"Bio.Seq.Seq",
"pysam.FastaFile",
"argparse.ArgumentParser",
"pysam.AlignmentFile",
"collections.defaultdict",
"collections.Counter"
] |
[((9586, 9603), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (9597, 9603), False, 'from collections import Counter, defaultdict\n'), ((10417, 10426), 'collections.Counter', 'Counter', ([], {}), '()\n', (10424, 10426), False, 'from collections import Counter, defaultdict\n'), ((10439, 10466), 'pysam.FastaFile', 'pysam.FastaFile', (['new_genome'], {}), '(new_genome)\n', (10454, 10466), False, 'import pysam\n'), ((12840, 12931), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': 'description', 'formatter_class': 'RawTextHelpFormatter'}), '(description=description, formatter_class=\n RawTextHelpFormatter)\n', (12863, 12931), False, 'import argparse\n'), ((5279, 5319), 'pysam.AlignmentFile', 'pysam.AlignmentFile', (['bam_file_path', '"""rb"""'], {}), "(bam_file_path, 'rb')\n", (5298, 5319), False, 'import pysam\n'), ((308, 334), 'Bio.Seq.Seq', 'Seq', (['sequence', 'generic_dna'], {}), '(sequence, generic_dna)\n', (311, 334), False, 'from Bio.Seq import Seq\n')]
|
from detectron2.config import CfgNode as CN
def add_yolo_config(cfg):
cfg.MODEL.YAML = "yolov5m.yaml"
cfg.MODEL.YOLO = CN()
cfg.MODEL.YOLO.NORM = "BN"
cfg.MODEL.YOLO.ACTIVATION = "nn.LeakyReLU"
cfg.MODEL.YOLO.FOCAL_LOSS_GAMMA = 0.0
cfg.MODEL.YOLO.BOX_LOSS_GAIN = 0.05
cfg.MODEL.YOLO.CLS_LOSS_GAIN = 0.3
cfg.MODEL.YOLO.CLS_POSITIVE_WEIGHT = 1.0
cfg.MODEL.YOLO.OBJ_LOSS_GAIN = 0.7
cfg.MODEL.YOLO.OBJ_POSITIVE_WEIGHT = 1.0
cfg.MODEL.YOLO.LABEL_SMOOTHING = 0.0
cfg.MODEL.YOLO.ANCHOR_T = 4.0
cfg.MODEL.YOLO.CONF_THRESH = 0.001
cfg.MODEL.YOLO.IOU_THRES = 0.65
cfg.MODEL.PIXEL_MEAN: [0.0, 0.0, 0.0]
cfg.MODEL.PIXEL_STD: [255.0, 255.0, 255.0]
cfg.SOLVER.BASE_LR = 0.001
cfg.SOLVER.MOMENTUM = 0.937
cfg.SOLVER.NESTEROV = True
cfg.SOLVER.WEIGHT_DECAY = 0.0005
cfg.SOLVER.WEIGHT_DECAY_NORM = 0.0
cfg.SOLVER.WEIGHT_DECAY_BIAS = 0.0005
cfg.SOLVER.LR_SCHEDULER_NAME = "WarmupCosineLR"
cfg.SOLVER.WARMUP_ITERS = 1000
cfg.SOLVER.IMS_PER_BATCH = 16
cfg.INPUT.SIZE = 416
cfg.INPUT.HSV_H = 0.015
cfg.INPUT.HSV_S = 0.7
cfg.INPUT.HSV_V = 0.4
cfg.INPUT.DEGREES = 0.0
cfg.INPUT.TRANSLATE = 0.1
cfg.INPUT.SCALE = 0.5
cfg.INPUT.SHEAR = 0.0
cfg.INPUT.PERSPECTIVE = 0.0
cfg.INPUT.FLIPUD = 0.0
cfg.INPUT.FLIPLR = 0.5
cfg.INPUT.MOSAIC = 1.0 # IGNORED
cfg.INPUT.MIXUP = 0.0
cfg.INPUT.FORMAT = "BGR"
cfg.TEST.AUG.SIZE = 416
|
[
"detectron2.config.CfgNode"
] |
[((129, 133), 'detectron2.config.CfgNode', 'CN', ([], {}), '()\n', (131, 133), True, 'from detectron2.config import CfgNode as CN\n')]
|
import paho.mqtt.client as paho
from sensorMetaData import sensorMetaData
from sht85 import SHT85
from mlx90614 import MLX90614
import os
import time
import threading
from threading import Lock
from datetime import datetime
import subprocess
# lock for thread print
thread_print_lock = Lock()
# publish or debug
mode="publish"
#mode="debug"
# setup watchdog
fd = open("/dev/watchdog", "w")
print(fd)
# setup mqtt client and mqtt function
def on_publish(client,userdata,result):
pass
broker="xxx.xxx.xxx.xxx"
port=1883
raspberrypi_id = 1
client = paho.Client("control" + str(raspberrypi_id))
client.on_publish = on_publish
client.connect(broker,port)
client.loop_start()
#global var
stationTXByte_old = 0
stationRXByte_old = 0
time_old = 0
# thread safe print
def thread_print(a, *b):
global mode
# if we are sending the data to the server, we mute the output
if mode == "publish":
return
with thread_print_lock:
# print format: time + data
now = datetime.now()
current_time = now.strftime("%H:%M:%S")
print("%s: " % current_time, end='')
print (a % b)
# sensor list
currentSensors = {}
ignore = 'w1_bus_master'
# Sensor Reading Class
# Each physical sensor will have a sensorReading obj
# There is no thread stop in python thread package, so we use stop flag to stop threads
class SensorReading(threading.Thread):
global client
global fd
def __init__(self, metaData):
threading.Thread.__init__(self)
self._stopper = threading.Event()
self.metaData = metaData
def stop(self):
self._stopper.set()
def stopped(self):
return self._stopper.isSet()
def getInterval(self):
return 1.0/self.metaData["frequency"]
def getCalibration(self):
return float(self.metaData["calibration"])
def getID(self):
return self.metaData["id"]
def kickDog(self, ret):
if ret.rc == 0:
nb = fd.write("u")
fd.flush()
if nb > 0:
pass
else:
thread_print("WATCHDOG ERROR")
else:
thread_print("Didn't kick the dog. ret value = %s" % str(ret.rc))
def sht85_read(self):
# get frequency
interval = self.getInterval()
calibration = self.getCalibration()
# get id
id = self.getID()
while True:
if self.stopped():
return
t,h,c = self.metaData["i2cDevice"].single_shot("HIGH")
h = h + calibration
if c == 5:
#read fail don't publish rare
thread_print("sensor: %s. Read fail." % (id))
else:
# read success
# send r, h, id to the server
thread_print("sensor: %s. Temp: %s. Hum: %s. Attempts: %s" % (id,str(t), str(h), str(c)))
# publish
try:
ret = client.publish("senselet/" + id, str(time.time()) + '_' + str(t) + '_' + str(h))
self.kickDog(ret)
except Exception as e:
thread_print(str(e))
# adapt sleep time
# after using ds2482, actually c will always be zero
time.sleep(max(0.1, interval - (0.3*c + 0.2)))
def mlx90614_read(self):
# get frequency
interval = self.getInterval()
# get id
id = self.getID()
while True:
if self.stopped():
return
t,c = self.metaData["i2cDevice"].get_obj_temp()
if c == 5:
#read fail
thread_print("sensor: %s. Read fail." % (id))
else:
# read success
# send r, h, id to the server
thread_print("sensor: %s. Temp: %s.Attempts: %s" % (id,str(t), str(c)))
try:
ret = client.publish("senselet/" + id, str(time.time()) + '_' + str(t))
self.kickDog(ret)
except Exception as e:
thread_print(str(e))
# adapt sleep time
# after using ds2482, actually c will always be zero
time.sleep(max(0.1, interval - 0.1*c))
def waterLeakageRope_read(self):
# get frequency
interval = self.getInterval()
# get id
id = self.getID()
while True:
if self.stopped():
return
# read adc
path = "/sys/bus/w1/devices/" + id + "/vad"
c = 0
for i in range(5):
try:
with open(path, "r") as f:
status = f.read().replace("\n","")
break
except IOError:
c = c + 1
if c == 5 :
#read fail
thread_print("sensor: %s. Read fail." % (id))
else:
# read success
# convert adc reading to discrete status
if int(status) < 300 and int(status) > 15:
status = 1
else:
status = 0
# send status to the server
thread_print("sensor: %s. Status: %s. Attempts: %s" % (id,status, str(c)))
try:
ret = client.publish("senselet/" + id, str(time.time()) + '_' + str(status))
self.kickDog(ret)
except Exception as e:
thread_print(str(e))
time.sleep(interval)
def waterLeakagePoint_read(self):
# get frequency
interval = self.getInterval()
# get id
id = self.getID()
while True:
if self.stopped():
return
path = "/sys/bus/w1/devices/" + id + "/state"
c = 0
for i in range(5):
try:
with open(path, "r") as f:
status = f.read(1)
break
except IOError:
c = c + 1
if c == 5 :
#read fail
thread_print("sensor: %s. Read fail." % (id))
else:
# read success
# send status to the server
s = '{0:08b}'.format(ord(status))[1]
thread_print("sensor: %s. Status: %s. Attempts: %s" % (id, s, str(c)))
try:
ret = client.publish("senselet/" + id, str(time.time()) + '_' + s)
self.kickDog(ret)
except Exception as e:
thread_print(str(e))
time.sleep(interval)
def doorSensor_read(self):
# get frequency
interval = self.getInterval()
# get id
id = self.getID()
while True:
if self.stopped():
return
path = "/sys/bus/w1/devices/" + id + "/state"
c = 0
for i in range(5):
try:
with open(path, "r") as f:
status = f.read(1)
break
except IOError:
c = c + 1
if c == 5 :
#read fail
thread_print("sensor: %s. Read fail." % (id))
else:
# read success
# send status to the server
s = '{0:08b}'.format(ord(status))[1]
# convert: 1 is open 0 is close
if s == "1":
s = "0"
else:
s = "1"
thread_print("sensor: %s. Status: %s. Attempts: %s" % (id, s, str(c)))
try:
ret = client.publish("senselet/" + id, str(time.time()) + '_' + s)
self.kickDog(ret)
except Exception as e:
thread_print(str(e))
time.sleep(interval)
def oilLeakagePoint_read(self):
# get frequency
interval = self.getInterval()
# get id
id = self.getID()
while True:
if self.stopped():
return
path = "/sys/bus/w1/devices/" + id + "/state"
c = 0
for i in range(5):
try:
with open(path, "r") as f:
status = f.read(1)
break
except IOError:
c = c + 1
if c == 5 :
#read fail
thread_print("sensor: %s. Read fail." % (id))
else:
# read success
# send status to the server
s = '{0:08b}'.format(ord(status))[1]
thread_print("sensor: %s. Status: %s. Attempts: %s" % (id, s, str(c)))
try:
ret = client.publish("senselet/" + id, str(time.time()) + '_' + s)
self.kickDog(ret)
except Exception as e:
thread_print(str(e))
time.sleep(interval)
def airFlow_read(self):
# get frequency
interval = self.getInterval()
# get id
id = self.getID()
while True:
if self.stopped():
return
# read adc
path = "/sys/bus/w1/devices/" + id + "/vad"
c = 0
for i in range(5):
try:
with open(path, "r") as f:
status = f.read().replace("\n","")
break
except IOError:
c = c + 1
if c == 5 :
#read fail
thread_print("sensor: %s. Read fail." % (id))
else:
# read success
# send status to the server
thread_print("sensor: %s. Speed: %s. Attempts: %s" % (id,status, str(c)))
try:
ret = client.publish("senselet/" + id, str(time.time()) + '_' + str(status))
self.kickDog(ret)
except Exception as e:
thread_print(str(e))
time.sleep(interval)
def run(self):
if self.metaData["name"] == "sht85":
self.sht85_read()
elif self.metaData["name"] == "mlx90614":
self.mlx90614_read()
elif self.metaData["name"] == "waterLeakageRope":
self.waterLeakageRope_read()
elif self.metaData["name"] == "waterLeakagePoint":
self.waterLeakagePoint_read()
elif self.metaData["name"] == "doorSensorSmall":
self.doorSensor_read()
elif self.metaData["name"] == "doorSensorLarge":
self.doorSensor_read()
elif self.metaData["name"] == "oilLeakagePoint":
self.oilLeakagePoint_read()
elif self.metaData["name"] == "airFlow":
self.airFlow_read()
# Sensor Reading Class End
# get available devices in the folder
def getDevices():
path = "/sys/bus/w1/devices/"
try:
files = os.listdir(path)
except:
return -1
return files
# get the I2C BUS of a given sensor id
def getI2CBUS(sensor):
path = "/sys/bus/w1/devices/" + sensor + "/"
try:
files = os.listdir(path)
except:
# oops fail to list dirs -> we unplug it
return -1
sub = "i2c"
busName = [s for s in files if sub in s]
if len(busName) == 0:
#we have the file but no i2c -> we unplug it
return -1
return int(busName[0].split('-')[1])
def kickDog():
thread_print("no sensor connect to the device %s." %(raspberrypi_id))
def checkAndUpdateSensors():
start = time.time()
global currentSensors
if len(currentSensors) == 0:
kickDog()
# temp sensor list
newSensors = {}
# create a temp sensor list
sensors = getDevices()
if sensors == -1:
return -1
# if there is no sensor connecting to the edge, we still pat the dog
# sensors are discovered by the driver
for sensor in sensors:
if ignore in sensor:
continue
if sensor not in sensorMetaData:
# if the sensor is not registered, we print&log this error
thread_print("sensor - %s not registered" % sensor)
continue
else:
metaData = sensorMetaData[sensor]
# if the sensor is registered, check if it is an i2c device(i2c device is a little bit complex)
if metaData["protocol"] == "I2C":
newI2cBus = getI2CBUS(sensor)
if newI2cBus == -1 :
continue
else:
if metaData["name"] == "sht85":
newSensors[sensor] = {
"protocol": "I2C",
"i2cBus": newI2cBus,
"name": metaData["name"],
"frequency": metaData["frequency"],
"calibration": metaData["calibration"]
}
else:
newSensors[sensor] = {
"protocol": "I2C",
"i2cBus": newI2cBus,
"name": metaData["name"],
"frequency": metaData["frequency"]
}
else:
newSensors[sensor] = {
"protocol": metaData["protocol"],
"name": metaData["name"],
"frequency": metaData["frequency"]
}
# loop through current sensor list, add new sensor, remove unplugged sensor
# for i2c sensor, we need to check (1) if it exists in the currentSensors (2) if the i2c bus is the same
for sensor in list(currentSensors):
metaData = currentSensors[sensor]
# unplug
if sensor not in newSensors:
# thread_print&log
thread_print("sensor - %s - %s unplugged" % (sensor, metaData["name"]))
# if this is I2C device, we need to close the I2C file
if metaData["protocol"] == "I2C":
metaData["i2cDevice"].bus.close()
# stop the thread
metaData["threading"].stop()
metaData["threading"].join()
# delete this sensor from the connected sensor list
del currentSensors[sensor]
else:
# we need to do extra checks for I2C, we don't need to check sensors with other types
if metaData["protocol"] == "I2C":
# if the bus number does not change, do nothing. else, we close the old device and add the new device
oldI2cBus = metaData["i2cBus"]
newI2cBus = newSensors[sensor]["i2cBus"]
if oldI2cBus != newI2cBus:
thread_print("i2c sensor change i2c bus from %s -> %s" % (str(oldI2cBus), str(newI2cBus)))
metaData["i2cDevice"].bus.close()
# start new bus
if metaData["name"] == "sht85":
metaData["i2cDevice"] = SHT85(newI2cBus)
elif metaData["name"] == "mlx90614":
metaData["i2cDevice"] = MLX90614(newI2cBus)
# delete this sensor from new Sensors list
del newSensors[sensor]
# for threading we dont need to do anything
# remained sensors in newSensors are new plugged sensors
for sensor in newSensors:
metaData = newSensors[sensor]
thread_print("sensor - %s - %s plugged in to the system" % (sensor, metaData["name"]))
if metaData["protocol"]== "I2C":
newI2cBus = newSensors[sensor]["i2cBus"]
if metaData["name"]== "sht85":
i2cDevice = SHT85(newI2cBus)
elif metaData["name"]== "mlx90614":
i2cDevice = MLX90614(0x5a,newI2cBus)
if metaData["name"] == "sht85":
currentSensors[sensor] = {
"id": sensor,
"protocol": "I2C",
"i2cBus": newI2cBus,
"i2cDevice": i2cDevice,
"name": metaData["name"],
"frequency": metaData["frequency"],
"calibration": metaData["calibration"]
}
else:
currentSensors[sensor] = {
"id": sensor,
"protocol": "I2C",
"i2cBus": newI2cBus,
"i2cDevice": i2cDevice,
"name": metaData["name"],
"frequency": metaData["frequency"]
}
else:
currentSensors[sensor] = {
"id": sensor,
"protocol": metaData["protocol"],
"name": metaData["name"],
"frequency": metaData["frequency"]
}
# start sensor reading thread
t = SensorReading(currentSensors[sensor])
currentSensors[sensor]["threading"] = t
t.start()
end = time.time()
# publish network stats
def publishLink():
global raspberrypi_id
id = 'network' + str(raspberrypi_id)
process = subprocess.run('cat /proc/net/wireless', shell=True, check=True, stdout=subprocess.PIPE, universal_newlines=True)
output = process.stdout
outputList = output.split("\n")
level = outputList[2].split()[2].replace('.','')
thread_print("controller: %s. Link: %s. " % (id, level))
try:
ret = client.publish("senselet/" + id, str(time.time()) + '_' + level)
except Exception as e:
# we just print out the error
thread_print(str(e))
def publishWIFIStats():
global raspberrypi_id, stationTXByte_old, stationRXByte_old, time_old
id = 'control' + str(raspberrypi_id)
process = subprocess.run('iw dev wlan0 station dump', shell=True, check=True, stdout=subprocess.PIPE, universal_newlines=True)
output = process.stdout
outputList = output.split('\n\t')
stationMAC = str(outputList[0].split()[1])
stationSignal = str(outputList[7].split('[')[1].split(']')[0])
stationTXRate = str(outputList[8].split('\t')[1].split()[0])
stationRXRate = str(outputList[9].split('\t')[1].split()[0])
if stationTXByte_old == 0:
time_old = time.time()
stationTXByte_old = int(outputList[4].split('\t')[1])
stationRXByte_old = int(outputList[2].split('\t')[1])
stationTXByteRate = 0
stationRXByteRate = 0
else:
time_new = time.time()
time_diff = time_new - time_old
stationTXByte_new = int(outputList[4].split('\t')[1])
stationRXByte_new = int(outputList[2].split('\t')[1])
stationTXByteRate = str(round((stationTXByte_new - stationTXByte_old)/time_diff ,3))
stationRXByteRate = str(round((stationRXByte_new - stationRXByte_old)/time_diff,3))
stationTXByte_old = stationTXByte_new
stationRXByte_old = stationRXByte_new
time_old = time_new
thread_print("controller: %s. APMAC: %s. APTXByteRate: %s. APRXByteRate: %s. APSignal: %s. APTXRate: %s. APRXRate: %s. " % (id,stationMAC, stationTXByteRate, stationRXByteRate, stationSignal, stationTXRate, stationRXRate))
try:
ret = client.publish("senselet/" + id, str(time.time()) + '_' + stationMAC + '_' + stationTXByteRate + '_' + stationRXByteRate + '_' + stationSignal + '_' + stationTXRate + '_' + stationRXRate)
except Exception as e:
# we just print out the error
thread_print(str(e))
def main():
while True:
try:
# 04/16/2021 add wifi stats
publishWIFIStats()
ret = checkAndUpdateSensors()
if ret == -1:
thread_print("something wrong happened, lets try it agagin")
time.sleep(0.5)
continue
# read link every 1 s
publishLink()
time.sleep(1)
publishLink()
time.sleep(1)
publishLink()
time.sleep(1)
except KeyboardInterrupt:
# Ctrl-C handling and send kill to threads
thread_print ("Sending kill to threads...")
for sensor in currentSensors:
currentSensors[sensor]["threading"].stop()
for sensor in currentSensors:
currentSensors[sensor]["threading"].join()
break
thread_print ("Exited")
if __name__ == '__main__':
main()
fd.write("V")
fd.close()
print("watch dog stop")
|
[
"subprocess.run",
"threading.Thread.__init__",
"time.time",
"threading.Lock",
"time.sleep",
"sht85.SHT85",
"mlx90614.MLX90614",
"threading.Event",
"datetime.datetime.now",
"os.listdir"
] |
[((288, 294), 'threading.Lock', 'Lock', ([], {}), '()\n', (292, 294), False, 'from threading import Lock\n'), ((11824, 11835), 'time.time', 'time.time', ([], {}), '()\n', (11833, 11835), False, 'import time\n'), ((17587, 17598), 'time.time', 'time.time', ([], {}), '()\n', (17596, 17598), False, 'import time\n'), ((17724, 17842), 'subprocess.run', 'subprocess.run', (['"""cat /proc/net/wireless"""'], {'shell': '(True)', 'check': '(True)', 'stdout': 'subprocess.PIPE', 'universal_newlines': '(True)'}), "('cat /proc/net/wireless', shell=True, check=True, stdout=\n subprocess.PIPE, universal_newlines=True)\n", (17738, 17842), False, 'import subprocess\n'), ((18355, 18476), 'subprocess.run', 'subprocess.run', (['"""iw dev wlan0 station dump"""'], {'shell': '(True)', 'check': '(True)', 'stdout': 'subprocess.PIPE', 'universal_newlines': '(True)'}), "('iw dev wlan0 station dump', shell=True, check=True, stdout=\n subprocess.PIPE, universal_newlines=True)\n", (18369, 18476), False, 'import subprocess\n'), ((1000, 1014), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1012, 1014), False, 'from datetime import datetime\n'), ((1476, 1507), 'threading.Thread.__init__', 'threading.Thread.__init__', (['self'], {}), '(self)\n', (1501, 1507), False, 'import threading\n'), ((1533, 1550), 'threading.Event', 'threading.Event', ([], {}), '()\n', (1548, 1550), False, 'import threading\n'), ((11190, 11206), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (11200, 11206), False, 'import os\n'), ((11396, 11412), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (11406, 11412), False, 'import os\n'), ((18834, 18845), 'time.time', 'time.time', ([], {}), '()\n', (18843, 18845), False, 'import time\n'), ((19059, 19070), 'time.time', 'time.time', ([], {}), '()\n', (19068, 19070), False, 'import time\n'), ((5597, 5617), 'time.sleep', 'time.sleep', (['interval'], {}), '(interval)\n', (5607, 5617), False, 'import time\n'), ((6733, 6753), 'time.sleep', 'time.sleep', (['interval'], {}), '(interval)\n', (6743, 6753), False, 'import time\n'), ((8017, 8037), 'time.sleep', 'time.sleep', (['interval'], {}), '(interval)\n', (8027, 8037), False, 'import time\n'), ((9151, 9171), 'time.sleep', 'time.sleep', (['interval'], {}), '(interval)\n', (9161, 9171), False, 'import time\n'), ((10275, 10295), 'time.sleep', 'time.sleep', (['interval'], {}), '(interval)\n', (10285, 10295), False, 'import time\n'), ((20498, 20511), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (20508, 20511), False, 'import time\n'), ((20550, 20563), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (20560, 20563), False, 'import time\n'), ((20602, 20615), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (20612, 20615), False, 'import time\n'), ((16071, 16087), 'sht85.SHT85', 'SHT85', (['newI2cBus'], {}), '(newI2cBus)\n', (16076, 16087), False, 'from sht85 import SHT85\n'), ((20381, 20396), 'time.sleep', 'time.sleep', (['(0.5)'], {}), '(0.5)\n', (20391, 20396), False, 'import time\n'), ((16164, 16187), 'mlx90614.MLX90614', 'MLX90614', (['(90)', 'newI2cBus'], {}), '(90, newI2cBus)\n', (16172, 16187), False, 'from mlx90614 import MLX90614\n'), ((15388, 15404), 'sht85.SHT85', 'SHT85', (['newI2cBus'], {}), '(newI2cBus)\n', (15393, 15404), False, 'from sht85 import SHT85\n'), ((18076, 18087), 'time.time', 'time.time', ([], {}), '()\n', (18085, 18087), False, 'import time\n'), ((15510, 15529), 'mlx90614.MLX90614', 'MLX90614', (['newI2cBus'], {}), '(newI2cBus)\n', (15518, 15529), False, 'from mlx90614 import MLX90614\n'), ((3990, 4001), 'time.time', 'time.time', ([], {}), '()\n', (3999, 4001), False, 'import time\n'), ((5433, 5444), 'time.time', 'time.time', ([], {}), '()\n', (5442, 5444), False, 'import time\n'), ((6578, 6589), 'time.time', 'time.time', ([], {}), '()\n', (6587, 6589), False, 'import time\n'), ((7862, 7873), 'time.time', 'time.time', ([], {}), '()\n', (7871, 7873), False, 'import time\n'), ((8996, 9007), 'time.time', 'time.time', ([], {}), '()\n', (9005, 9007), False, 'import time\n'), ((10111, 10122), 'time.time', 'time.time', ([], {}), '()\n', (10120, 10122), False, 'import time\n'), ((3020, 3031), 'time.time', 'time.time', ([], {}), '()\n', (3029, 3031), False, 'import time\n'), ((19849, 19860), 'time.time', 'time.time', ([], {}), '()\n', (19858, 19860), False, 'import time\n')]
|
# Copyright 2020 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""test tfrecord to mindrecord tool"""
import collections
from importlib import import_module
import os
import numpy as np
import pytest
from mindspore import log as logger
from mindspore.mindrecord import FileReader
from mindspore.mindrecord import TFRecordToMR
SupportedTensorFlowVersion = '2.1.0'
try:
tf = import_module("tensorflow") # just used to convert tfrecord to mindrecord
except ModuleNotFoundError:
logger.warning("tensorflow module not found.")
tf = None
TFRECORD_DATA_DIR = "../data/mindrecord/testTFRecordData"
TFRECORD_FILE_NAME = "test.tfrecord"
MINDRECORD_FILE_NAME = "test.mindrecord"
PARTITION_NUM = 1
def verify_data(transformer, reader):
"""Verify the data by read from mindrecord"""
tf_iter = transformer.tfrecord_iterator()
mr_iter = reader.get_next()
count = 0
for tf_item, mr_item in zip(tf_iter, mr_iter):
count = count + 1
assert len(tf_item) == 6
assert len(mr_item) == 6
for key, value in tf_item.items():
logger.info("key: {}, tfrecord: value: {}, mindrecord: value: {}".format(key, value, mr_item[key]))
if isinstance(value, np.ndarray):
assert (value == mr_item[key]).all()
else:
assert value == mr_item[key]
assert count == 10
def generate_tfrecord():
def create_int_feature(values):
if isinstance(values, list):
feature = tf.train.Feature(int64_list=tf.train.Int64List(value=list(values))) # values: [int, int, int]
else:
feature = tf.train.Feature(int64_list=tf.train.Int64List(value=[values])) # values: int
return feature
def create_float_feature(values):
if isinstance(values, list):
feature = tf.train.Feature(float_list=tf.train.FloatList(value=list(values))) # values: [float, float]
else:
feature = tf.train.Feature(float_list=tf.train.FloatList(value=[values])) # values: float
return feature
def create_bytes_feature(values):
if isinstance(values, bytes):
feature = tf.train.Feature(bytes_list=tf.train.BytesList(value=[values])) # values: bytes
else:
# values: string
feature = tf.train.Feature(bytes_list=tf.train.BytesList(value=[bytes(values, encoding='utf-8')]))
return feature
writer = tf.io.TFRecordWriter(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
example_count = 0
for i in range(10):
file_name = "000" + str(i) + ".jpg"
image_bytes = bytes(str("aaaabbbbcccc" + str(i)), encoding="utf-8")
int64_scalar = i
float_scalar = float(i)
int64_list = [i, i+1, i+2, i+3, i+4, i+1234567890]
float_list = [float(i), float(i+1), float(i+2.8), float(i+3.2),
float(i+4.4), float(i+123456.9), float(i+98765432.1)]
features = collections.OrderedDict()
features["file_name"] = create_bytes_feature(file_name)
features["image_bytes"] = create_bytes_feature(image_bytes)
features["int64_scalar"] = create_int_feature(int64_scalar)
features["float_scalar"] = create_float_feature(float_scalar)
features["int64_list"] = create_int_feature(int64_list)
features["float_list"] = create_float_feature(float_list)
tf_example = tf.train.Example(features=tf.train.Features(feature=features))
writer.write(tf_example.SerializeToString())
example_count += 1
writer.close()
logger.info("Write {} rows in tfrecord.".format(example_count))
def test_tfrecord_to_mindrecord():
"""test transform tfrecord to mindrecord."""
if not tf or tf.__version__ < SupportedTensorFlowVersion:
# skip the test
logger.warning("Module tensorflow is not found or version wrong, \
please use pip install it / reinstall version >= {}.".format(SupportedTensorFlowVersion))
return
generate_tfrecord()
assert os.path.exists(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
feature_dict = {"file_name": tf.io.FixedLenFeature([], tf.string),
"image_bytes": tf.io.FixedLenFeature([], tf.string),
"int64_scalar": tf.io.FixedLenFeature([], tf.int64),
"float_scalar": tf.io.FixedLenFeature([], tf.float32),
"int64_list": tf.io.FixedLenFeature([6], tf.int64),
"float_list": tf.io.FixedLenFeature([7], tf.float32),
}
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
tfrecord_transformer = TFRecordToMR(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME),
MINDRECORD_FILE_NAME, feature_dict, ["image_bytes"])
tfrecord_transformer.transform()
assert os.path.exists(MINDRECORD_FILE_NAME)
assert os.path.exists(MINDRECORD_FILE_NAME + ".db")
fr_mindrecord = FileReader(MINDRECORD_FILE_NAME)
verify_data(tfrecord_transformer, fr_mindrecord)
os.remove(MINDRECORD_FILE_NAME)
os.remove(MINDRECORD_FILE_NAME + ".db")
os.remove(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
def test_tfrecord_to_mindrecord_scalar_with_1():
"""test transform tfrecord to mindrecord."""
if not tf or tf.__version__ < SupportedTensorFlowVersion:
# skip the test
logger.warning("Module tensorflow is not found or version wrong, \
please use pip install it / reinstall version >= {}.".format(SupportedTensorFlowVersion))
return
generate_tfrecord()
assert os.path.exists(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
feature_dict = {"file_name": tf.io.FixedLenFeature([], tf.string),
"image_bytes": tf.io.FixedLenFeature([], tf.string),
"int64_scalar": tf.io.FixedLenFeature([1], tf.int64),
"float_scalar": tf.io.FixedLenFeature([1], tf.float32),
"int64_list": tf.io.FixedLenFeature([6], tf.int64),
"float_list": tf.io.FixedLenFeature([7], tf.float32),
}
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
tfrecord_transformer = TFRecordToMR(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME),
MINDRECORD_FILE_NAME, feature_dict, ["image_bytes"])
tfrecord_transformer.transform()
assert os.path.exists(MINDRECORD_FILE_NAME)
assert os.path.exists(MINDRECORD_FILE_NAME + ".db")
fr_mindrecord = FileReader(MINDRECORD_FILE_NAME)
verify_data(tfrecord_transformer, fr_mindrecord)
os.remove(MINDRECORD_FILE_NAME)
os.remove(MINDRECORD_FILE_NAME + ".db")
os.remove(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
def test_tfrecord_to_mindrecord_scalar_with_1_list_small_len_exception():
"""test transform tfrecord to mindrecord."""
if not tf or tf.__version__ < SupportedTensorFlowVersion:
# skip the test
logger.warning("Module tensorflow is not found or version wrong, \
please use pip install it / reinstall version >= {}.".format(SupportedTensorFlowVersion))
return
generate_tfrecord()
assert os.path.exists(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
feature_dict = {"file_name": tf.io.FixedLenFeature([], tf.string),
"image_bytes": tf.io.FixedLenFeature([], tf.string),
"int64_scalar": tf.io.FixedLenFeature([1], tf.int64),
"float_scalar": tf.io.FixedLenFeature([1], tf.float32),
"int64_list": tf.io.FixedLenFeature([6], tf.int64),
"float_list": tf.io.FixedLenFeature([2], tf.float32),
}
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
with pytest.raises(ValueError):
tfrecord_transformer = TFRecordToMR(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME),
MINDRECORD_FILE_NAME, feature_dict, ["image_bytes"])
tfrecord_transformer.transform()
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
os.remove(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
def test_tfrecord_to_mindrecord_list_with_diff_type_exception():
"""test transform tfrecord to mindrecord."""
if not tf or tf.__version__ < SupportedTensorFlowVersion:
# skip the test
logger.warning("Module tensorflow is not found or version wrong, \
please use pip install it / reinstall version >= {}.".format(SupportedTensorFlowVersion))
return
generate_tfrecord()
assert os.path.exists(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
feature_dict = {"file_name": tf.io.FixedLenFeature([], tf.string),
"image_bytes": tf.io.FixedLenFeature([], tf.string),
"int64_scalar": tf.io.FixedLenFeature([1], tf.int64),
"float_scalar": tf.io.FixedLenFeature([1], tf.float32),
"int64_list": tf.io.FixedLenFeature([6], tf.float32),
"float_list": tf.io.FixedLenFeature([7], tf.float32),
}
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
with pytest.raises(ValueError):
tfrecord_transformer = TFRecordToMR(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME),
MINDRECORD_FILE_NAME, feature_dict, ["image_bytes"])
tfrecord_transformer.transform()
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
os.remove(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
def test_tfrecord_to_mindrecord_list_without_bytes_type():
"""test transform tfrecord to mindrecord."""
if not tf or tf.__version__ < SupportedTensorFlowVersion:
# skip the test
logger.warning("Module tensorflow is not found or version wrong, \
please use pip install it / reinstall version >= {}.".format(SupportedTensorFlowVersion))
return
generate_tfrecord()
assert os.path.exists(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
feature_dict = {"file_name": tf.io.FixedLenFeature([], tf.string),
"image_bytes": tf.io.FixedLenFeature([], tf.string),
"int64_scalar": tf.io.FixedLenFeature([1], tf.int64),
"float_scalar": tf.io.FixedLenFeature([1], tf.float32),
"int64_list": tf.io.FixedLenFeature([6], tf.int64),
"float_list": tf.io.FixedLenFeature([7], tf.float32),
}
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
tfrecord_transformer = TFRecordToMR(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME),
MINDRECORD_FILE_NAME, feature_dict)
tfrecord_transformer.transform()
assert os.path.exists(MINDRECORD_FILE_NAME)
assert os.path.exists(MINDRECORD_FILE_NAME + ".db")
fr_mindrecord = FileReader(MINDRECORD_FILE_NAME)
verify_data(tfrecord_transformer, fr_mindrecord)
os.remove(MINDRECORD_FILE_NAME)
os.remove(MINDRECORD_FILE_NAME + ".db")
os.remove(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
def test_tfrecord_to_mindrecord_scalar_with_2_exception():
"""test transform tfrecord to mindrecord."""
if not tf or tf.__version__ < SupportedTensorFlowVersion:
# skip the test
logger.warning("Module tensorflow is not found or version wrong, \
please use pip install it / reinstall version >= {}.".format(SupportedTensorFlowVersion))
return
generate_tfrecord()
assert os.path.exists(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
feature_dict = {"file_name": tf.io.FixedLenFeature([], tf.string),
"image_bytes": tf.io.FixedLenFeature([], tf.string),
"int64_scalar": tf.io.FixedLenFeature([2], tf.int64),
"float_scalar": tf.io.FixedLenFeature([1], tf.float32),
"int64_list": tf.io.FixedLenFeature([6], tf.int64),
"float_list": tf.io.FixedLenFeature([7], tf.float32),
}
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
tfrecord_transformer = TFRecordToMR(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME),
MINDRECORD_FILE_NAME, feature_dict, ["image_bytes"])
with pytest.raises(ValueError):
tfrecord_transformer.transform()
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
os.remove(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
def test_tfrecord_to_mindrecord_scalar_string_with_1_exception():
"""test transform tfrecord to mindrecord."""
if not tf or tf.__version__ < SupportedTensorFlowVersion:
# skip the test
logger.warning("Module tensorflow is not found or version wrong, \
please use pip install it / reinstall version >= {}.".format(SupportedTensorFlowVersion))
return
generate_tfrecord()
assert os.path.exists(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
feature_dict = {"file_name": tf.io.FixedLenFeature([1], tf.string),
"image_bytes": tf.io.FixedLenFeature([], tf.string),
"int64_scalar": tf.io.FixedLenFeature([1], tf.int64),
"float_scalar": tf.io.FixedLenFeature([1], tf.float32),
"int64_list": tf.io.FixedLenFeature([6], tf.int64),
"float_list": tf.io.FixedLenFeature([7], tf.float32),
}
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
with pytest.raises(ValueError):
tfrecord_transformer = TFRecordToMR(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME),
MINDRECORD_FILE_NAME, feature_dict, ["image_bytes"])
tfrecord_transformer.transform()
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
os.remove(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
def test_tfrecord_to_mindrecord_scalar_bytes_with_10_exception():
"""test transform tfrecord to mindrecord."""
if not tf or tf.__version__ < SupportedTensorFlowVersion:
# skip the test
logger.warning("Module tensorflow is not found or version wrong, \
please use pip install it / reinstall version >= {}.".format(SupportedTensorFlowVersion))
return
generate_tfrecord()
assert os.path.exists(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
feature_dict = {"file_name": tf.io.FixedLenFeature([], tf.string),
"image_bytes": tf.io.FixedLenFeature([10], tf.string),
"int64_scalar": tf.io.FixedLenFeature([1], tf.int64),
"float_scalar": tf.io.FixedLenFeature([1], tf.float32),
"int64_list": tf.io.FixedLenFeature([6], tf.int64),
"float_list": tf.io.FixedLenFeature([7], tf.float32),
}
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
with pytest.raises(ValueError):
tfrecord_transformer = TFRecordToMR(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME),
MINDRECORD_FILE_NAME, feature_dict, ["image_bytes"])
tfrecord_transformer.transform()
if os.path.exists(MINDRECORD_FILE_NAME):
os.remove(MINDRECORD_FILE_NAME)
if os.path.exists(MINDRECORD_FILE_NAME + ".db"):
os.remove(MINDRECORD_FILE_NAME + ".db")
os.remove(os.path.join(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME))
|
[
"os.remove",
"mindspore.log.warning",
"importlib.import_module",
"os.path.exists",
"pytest.raises",
"mindspore.mindrecord.FileReader",
"collections.OrderedDict",
"os.path.join"
] |
[((910, 937), 'importlib.import_module', 'import_module', (['"""tensorflow"""'], {}), "('tensorflow')\n", (923, 937), False, 'from importlib import import_module\n'), ((5108, 5144), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (5122, 5144), False, 'import os\n'), ((5193, 5237), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (5207, 5237), False, 'import os\n'), ((5523, 5559), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (5537, 5559), False, 'import os\n'), ((5571, 5615), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (5585, 5615), False, 'import os\n'), ((5637, 5669), 'mindspore.mindrecord.FileReader', 'FileReader', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (5647, 5669), False, 'from mindspore.mindrecord import FileReader\n'), ((5728, 5759), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (5737, 5759), False, 'import os\n'), ((5764, 5803), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (5773, 5803), False, 'import os\n'), ((6824, 6860), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (6838, 6860), False, 'import os\n'), ((6909, 6953), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (6923, 6953), False, 'import os\n'), ((7239, 7275), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (7253, 7275), False, 'import os\n'), ((7287, 7331), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (7301, 7331), False, 'import os\n'), ((7353, 7385), 'mindspore.mindrecord.FileReader', 'FileReader', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (7363, 7385), False, 'from mindspore.mindrecord import FileReader\n'), ((7444, 7475), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (7453, 7475), False, 'import os\n'), ((7480, 7519), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (7489, 7519), False, 'import os\n'), ((8565, 8601), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (8579, 8601), False, 'import os\n'), ((8650, 8694), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (8664, 8694), False, 'import os\n'), ((9024, 9060), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (9038, 9060), False, 'import os\n'), ((9109, 9153), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (9123, 9153), False, 'import os\n'), ((10241, 10277), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (10255, 10277), False, 'import os\n'), ((10326, 10370), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (10340, 10370), False, 'import os\n'), ((10700, 10736), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (10714, 10736), False, 'import os\n'), ((10785, 10829), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (10799, 10829), False, 'import os\n'), ((11909, 11945), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (11923, 11945), False, 'import os\n'), ((11994, 12038), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (12008, 12038), False, 'import os\n'), ((12307, 12343), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (12321, 12343), False, 'import os\n'), ((12355, 12399), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (12369, 12399), False, 'import os\n'), ((12421, 12453), 'mindspore.mindrecord.FileReader', 'FileReader', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (12431, 12453), False, 'from mindspore.mindrecord import FileReader\n'), ((12512, 12543), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (12521, 12543), False, 'import os\n'), ((12548, 12587), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (12557, 12587), False, 'import os\n'), ((13618, 13654), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (13632, 13654), False, 'import os\n'), ((13703, 13747), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (13717, 13747), False, 'import os\n'), ((14069, 14105), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (14083, 14105), False, 'import os\n'), ((14154, 14198), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (14168, 14198), False, 'import os\n'), ((15286, 15322), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (15300, 15322), False, 'import os\n'), ((15371, 15415), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (15385, 15415), False, 'import os\n'), ((15745, 15781), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (15759, 15781), False, 'import os\n'), ((15830, 15874), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (15844, 15874), False, 'import os\n'), ((16963, 16999), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (16977, 16999), False, 'import os\n'), ((17048, 17092), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (17062, 17092), False, 'import os\n'), ((17422, 17458), 'os.path.exists', 'os.path.exists', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (17436, 17458), False, 'import os\n'), ((17507, 17551), 'os.path.exists', 'os.path.exists', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (17521, 17551), False, 'import os\n'), ((1019, 1065), 'mindspore.log.warning', 'logger.warning', (['"""tensorflow module not found."""'], {}), "('tensorflow module not found.')\n", (1033, 1065), True, 'from mindspore import log as logger\n'), ((2990, 3041), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (3002, 3041), False, 'import os\n'), ((3494, 3519), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (3517, 3519), False, 'import collections\n'), ((4586, 4637), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (4598, 4637), False, 'import os\n'), ((5154, 5185), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (5163, 5185), False, 'import os\n'), ((5247, 5286), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (5256, 5286), False, 'import os\n'), ((5328, 5379), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (5340, 5379), False, 'import os\n'), ((5819, 5870), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (5831, 5870), False, 'import os\n'), ((6300, 6351), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (6312, 6351), False, 'import os\n'), ((6870, 6901), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (6879, 6901), False, 'import os\n'), ((6963, 7002), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (6972, 7002), False, 'import os\n'), ((7044, 7095), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (7056, 7095), False, 'import os\n'), ((7535, 7586), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (7547, 7586), False, 'import os\n'), ((8041, 8092), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (8053, 8092), False, 'import os\n'), ((8611, 8642), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (8620, 8642), False, 'import os\n'), ((8704, 8743), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (8713, 8743), False, 'import os\n'), ((8754, 8779), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (8767, 8779), False, 'import pytest\n'), ((9070, 9101), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (9079, 9101), False, 'import os\n'), ((9163, 9202), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (9172, 9202), False, 'import os\n'), ((9218, 9269), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (9230, 9269), False, 'import os\n'), ((9715, 9766), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (9727, 9766), False, 'import os\n'), ((10287, 10318), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (10296, 10318), False, 'import os\n'), ((10380, 10419), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (10389, 10419), False, 'import os\n'), ((10430, 10455), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (10443, 10455), False, 'import pytest\n'), ((10746, 10777), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (10755, 10777), False, 'import os\n'), ((10839, 10878), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (10848, 10878), False, 'import os\n'), ((10894, 10945), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (10906, 10945), False, 'import os\n'), ((11385, 11436), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (11397, 11436), False, 'import os\n'), ((11955, 11986), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (11964, 11986), False, 'import os\n'), ((12048, 12087), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (12057, 12087), False, 'import os\n'), ((12129, 12180), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (12141, 12180), False, 'import os\n'), ((12603, 12654), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (12615, 12654), False, 'import os\n'), ((13094, 13145), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (13106, 13145), False, 'import os\n'), ((13664, 13695), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (13673, 13695), False, 'import os\n'), ((13757, 13796), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (13766, 13796), False, 'import os\n'), ((13838, 13889), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (13850, 13889), False, 'import os\n'), ((13993, 14018), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (14006, 14018), False, 'import pytest\n'), ((14115, 14146), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (14124, 14146), False, 'import os\n'), ((14208, 14247), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (14217, 14247), False, 'import os\n'), ((14263, 14314), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (14275, 14314), False, 'import os\n'), ((14761, 14812), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (14773, 14812), False, 'import os\n'), ((15332, 15363), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (15341, 15363), False, 'import os\n'), ((15425, 15464), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (15434, 15464), False, 'import os\n'), ((15475, 15500), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (15488, 15500), False, 'import pytest\n'), ((15791, 15822), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (15800, 15822), False, 'import os\n'), ((15884, 15923), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (15893, 15923), False, 'import os\n'), ((15939, 15990), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (15951, 15990), False, 'import os\n'), ((16437, 16488), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (16449, 16488), False, 'import os\n'), ((17009, 17040), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (17018, 17040), False, 'import os\n'), ((17102, 17141), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (17111, 17141), False, 'import os\n'), ((17152, 17177), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (17165, 17177), False, 'import pytest\n'), ((17468, 17499), 'os.remove', 'os.remove', (['MINDRECORD_FILE_NAME'], {}), '(MINDRECORD_FILE_NAME)\n', (17477, 17499), False, 'import os\n'), ((17561, 17600), 'os.remove', 'os.remove', (["(MINDRECORD_FILE_NAME + '.db')"], {}), "(MINDRECORD_FILE_NAME + '.db')\n", (17570, 17600), False, 'import os\n'), ((17616, 17667), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (17628, 17667), False, 'import os\n'), ((8825, 8876), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (8837, 8876), False, 'import os\n'), ((10501, 10552), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (10513, 10552), False, 'import os\n'), ((15546, 15597), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (15558, 15597), False, 'import os\n'), ((17223, 17274), 'os.path.join', 'os.path.join', (['TFRECORD_DATA_DIR', 'TFRECORD_FILE_NAME'], {}), '(TFRECORD_DATA_DIR, TFRECORD_FILE_NAME)\n', (17235, 17274), False, 'import os\n')]
|
# -------------------------------------------------------------------------
#
# Part of the CodeChecker project, under the Apache License v2.0 with
# LLVM Exceptions. See LICENSE for license information.
# SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
#
# -------------------------------------------------------------------------
"""
This module tests the correctness of the OutputParser and PListConverter, which
used in sequence transform a Clang Tidy output file to a plist file.
"""
import os
import plistlib
import shutil
import tempfile
import unittest
from codechecker_report_converter.analyzers.clang_tidy import analyzer_result
from codechecker_report_converter.report.parser import plist
OLD_PWD = None
def setup_module():
"""Setup the test tidy reprs for the test classes in the module."""
global OLD_PWD
OLD_PWD = os.getcwd()
os.chdir(os.path.join(os.path.dirname(__file__), 'tidy_output_test_files'))
def teardown_module():
"""Restore environment after tests have ran."""
global OLD_PWD
os.chdir(OLD_PWD)
class ClangTidyAnalyzerResultTestCase(unittest.TestCase):
""" Test the output of the ClangTidyAnalyzerResult. """
def setUp(self):
""" Setup the test. """
self.analyzer_result = analyzer_result.AnalyzerResult()
self.cc_result_dir = tempfile.mkdtemp()
def tearDown(self):
""" Clean temporary directory. """
shutil.rmtree(self.cc_result_dir)
def __check_analyzer_result(self, analyzer_result, analyzer_result_plist,
source_files, expected_plist):
""" Check the result of the analyzer transformation. """
self.analyzer_result.transform(
analyzer_result, self.cc_result_dir, plist.EXTENSION)
plist_file = os.path.join(self.cc_result_dir, analyzer_result_plist)
with open(plist_file, mode='rb') as pfile:
res = plistlib.load(pfile)
# Use relative path for this test.
res['files'] = source_files
with open(expected_plist, mode='rb') as pfile:
exp = plistlib.load(pfile)
self.assertTrue(res['metadata']['generated_by']['version'])
res['metadata']['generated_by']['version'] = "x.y.z"
self.assertEqual(res, exp)
def test_empty1(self):
""" Test for empty Messages. """
ret = self.analyzer_result.transform(
'empty1.out', self.cc_result_dir, plist.EXTENSION)
self.assertFalse(ret)
def test_empty2(self):
""" Test for empty Messages with multiple line. """
ret = self.analyzer_result.transform(
'empty2.out', self.cc_result_dir, plist.EXTENSION)
self.assertFalse(ret)
def test_tidy1(self):
""" Test for the tidy1.plist file. """
self.__check_analyzer_result('tidy1.out', 'test.cpp_clang-tidy.plist',
['files/test.cpp'], 'tidy1.plist')
def test_tidy2(self):
""" Test for the tidy2.plist file. """
self.__check_analyzer_result('tidy2.out', 'test2.cpp_clang-tidy.plist',
['files/test2.cpp'], 'tidy2.plist')
def test_tidy3(self):
""" Test for the tidy3.plist file. """
self.__check_analyzer_result('tidy3.out', 'test3.cpp_clang-tidy.plist',
['files/test3.cpp'],
'tidy3_cpp.plist')
self.__check_analyzer_result('tidy3.out', 'test3.hh_clang-tidy.plist',
['files/test3.cpp', 'files/test3.hh'],
'tidy3_hh.plist')
|
[
"plistlib.load",
"os.getcwd",
"os.path.dirname",
"tempfile.mkdtemp",
"shutil.rmtree",
"os.path.join",
"os.chdir",
"codechecker_report_converter.analyzers.clang_tidy.analyzer_result.AnalyzerResult"
] |
[((859, 870), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (868, 870), False, 'import os\n'), ((1051, 1068), 'os.chdir', 'os.chdir', (['OLD_PWD'], {}), '(OLD_PWD)\n', (1059, 1068), False, 'import os\n'), ((1274, 1306), 'codechecker_report_converter.analyzers.clang_tidy.analyzer_result.AnalyzerResult', 'analyzer_result.AnalyzerResult', ([], {}), '()\n', (1304, 1306), False, 'from codechecker_report_converter.analyzers.clang_tidy import analyzer_result\n'), ((1336, 1354), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1352, 1354), False, 'import tempfile\n'), ((1431, 1464), 'shutil.rmtree', 'shutil.rmtree', (['self.cc_result_dir'], {}), '(self.cc_result_dir)\n', (1444, 1464), False, 'import shutil\n'), ((1800, 1855), 'os.path.join', 'os.path.join', (['self.cc_result_dir', 'analyzer_result_plist'], {}), '(self.cc_result_dir, analyzer_result_plist)\n', (1812, 1855), False, 'import os\n'), ((897, 922), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (912, 922), False, 'import os\n'), ((1925, 1945), 'plistlib.load', 'plistlib.load', (['pfile'], {}), '(pfile)\n', (1938, 1945), False, 'import plistlib\n'), ((2108, 2128), 'plistlib.load', 'plistlib.load', (['pfile'], {}), '(pfile)\n', (2121, 2128), False, 'import plistlib\n')]
|
import os
import unittest
import numpy
import moments
import time
class ResultsTestCase(unittest.TestCase):
def setUp(self):
self.startTime = time.time()
def tearDown(self):
t = time.time() - self.startTime
print("%s: %.3f seconds" % (self.id(), t))
def test_1d_ic(self):
# This just the standard neutral model
n = 10
fs = moments.Spectrum(numpy.zeros(n+1))
fs.integrate([1], tf=10, dt_fac=0.01)
answer = moments.Spectrum(1./numpy.arange(n+1))
self.assert_(numpy.ma.allclose(fs, answer, atol=5e-5))
def test_1pop(self):
n = 15
f = lambda x: [1+0.0001*x]
sfs = moments.Spectrum(numpy.zeros([n+1]))
sfs.integrate(f, 5, 0.01, theta=1.0, h=0.1, gamma=-1)
sfs_ref = moments.Spectrum.from_file('test_files/1_pop.fs')
self.assertTrue(numpy.allclose(sfs, sfs_ref))
def test_2pops_neutral(self):
n = 20
mig = numpy.ones([2, 2])
f = lambda x: [1, 1+0.0001*x]
sfs = moments.Spectrum(numpy.zeros([n+1, n+1]))
sfs.integrate(f, 10, 0.005, theta=1.0, h=[0.5, 0.5], gamma=[0, 0], m=mig)
sfs_ref = moments.Spectrum.from_file('test_files/2_pops_neutral.fs')
self.assertTrue(numpy.allclose(sfs, sfs_ref))
def test_2pops(self):
n1, n2 = 15, 20
mig = numpy.ones([2, 2])
f = lambda x: [1, 1+0.0001*x]
sfs = moments.Spectrum(numpy.zeros([n1+1, n2+1]))
sfs.integrate(f, 10, 0.005, theta=1.0, h=[0.6, 0.6], gamma=[2, 2], m=mig)
sfs_ref = moments.Spectrum.from_file('test_files/2_pops.fs')
self.assertTrue(numpy.allclose(sfs, sfs_ref))
def test_3pops_slow(self):
n1, n2, n3 = 15, 20, 18
gamma = [0, 0.5, -2]
h = [0.5, 0.1, 0.9]
mig = numpy.array([[0, 5, 2],[1, 0, 1],[10, 0, 1]])
f = lambda x: [1, 1, 1+0.0001*x]
sfs = moments.Spectrum(numpy.zeros([n1+1, n2+1, n3+1]))
sfs.integrate(f, 10, 0.01, theta=1.0, h=h, gamma=gamma, m=mig)
sfs_ref = moments.Spectrum.from_file('test_files/3_pops.fs')
self.assertTrue(numpy.allclose(sfs, sfs_ref))
def test_IM(self):
params = (0.8, 2.0, 0.6, 0.45, 5.0, 0.3)
ns = (20,13)
theta = 1000.
fs = theta*moments.Demographics2D.IM(params, ns)
dadi_fs = moments.Spectrum.from_file('test_files/IM.fs')
resid = moments.Inference.Anscombe_Poisson_residual(fs,dadi_fs)
self.assert_(abs(resid).max() < 0.25)
suite = unittest.TestLoader().loadTestsFromTestCase(ResultsTestCase)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"numpy.ma.allclose",
"moments.Spectrum.from_file",
"moments.Demographics2D.IM",
"moments.Inference.Anscombe_Poisson_residual",
"numpy.allclose",
"numpy.zeros",
"numpy.ones",
"time.time",
"numpy.array",
"unittest.TestLoader",
"numpy.arange"
] |
[((2626, 2641), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2639, 2641), False, 'import unittest\n'), ((156, 167), 'time.time', 'time.time', ([], {}), '()\n', (165, 167), False, 'import time\n'), ((794, 843), 'moments.Spectrum.from_file', 'moments.Spectrum.from_file', (['"""test_files/1_pop.fs"""'], {}), "('test_files/1_pop.fs')\n", (820, 843), False, 'import moments\n'), ((966, 984), 'numpy.ones', 'numpy.ones', (['[2, 2]'], {}), '([2, 2])\n', (976, 984), False, 'import numpy\n'), ((1179, 1237), 'moments.Spectrum.from_file', 'moments.Spectrum.from_file', (['"""test_files/2_pops_neutral.fs"""'], {}), "('test_files/2_pops_neutral.fs')\n", (1205, 1237), False, 'import moments\n'), ((1361, 1379), 'numpy.ones', 'numpy.ones', (['[2, 2]'], {}), '([2, 2])\n', (1371, 1379), False, 'import numpy\n'), ((1576, 1626), 'moments.Spectrum.from_file', 'moments.Spectrum.from_file', (['"""test_files/2_pops.fs"""'], {}), "('test_files/2_pops.fs')\n", (1602, 1626), False, 'import moments\n'), ((1820, 1867), 'numpy.array', 'numpy.array', (['[[0, 5, 2], [1, 0, 1], [10, 0, 1]]'], {}), '([[0, 5, 2], [1, 0, 1], [10, 0, 1]])\n', (1831, 1867), False, 'import numpy\n'), ((2060, 2110), 'moments.Spectrum.from_file', 'moments.Spectrum.from_file', (['"""test_files/3_pops.fs"""'], {}), "('test_files/3_pops.fs')\n", (2086, 2110), False, 'import moments\n'), ((2357, 2403), 'moments.Spectrum.from_file', 'moments.Spectrum.from_file', (['"""test_files/IM.fs"""'], {}), "('test_files/IM.fs')\n", (2383, 2403), False, 'import moments\n'), ((2421, 2477), 'moments.Inference.Anscombe_Poisson_residual', 'moments.Inference.Anscombe_Poisson_residual', (['fs', 'dadi_fs'], {}), '(fs, dadi_fs)\n', (2464, 2477), False, 'import moments\n'), ((2533, 2554), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (2552, 2554), False, 'import unittest\n'), ((205, 216), 'time.time', 'time.time', ([], {}), '()\n', (214, 216), False, 'import time\n'), ((404, 422), 'numpy.zeros', 'numpy.zeros', (['(n + 1)'], {}), '(n + 1)\n', (415, 422), False, 'import numpy\n'), ((545, 586), 'numpy.ma.allclose', 'numpy.ma.allclose', (['fs', 'answer'], {'atol': '(5e-05)'}), '(fs, answer, atol=5e-05)\n', (562, 586), False, 'import numpy\n'), ((694, 714), 'numpy.zeros', 'numpy.zeros', (['[n + 1]'], {}), '([n + 1])\n', (705, 714), False, 'import numpy\n'), ((868, 896), 'numpy.allclose', 'numpy.allclose', (['sfs', 'sfs_ref'], {}), '(sfs, sfs_ref)\n', (882, 896), False, 'import numpy\n'), ((1054, 1081), 'numpy.zeros', 'numpy.zeros', (['[n + 1, n + 1]'], {}), '([n + 1, n + 1])\n', (1065, 1081), False, 'import numpy\n'), ((1262, 1290), 'numpy.allclose', 'numpy.allclose', (['sfs', 'sfs_ref'], {}), '(sfs, sfs_ref)\n', (1276, 1290), False, 'import numpy\n'), ((1449, 1478), 'numpy.zeros', 'numpy.zeros', (['[n1 + 1, n2 + 1]'], {}), '([n1 + 1, n2 + 1])\n', (1460, 1478), False, 'import numpy\n'), ((1651, 1679), 'numpy.allclose', 'numpy.allclose', (['sfs', 'sfs_ref'], {}), '(sfs, sfs_ref)\n', (1665, 1679), False, 'import numpy\n'), ((1938, 1975), 'numpy.zeros', 'numpy.zeros', (['[n1 + 1, n2 + 1, n3 + 1]'], {}), '([n1 + 1, n2 + 1, n3 + 1])\n', (1949, 1975), False, 'import numpy\n'), ((2135, 2163), 'numpy.allclose', 'numpy.allclose', (['sfs', 'sfs_ref'], {}), '(sfs, sfs_ref)\n', (2149, 2163), False, 'import numpy\n'), ((2300, 2337), 'moments.Demographics2D.IM', 'moments.Demographics2D.IM', (['params', 'ns'], {}), '(params, ns)\n', (2325, 2337), False, 'import moments\n'), ((505, 524), 'numpy.arange', 'numpy.arange', (['(n + 1)'], {}), '(n + 1)\n', (517, 524), False, 'import numpy\n')]
|
#!/usr/bin/env python3
# thmigctrl
# v0.1.0 for Python 3.5
# Runs modules based on a matching hashtag
# Setup tag and channel parameters, a list of valid command options:
tag = 'tmctrl'
retrievecount = 20
channelid = 962
# Import @33MHz and @thrrgilag's library for interacting with pnut.io:
import pnutpy
# Import time, used to delay posting to avoid rate limits:
import time
# Setup pnut.io authorisation:
tokenfile = open("pnut_app_token.txt", "r")
token = tokenfile.read()
token = token.strip()
pnutpy.api.add_authorization_token(token)
# Get hashtag content from pnut.io:
d = pnutpy.api.posts_with_hashtag(tag, count = retrievecount)
# Extract posts, strip out unnecessary words, check for matches to poll options, and construct a summary message:
# Open the previous post numbers file:
f=open('pollctrl.txt','r')
y = f.readlines()
f.close()
f=open('pollctrl.txt','w')
posttext = ''
number = retrievecount
# hashtag = ''
while number >= 0:
try:
if not 'is_deleted' in d[0][number]:
user = str(d[0][number]["user"]["username"])
querypost = d[0][number]["content"]["text"]
postnum = str(d[0][number]["id"])
# If postnum does not appear in the file it's not been seen, so process it to see if a command was made:
success = False
if (not (postnum + '\n') in y):
if 'help' in querypost:
posttext = '''
*Checks only every 15 minutes.
*Precede all commands with a hash
tmctrl help:
this!
tmask #hashtag:
Suggest a hashtag
tmpoll #hashtag:
Vote for a hashtag
'''
success = True
elif ('ask' in querypost):
posttext = ' ask'
success = True
elif ('poll' in querypost):
posttext = ' poll'
success = True
elif success == False:
posttext = ' Oops, I don\'t understand; please try again. Try #help for more.'
posttext = '@' + user + posttext + ' (' + postnum + ')'
if posttext:
pnutpy.api.create_post(data={'reply_to': postnum, 'text': posttext})
# Delay to avoid rate limits:
time.sleep(3.2)
f.write(str(postnum) + '\n')
posttext = ''
except IndexError:
pass
number -= 1
f.close()
|
[
"pnutpy.api.add_authorization_token",
"pnutpy.api.create_post",
"pnutpy.api.posts_with_hashtag",
"time.sleep"
] |
[((506, 547), 'pnutpy.api.add_authorization_token', 'pnutpy.api.add_authorization_token', (['token'], {}), '(token)\n', (540, 547), False, 'import pnutpy\n'), ((589, 644), 'pnutpy.api.posts_with_hashtag', 'pnutpy.api.posts_with_hashtag', (['tag'], {'count': 'retrievecount'}), '(tag, count=retrievecount)\n', (618, 644), False, 'import pnutpy\n'), ((1865, 1933), 'pnutpy.api.create_post', 'pnutpy.api.create_post', ([], {'data': "{'reply_to': postnum, 'text': posttext}"}), "(data={'reply_to': postnum, 'text': posttext})\n", (1887, 1933), False, 'import pnutpy\n'), ((1974, 1989), 'time.sleep', 'time.sleep', (['(3.2)'], {}), '(3.2)\n', (1984, 1989), False, 'import time\n')]
|
from util import entropy, information_gain, partition_classes
import numpy as np
import ast
import heapq
import copy
class DecisionTree(object):
def __init__(self):
# Initializing the tree as an empty dictionary or list, as preferred
#self.tree = []
self.tree = {}
self.maxDepth = 30
def learn(self, X, y):
# TODO: Train the decision tree (self.tree) using the the sample X and labels y
# You will have to make use of the functions in utils.py to train the tree
# One possible way of implementing the tree:
# Each node in self.tree could be in the form of a dictionary:
# https://docs.python.org/2/library/stdtypes.html#mapping-types-dict
# For example, a non-leaf node with two children can have a 'left' key and a
# 'right' key. You can add more keys which might help in classification
# (eg. split attribute and split value)
def decideLabel(labels):
# Return the majority label (0 or 1) in labels list.
ones = sum(labels)
if len(labels)-ones >= ones:
return 0
return 1
def decideSplitAttrVal(X, y, attrs):
# iterate all attribute in X's instance, choose the best attribute for spliting
for i in range(len(X[0])): # iterate all attribute
print(" Comparing No."+str(i)+" attr \n")
maxInfoGain = splitVal = splitAttr = -1
values_of_per_Attr = [[X[k][i]] for k in range(len(X))]
for split_val_try in values_of_per_Attr:
Xleft, Xright, yleft, yright = partition_classes(values_of_per_Attr, y, 0, split_val_try[0])
curInfoGain = information_gain(y, [yleft,yright])
# Update maxInfoGain
if curInfoGain > maxInfoGain:
splitAttr = i
splitVal = split_val_try[0]
maxInfoGain = curInfoGain
attrs.append((1-maxInfoGain, splitAttr, splitVal))
heapq.heapify(attrs)
return
def buildTree(X, y, dep, attrs):
# If depth exceed or there is only one feature in instance of X, return label (0 or 1) directly.
if dep >= self.maxDepth or len(attrs) <= 1:
return decideLabel(y)
# If features in y are the same, no need for more branch
if sum(y) == len(y) or sum(y) == 0:
return y[0]
print("buildTree Depth is "+str(dep)+"\n" )
#splitAttr, splitVal = decideSplitAttrVal(X, y)
#splitAttr, splitVal = 0, decideSplitAttrVal2(X,y)
grades, splitAttr, splitVal = heapq.heappop(attrs)
Xleft, Xright, yleft, yright = partition_classes(X, y, splitAttr, splitVal)
print("partition finished \n")
# Get off the splitAttr of each instance in Xleft and Xright
print(len(Xleft))
print(len(Xright))
# for i in range(len(Xleft)):
# Xleft[i] = Xleft[i][:splitAttr]+Xleft[i][splitAttr+1:]
# for j in range(len(Xright)):
# Xright[j] = Xright[j][:splitAttr]+Xright[j][splitAttr+1:]
# Recursion stops when the spliting is not applicable
if len(Xleft) == 0 or len(Xright) == 0:
return decideLabel(y)
else:
tree = {}
#tree[splitAttr] = [splitVal, buildTree(Xleft, yleft, dep+1), buildTree(Xright, yright, dep+1)]
tree[splitAttr] = [splitVal, buildTree(Xleft, yleft, dep+1, copy.deepcopy(attrs)),
buildTree(Xright, yright, dep+1, copy.deepcopy(attrs))]
return tree
attrs = []
decideSplitAttrVal(X, y, attrs)
self.tree = buildTree(X, y, 1, attrs)
#self.tree = buildTree(X, y, 1)
def classify(self, record):
# TODO: classify the record using self.tree and return the predicted label
cur = self.tree
# remeber the keys of self.tree is splitAttr, which is index
tmp = record[:]
while isinstance(cur, dict):
feature = list(cur.keys())[0]
if isinstance(tmp[feature], int) or isinstance(tmp[feature], float):
if tmp[feature] <= cur[feature][0]:
cur = cur[feature][1]
else:
cur = cur[feature][2]
else:
if tmp[feature] == cur[feature][0]:
cur = cur[feature][1]
else:
cur = cur[feature][2]
#tmp = tmp[:feature]+tmp[feature+1:]
return cur
|
[
"copy.deepcopy",
"heapq.heapify",
"util.partition_classes",
"heapq.heappop",
"util.information_gain"
] |
[((2142, 2162), 'heapq.heapify', 'heapq.heapify', (['attrs'], {}), '(attrs)\n', (2155, 2162), False, 'import heapq\n'), ((2807, 2827), 'heapq.heappop', 'heapq.heappop', (['attrs'], {}), '(attrs)\n', (2820, 2827), False, 'import heapq\n'), ((2884, 2928), 'util.partition_classes', 'partition_classes', (['X', 'y', 'splitAttr', 'splitVal'], {}), '(X, y, splitAttr, splitVal)\n', (2901, 2928), False, 'from util import entropy, information_gain, partition_classes\n'), ((1700, 1761), 'util.partition_classes', 'partition_classes', (['values_of_per_Attr', 'y', '(0)', 'split_val_try[0]'], {}), '(values_of_per_Attr, y, 0, split_val_try[0])\n', (1717, 1761), False, 'from util import entropy, information_gain, partition_classes\n'), ((1796, 1832), 'util.information_gain', 'information_gain', (['y', '[yleft, yright]'], {}), '(y, [yleft, yright])\n', (1812, 1832), False, 'from util import entropy, information_gain, partition_classes\n'), ((3730, 3750), 'copy.deepcopy', 'copy.deepcopy', (['attrs'], {}), '(attrs)\n', (3743, 3750), False, 'import copy\n'), ((3802, 3822), 'copy.deepcopy', 'copy.deepcopy', (['attrs'], {}), '(attrs)\n', (3815, 3822), False, 'import copy\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from itertools import groupby
from notifications.management.commands.base import EmailCommand
from reviews.models import Review
class Command(EmailCommand):
help = 'Send an email reminder to all users with pending reviews'
text_template = 'reviews/pending_reviews_reminder_email.txt'
html_template = 'reviews/pending_reviews_reminder_email.html'
def handle(self, *args, **options):
pending_reviews = Review.objects \
.filter(status='progress') \
.select_related('document', 'reviewer') \
.order_by('reviewer', 'role')
users = groupby(pending_reviews, lambda rev: rev.reviewer)
for user, reviews in users:
if not user.send_pending_reviews_mails:
continue
self.send_notification(user=user, reviews=list(reviews))
def get_subject(self, **kwargs):
return 'Phase - Pending reviews'
def get_recipient_list(self, **kwargs):
return [kwargs['user'].email]
|
[
"itertools.groupby",
"reviews.models.Review.objects.filter"
] |
[((641, 691), 'itertools.groupby', 'groupby', (['pending_reviews', '(lambda rev: rev.reviewer)'], {}), '(pending_reviews, lambda rev: rev.reviewer)\n', (648, 691), False, 'from itertools import groupby\n'), ((471, 511), 'reviews.models.Review.objects.filter', 'Review.objects.filter', ([], {'status': '"""progress"""'}), "(status='progress')\n", (492, 511), False, 'from reviews.models import Review\n')]
|
import os
class SWEPC:
def __init__(self, name, output, testCase, solver, degree, elements,
endTime, dt, topographyMean=0.6):
self.name = name
self.output = os.path.join('$builddir', output)
self.testCase = testCase
self.solver = solver
self.degree = degree
self.elements = elements
self.endTime = endTime
self.dt = dt
self.topographyMean = topographyMean
def write(self, generator):
generator.w.build(
os.path.join(self.output, 'coefficients.dat'),
'swepc',
implicit_outputs=[
os.path.join(self.output, 'statistics.dat'),
os.path.join(self.output, 'derived-statistics.dat')
],
variables={
'outputDir': self.output,
'testCase': self.testCase,
'solver': self.solver,
'degree': self.degree,
'elements': self.elements,
'endTime': self.endTime,
'dt': self.dt,
'topographyMean' : self.topographyMean})
def outputs(self):
return [os.path.join(self.output, file)
for file in ['statistics.dat', 'derived-statistics.dat',
'coefficients.dat']]
def __str__(self):
return self.name
class SWEMonteCarlo:
def __init__(self, name, output, testCase, solver, iterations, sampleIndex,
elements, endTime, dt):
self.name = name
self.output = os.path.join('$builddir', output)
self.testCase = testCase
self.solver = solver
self.iterations = iterations
self.sampleIndex = sampleIndex
self.elements = elements
self.endTime = endTime
self.dt = dt
def write(self, generator):
generator.w.build(
os.path.join(self.output, 'statistics.dat'),
'swemc',
implicit_outputs=[
os.path.join(self.output, 'derived-statistics.dat'),
os.path.join(self.output, 'convergence.dat'),
os.path.join(self.output, 'sample'+str(self.sampleIndex)+'.dat')],
variables={
'outputDir': self.output,
'testCase': self.testCase,
'solver': self.solver,
'iterations': self.iterations,
'sampleIndex': self.sampleIndex,
'elements': self.elements,
'endTime': self.endTime,
'dt': self.dt})
def outputs(self):
return [os.path.join(self.output, file)
for file in ['statistics.dat', 'derived-statistics.dat',
'convergence.dat',
'sample'+str(self.sampleIndex)+'.dat']]
def __str__(self):
return self.name
class SWEPDF:
def __init__(self, name, output, coefficientsFile, variable, sampleIndex,
min, max, samples):
self.name = name
self.output = os.path.join('$builddir', output + '.dat')
self.coefficientsFile = os.path.join('$builddir', coefficientsFile)
self.variable = variable
self.sampleIndex = sampleIndex
self.min = min
self.max = max
self.samples = samples
def write(self, generator):
generator.w.build(
self.output,
'swepdf',
inputs=[self.coefficientsFile],
variables={
'variable': self.variable,
'min': self.min,
'max': self.max,
'samples': self.samples,
'line': self.sampleIndex+2})
def outputs(self):
return [self.output]
def __str__(self):
return self.name
|
[
"os.path.join"
] |
[((190, 223), 'os.path.join', 'os.path.join', (['"""$builddir"""', 'output'], {}), "('$builddir', output)\n", (202, 223), False, 'import os\n'), ((1599, 1632), 'os.path.join', 'os.path.join', (['"""$builddir"""', 'output'], {}), "('$builddir', output)\n", (1611, 1632), False, 'import os\n'), ((3064, 3106), 'os.path.join', 'os.path.join', (['"""$builddir"""', "(output + '.dat')"], {}), "('$builddir', output + '.dat')\n", (3076, 3106), False, 'import os\n'), ((3139, 3182), 'os.path.join', 'os.path.join', (['"""$builddir"""', 'coefficientsFile'], {}), "('$builddir', coefficientsFile)\n", (3151, 3182), False, 'import os\n'), ((521, 566), 'os.path.join', 'os.path.join', (['self.output', '"""coefficients.dat"""'], {}), "(self.output, 'coefficients.dat')\n", (533, 566), False, 'import os\n'), ((1219, 1250), 'os.path.join', 'os.path.join', (['self.output', 'file'], {}), '(self.output, file)\n', (1231, 1250), False, 'import os\n'), ((1928, 1971), 'os.path.join', 'os.path.join', (['self.output', '"""statistics.dat"""'], {}), "(self.output, 'statistics.dat')\n", (1940, 1971), False, 'import os\n'), ((2639, 2670), 'os.path.join', 'os.path.join', (['self.output', 'file'], {}), '(self.output, file)\n', (2651, 2670), False, 'import os\n'), ((648, 691), 'os.path.join', 'os.path.join', (['self.output', '"""statistics.dat"""'], {}), "(self.output, 'statistics.dat')\n", (660, 691), False, 'import os\n'), ((713, 764), 'os.path.join', 'os.path.join', (['self.output', '"""derived-statistics.dat"""'], {}), "(self.output, 'derived-statistics.dat')\n", (725, 764), False, 'import os\n'), ((2041, 2092), 'os.path.join', 'os.path.join', (['self.output', '"""derived-statistics.dat"""'], {}), "(self.output, 'derived-statistics.dat')\n", (2053, 2092), False, 'import os\n'), ((2110, 2154), 'os.path.join', 'os.path.join', (['self.output', '"""convergence.dat"""'], {}), "(self.output, 'convergence.dat')\n", (2122, 2154), False, 'import os\n')]
|
"""
Python methods for importing and exporting '.proto' files from the BBP type
definition format.
"""
# TODO get custom exceptions for these methods
import io
import re
import logging
from blackboxprotobuf.lib.exceptions import TypedefException
import blackboxprotobuf.lib.api
PROTO_FILE_TYPE_MAP = {
"uint": "uint64",
"int": "int64",
"sint": "sint64",
"fixed32": "fixed32",
"sfixed32": "sfixed32",
"float": "float",
"fixed64": "fixed64",
"sfixed64": "sfixed64",
"double": "double",
"bytes": "bytes",
"bytes_hex": "bytes",
"string": "string",
}
PACKABLE_TYPES = [
"uint",
"int",
"sint",
"fixed32",
"sfixed32",
"float",
"fixed64",
"sfixed64",
"double",
]
# Inverse of the above, but we have to include more types
PROTO_FILE_TYPE_TO_BBP = {
"double": "double",
"float": "float",
"int32": "int",
"int64": "int",
"uint32": "uint",
"uint64": "uint",
"sint32": "sint",
"sint64": "sint",
"fixed32": "fixed32",
"fixed64": "fixed64",
"sfixed32": "sfixed32",
"sfixed64": "sfixed64",
"bool": "uint",
"string": "string",
# should be default_binary_type, but can't handle that well here
"bytes": "bytes",
}
NAME_REGEX = re.compile(r"\A[a-zA-Z_][a-zA-Z0-9_]*\Z")
# add packed types to the list
for packable_type in PACKABLE_TYPES:
packed_type = "packed_" + packable_type
PROTO_FILE_TYPE_MAP[packed_type] = PROTO_FILE_TYPE_MAP[packable_type]
def _print_message(message_name, typedef, output_file, depth=0):
indent = u" " * depth
if not NAME_REGEX.match(message_name):
raise TypedefException("Message name: %s is not valid" % message_name)
# sort typedef for better looking output
typedef = blackboxprotobuf.lib.api.sort_typedef(typedef)
message_name = message_name.strip()
output_file.write(u"\n")
output_file.write(indent)
output_file.write(u"message %s {\n" % message_name)
for field_number, field_typedef in typedef.items():
# TODO Default to all fields as repeated? or optional
proto_type = None
field_name = None
field_options = ""
# a repeated field with one element is indistinduishable from a
# repeated field so we just put repeated if we have proof that it is
# repeatable, but this might be wrong sometimes
# maybe some sort of protobuf discovery tool can detect this
is_repeated = field_typedef.get("seen_repeated", False)
if "name" in field_typedef and field_typedef["name"] != "":
field_name = field_typedef["name"]
field_name = field_name.strip()
if not NAME_REGEX.match(field_name):
field_name = None
if field_name is None:
field_name = u"field%s" % field_number
if field_typedef["type"] == "message":
# If we have multiple typedefs, this means is something like the Any
# message, and has to be manually reparsed to each type
if "alt_typedefs" in field_typedef:
proto_type = "bytes"
else:
proto_type = field_name + "_type"
_print_message(
proto_type, field_typedef["message_typedef"], output_file, depth + 1
)
else:
if field_typedef["type"] not in PROTO_FILE_TYPE_MAP:
raise TypedefException(
"Type %s does not have a mapping to protobuf types."
% field_typedef["type"]
)
proto_type = PROTO_FILE_TYPE_MAP[field_typedef["type"]]
# we're using proto3 syntax. Repeated numeric fields are packed by default
# if it's repeated and not packed, then make sure we specify it's not packed
if is_repeated and field_typedef["type"] in PACKABLE_TYPES:
field_options = u" [packed=false]"
# if it's a packed type, we'll explicitoly set that too, can't hurt
elif field_typedef["type"].startswith("packed_"):
field_options = u" [packed=true]"
is_repeated = True
output_file.write(indent)
output_file.write(
u" %s%s %s = %s%s;\n"
% (
"repeated " if is_repeated else "",
proto_type,
field_name,
field_number,
field_options,
)
)
output_file.write(indent)
output_file.write(u"}\n\n")
def export_proto(typedef_map, output_filename=None, output_file=None, package=None):
"""Export the given type definitons as a '.proto' file. Typedefs are
expected as a dictionary of {'message_name': typedef }
Write to output_file or output_filename if provided, otherwise return a string
output_filename will be overwritten if it exists
"""
return_string = False
if output_filename is not None:
output_file = io.open(output_filename, "w+")
if output_file is None:
return_string = True
output_file = io.StringIO()
# preamble
output_file.write(u'syntax = "proto3";\n\n')
if package:
output_file.write(u"package %s;\n\n" % package)
for typedef_name, typedef in typedef_map.items():
_print_message(typedef_name, typedef, output_file)
if return_string:
return output_file.getvalue()
# close the file if we opened it
elif output_filename is not None:
output_file.close()
return None
MESSAGE_START_REGEX = re.compile(r"^message +([a-zA-Z_0-9]+) *{.*")
FIELD_REGEX = re.compile(
r"^ *(repeated|optional|required)? *([a-zA-Z0-9_]+) +([a-zA-Z0-9_]+) += +([0-9]+) *(\[[a-z]+=[a-z]*\])?.*;.*$"
)
SYNTAX_REGEX = re.compile(r'^ *syntax += +"(proto\d)" *;.*')
ENUM_REGEX = re.compile(r"^ *enum +([a-zA-Z0-9_]+) *{.*")
PACKAGE_REGEX = re.compile(r"^ *package +([a-zA-Z0-9_.]+) *;.*")
def import_proto(config, input_string=None, input_filename=None, input_file=None):
typedef_map = {}
if input_string is not None:
input_file = io.StringIO(input_string)
if input_file is None and input_filename is not None:
input_file = io.open(input_filename, "r")
if input_file is None:
raise ValueError("No file provided to import_proto")
syntax_version = "proto2"
package_prefix = ""
enum_names = []
message_trees = []
message_names = []
line = input_file.readline()
while line:
line = line.strip()
if line.startswith("syntax") and SYNTAX_REGEX.match(line):
syntax_version = SYNTAX_REGEX.match(line).group(1)
elif line.startswith("package") and PACKAGE_REGEX.match(line):
package_prefix = PACKAGE_REGEX.match(line).group(1) + "."
elif line.startswith("import"):
logging.warn(
"Proto file has import which is not supported "
"by the parser. Ensure the imported files are "
"processed first: %s",
line,
)
elif line.startswith("enum") and ENUM_REGEX.match(line):
enum_name = _parse_enum(line, input_file)
enum_names.append(enum_name)
elif line.startswith("message") and MESSAGE_START_REGEX.match(line):
message_tree = _preparse_message(line, input_file)
message_trees.append(message_tree)
line = input_file.readline()
# TODO parse the message data
for tree in message_trees:
new_message_names, new_enum_names = _collect_names(package_prefix, tree)
enum_names += new_enum_names
message_names += new_message_names
logging.debug("Got the following enum_names: %s", enum_names)
logging.debug("Got the following message_names: %s", message_names)
for tree in message_trees:
_parse_message(
tree,
typedef_map,
message_names,
enum_names,
package_prefix,
syntax_version == "proto3",
config,
)
return typedef_map
def _parse_enum(line, input_file):
"""Parse an enum out of the file. Goes from enum declaration to next }
Returns the enum's name
"""
enum_name = ENUM_REGEX.match(line).group(1)
# parse until the next '}'
while "}" not in line:
line = input_file.readline()
if not line:
raise ValueError("Did not find close of enum")
return enum_name
def _preparse_message(line, input_file):
"""Parse out a message name and the lines that make it up"""
message_name = MESSAGE_START_REGEX.match(line).group(1)
message_lines = []
inner_enums = []
inner_messages = []
while "}" not in line:
line = input_file.readline()
if not line:
raise ValueError("Did not find close of message")
line = line.strip()
if line.startswith("enum") and ENUM_REGEX.match(line):
enum_name = _parse_enum(line, input_file)
inner_enums.append(enum_name)
elif line.startswith("message") and MESSAGE_START_REGEX.match(line):
message_tree = _preparse_message(line, input_file)
inner_messages.append(message_tree)
# not an inner enum or message
else:
message_lines.append(line)
return {
"name": message_name,
"data": message_lines,
"enums": inner_enums,
"inner_messages": inner_messages,
}
def _collect_names(prefix, message_tree):
message_names = []
enum_names = []
name = prefix + message_tree["name"]
message_names.append(name)
for enum_name in message_tree["enums"]:
enum_names.append(prefix + enum_name)
for inner_message in message_tree["inner_messages"]:
new_message_names, new_enum_names = _collect_names(name + ".", inner_message)
message_names += new_message_names
enum_names += new_enum_names
return message_names, enum_names
def _check_message_name(current_path, name, known_message_names, config):
# Verify message name against preparsed message names and global
# known_messages
# For example, if we have:
# Message.InnerMesage
# referenced from:
# PackageA.Message2
# we would look up:
# PackageA.Message2.Message.InnerMessage
# PackageA.Message.InnerMessage
# should also work for enums
if name in config.known_types:
return True
# search for anything under a common prefix in known_message_names
logging.debug("Testing message name: %s", name)
prefix_options = [""]
for part in current_path.split("."):
if part:
prefix_options = [prefix_options[0] + part + "."] + prefix_options
logging.debug("prefix_options: %s", prefix_options)
for prefix in prefix_options:
logging.debug("Testing message name: %s", prefix + name)
if prefix + name in known_message_names:
return prefix + name
# remove the last bit of the prefix
if "." not in prefix:
break
prefix = ".".join(prefix.split(".")[:-1])
logging.debug(
"Message %s not found from %s Known names are: %s",
name,
current_path,
known_message_names,
)
return None
def _parse_message(
message_tree, typdef_map, known_message_names, enum_names, prefix, is_proto3, config
):
message_typedef = {}
message_name = prefix + message_tree["name"]
prefix = message_name + "."
# parse the actual message fields
for line in message_tree["data"]:
# lines should already be stripped and should not have messages or enums
# logging.debug("Line before assert: %s", line)
assert all([not line.strip().startswith(x) for x in ["message ", "enum "]])
# Check if the line matches the field regex
match = FIELD_REGEX.match(line)
if match:
field_number, field_typedef = _parse_field(
match, known_message_names, enum_names, prefix, is_proto3, config
)
message_typedef[field_number] = field_typedef
# add the messsage to tyep returned typedefs
logging.debug("Adding message %s to typedef maps", message_name)
typdef_map[message_name] = message_typedef
for inner_message in message_tree["inner_messages"]:
# TODO prefix should be added to?
_parse_message(
inner_message,
typdef_map,
known_message_names,
enum_names,
prefix,
is_proto3,
config,
)
# parse a field into a dictionary for the typedef
def _parse_field(match, known_message_names, enum_names, prefix, is_proto3, config):
typedef = {}
field_name = match.group(3)
if not field_name:
raise ValueError("Could not parse field name from line: %s" % match)
typedef["name"] = field_name
field_number = match.group(4)
if not field_number:
raise ValueError("Could not parse field number from line: %s" % match)
# figure out repeated
field_rule = match.group(1)
is_repeated = False
if field_rule and "repeated" in field_rule:
is_repeated = True
typedef["seen_repeated"] = True
field_type = match.group(2)
if not field_type:
raise ValueError("Could not parse field type from line: %s" % match)
# check normal types
bbp_type = PROTO_FILE_TYPE_TO_BBP.get(field_type, None)
if not bbp_type:
logging.debug("Got non-basic type: %s, checking enums", field_type)
# check enum names
if _check_message_name(prefix, field_type, enum_names, config):
# enum = uint
bbp_type = "uint"
if not bbp_type:
# Not enum or normal type, check messages
message_name = _check_message_name(
prefix, field_type, known_message_names, config
)
if message_name:
bbp_type = "message"
typedef["message_type_name"] = message_name
if not bbp_type:
# If we don't have a type now, then fail
raise ValueError(
"Could not get a type for field %s: %s" % (field_name, field_type)
)
# figure out packed
# default based on repeated + proto3, fallback to options
field_options = match.group(5)
is_packed = is_repeated and is_proto3 and (field_type in PACKABLE_TYPES)
if is_packed and field_options and "packed=false" in field_options:
is_packed = False
elif is_repeated and field_options and "packed=true" in field_options:
is_packed = True
# make sure the type lines up with packable
if is_packed and bbp_type not in PACKABLE_TYPES:
raise ValueError(
"Field %s set as packable, but not a packable type: %s"
% (field_name, bbp_type)
)
if is_packed:
bbp_type = "packed_" + bbp_type
typedef["type"] = bbp_type
logging.debug("Parsed field number %s: %s", field_number, typedef)
return field_number, typedef
|
[
"io.StringIO",
"logging.debug",
"logging.warn",
"blackboxprotobuf.lib.exceptions.TypedefException",
"io.open",
"re.compile"
] |
[((1262, 1304), 're.compile', 're.compile', (['"""\\\\A[a-zA-Z_][a-zA-Z0-9_]*\\\\Z"""'], {}), "('\\\\A[a-zA-Z_][a-zA-Z0-9_]*\\\\Z')\n", (1272, 1304), False, 'import re\n'), ((5534, 5578), 're.compile', 're.compile', (['"""^message +([a-zA-Z_0-9]+) *{.*"""'], {}), "('^message +([a-zA-Z_0-9]+) *{.*')\n", (5544, 5578), False, 'import re\n'), ((5594, 5727), 're.compile', 're.compile', (['"""^ *(repeated|optional|required)? *([a-zA-Z0-9_]+) +([a-zA-Z0-9_]+) += +([0-9]+) *(\\\\[[a-z]+=[a-z]*\\\\])?.*;.*$"""'], {}), "(\n '^ *(repeated|optional|required)? *([a-zA-Z0-9_]+) +([a-zA-Z0-9_]+) += +([0-9]+) *(\\\\[[a-z]+=[a-z]*\\\\])?.*;.*$'\n )\n", (5604, 5727), False, 'import re\n'), ((5738, 5783), 're.compile', 're.compile', (['"""^ *syntax += +"(proto\\\\d)" *;.*"""'], {}), '(\'^ *syntax += +"(proto\\\\d)" *;.*\')\n', (5748, 5783), False, 'import re\n'), ((5797, 5840), 're.compile', 're.compile', (['"""^ *enum +([a-zA-Z0-9_]+) *{.*"""'], {}), "('^ *enum +([a-zA-Z0-9_]+) *{.*')\n", (5807, 5840), False, 'import re\n'), ((5858, 5905), 're.compile', 're.compile', (['"""^ *package +([a-zA-Z0-9_.]+) *;.*"""'], {}), "('^ *package +([a-zA-Z0-9_.]+) *;.*')\n", (5868, 5905), False, 'import re\n'), ((7649, 7710), 'logging.debug', 'logging.debug', (['"""Got the following enum_names: %s"""', 'enum_names'], {}), "('Got the following enum_names: %s', enum_names)\n", (7662, 7710), False, 'import logging\n'), ((7715, 7782), 'logging.debug', 'logging.debug', (['"""Got the following message_names: %s"""', 'message_names'], {}), "('Got the following message_names: %s', message_names)\n", (7728, 7782), False, 'import logging\n'), ((10506, 10553), 'logging.debug', 'logging.debug', (['"""Testing message name: %s"""', 'name'], {}), "('Testing message name: %s', name)\n", (10519, 10553), False, 'import logging\n'), ((10723, 10774), 'logging.debug', 'logging.debug', (['"""prefix_options: %s"""', 'prefix_options'], {}), "('prefix_options: %s', prefix_options)\n", (10736, 10774), False, 'import logging\n'), ((11102, 11212), 'logging.debug', 'logging.debug', (['"""Message %s not found from %s Known names are: %s"""', 'name', 'current_path', 'known_message_names'], {}), "('Message %s not found from %s Known names are: %s', name,\n current_path, known_message_names)\n", (11115, 11212), False, 'import logging\n'), ((12155, 12219), 'logging.debug', 'logging.debug', (['"""Adding message %s to typedef maps"""', 'message_name'], {}), "('Adding message %s to typedef maps', message_name)\n", (12168, 12219), False, 'import logging\n'), ((14918, 14984), 'logging.debug', 'logging.debug', (['"""Parsed field number %s: %s"""', 'field_number', 'typedef'], {}), "('Parsed field number %s: %s', field_number, typedef)\n", (14931, 14984), False, 'import logging\n'), ((1642, 1706), 'blackboxprotobuf.lib.exceptions.TypedefException', 'TypedefException', (["('Message name: %s is not valid' % message_name)"], {}), "('Message name: %s is not valid' % message_name)\n", (1658, 1706), False, 'from blackboxprotobuf.lib.exceptions import TypedefException\n'), ((4954, 4984), 'io.open', 'io.open', (['output_filename', '"""w+"""'], {}), "(output_filename, 'w+')\n", (4961, 4984), False, 'import io\n'), ((5065, 5078), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (5076, 5078), False, 'import io\n'), ((6067, 6092), 'io.StringIO', 'io.StringIO', (['input_string'], {}), '(input_string)\n', (6078, 6092), False, 'import io\n'), ((6172, 6200), 'io.open', 'io.open', (['input_filename', '"""r"""'], {}), "(input_filename, 'r')\n", (6179, 6200), False, 'import io\n'), ((10817, 10873), 'logging.debug', 'logging.debug', (['"""Testing message name: %s"""', '(prefix + name)'], {}), "('Testing message name: %s', prefix + name)\n", (10830, 10873), False, 'import logging\n'), ((13475, 13542), 'logging.debug', 'logging.debug', (['"""Got non-basic type: %s, checking enums"""', 'field_type'], {}), "('Got non-basic type: %s, checking enums', field_type)\n", (13488, 13542), False, 'import logging\n'), ((3421, 3519), 'blackboxprotobuf.lib.exceptions.TypedefException', 'TypedefException', (["('Type %s does not have a mapping to protobuf types.' % field_typedef['type'])"], {}), "('Type %s does not have a mapping to protobuf types.' %\n field_typedef['type'])\n", (3437, 3519), False, 'from blackboxprotobuf.lib.exceptions import TypedefException\n'), ((6814, 6955), 'logging.warn', 'logging.warn', (['"""Proto file has import which is not supported by the parser. Ensure the imported files are processed first: %s"""', 'line'], {}), "(\n 'Proto file has import which is not supported by the parser. Ensure the imported files are processed first: %s'\n , line)\n", (6826, 6955), False, 'import logging\n')]
|
import json
from datetime import datetime
from io import StringIO
from django.test import TestCase, override_settings
from wagtail.core.models import Site
import dateutil.relativedelta
from dateutil.relativedelta import relativedelta
from pytz import timezone
from search.elasticsearch_helpers import ElasticsearchTestsMixin
from v1.documents import (
EnforcementActionFilterablePagesDocumentSearch,
EventFilterablePagesDocumentSearch, FilterablePagesDocument,
FilterablePagesDocumentSearch
)
from v1.models.base import CFGOVPageCategory
from v1.models.blog_page import BlogPage
from v1.models.enforcement_action_page import (
EnforcementActionPage, EnforcementActionProduct, EnforcementActionStatus
)
from v1.models.learn_page import AbstractFilterPage, EventPage
from v1.tests.wagtail_pages.helpers import publish_page
class FilterablePagesDocumentTest(TestCase):
def test_model_class_added(self):
self.assertEqual(FilterablePagesDocument.django.model, AbstractFilterPage)
def test_ignore_signal_default(self):
self.assertFalse(FilterablePagesDocument.django.ignore_signals)
def test_auto_refresh_default(self):
self.assertFalse(FilterablePagesDocument.django.auto_refresh)
def test_fields_populated(self):
mapping = FilterablePagesDocument._doc_type.mapping
self.assertCountEqual(
mapping.properties.properties.to_dict().keys(),
[
'tags', 'categories', 'language', 'title', 'url',
'is_archived', 'date_published', 'start_dt', 'end_dt',
'statuses', 'products', 'initial_filing_date', 'model_class',
'content', 'preview_description'
]
)
def test_get_queryset(self):
test_event = EventPage(
title="Testing",
start_dt=datetime.now(timezone('UTC'))
)
qs = FilterablePagesDocument().get_queryset()
self.assertFalse(qs.filter(title=test_event.title).exists())
def test_prepare_statuses(self):
enforcement = EnforcementActionPage(
title="Great Test Page",
preview_description='This is a great test page.',
initial_filing_date=datetime.now(timezone('UTC'))
)
status = EnforcementActionStatus(status='expired-terminated-dismissed')
enforcement.statuses.add(status)
doc = FilterablePagesDocument()
prepared_data = doc.prepare(enforcement)
self.assertEqual(prepared_data['statuses'], ['expired-terminated-dismissed'])
def test_prepare_content_no_content_defined(self):
event = EventPage(
title='Event Test',
start_dt=datetime.now(timezone('UTC'))
)
doc = FilterablePagesDocument()
prepared_data = doc.prepare(event)
self.assertIsNone(prepared_data['content'])
def test_prepare_content_exists(self):
blog = BlogPage(
title='Test Blog',
content=json.dumps([
{
'type': 'full_width_text',
'value': [
{
'type':'content',
'value': 'Blog Text'
}]
}
])
)
doc = FilterablePagesDocument()
prepared_data = doc.prepare(blog)
self.assertEqual(prepared_data['content'], 'Blog Text')
def test_prepare_content_empty(self):
blog = BlogPage(
title='Test Blog',
content=json.dumps([])
)
doc = FilterablePagesDocument()
prepared_data = doc.prepare(blog)
self.assertIsNone(prepared_data['content'])
def test_prepare_products(self):
enforcement = EnforcementActionPage(
title="Great Test Page",
preview_description='This is a great test page.',
initial_filing_date=datetime.now(timezone('UTC'))
)
product = EnforcementActionProduct(product='Fair Lending')
enforcement.products.add(product)
doc = FilterablePagesDocument()
prepared_data = doc.prepare(enforcement)
self.assertEqual(prepared_data['products'], ['Fair Lending'])
class FilterablePagesDocumentSearchTest(ElasticsearchTestsMixin, TestCase):
@classmethod
def setUpTestData(cls):
cls.site = Site.objects.get(is_default_site=True)
content = json.dumps([
{
'type': 'full_width_text',
'value': [
{
'type':'content',
'value': 'Foo Test Content'
}]
}
])
event = EventPage(
title='Event Test',
start_dt=datetime(2021, 2, 16, tzinfo=timezone('UTC')),
end_dt=datetime(2021, 2, 16, tzinfo=timezone('UTC'))
)
event.tags.add('test-topic')
event.categories.add(CFGOVPageCategory(name='test-category'))
event.language = 'es'
publish_page(event)
enforcement = EnforcementActionPage(
title="Great Test Page",
preview_description='This is a great test page.',
initial_filing_date=datetime.now(timezone('UTC'))
)
status = EnforcementActionStatus(status='expired-terminated-dismissed')
enforcement.statuses.add(status)
product = EnforcementActionProduct(product='Debt Collection')
enforcement.products.add(product)
publish_page(enforcement)
blog = BlogPage(
title="Blog Page"
)
publish_page(blog)
blog_title_match = BlogPage(
title="Foo Bar"
)
publish_page(blog_title_match)
blog_preview_match = BlogPage(
title="Random Title",
preview_description="Foo blog"
)
publish_page(blog_preview_match)
blog_content_match = BlogPage(
title="Some Title",
content=content
)
publish_page(blog_content_match)
blog_topic_match = BlogPage(
title="Another Blog Post"
)
blog_topic_match.tags.add("Foo Tag")
publish_page(blog_topic_match)
cls.event = event
cls.enforcement = enforcement
cls.blog = blog
cls.blog_title_match = blog_title_match
cls.blog_preview_match = blog_preview_match
cls.blog_content_match = blog_content_match
cls.blog_topic_match = blog_topic_match
cls.rebuild_elasticsearch_index('v1', stdout=StringIO())
def test_search_event_all_fields(self):
to_date_dt = datetime(2021, 3, 16)
to_date = datetime.date(to_date_dt)
from_date_dt = datetime(2021, 1, 16)
from_date = datetime.date(from_date_dt)
search = EventFilterablePagesDocumentSearch(prefix='/')
search.filter(
topics=['test-topic'],
categories=['test-category'],
language=['es'],
to_date=to_date,
from_date=from_date,
archived=['no']
)
results = search.search(title='Event Test')
self.assertTrue(results.filter(title=self.event.title).exists())
def test_search_blog_dates(self):
to_date_dt = datetime.today() + relativedelta(months=1)
to_date = datetime.date(to_date_dt)
from_date_dt = datetime.today() - relativedelta(months=1)
from_date = datetime.date(from_date_dt)
search = FilterablePagesDocumentSearch(prefix='/')
search.filter(
topics=[],
categories=[],
language=[],
to_date=to_date,
from_date=from_date,
archived=None,
)
results = search.search(title=None)
self.assertTrue(results.filter(title=self.blog.title).exists())
def test_search_enforcement_actions(self):
to_date_dt = datetime.today() + relativedelta(months=1)
to_date = datetime.date(to_date_dt)
from_date_dt = datetime.today() - relativedelta(months=1)
from_date = datetime.date(from_date_dt)
search = EnforcementActionFilterablePagesDocumentSearch(prefix='/')
search.filter(
topics=[],
categories=[],
language=[],
to_date=to_date,
from_date=from_date,
statuses=['expired-terminated-dismissed'],
products=['Debt Collection'],
archived=None
)
results = search.search(title=None)
self.assertTrue(results.filter(title=self.enforcement.title).exists())
def test_search_enforcement_actions_no_statuses(self):
to_date_dt = datetime.today() + relativedelta(months=1)
to_date = datetime.date(to_date_dt)
from_date_dt = datetime.today() - relativedelta(months=1)
from_date = datetime.date(from_date_dt)
search = EnforcementActionFilterablePagesDocumentSearch(prefix='/')
search.filter(
topics=[],
categories=[],
language=[],
to_date=to_date,
from_date=from_date,
statuses=[],
products=[],
archived=None
)
results = search.search(title=None)
self.assertTrue(results.filter(title=self.enforcement.title).exists())
def test_search_title_uses_multimatch(self):
search = FilterablePagesDocumentSearch(prefix='/')
search.filter(
topics=[],
categories=[],
language=[],
to_date=None,
from_date=None,
archived=None
)
results = search.search(title="Foo")
self.assertTrue(results.filter(title=self.blog_title_match).exists())
self.assertTrue(results.filter(title=self.blog_content_match.title).exists())
self.assertTrue(results.filter(title=self.blog_preview_match.title).exists())
self.assertTrue(results.filter(title=self.blog_topic_match.title).exists())
|
[
"v1.documents.FilterablePagesDocumentSearch",
"v1.models.enforcement_action_page.EnforcementActionStatus",
"io.StringIO",
"datetime.datetime.today",
"wagtail.core.models.Site.objects.get",
"v1.models.blog_page.BlogPage",
"v1.models.base.CFGOVPageCategory",
"v1.documents.FilterablePagesDocument",
"dateutil.relativedelta.relativedelta",
"json.dumps",
"datetime.datetime",
"datetime.datetime.date",
"v1.models.enforcement_action_page.EnforcementActionProduct",
"pytz.timezone",
"v1.tests.wagtail_pages.helpers.publish_page",
"v1.documents.EnforcementActionFilterablePagesDocumentSearch",
"v1.documents.EventFilterablePagesDocumentSearch"
] |
[((2278, 2340), 'v1.models.enforcement_action_page.EnforcementActionStatus', 'EnforcementActionStatus', ([], {'status': '"""expired-terminated-dismissed"""'}), "(status='expired-terminated-dismissed')\n", (2301, 2340), False, 'from v1.models.enforcement_action_page import EnforcementActionPage, EnforcementActionProduct, EnforcementActionStatus\n'), ((2396, 2421), 'v1.documents.FilterablePagesDocument', 'FilterablePagesDocument', ([], {}), '()\n', (2419, 2421), False, 'from v1.documents import EnforcementActionFilterablePagesDocumentSearch, EventFilterablePagesDocumentSearch, FilterablePagesDocument, FilterablePagesDocumentSearch\n'), ((2747, 2772), 'v1.documents.FilterablePagesDocument', 'FilterablePagesDocument', ([], {}), '()\n', (2770, 2772), False, 'from v1.documents import EnforcementActionFilterablePagesDocumentSearch, EventFilterablePagesDocumentSearch, FilterablePagesDocument, FilterablePagesDocumentSearch\n'), ((3298, 3323), 'v1.documents.FilterablePagesDocument', 'FilterablePagesDocument', ([], {}), '()\n', (3321, 3323), False, 'from v1.documents import EnforcementActionFilterablePagesDocumentSearch, EventFilterablePagesDocumentSearch, FilterablePagesDocument, FilterablePagesDocumentSearch\n'), ((3588, 3613), 'v1.documents.FilterablePagesDocument', 'FilterablePagesDocument', ([], {}), '()\n', (3611, 3613), False, 'from v1.documents import EnforcementActionFilterablePagesDocumentSearch, EventFilterablePagesDocumentSearch, FilterablePagesDocument, FilterablePagesDocumentSearch\n'), ((3980, 4028), 'v1.models.enforcement_action_page.EnforcementActionProduct', 'EnforcementActionProduct', ([], {'product': '"""Fair Lending"""'}), "(product='Fair Lending')\n", (4004, 4028), False, 'from v1.models.enforcement_action_page import EnforcementActionPage, EnforcementActionProduct, EnforcementActionStatus\n'), ((4085, 4110), 'v1.documents.FilterablePagesDocument', 'FilterablePagesDocument', ([], {}), '()\n', (4108, 4110), False, 'from v1.documents import EnforcementActionFilterablePagesDocumentSearch, EventFilterablePagesDocumentSearch, FilterablePagesDocument, FilterablePagesDocumentSearch\n'), ((4373, 4411), 'wagtail.core.models.Site.objects.get', 'Site.objects.get', ([], {'is_default_site': '(True)'}), '(is_default_site=True)\n', (4389, 4411), False, 'from wagtail.core.models import Site\n'), ((4431, 4537), 'json.dumps', 'json.dumps', (["[{'type': 'full_width_text', 'value': [{'type': 'content', 'value':\n 'Foo Test Content'}]}]"], {}), "([{'type': 'full_width_text', 'value': [{'type': 'content',\n 'value': 'Foo Test Content'}]}])\n", (4441, 4537), False, 'import json\n'), ((5072, 5091), 'v1.tests.wagtail_pages.helpers.publish_page', 'publish_page', (['event'], {}), '(event)\n', (5084, 5091), False, 'from v1.tests.wagtail_pages.helpers import publish_page\n'), ((5325, 5387), 'v1.models.enforcement_action_page.EnforcementActionStatus', 'EnforcementActionStatus', ([], {'status': '"""expired-terminated-dismissed"""'}), "(status='expired-terminated-dismissed')\n", (5348, 5387), False, 'from v1.models.enforcement_action_page import EnforcementActionPage, EnforcementActionProduct, EnforcementActionStatus\n'), ((5447, 5498), 'v1.models.enforcement_action_page.EnforcementActionProduct', 'EnforcementActionProduct', ([], {'product': '"""Debt Collection"""'}), "(product='Debt Collection')\n", (5471, 5498), False, 'from v1.models.enforcement_action_page import EnforcementActionPage, EnforcementActionProduct, EnforcementActionStatus\n'), ((5549, 5574), 'v1.tests.wagtail_pages.helpers.publish_page', 'publish_page', (['enforcement'], {}), '(enforcement)\n', (5561, 5574), False, 'from v1.tests.wagtail_pages.helpers import publish_page\n'), ((5590, 5617), 'v1.models.blog_page.BlogPage', 'BlogPage', ([], {'title': '"""Blog Page"""'}), "(title='Blog Page')\n", (5598, 5617), False, 'from v1.models.blog_page import BlogPage\n'), ((5648, 5666), 'v1.tests.wagtail_pages.helpers.publish_page', 'publish_page', (['blog'], {}), '(blog)\n', (5660, 5666), False, 'from v1.tests.wagtail_pages.helpers import publish_page\n'), ((5695, 5720), 'v1.models.blog_page.BlogPage', 'BlogPage', ([], {'title': '"""Foo Bar"""'}), "(title='Foo Bar')\n", (5703, 5720), False, 'from v1.models.blog_page import BlogPage\n'), ((5751, 5781), 'v1.tests.wagtail_pages.helpers.publish_page', 'publish_page', (['blog_title_match'], {}), '(blog_title_match)\n', (5763, 5781), False, 'from v1.tests.wagtail_pages.helpers import publish_page\n'), ((5812, 5874), 'v1.models.blog_page.BlogPage', 'BlogPage', ([], {'title': '"""Random Title"""', 'preview_description': '"""Foo blog"""'}), "(title='Random Title', preview_description='Foo blog')\n", (5820, 5874), False, 'from v1.models.blog_page import BlogPage\n'), ((5917, 5949), 'v1.tests.wagtail_pages.helpers.publish_page', 'publish_page', (['blog_preview_match'], {}), '(blog_preview_match)\n', (5929, 5949), False, 'from v1.tests.wagtail_pages.helpers import publish_page\n'), ((5980, 6025), 'v1.models.blog_page.BlogPage', 'BlogPage', ([], {'title': '"""Some Title"""', 'content': 'content'}), "(title='Some Title', content=content)\n", (5988, 6025), False, 'from v1.models.blog_page import BlogPage\n'), ((6068, 6100), 'v1.tests.wagtail_pages.helpers.publish_page', 'publish_page', (['blog_content_match'], {}), '(blog_content_match)\n', (6080, 6100), False, 'from v1.tests.wagtail_pages.helpers import publish_page\n'), ((6129, 6164), 'v1.models.blog_page.BlogPage', 'BlogPage', ([], {'title': '"""Another Blog Post"""'}), "(title='Another Blog Post')\n", (6137, 6164), False, 'from v1.models.blog_page import BlogPage\n'), ((6240, 6270), 'v1.tests.wagtail_pages.helpers.publish_page', 'publish_page', (['blog_topic_match'], {}), '(blog_topic_match)\n', (6252, 6270), False, 'from v1.tests.wagtail_pages.helpers import publish_page\n'), ((6692, 6713), 'datetime.datetime', 'datetime', (['(2021)', '(3)', '(16)'], {}), '(2021, 3, 16)\n', (6700, 6713), False, 'from datetime import datetime\n'), ((6732, 6757), 'datetime.datetime.date', 'datetime.date', (['to_date_dt'], {}), '(to_date_dt)\n', (6745, 6757), False, 'from datetime import datetime\n'), ((6781, 6802), 'datetime.datetime', 'datetime', (['(2021)', '(1)', '(16)'], {}), '(2021, 1, 16)\n', (6789, 6802), False, 'from datetime import datetime\n'), ((6823, 6850), 'datetime.datetime.date', 'datetime.date', (['from_date_dt'], {}), '(from_date_dt)\n', (6836, 6850), False, 'from datetime import datetime\n'), ((6869, 6915), 'v1.documents.EventFilterablePagesDocumentSearch', 'EventFilterablePagesDocumentSearch', ([], {'prefix': '"""/"""'}), "(prefix='/')\n", (6903, 6915), False, 'from v1.documents import EnforcementActionFilterablePagesDocumentSearch, EventFilterablePagesDocumentSearch, FilterablePagesDocument, FilterablePagesDocumentSearch\n'), ((7391, 7416), 'datetime.datetime.date', 'datetime.date', (['to_date_dt'], {}), '(to_date_dt)\n', (7404, 7416), False, 'from datetime import datetime\n'), ((7503, 7530), 'datetime.datetime.date', 'datetime.date', (['from_date_dt'], {}), '(from_date_dt)\n', (7516, 7530), False, 'from datetime import datetime\n'), ((7549, 7590), 'v1.documents.FilterablePagesDocumentSearch', 'FilterablePagesDocumentSearch', ([], {'prefix': '"""/"""'}), "(prefix='/')\n", (7578, 7590), False, 'from v1.documents import EnforcementActionFilterablePagesDocumentSearch, EventFilterablePagesDocumentSearch, FilterablePagesDocument, FilterablePagesDocumentSearch\n'), ((8034, 8059), 'datetime.datetime.date', 'datetime.date', (['to_date_dt'], {}), '(to_date_dt)\n', (8047, 8059), False, 'from datetime import datetime\n'), ((8146, 8173), 'datetime.datetime.date', 'datetime.date', (['from_date_dt'], {}), '(from_date_dt)\n', (8159, 8173), False, 'from datetime import datetime\n'), ((8192, 8250), 'v1.documents.EnforcementActionFilterablePagesDocumentSearch', 'EnforcementActionFilterablePagesDocumentSearch', ([], {'prefix': '"""/"""'}), "(prefix='/')\n", (8238, 8250), False, 'from v1.documents import EnforcementActionFilterablePagesDocumentSearch, EventFilterablePagesDocumentSearch, FilterablePagesDocument, FilterablePagesDocumentSearch\n'), ((8809, 8834), 'datetime.datetime.date', 'datetime.date', (['to_date_dt'], {}), '(to_date_dt)\n', (8822, 8834), False, 'from datetime import datetime\n'), ((8921, 8948), 'datetime.datetime.date', 'datetime.date', (['from_date_dt'], {}), '(from_date_dt)\n', (8934, 8948), False, 'from datetime import datetime\n'), ((8967, 9025), 'v1.documents.EnforcementActionFilterablePagesDocumentSearch', 'EnforcementActionFilterablePagesDocumentSearch', ([], {'prefix': '"""/"""'}), "(prefix='/')\n", (9013, 9025), False, 'from v1.documents import EnforcementActionFilterablePagesDocumentSearch, EventFilterablePagesDocumentSearch, FilterablePagesDocument, FilterablePagesDocumentSearch\n'), ((9462, 9503), 'v1.documents.FilterablePagesDocumentSearch', 'FilterablePagesDocumentSearch', ([], {'prefix': '"""/"""'}), "(prefix='/')\n", (9491, 9503), False, 'from v1.documents import EnforcementActionFilterablePagesDocumentSearch, EventFilterablePagesDocumentSearch, FilterablePagesDocument, FilterablePagesDocumentSearch\n'), ((4993, 5032), 'v1.models.base.CFGOVPageCategory', 'CFGOVPageCategory', ([], {'name': '"""test-category"""'}), "(name='test-category')\n", (5010, 5032), False, 'from v1.models.base import CFGOVPageCategory\n'), ((7330, 7346), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (7344, 7346), False, 'from datetime import datetime\n'), ((7349, 7372), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(1)'}), '(months=1)\n', (7362, 7372), False, 'from dateutil.relativedelta import relativedelta\n'), ((7440, 7456), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (7454, 7456), False, 'from datetime import datetime\n'), ((7459, 7482), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(1)'}), '(months=1)\n', (7472, 7482), False, 'from dateutil.relativedelta import relativedelta\n'), ((7973, 7989), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (7987, 7989), False, 'from datetime import datetime\n'), ((7992, 8015), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(1)'}), '(months=1)\n', (8005, 8015), False, 'from dateutil.relativedelta import relativedelta\n'), ((8083, 8099), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (8097, 8099), False, 'from datetime import datetime\n'), ((8102, 8125), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(1)'}), '(months=1)\n', (8115, 8125), False, 'from dateutil.relativedelta import relativedelta\n'), ((8748, 8764), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (8762, 8764), False, 'from datetime import datetime\n'), ((8767, 8790), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(1)'}), '(months=1)\n', (8780, 8790), False, 'from dateutil.relativedelta import relativedelta\n'), ((8858, 8874), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (8872, 8874), False, 'from datetime import datetime\n'), ((8877, 8900), 'dateutil.relativedelta.relativedelta', 'relativedelta', ([], {'months': '(1)'}), '(months=1)\n', (8890, 8900), False, 'from dateutil.relativedelta import relativedelta\n'), ((1897, 1922), 'v1.documents.FilterablePagesDocument', 'FilterablePagesDocument', ([], {}), '()\n', (1920, 1922), False, 'from v1.documents import EnforcementActionFilterablePagesDocumentSearch, EventFilterablePagesDocumentSearch, FilterablePagesDocument, FilterablePagesDocumentSearch\n'), ((2988, 3087), 'json.dumps', 'json.dumps', (["[{'type': 'full_width_text', 'value': [{'type': 'content', 'value':\n 'Blog Text'}]}]"], {}), "([{'type': 'full_width_text', 'value': [{'type': 'content',\n 'value': 'Blog Text'}]}])\n", (2998, 3087), False, 'import json\n'), ((3549, 3563), 'json.dumps', 'json.dumps', (['[]'], {}), '([])\n', (3559, 3563), False, 'import json\n'), ((6614, 6624), 'io.StringIO', 'StringIO', ([], {}), '()\n', (6622, 6624), False, 'from io import StringIO\n'), ((1857, 1872), 'pytz.timezone', 'timezone', (['"""UTC"""'], {}), "('UTC')\n", (1865, 1872), False, 'from pytz import timezone\n'), ((2234, 2249), 'pytz.timezone', 'timezone', (['"""UTC"""'], {}), "('UTC')\n", (2242, 2249), False, 'from pytz import timezone\n'), ((2706, 2721), 'pytz.timezone', 'timezone', (['"""UTC"""'], {}), "('UTC')\n", (2714, 2721), False, 'from pytz import timezone\n'), ((3935, 3950), 'pytz.timezone', 'timezone', (['"""UTC"""'], {}), "('UTC')\n", (3943, 3950), False, 'from pytz import timezone\n'), ((5281, 5296), 'pytz.timezone', 'timezone', (['"""UTC"""'], {}), "('UTC')\n", (5289, 5296), False, 'from pytz import timezone\n'), ((4834, 4849), 'pytz.timezone', 'timezone', (['"""UTC"""'], {}), "('UTC')\n", (4842, 4849), False, 'from pytz import timezone\n'), ((4900, 4915), 'pytz.timezone', 'timezone', (['"""UTC"""'], {}), "('UTC')\n", (4908, 4915), False, 'from pytz import timezone\n')]
|
from immutablecollections import immutableset
ENGLISH_DETERMINERS = immutableset(["the", "a"])
DETERMINERS = immutableset(
[
"the",
"a",
"yi1_ge4",
"yi1_jang1",
"yi1_ben3",
"yi1_jyan1",
"yi1_lyang4",
"yi1_bei1",
"yi1_ba3",
"yi1_jr1",
"yi1_shan4",
"yi1_ding3",
"yi1_kwai4",
"yi1_tiao2",
"yi1_zhi1",
]
)
"""
These are determiners we automatically add to the beginning of non-proper English noun phrases.
This is a language-specific hack since learning determiners is out of our scope:
https://github.com/isi-vista/adam/issues/498
"""
ENGLISH_BLOCK_DETERMINERS = immutableset(["you", "me", "your", "my"]).union(
ENGLISH_DETERMINERS
)
"""
These words block the addition of the determiners above to English noun phrases.
"""
|
[
"immutablecollections.immutableset"
] |
[((69, 95), 'immutablecollections.immutableset', 'immutableset', (["['the', 'a']"], {}), "(['the', 'a'])\n", (81, 95), False, 'from immutablecollections import immutableset\n'), ((110, 305), 'immutablecollections.immutableset', 'immutableset', (["['the', 'a', 'yi1_ge4', 'yi1_jang1', 'yi1_ben3', 'yi1_jyan1', 'yi1_lyang4',\n 'yi1_bei1', 'yi1_ba3', 'yi1_jr1', 'yi1_shan4', 'yi1_ding3', 'yi1_kwai4',\n 'yi1_tiao2', 'yi1_zhi1']"], {}), "(['the', 'a', 'yi1_ge4', 'yi1_jang1', 'yi1_ben3', 'yi1_jyan1',\n 'yi1_lyang4', 'yi1_bei1', 'yi1_ba3', 'yi1_jr1', 'yi1_shan4',\n 'yi1_ding3', 'yi1_kwai4', 'yi1_tiao2', 'yi1_zhi1'])\n", (122, 305), False, 'from immutablecollections import immutableset\n'), ((689, 730), 'immutablecollections.immutableset', 'immutableset', (["['you', 'me', 'your', 'my']"], {}), "(['you', 'me', 'your', 'my'])\n", (701, 730), False, 'from immutablecollections import immutableset\n')]
|
#!/usr/bin/python3
import argparse
import logging
import sys
import time
from configparser import ConfigParser
from pathlib import Path
from random import randint
from typing import Optional, Callable
from dns import resolver, rdtypes
from tldextract import tldextract
from .wapi import Wapi
# Constants that might change at some point but that probably don't need to be configurable:
# how long to wait (in seconds) between DNS queries for validating that the record change has propagated
PROPAGATION_CHECK_DELAY: float = 10
# how many retries will be done before giving up the record validation
PROPAGATION_MAX_RETRIES: int = round(3600 / PROPAGATION_CHECK_DELAY)
# name servers used to validate DNS record addition
# those should be public servers far away from you, not your local resolver
NAMESERVERS = ['1.1.1.1', '8.8.8.8']
DOC_LINK = 'https://github.com/hlandau/acmetool/blob/master/_doc/SCHEMA.md#challenge-dns-start-challenge-dns-stop'
OPT_TEST = 'test'
OPT_DNS_CHALLENGE_STOP = 'challenge-dns-stop'
OPT_DNS_CHALLENGE_START = 'challenge-dns-start'
OPT_HTTP_CHALLENGE_START = 'challenge-http-start'
OPT_HTTP_CHALLENGE_STOP = 'challenge-http-stop'
OPT_LIVE_UPDATED = 'live-updated'
wapi: Wapi
def test(domain: str, name: str):
logging.info('Pinging API to make sure basic functionality works')
wapi.ping()
name = ('_test-challenge.' + name).rstrip('.')
data_prefix = '_TEST-CHALLENGE.'
data = data_prefix + str(randint(0, 10000000))
logging.info('Creating record')
wapi.dns_row_add(domain, name, data, 'Wedos Hook Test Record')
wapi.dns_domain_commit(domain)
result = wait_for_record_propagation(domain, name, data)
if not result:
logging.critical('Record propagation failed! Attempts timed out after all retries.')
ids_to_delete = find_row_ids_for_delete(domain, name, lambda record_data: record_data.startswith(data_prefix))
result = do_delete(domain, ids_to_delete)
if not result:
sys.exit(5)
wapi.dns_domain_commit(domain)
logging.info('Test success')
sys.exit(0)
def challenge_start(domain: str, name: str, data: str):
name = ('_acme-challenge.' + name).rstrip('.')
logging.info('Creating record')
wapi.dns_row_add(domain, name, data, 'AcmeTool Wedos Hook')
wapi.dns_domain_commit(domain)
result = wait_for_record_propagation(domain, name, data)
if result:
logging.info('Record created and propagated')
else:
logging.critical('Record propagation failed')
sys.exit(0 if result else 42)
def challenge_stop(domain: str, name: str, data: str):
name = ('_acme-challenge.' + name).rstrip('.')
ids_to_delete = find_row_ids_for_delete(domain, name, lambda record_data: record_data == data)
result = do_delete(domain, ids_to_delete)
wapi.dns_domain_commit(domain)
if result:
logging.info('Record removed successfully')
else:
logging.critical('Record removal failure')
sys.exit(0 if result else 42)
def find_row_ids_for_delete(domain: str, name: str, data_matches: Callable[[str], bool]):
logging.info('Looking up records for deletion')
rows = wapi.dns_rows_list(domain)['response']['data']['row']
ids_to_delete = []
for row in rows:
# if row['ttl'] != str(wapi.default_dns_record_ttl):
# continue
if row['rdtype'] != wapi.default_dns_record_type:
continue
if row['name'] != name:
continue
if not data_matches(str(row['rdata'])):
continue
ids_to_delete.append(row['ID'])
return ids_to_delete
def do_delete(domain, ids_to_delete) -> bool:
# Check that we actually found our record, otherwise something is quite wrong
if len(ids_to_delete) == 0:
logging.error('Found 0 rows to delete')
return False
logging.info(f'Deleting row IDs: {", ".join(ids_to_delete)}')
for rid in ids_to_delete:
wapi.dns_row_delete(domain, int(rid))
return True
def wait_for_record_propagation(domain: str, name: str, data: str) -> bool:
"""Waits for DNS record propagation, aborting after a set amount of delayed retries
:param domain: the domain to verify
:param name: the name (subdomain), if any
:param data: record data (used to verify the exact data in case there are multiple records for the same domain)
:return: whether propagation succeeded (`True`), `False` otherwise
"""
full_name = f'{name}.{domain}'.lstrip('.')
my_resolver = resolver.Resolver()
my_resolver.nameservers = NAMESERVERS
has_propagated: Optional[bool] = None
tries = 1
logging.info(
f'Checking for DNS record propagation for a maximum of {PROPAGATION_MAX_RETRIES} tries with {PROPAGATION_CHECK_DELAY}s delays (for a total of {PROPAGATION_MAX_RETRIES * PROPAGATION_CHECK_DELAY} seconds)')
while tries <= PROPAGATION_MAX_RETRIES and (has_propagated is None or not has_propagated):
logging.debug(f'Checking whether record propagated (try {tries} of {PROPAGATION_MAX_RETRIES})')
try:
answer = my_resolver.query(full_name, wapi.default_dns_record_type)
has_propagated = record_has_propagated(answer, data)
except (resolver.NoAnswer, resolver.NXDOMAIN):
has_propagated = False
if not has_propagated:
tries += 1
logging.debug(f'Sleeping for {PROPAGATION_CHECK_DELAY}s')
time.sleep(PROPAGATION_CHECK_DELAY)
else:
logging.info(f'Match found after {tries} tries ({(tries - 1) * PROPAGATION_CHECK_DELAY} seconds)')
return has_propagated
def record_has_propagated(answer: resolver.Answer, data: str) -> bool:
record: rdtypes.ANY.TXT.TXT
for record in answer:
record_data = record.to_text().strip('"')
logging.debug(f'Reading TXT record data: {record_data}')
if record_data == data:
return True
logging.debug('No match')
return False
pass
def read_config() -> dict:
base = Path(__file__).resolve().parents[1]
dist_config_path = base.joinpath('./config.ini.dist')
config_path = base.joinpath('./config.ini')
if not dist_config_path.exists():
logging.error(f'Distributable config file not found at path "{dist_config_path}".')
if not config_path.exists():
raise FileNotFoundError(f'Config file not found. Create file "{config_path}" (ideally by copying "{dist_config_path}") and edit it.')
parser = ConfigParser()
parser.read([dist_config_path, config_path])
return {
'wapi': {
'username': parser.get('wapi', 'username'),
'password_sha1': parser.get('wapi', '<PASSWORD>'),
},
'hook': {
'verbosity': max(0, min(10, parser.getint('hook', 'override_verbosity', fallback=0))),
},
}
def get_arg_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(
description='AcmeTool DNS-01 validation hook for Wedos API (WAPI)',
epilog=f'Read {DOC_LINK}for more information about AcmeTool Hooks\n',
)
parser.add_argument('--verbose', '-v', action='count', default=0, help='log verbosity; use multiple times for higher')
subparsers = parser.add_subparsers(title='Actions', dest='action', description='Hook actions; pick one and call it with --help for more help')
test_parser = subparsers.add_parser(OPT_TEST, help='test the integration on a selected domain')
test_parser.add_argument('--verbose', '-v', action='count', default=0, help='log verbosity; use multiple times for higher')
test_parser.add_argument('domain', type=str)
start_parser = subparsers.add_parser(OPT_DNS_CHALLENGE_START, help='hook action used before the challenge')
start_parser.add_argument('--verbose', '-v', action='count', default=0, help='log verbosity; use multiple times for higher')
start_parser.add_argument('domain', type=str)
start_parser.add_argument('file', help='not used, passed here by AcmeTool')
start_parser.add_argument('record', type=str, help='the TXT record')
stop_parser = subparsers.add_parser(OPT_DNS_CHALLENGE_STOP, help='hook action used after the challenge')
stop_parser.add_argument('--verbose', '-v', action='count', default=0, help='log verbosity; use multiple times for higher')
stop_parser.add_argument('domain', type=str)
stop_parser.add_argument('file', help='not used, passed here by AcmeTool')
stop_parser.add_argument('record', type=str, help='the TXT record')
# additional parsers that run a dummy function so that there are no errors in AcmeTool output
subparsers.add_parser(OPT_HTTP_CHALLENGE_START, help='dummy hook action').add_argument('dummy', nargs=3)
subparsers.add_parser(OPT_HTTP_CHALLENGE_STOP, help='dummy hook action').add_argument('dummy', nargs=3)
subparsers.add_parser(OPT_LIVE_UPDATED, help='dummy hook action')
return parser
def main():
global wapi
# Read config
config = read_config()
# Parse args
arg_parser = get_arg_parser()
args = arg_parser.parse_args()
verbosity = max(args.verbose, config['hook']['verbosity'])
if verbosity >= 2:
loglevel = logging.DEBUG
elif verbosity >= 1:
loglevel = logging.INFO
else:
loglevel = logging.WARNING
logging.basicConfig(level=loglevel)
logging.debug(args)
# In case no arguments / action is specified, exit
if 'action' not in args or args.action is None:
arg_parser.print_help()
sys.exit(3)
# Extract domain/subdomain
if 'domain' in args:
extract_result = tldextract.extract(args.domain)
info_prefix = f'Domain "{args.domain}" extracted as {extract_result.registered_domain} (TLD {extract_result.suffix}, '
if extract_result.subdomain == '':
logging.info(info_prefix + 'NO SUBDOMAIN)')
else:
logging.info(info_prefix + f'SUBDOMAIN {extract_result.subdomain})')
# Initialize Wapi
logging.info(f'Using account "{config["wapi"]["username"]}"')
wapi = Wapi(config['wapi']['username'], config['wapi']['password_sha1'])
# Finally decide what to do and run the given function
{
OPT_TEST: lambda: test(extract_result.registered_domain, extract_result.subdomain),
OPT_DNS_CHALLENGE_START: lambda: challenge_start(extract_result.registered_domain, extract_result.subdomain, args.record),
OPT_DNS_CHALLENGE_STOP: lambda: challenge_stop(extract_result.registered_domain, extract_result.subdomain, args.record),
OPT_HTTP_CHALLENGE_START: lambda: exit_not_implemented(),
OPT_HTTP_CHALLENGE_STOP: lambda: exit_not_implemented(),
OPT_LIVE_UPDATED: lambda: exit_not_implemented(),
}[args.action]()
# Commands should exit by themselves - if they don't, we return with error here
logging.error('Subcommand did not exit on its own.')
sys.exit(255)
def exit_not_implemented():
logging.debug('Not implemented.')
sys.exit(4)
if __name__ == '__main__':
main()
|
[
"logging.error",
"logging.debug",
"argparse.ArgumentParser",
"logging.basicConfig",
"random.randint",
"dns.resolver.Resolver",
"time.sleep",
"logging.info",
"pathlib.Path",
"logging.critical",
"configparser.ConfigParser",
"tldextract.tldextract.extract",
"sys.exit"
] |
[((1251, 1317), 'logging.info', 'logging.info', (['"""Pinging API to make sure basic functionality works"""'], {}), "('Pinging API to make sure basic functionality works')\n", (1263, 1317), False, 'import logging\n'), ((1479, 1510), 'logging.info', 'logging.info', (['"""Creating record"""'], {}), "('Creating record')\n", (1491, 1510), False, 'import logging\n'), ((2031, 2059), 'logging.info', 'logging.info', (['"""Test success"""'], {}), "('Test success')\n", (2043, 2059), False, 'import logging\n'), ((2064, 2075), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (2072, 2075), False, 'import sys\n'), ((2190, 2221), 'logging.info', 'logging.info', (['"""Creating record"""'], {}), "('Creating record')\n", (2202, 2221), False, 'import logging\n'), ((2522, 2551), 'sys.exit', 'sys.exit', (['(0 if result else 42)'], {}), '(0 if result else 42)\n', (2530, 2551), False, 'import sys\n'), ((2976, 3005), 'sys.exit', 'sys.exit', (['(0 if result else 42)'], {}), '(0 if result else 42)\n', (2984, 3005), False, 'import sys\n'), ((3102, 3149), 'logging.info', 'logging.info', (['"""Looking up records for deletion"""'], {}), "('Looking up records for deletion')\n", (3114, 3149), False, 'import logging\n'), ((4514, 4533), 'dns.resolver.Resolver', 'resolver.Resolver', ([], {}), '()\n', (4531, 4533), False, 'from dns import resolver, rdtypes\n'), ((4637, 4864), 'logging.info', 'logging.info', (['f"""Checking for DNS record propagation for a maximum of {PROPAGATION_MAX_RETRIES} tries with {PROPAGATION_CHECK_DELAY}s delays (for a total of {PROPAGATION_MAX_RETRIES * PROPAGATION_CHECK_DELAY} seconds)"""'], {}), "(\n f'Checking for DNS record propagation for a maximum of {PROPAGATION_MAX_RETRIES} tries with {PROPAGATION_CHECK_DELAY}s delays (for a total of {PROPAGATION_MAX_RETRIES * PROPAGATION_CHECK_DELAY} seconds)'\n )\n", (4649, 4864), False, 'import logging\n'), ((5944, 5969), 'logging.debug', 'logging.debug', (['"""No match"""'], {}), "('No match')\n", (5957, 5969), False, 'import logging\n'), ((6500, 6514), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (6512, 6514), False, 'from configparser import ConfigParser\n'), ((6924, 7098), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""AcmeTool DNS-01 validation hook for Wedos API (WAPI)"""', 'epilog': 'f"""Read {DOC_LINK}for more information about AcmeTool Hooks\n"""'}), '(description=\n \'AcmeTool DNS-01 validation hook for Wedos API (WAPI)\', epilog=\n f"""Read {DOC_LINK}for more information about AcmeTool Hooks\n""")\n', (6947, 7098), False, 'import argparse\n'), ((9333, 9368), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'loglevel'}), '(level=loglevel)\n', (9352, 9368), False, 'import logging\n'), ((9374, 9393), 'logging.debug', 'logging.debug', (['args'], {}), '(args)\n', (9387, 9393), False, 'import logging\n'), ((10016, 10081), 'logging.info', 'logging.info', (['f"""Using account "{config[\'wapi\'][\'username\']}\\""""'], {}), '(f\'Using account "{config[\\\'wapi\\\'][\\\'username\\\']}"\')\n', (10028, 10081), False, 'import logging\n'), ((10872, 10924), 'logging.error', 'logging.error', (['"""Subcommand did not exit on its own."""'], {}), "('Subcommand did not exit on its own.')\n", (10885, 10924), False, 'import logging\n'), ((10929, 10942), 'sys.exit', 'sys.exit', (['(255)'], {}), '(255)\n', (10937, 10942), False, 'import sys\n'), ((10977, 11010), 'logging.debug', 'logging.debug', (['"""Not implemented."""'], {}), "('Not implemented.')\n", (10990, 11010), False, 'import logging\n'), ((11015, 11026), 'sys.exit', 'sys.exit', (['(4)'], {}), '(4)\n', (11023, 11026), False, 'import sys\n'), ((1703, 1792), 'logging.critical', 'logging.critical', (['"""Record propagation failed! Attempts timed out after all retries."""'], {}), "(\n 'Record propagation failed! Attempts timed out after all retries.')\n", (1719, 1792), False, 'import logging\n'), ((1978, 1989), 'sys.exit', 'sys.exit', (['(5)'], {}), '(5)\n', (1986, 1989), False, 'import sys\n'), ((2407, 2452), 'logging.info', 'logging.info', (['"""Record created and propagated"""'], {}), "('Record created and propagated')\n", (2419, 2452), False, 'import logging\n'), ((2471, 2516), 'logging.critical', 'logging.critical', (['"""Record propagation failed"""'], {}), "('Record propagation failed')\n", (2487, 2516), False, 'import logging\n'), ((2866, 2909), 'logging.info', 'logging.info', (['"""Record removed successfully"""'], {}), "('Record removed successfully')\n", (2878, 2909), False, 'import logging\n'), ((2928, 2970), 'logging.critical', 'logging.critical', (['"""Record removal failure"""'], {}), "('Record removal failure')\n", (2944, 2970), False, 'import logging\n'), ((3779, 3818), 'logging.error', 'logging.error', (['"""Found 0 rows to delete"""'], {}), "('Found 0 rows to delete')\n", (3792, 3818), False, 'import logging\n'), ((4967, 5072), 'logging.debug', 'logging.debug', (['f"""Checking whether record propagated (try {tries} of {PROPAGATION_MAX_RETRIES})"""'], {}), "(\n f'Checking whether record propagated (try {tries} of {PROPAGATION_MAX_RETRIES})'\n )\n", (4980, 5072), False, 'import logging\n'), ((5826, 5882), 'logging.debug', 'logging.debug', (['f"""Reading TXT record data: {record_data}"""'], {}), "(f'Reading TXT record data: {record_data}')\n", (5839, 5882), False, 'import logging\n'), ((6226, 6314), 'logging.error', 'logging.error', (['f"""Distributable config file not found at path "{dist_config_path}"."""'], {}), '(\n f\'Distributable config file not found at path "{dist_config_path}".\')\n', (6239, 6314), False, 'import logging\n'), ((9542, 9553), 'sys.exit', 'sys.exit', (['(3)'], {}), '(3)\n', (9550, 9553), False, 'import sys\n'), ((9636, 9667), 'tldextract.tldextract.extract', 'tldextract.extract', (['args.domain'], {}), '(args.domain)\n', (9654, 9667), False, 'from tldextract import tldextract\n'), ((1452, 1472), 'random.randint', 'randint', (['(0)', '(10000000)'], {}), '(0, 10000000)\n', (1459, 1472), False, 'from random import randint\n'), ((5379, 5436), 'logging.debug', 'logging.debug', (['f"""Sleeping for {PROPAGATION_CHECK_DELAY}s"""'], {}), "(f'Sleeping for {PROPAGATION_CHECK_DELAY}s')\n", (5392, 5436), False, 'import logging\n'), ((5449, 5484), 'time.sleep', 'time.sleep', (['PROPAGATION_CHECK_DELAY'], {}), '(PROPAGATION_CHECK_DELAY)\n', (5459, 5484), False, 'import time\n'), ((5511, 5619), 'logging.info', 'logging.info', (['f"""Match found after {tries} tries ({(tries - 1) * PROPAGATION_CHECK_DELAY} seconds)"""'], {}), "(\n f'Match found after {tries} tries ({(tries - 1) * PROPAGATION_CHECK_DELAY} seconds)'\n )\n", (5523, 5619), False, 'import logging\n'), ((9850, 9893), 'logging.info', 'logging.info', (["(info_prefix + 'NO SUBDOMAIN)')"], {}), "(info_prefix + 'NO SUBDOMAIN)')\n", (9862, 9893), False, 'import logging\n'), ((9920, 9988), 'logging.info', 'logging.info', (["(info_prefix + f'SUBDOMAIN {extract_result.subdomain})')"], {}), "(info_prefix + f'SUBDOMAIN {extract_result.subdomain})')\n", (9932, 9988), False, 'import logging\n'), ((6037, 6051), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (6041, 6051), False, 'from pathlib import Path\n')]
|
import torch
import math
import numpy as np
def convert_locations_to_boxes(locations, priors, center_variance,
size_variance):
"""Convert regressional location results of SSD into boxes in the form of (center_x, center_y, h, w).
The conversion:
$$predicted\_center * center_variance = \frac {real\_center - prior\_center} {prior\_hw}$$
$$exp(predicted\_hw * size_variance) = \frac {real\_hw} {prior\_hw}$$
We do it in the inverse direction here.
Args:
locations (batch_size, num_priors, 4): the regression output of SSD. It will contain the outputs as well.
priors (num_priors, 4) or (batch_size/1, num_priors, 4): prior boxes.
center_variance: a float used to change the scale of center.
size_variance: a float used to change of scale of size.
Returns:
boxes: priors: [[center_x, center_y, h, w]]. All the values
are relative to the image size.
"""
# priors can have one dimension less.
# if priors.dim() + 1 == locations.dim():
# priors = priors.unsqueeze(0)
# return torch.cat([
# locations[..., :2] * center_variance * priors[..., 2:] + priors[..., :2],
# torch.exp(locations[..., 2:] * size_variance) * priors[..., 2:]
# ], dim=locations.dim() - 1)
#print('locations:',locations)
# print('priors.size():',priors.size)
return locations*center_variance+torch.from_numpy(priors).cuda()
def convert_boxes_to_locations(quad_form_boxes, quad_form_priors, center_variance, size_variance):
# priors can have one dimension less
# if center_form_priors.dim() + 1 == center_form_boxes.dim():
# center_form_priors = center_form_priors.unsqueeze(0)
# return torch.cat([
# (center_form_boxes[..., :2] - center_form_priors[..., :2]) / center_form_priors[..., 2:] / center_variance,
# torch.log(center_form_boxes[..., 2:] / center_form_priors[..., 2:]) / size_variance
# ], dim=center_form_boxes.dim() - 1)
return (quad_form_boxes-quad_form_priors) / center_variance
def area_of(left_top, right_bottom) -> torch.Tensor:
"""Compute the areas of rectangles given two corners.
Args:
left_top (N, 2): left top corner.
right_bottom (N, 2): right bottom corner.
Returns:
area (N): return the area.
"""
hw = torch.clamp(right_bottom - left_top, min=0.0)
return hw[..., 0] * hw[..., 1]
import shapely
from shapely.geometry import Polygon,MultiPoint #多边形
from itertools import product
import time
#萨瑟兰-Hodgman算法
def clip(subjectPolygon, clipPolygon):
def inside(p):
return(cp2[0]-cp1[0])*(p[1]-cp1[1]) > (cp2[1]-cp1[1])*(p[0]-cp1[0])
def computeIntersection():
dc = [ cp1[0] - cp2[0], cp1[1] - cp2[1] ]
dp = [ s[0] - e[0], s[1] - e[1] ]
n1 = cp1[0] * cp2[1] - cp1[1] * cp2[0]
n2 = s[0] * e[1] - s[1] * e[0]
n3 = 1.0/(dc[0] * dp[1] - dc[1] * dp[0])
return [(n1*dp[0] - n2*dc[0]) * n3, (n1*dp[1] - n2*dc[1]) * n3]
outputList = subjectPolygon
cp1 = clipPolygon[-1]
for clipVertex in clipPolygon:
cp2 = clipVertex
inputList = outputList
outputList = []
if inputList==[]:
return [[0,0]]*4
s = inputList[-1]
for subjectVertex in inputList:
e = subjectVertex
if inside(e):
if not inside(s):
outputList.append(computeIntersection())
outputList.append(e)
elif inside(s):
outputList.append(computeIntersection())
s = e
cp1 = cp2
return(outputList)
def PolygonArea(corners):
n = len(corners) # of corners
area = 0.0
for i in range(n):
j = (i + 1) % n
area += corners[i][0] * corners[j][1]
area -= corners[j][0] * corners[i][1]
area = abs(area)/2.0
return area
def calc_iou_Hodgman(quad1,quad2):
intersection = clip(quad1, quad2)
if intersection == 0:
return 0
intersection_area = PolygonArea(intersection)
print('intersection_area:',intersection_area)
print('PolygonArea(quad1):',PolygonArea(quad1))
print('PolygonArea(quad2):',PolygonArea(quad2))
print('PolygonArea(quad1) + PolygonArea(quad2):',PolygonArea(quad1) + PolygonArea(quad2))
union_area=(PolygonArea(quad1) + PolygonArea(quad2) - intersection_area)
print('union_area:',union_area)
iou = intersection_area / union_area
return iou
def iou_of(boxes0, boxes1, eps=1e-5):
"""Return intersection-over-union (Jaccard index) of boxes.
Args:
boxes0 (1,N,8): ground truth boxes.
boxes1 (N,1,8): predicted boxes.
eps: a small number to avoid 0 as denominator.
Returns:
iou (N): IoU values.
"""
start = time.time()
# print('boxes0.shape:',np.shape(boxes0))
# print('boxes1.shape:',np.shape(boxes1))
boxes0=np.reshape(boxes0,(-1,4,2))
boxes1=np.reshape(boxes1,(-1,4,2))
iou_result=np.zeros(shape=(np.shape(boxes1)[0],np.shape(boxes0)[0]),dtype=np.float32)
for i, j in product(range(np.shape(boxes1)[0]),range(np.shape(boxes0)[0])):
quad1=boxes0[j]
quad2=boxes1[i]
quad1=np.reshape(np.array(quad1),(4,2))
quad2=np.reshape(np.array(quad2),(4,2))
# iou=calc_iou_Hodgman(quad1,quad2)
# if iou > 1 or iou < 0:
# print('iou:',iou)
# assert iou <= 1 and iou >=0
# iou_result[i][j] = iou
poly1 = Polygon(quad1.reshape(4,2)).convex_hull
poly2 = Polygon(quad2.reshape(4,2)).convex_hull
union_poly = np.concatenate((quad1.reshape(4,2),quad2.reshape(4,2))) # 合并两个box坐标,变为8*2
if not poly1.intersects(poly2): # 如果两四边形不相交
iou = 0
else:
try:
inter_area = poly1.intersection(poly2).area # 相交面积
#print(inter_area)
union_area = MultiPoint(union_poly).convex_hull.area
if union_area == 0:
iou = 0
else:
iou = float(inter_area) / union_area
iou_result[i][j] = iou
except shapely.geos.TopologicalError:
print('shapely.geos.TopologicalError occured, iou set to 0')
iou = 0
assert iou <= 1 and iou >= 0
end = time.time()
#print('time consuming:',end-start)
return iou_result
def distance_sum(quad_gt,quad_from_priors):
ret = []
# print('quad_gt.size:', np.shape(quad_gt))
quad_gt=np.reshape(np.array(quad_gt),(-1,4,2))
quad_from_priors=np.reshape(np.array(quad_from_priors),(-1,4,2))
for i in range(np.shape(quad_gt)[0]):
# ret_temp=b-a[i,:].sum(axis=1,keepdims=True)
ret_temp = np.sum(np.sqrt(np.sum(np.power(quad_from_priors - quad_gt[i, ...],2), axis=2, keepdims=False)),axis=1,keepdims=True)
#print('ret_temp.shape:',np.shape(ret_temp))
ret.append(ret_temp)
# print('ret.size:',len(ret))
ret = np.concatenate(ret, axis=1)
#print('ret.shape:', np.shape(ret))
# print('quad_gt.shape:',np.shape(quad_gt))
# print('quad_from_priors.shape:',np.shape(quad_from_priors))
# print('ret.shape:',np.shape(ret))
return ret
# overlap_left_top = torch.max(boxes0[..., :2], boxes1[..., :2])
# overlap_right_bottom = torch.min(boxes0[..., 2:], boxes1[..., 2:])
#
# overlap_area = area_of(overlap_left_top, overlap_right_bottom)
# area0 = area_of(boxes0[..., :2], boxes0[..., 2:])
# area1 = area_of(boxes1[..., :2], boxes1[..., 2:])
# return overlap_area / (area0 + area1 - overlap_area + eps)
def get_pos_distance_array(pos_distance_threshold):
#根据不同尺度的default box自适应决定default box和gt距离的阈值
# print('distance_threshold:',distance_threshold)
# scale = [0.039,0.098,0.156,0.215,0.273,0.332,0.391]
# diff_from_ratio = [1.656,1.588,1.491,1.403,1.323,1.261,1.203,1.068]#this if for different aspect ratio settings
# diff_from_ratio = [1.656,1.656,1.656,1.656,1.656,1.656,1.656,1.656]
# pos_distance_array = []
# pos_distance_array += 64 * 64 * list(np.array([18 * [scale[0] * item] for item in diff_from_ratio]).reshape(-1))
# pos_distance_array += 32 * 32 * list(np.array([18 * [scale[1] * item] for item in diff_from_ratio]).reshape(-1))
# pos_distance_array += 16 * 16 * list(np.array([18 * [scale[2] * item] for item in diff_from_ratio]).reshape(-1))
# pos_distance_array += 8 * 8 * list(np.array([18 * [scale[3] * item] for item in diff_from_ratio]).reshape(-1))
# pos_distance_array += 4 * 4 * list(np.array([18 * [scale[4] * item] for item in diff_from_ratio]).reshape(-1))
# pos_distance_array += 2 * 2 * list(np.array([18 * [scale[5] * item] for item in diff_from_ratio]).reshape(-1))
# pos_distance_array += 1 * 1 * list(np.array([18 * [scale[5] * item] for item in diff_from_ratio]).reshape(-1))
# print('len(pos_distance_array):',len(pos_distance_array))
# print('pos_distance_threshold:',pos_distance_threshold)
n = 144
pos_distance_array = []
pos_distance_array+=64*64*n*[pos_distance_threshold[0]]#0~32768
pos_distance_array+=32*32*n*[pos_distance_threshold[1]]#32768~40960
pos_distance_array+=16*16*n*[pos_distance_threshold[2]]#40960~43008
pos_distance_array+=8*8*n*[pos_distance_threshold[3]]#43008~43520
pos_distance_array+=4*4*n*[pos_distance_threshold[4]]#43520~43648
pos_distance_array+=2*2*n*[pos_distance_threshold[5]]#43648~43680
pos_distance_array+=1*1*n*[pos_distance_threshold[6]]#43680~43688
# print('distance_array.size:',np.shape(distance_array))
# print('len:distance_array:',len(pos_distance_array))
return np.array(pos_distance_array)
def get_ignore_distance_array(ignore_distance_threshold):
#根据不同尺度的default box自适应决定default box和gt距离的阈值
# print('distance_threshold:',distance_threshold)
ignore_distance_array = []
n = 126
ignore_distance_array+=64*64*n*[ignore_distance_threshold[0]]#0~32768
ignore_distance_array+=32*32*n*[ignore_distance_threshold[1]]#32768~40960
ignore_distance_array+=16*16*n*[ignore_distance_threshold[2]]#40960~43008
ignore_distance_array+=8*8*n*[ignore_distance_threshold[3]]#43008~43520
ignore_distance_array+=4*4*n*[ignore_distance_threshold[4]]#43520~43648
ignore_distance_array+=2*2*n*[ignore_distance_threshold[5]]#43648~43680
ignore_distance_array+=1*1*n*[ignore_distance_threshold[6]]#43680~43688
# print('distance_array.size:',np.shape(distance_array))
return np.array(ignore_distance_array)
def assign_priors(quad_gt, quad_form_priors,iou_threshold,pos_distance_threshold):
"""Assign ground truth boxes and targets to priors.
Args:
gt_boxes (num_targets, 4): ground truth boxes.
gt_labels (num_targets): labels of targets.
priors (num_priors, 4): corner form priors
Returns:
boxes (num_priors, 4): real values for priors.
labels (num_priros): labels for priors.
"""
# size: num_priors x num_targets
#ious = iou_of(quad_gt, quad_form_priors)
#ious = iou_of(quad_gt, quad_form_priors)
distance = distance_sum(quad_gt,quad_form_priors)
# size: num_priors
# 表示每一个prior对应distance最小的target的distance值
best_target_per_prior=np.min(distance,axis=1)
# 表示每一个prior对应distance最小的target的target的index值
best_target_per_prior_index=np.argmin(distance,axis=1)
#print(np.shape(best_target_per_prior))
#print(np.shape(best_target_per_prior_index))
# size: num_targets
# 表示每一个target对应distance最小的prior的distance值
best_prior_per_target=np.min(distance,axis=0)
# 表示每一个target对应distance最小的prior的index
best_prior_per_target_index=np.argmin(distance,axis=0)
# 将每一个target对应的最大的prior赋值给这个prior对应最大的target
for target_index, prior_index in enumerate(best_prior_per_target_index):
best_target_per_prior_index[prior_index] = target_index
# 2.0 is used to make sure every target has a prior assigned
best_target_per_prior[best_prior_per_target_index]=2
# size: num_priors
gt_labels=np.ones(shape=np.shape(quad_gt)[0])
labels = gt_labels[best_target_per_prior_index]
# print('distance_threshold:',distance_threshold)
pos_distance_array=get_pos_distance_array(pos_distance_threshold)
ignore_distance_array=pos_distance_array * 1.995#1.995是根据曼哈顿距离度量中iou=0.3算出来的一个倍数关系
labels[best_target_per_prior > pos_distance_array] = 0 # the backgournd id
# print('shape:',np.shape(best_target_per_prior > pos_distance_array))
#ignore_mask = np.multiply(best_target_per_prior > pos_distance_array ,best_target_per_prior < ignore_distance_array)
# print('ignore_mask.size1:',ignore_mask.sum())
#labels[ignore_mask] = -1
quad = quad_gt[best_target_per_prior_index]
# np.savetxt("/home/binchengxiong/boxes.txt", quad)
# np.savetxt("/home/binchengxiong/labels.txt", labels)
return quad,labels
def hard_negative_mining(loss, labels, neg_pos_ratio):
"""
It used to suppress the presence of a large number of negative prediction.
It works on image level not batch level.
For any example/image, it keeps all the positive predictions and
cut the number of negative predictions to make sure the ratio
between the negative examples and positive examples is no more
the given ratio for an image.
Args:
loss (N, num_priors): the loss for each example.
labels (N, num_priors): the labels.
neg_pos_ratio: the ratio between the negative examples and positive examples.
"""
pos_mask = labels == 1
#ignore_mask = labels == -1
# print('ignore_mask.size',ignore_mask.size())
# print('ignore_mask2.size:',ignore_mask.sum())
num_pos = pos_mask.long().sum(dim=1, keepdim=True)
# print('num_pos:',num_pos)
num_neg = num_pos * neg_pos_ratio
# print('pos_mask.size()[1]:',pos_mask.size()[1])
# print('total train sample num:',num_pos * (neg_pos_ratio + 1))
#把正样本对应的loss设为负无穷大,这样对loss进行降序排序的时候正样本的loss就会处于最后面
# print('loss.size',loss.size())
loss[pos_mask] = -math.inf
#loss[ignore_mask] = -math.inf
try:
ordered_loss, indexes = loss.sort(dim=1, descending=True)
# print('ordered_loss:',ordered_loss)
# print('loss.size:',loss.size())
except RuntimeError:
print('loss.size()',loss.size())
print('loss:',loss)
_, orders = indexes.sort(dim=1)
neg_mask = orders < num_neg
return pos_mask | neg_mask
#顶点形式的default box表示形式
def center_form_to_corner_form(locations):
return torch.cat([locations[..., :2] - locations[..., 2:] / 2,
locations[..., :2] + locations[..., 2:] / 2], locations.dim() - 1)
def corner_form_to_center_form(boxes):
return torch.cat([
(boxes[..., :2] + boxes[..., 2:]) / 2,
boxes[..., 2:] - boxes[..., :2]
], boxes.dim() - 1)
|
[
"shapely.geometry.MultiPoint",
"numpy.power",
"numpy.argmin",
"time.time",
"numpy.shape",
"numpy.min",
"torch.clamp",
"numpy.array",
"numpy.reshape",
"numpy.concatenate",
"torch.from_numpy"
] |
[((2362, 2407), 'torch.clamp', 'torch.clamp', (['(right_bottom - left_top)'], {'min': '(0.0)'}), '(right_bottom - left_top, min=0.0)\n', (2373, 2407), False, 'import torch\n'), ((4727, 4738), 'time.time', 'time.time', ([], {}), '()\n', (4736, 4738), False, 'import time\n'), ((4842, 4872), 'numpy.reshape', 'np.reshape', (['boxes0', '(-1, 4, 2)'], {}), '(boxes0, (-1, 4, 2))\n', (4852, 4872), True, 'import numpy as np\n'), ((4881, 4911), 'numpy.reshape', 'np.reshape', (['boxes1', '(-1, 4, 2)'], {}), '(boxes1, (-1, 4, 2))\n', (4891, 4911), True, 'import numpy as np\n'), ((6272, 6283), 'time.time', 'time.time', ([], {}), '()\n', (6281, 6283), False, 'import time\n'), ((6931, 6958), 'numpy.concatenate', 'np.concatenate', (['ret'], {'axis': '(1)'}), '(ret, axis=1)\n', (6945, 6958), True, 'import numpy as np\n'), ((9611, 9639), 'numpy.array', 'np.array', (['pos_distance_array'], {}), '(pos_distance_array)\n', (9619, 9639), True, 'import numpy as np\n'), ((10449, 10480), 'numpy.array', 'np.array', (['ignore_distance_array'], {}), '(ignore_distance_array)\n', (10457, 10480), True, 'import numpy as np\n'), ((11194, 11218), 'numpy.min', 'np.min', (['distance'], {'axis': '(1)'}), '(distance, axis=1)\n', (11200, 11218), True, 'import numpy as np\n'), ((11300, 11327), 'numpy.argmin', 'np.argmin', (['distance'], {'axis': '(1)'}), '(distance, axis=1)\n', (11309, 11327), True, 'import numpy as np\n'), ((11517, 11541), 'numpy.min', 'np.min', (['distance'], {'axis': '(0)'}), '(distance, axis=0)\n', (11523, 11541), True, 'import numpy as np\n'), ((11615, 11642), 'numpy.argmin', 'np.argmin', (['distance'], {'axis': '(0)'}), '(distance, axis=0)\n', (11624, 11642), True, 'import numpy as np\n'), ((6476, 6493), 'numpy.array', 'np.array', (['quad_gt'], {}), '(quad_gt)\n', (6484, 6493), True, 'import numpy as np\n'), ((6536, 6562), 'numpy.array', 'np.array', (['quad_from_priors'], {}), '(quad_from_priors)\n', (6544, 6562), True, 'import numpy as np\n'), ((5152, 5167), 'numpy.array', 'np.array', (['quad1'], {}), '(quad1)\n', (5160, 5167), True, 'import numpy as np\n'), ((5200, 5215), 'numpy.array', 'np.array', (['quad2'], {}), '(quad2)\n', (5208, 5215), True, 'import numpy as np\n'), ((6592, 6609), 'numpy.shape', 'np.shape', (['quad_gt'], {}), '(quad_gt)\n', (6600, 6609), True, 'import numpy as np\n'), ((1434, 1458), 'torch.from_numpy', 'torch.from_numpy', (['priors'], {}), '(priors)\n', (1450, 1458), False, 'import torch\n'), ((5029, 5045), 'numpy.shape', 'np.shape', (['boxes1'], {}), '(boxes1)\n', (5037, 5045), True, 'import numpy as np\n'), ((5056, 5072), 'numpy.shape', 'np.shape', (['boxes0'], {}), '(boxes0)\n', (5064, 5072), True, 'import numpy as np\n'), ((12005, 12022), 'numpy.shape', 'np.shape', (['quad_gt'], {}), '(quad_gt)\n', (12013, 12022), True, 'import numpy as np\n'), ((4940, 4956), 'numpy.shape', 'np.shape', (['boxes1'], {}), '(boxes1)\n', (4948, 4956), True, 'import numpy as np\n'), ((4960, 4976), 'numpy.shape', 'np.shape', (['boxes0'], {}), '(boxes0)\n', (4968, 4976), True, 'import numpy as np\n'), ((6710, 6757), 'numpy.power', 'np.power', (['(quad_from_priors - quad_gt[i, ...])', '(2)'], {}), '(quad_from_priors - quad_gt[i, ...], 2)\n', (6718, 6757), True, 'import numpy as np\n'), ((5848, 5870), 'shapely.geometry.MultiPoint', 'MultiPoint', (['union_poly'], {}), '(union_poly)\n', (5858, 5870), False, 'from shapely.geometry import Polygon, MultiPoint\n')]
|
import os, sys, json
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(BASE_DIR)
secret_file = os.path.join(BASE_DIR, 'secrets.json')
with open(secret_file) as f:
secrets = json.loads(f.read())
BROKER_URL = secrets['RABBITMQ_CONNECTION']
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
|
[
"sys.path.append",
"os.path.abspath",
"os.path.join"
] |
[((110, 135), 'sys.path.append', 'sys.path.append', (['BASE_DIR'], {}), '(BASE_DIR)\n', (125, 135), False, 'import os, sys, json\n'), ((151, 189), 'os.path.join', 'os.path.join', (['BASE_DIR', '"""secrets.json"""'], {}), "(BASE_DIR, 'secrets.json')\n", (163, 189), False, 'import os, sys, json\n'), ((81, 106), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (96, 106), False, 'import os, sys, json\n')]
|
# -*- coding: utf-8 -*-
__author__ = "<EMAIL>"
from pygeotoolbox.sharedtools.fonts.svg.svgfontreader import SVGFontReader
import pygeotoolbox.sharedtools.log as log
from pygeotoolbox.sharedtools import makeDirForFile
__readers = {}
def extractSVGIcon(svgFontFileName, glyphName, iconFileName=None):
global __readers
if not svgFontFileName in __readers:
__readers[svgFontFileName] = SVGFontReader(svgFontFileName)
reader = __readers[svgFontFileName]
for glyphUnicode, glyph in reader.glyphs.iteritems():
if glyphName == glyph.name:
result = '<?xml version="1.0"?>\n<svg>\n\t%s\n</svg>' % glyph.glyphXMLContent.replace("glyph", "path")
if iconFileName:
makeDirForFile(iconFileName)
open(iconFileName, "w").write(result)
log.debug("Saving icon '%s' --> '%s'." % (glyphName, iconFileName))
return result
log.warning("extractSVGIcon('%s', '%s', '%s') - glyph [%s] not found." % (svgFontFileName, glyphName, str(iconFileName), glyphName))
return ""
if __name__ == "__main__":
extractSVGIcon("C:/ms4w/Apache/htdocs/Generalizace/MapGen/projects/zm/zm10/zm10fonts/zm10x1.svg", "105_kostel", "C:/ms4w/Apache/htdocs/Generalizace/MapGen/ms4w/Apache/htdocs/mgFiddle/Maps/zm10/105_kostel.svg")
|
[
"pygeotoolbox.sharedtools.fonts.svg.svgfontreader.SVGFontReader",
"pygeotoolbox.sharedtools.log.debug",
"pygeotoolbox.sharedtools.makeDirForFile"
] |
[((404, 434), 'pygeotoolbox.sharedtools.fonts.svg.svgfontreader.SVGFontReader', 'SVGFontReader', (['svgFontFileName'], {}), '(svgFontFileName)\n', (417, 434), False, 'from pygeotoolbox.sharedtools.fonts.svg.svgfontreader import SVGFontReader\n'), ((730, 758), 'pygeotoolbox.sharedtools.makeDirForFile', 'makeDirForFile', (['iconFileName'], {}), '(iconFileName)\n', (744, 758), False, 'from pygeotoolbox.sharedtools import makeDirForFile\n'), ((829, 896), 'pygeotoolbox.sharedtools.log.debug', 'log.debug', (['("Saving icon \'%s\' --> \'%s\'." % (glyphName, iconFileName))'], {}), '("Saving icon \'%s\' --> \'%s\'." % (glyphName, iconFileName))\n', (838, 896), True, 'import pygeotoolbox.sharedtools.log as log\n')]
|
#!/usr/bin/env python3
from subprocess import run
from os import path, mkdir
import sys
from datetime import datetime
curr_script_dir = path.abspath(path.dirname(__file__))
publish_dir = datetime.now().strftime("publish-%Y-%m-%d-%H-%M-%S.%f")
mkdir(publish_dir)
run(
[sys.executable, path.join(curr_script_dir, "build.py")],
check=True,
cwd=publish_dir,
)
run(["twine", "check", "dist/*"], check=True, cwd=publish_dir)
run(["twine", "upload", "dist/*"], check=True, cwd=publish_dir)
|
[
"os.mkdir",
"subprocess.run",
"os.path.join",
"os.path.dirname",
"datetime.datetime.now"
] |
[((246, 264), 'os.mkdir', 'mkdir', (['publish_dir'], {}), '(publish_dir)\n', (251, 264), False, 'from os import path, mkdir\n'), ((372, 434), 'subprocess.run', 'run', (["['twine', 'check', 'dist/*']"], {'check': '(True)', 'cwd': 'publish_dir'}), "(['twine', 'check', 'dist/*'], check=True, cwd=publish_dir)\n", (375, 434), False, 'from subprocess import run\n'), ((435, 498), 'subprocess.run', 'run', (["['twine', 'upload', 'dist/*']"], {'check': '(True)', 'cwd': 'publish_dir'}), "(['twine', 'upload', 'dist/*'], check=True, cwd=publish_dir)\n", (438, 498), False, 'from subprocess import run\n'), ((151, 173), 'os.path.dirname', 'path.dirname', (['__file__'], {}), '(__file__)\n', (163, 173), False, 'from os import path, mkdir\n'), ((190, 204), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (202, 204), False, 'from datetime import datetime\n'), ((292, 330), 'os.path.join', 'path.join', (['curr_script_dir', '"""build.py"""'], {}), "(curr_script_dir, 'build.py')\n", (301, 330), False, 'from os import path, mkdir\n')]
|
# -*- coding: utf-8 -*-
from itertools import product
import sympy as sp
import networkx
import copy
# This should be made into a method.
def get_order(G, values = [], keep = False):
H = G.graph["dependency_graph"]
order = copy.deepcopy(H.graph["sort"])
if not values: return order
if keep == False:
for value in values:
order.remove(value)
else:
for value in set(order).difference(values):
order.remove(value)
return order
def variable_indices(G, values, restrictions = [], sort = False):
if(not restrictions):
restrictions = get_order(G)
order = get_order(G, restrictions, keep = True)
indices = [order.index(value) for value in values]
if sort: indices.sort()
return indices
# This is the ___init___ method.
def to_dagauss(G):
""" Calculate the unconditional mean vector of a multivariate normal DAG.
This function calculates the mean vector of a multivariate normal DAG. It
only calculates the vector and stores the values in the dag. Use the
function 'mean_vector' to get to the vector.
Args:
G (DagNormal): A DagNormal object representing a multivariate normal.
Returns:
None: The function modifies G in place.
Examples:
Examples should be written in doctest format, and should illustrate how
to use the function.
>>> a = [1,2,3]
>>> print [x + 3 for x in a]
[4, 5, 6]
"""
V = list(networkx.topological_sort(G))
""" Populates a directed graph G with attributes. """
for node in G.nodes:
G.nodes[node]["beta"] = sp.Symbol("beta_" + node, real = True )
G.nodes[node]["sigma"] = sp.Symbol("sigma_" + node, positive = True)
for edge in G.edges:
G.edges[edge]["beta"] = sp.Symbol("beta_" + edge[0] + edge[1], real = True)
H = G.to_undirected()
# This loop takes care of the means of the unconditional model.
for v in V:
edges = list(G.in_edges(v))
vertices = [x for (x, _) in edges]
self_contribution = H.nodes[v]["beta"]
parent_contribution = sum([H.nodes[vertex]["mu"]*H.edges[edge]["beta"]
for vertex, edge
in zip(vertices, edges)])
H.nodes[v]["mu"] = self_contribution + parent_contribution
# This loop takes care of sigmas of the unconditional model.
for i, v in enumerate(V):
edges = list(G.in_edges(v))
vertices = [x for (x, _) in edges]
product_edges = product(vertices, vertices)
parent_contribution = sum([H.edges[(x, y)]["psi"]*H.edges[(x, v)]["beta"]*H.edges[(y, v)]["beta"]
for (x, y)
in product_edges])
self_contribution = H.nodes[v]["sigma"]**2
H.add_edge(v, v, psi = self_contribution + parent_contribution)
predecessors = V[:i]
for w in predecessors:
contribution = sum([H.edges[edge]["beta"]*H.edges[(edge[0], w)]["psi"] for edge in edges])
H.add_edge(v, w, psi = contribution)
H.graph["sort"] = list(set(V))
H.graph["sort"].sort()
G.graph["dependency_graph"] = H
# This is the parameters() method.
def parameters(G, variables = [], conditionants = []):
""" Calculate the conditional mean vector and covariance matrix
Args:
G: A DaGauss object representing a multivariate normal.
variables: The variables in the regression. Can be more than one.
conditionants: The variables to condition on.
Returns:
A dictionary containing the theoretical conditional mean vector and
the theoretical conditional covariance matrix.
"""
if(not variables):
variables = get_order(G, conditionants)
H = G.graph["dependency_graph"]
order = get_order(G)
mean_ = sp.Matrix([H.nodes[value]["mu"] for value in order])
cov = sp.zeros(len(order), len(order))
for (i, j) in product(range(len(order)), range(len(order))):
cov[i, j] = H.edges[(order[i], order[j])]["psi"]
if(not conditionants):
return {"mean": mean_[variable_indices(G, variables, sort = True), 0],
"cov": cov[variable_indices(G, variables, sort = True),
variable_indices(G, variables, sort = True)]}
V = get_order(G)
variables_indices = variable_indices(G, variables, sort = True)
conditionants_indices = variable_indices(G, conditionants, sort = True)
cov_AA = cov[variables_indices, variables_indices]
cov_AB = cov[variables_indices, conditionants_indices]
cov_BA = cov[conditionants_indices, variables_indices]
cov_BB_inv = sp.Inverse(cov[conditionants_indices, conditionants_indices])
mean_A = sp.Matrix([mean_[index] for index in variables_indices])
mean_B = sp.Matrix([mean_[index] for index in conditionants_indices])
new_mean = mean_A + cov_AB*cov_BB_inv*(sp.Matrix(conditionants) - mean_B)
new_cov = cov_AA - cov_AB*cov_BB_inv*cov_BA
return {"mean": sp.simplify(new_mean),
"cov": sp.simplify(new_cov)}
# This is the mean() method.
def mean(G, variables = [], conditionants = []):
""" Calculate the conditional mean vector
Args:
G: A DaGauss object representing a multivariate normal.
variables: The variables in the regression. Can be more than one.
conditionants: The variables to condition on.
Returns:
The theoretical conditional mean vector
"""
return parameters(G, variables = variables,
conditionants = conditionants)["mean"]
# This is the covariance() method.
def covariance(G, variables = [], conditionants = []):
""" Calculate the conditional covariance matrix
Args:
G: A DaGauss object representing a multivariate normal.
variables: The variables in the regression. Can be more than one.
conditionants: The variables to condition on.
Returns:
The theoretical conditional covariance matrix
"""
return parameters(G, variables = variables,
conditionants = conditionants )["cov"]
# The variance method picks the only item from the covariance matrix.
def variance(G, variables = [], conditionants = []):
""" Calculate the conditional covariance matrix
Args:
G: A DaGauss object representing a multivariate normal.
variables: The variables in the regression. Can be more than one.
conditionants: The variables to condition on.
Returns:
The theoretical regression coefficient.
"""
cov = covariance(G, variables = variables,
conditionants = conditionants)
if len(variables) == 1:
return cov[0]
else:
return cov
def beta(G, responses = [], covariates = [], conditionants = []):
""" Calculate the theoretical beta coefficient of a regression
Args:
G: A DaGauss object representing a multivariate normal.
responses: The responses in the regression. Can be more than one.
covariates: The covariates of the regression.
conditionants: The variables the regression is conditioned on.
Returns:
The theoretical regression coefficient.
"""
variables = covariates + conditionants
means = mean(G, responses, variables)
def collect(index):
return sp.collect(expr = sp.expand(means[index]),
syms = variables)
collections = [collect(index) for index in range(len(means))]
betas = sp.Matrix([collection.coeff(variables)
for collection, variables
in product(collections, variables)])
betas.reshape(len(collections), len(variables)).T
indices = variable_indices(G, values = covariates,
restrictions = variables,
sort = True)
return betas[indices, :]
def rsquared(G, responses, covariates, conditionants = [], norm = "trace"):
""" Calculates the theoretical R squared.
This function calculates R squared, also known as the coefficient of
determination.
Args:
G: A DaGauss object representing a multivariate normal.
responses: The responses in the regression. Can be more than one.
covariates: The covariates of the regression.
conditionants: The variables the regression is conditioned on.
norm: Optional covariance matrix norm. Defaults to "trace", which is
recommended.
Returns:
The caclulated R squared. A scalar sympy object.
"""
betas = beta(G, responses = responses,
covariates = covariates,
conditionants = conditionants)
cov_covariates = variance(G, variables = covariates,
conditionants = conditionants)
cov_conditional = betas.T*cov_covariates*betas
cov_unconditional = covariance(G, variables = responses,
conditionants = conditionants)
if(norm == "trace"):
return sp.trace(cov_conditional)/sp.trace(cov_unconditional)
else:
return cov_conditional.norm(norm)/cov_unconditional.norm(norm)
def correlation(G, variables = [], conditionants = []):
""" Calculates the conditional correlation.
Args:
G: A DaGauss object representing a multivariate normal.
variables: The variables you wish to find the correlation matrix for.
conditionants: The variables the correlation matrix is conditioned on.
Returns:
A correlation matrix.
"""
cov = covariance(G, variables = variables,
conditionants = conditionants)
k = cov.shape[0]
sds = sp.Matrix([1/sp.sqrt(cov[i, i]) for i
in range(0, k)]*k).reshape(k, k)
cor = cov.multiply_elementwise(sds).multiply_elementwise(sds.T)
return cor.applyfunc(sp.simplify)
|
[
"sympy.Symbol",
"copy.deepcopy",
"sympy.Inverse",
"networkx.topological_sort",
"sympy.Matrix",
"sympy.simplify",
"sympy.trace",
"sympy.expand",
"sympy.sqrt",
"itertools.product"
] |
[((245, 275), 'copy.deepcopy', 'copy.deepcopy', (["H.graph['sort']"], {}), "(H.graph['sort'])\n", (258, 275), False, 'import copy\n'), ((3992, 4044), 'sympy.Matrix', 'sp.Matrix', (["[H.nodes[value]['mu'] for value in order]"], {}), "([H.nodes[value]['mu'] for value in order])\n", (4001, 4044), True, 'import sympy as sp\n'), ((4834, 4895), 'sympy.Inverse', 'sp.Inverse', (['cov[conditionants_indices, conditionants_indices]'], {}), '(cov[conditionants_indices, conditionants_indices])\n', (4844, 4895), True, 'import sympy as sp\n'), ((4909, 4965), 'sympy.Matrix', 'sp.Matrix', (['[mean_[index] for index in variables_indices]'], {}), '([mean_[index] for index in variables_indices])\n', (4918, 4965), True, 'import sympy as sp\n'), ((4979, 5039), 'sympy.Matrix', 'sp.Matrix', (['[mean_[index] for index in conditionants_indices]'], {}), '([mean_[index] for index in conditionants_indices])\n', (4988, 5039), True, 'import sympy as sp\n'), ((1547, 1575), 'networkx.topological_sort', 'networkx.topological_sort', (['G'], {}), '(G)\n', (1572, 1575), False, 'import networkx\n'), ((1693, 1729), 'sympy.Symbol', 'sp.Symbol', (["('beta_' + node)"], {'real': '(True)'}), "('beta_' + node, real=True)\n", (1702, 1729), True, 'import sympy as sp\n'), ((1766, 1807), 'sympy.Symbol', 'sp.Symbol', (["('sigma_' + node)"], {'positive': '(True)'}), "('sigma_' + node, positive=True)\n", (1775, 1807), True, 'import sympy as sp\n'), ((1867, 1916), 'sympy.Symbol', 'sp.Symbol', (["('beta_' + edge[0] + edge[1])"], {'real': '(True)'}), "('beta_' + edge[0] + edge[1], real=True)\n", (1876, 1916), True, 'import sympy as sp\n'), ((2621, 2648), 'itertools.product', 'product', (['vertices', 'vertices'], {}), '(vertices, vertices)\n', (2628, 2648), False, 'from itertools import product\n'), ((5188, 5209), 'sympy.simplify', 'sp.simplify', (['new_mean'], {}), '(new_mean)\n', (5199, 5209), True, 'import sympy as sp\n'), ((5231, 5251), 'sympy.simplify', 'sp.simplify', (['new_cov'], {}), '(new_cov)\n', (5242, 5251), True, 'import sympy as sp\n'), ((9394, 9419), 'sympy.trace', 'sp.trace', (['cov_conditional'], {}), '(cov_conditional)\n', (9402, 9419), True, 'import sympy as sp\n'), ((9420, 9447), 'sympy.trace', 'sp.trace', (['cov_unconditional'], {}), '(cov_unconditional)\n', (9428, 9447), True, 'import sympy as sp\n'), ((5084, 5108), 'sympy.Matrix', 'sp.Matrix', (['conditionants'], {}), '(conditionants)\n', (5093, 5108), True, 'import sympy as sp\n'), ((7631, 7654), 'sympy.expand', 'sp.expand', (['means[index]'], {}), '(means[index])\n', (7640, 7654), True, 'import sympy as sp\n'), ((7910, 7941), 'itertools.product', 'product', (['collections', 'variables'], {}), '(collections, variables)\n', (7917, 7941), False, 'from itertools import product\n'), ((10086, 10104), 'sympy.sqrt', 'sp.sqrt', (['cov[i, i]'], {}), '(cov[i, i])\n', (10093, 10104), True, 'import sympy as sp\n')]
|
import PyPDF2
PDF_odd = 'odd.pdf' #奇数ページpdf
PDF_even = 'even.pdf' #偶数ページpdf
OutputName = 'output.pdf' #出力pdf
angle_odd = 0 #奇数ページの回転角度, 時計回り弧度法
angle_even = 0
File_odd = open(PDF_odd, 'rb')
File_even = open(PDF_even, 'rb')
Reader_odd = PyPDF2.PdfFileReader(File_odd)
Reader_even = PyPDF2.PdfFileReader(File_even)
Writer = PyPDF2.PdfFileWriter()
for page in range(Reader_odd.numPages):
obj = Reader_odd.getPage(page)
obj.rotateClockwise(angle_odd)
Writer.addPage(obj)
obj = Reader_even.getPage(Reader_odd.numPages - page - 1)
obj.rotateClockwise(angle_even)
Writer.addPage(obj)
Output = open(OutputName, 'wb')
Writer.write(Output)
Output.close()
File_odd.close()
File_even.close()
|
[
"PyPDF2.PdfFileReader",
"PyPDF2.PdfFileWriter"
] |
[((237, 267), 'PyPDF2.PdfFileReader', 'PyPDF2.PdfFileReader', (['File_odd'], {}), '(File_odd)\n', (257, 267), False, 'import PyPDF2\n'), ((282, 313), 'PyPDF2.PdfFileReader', 'PyPDF2.PdfFileReader', (['File_even'], {}), '(File_even)\n', (302, 313), False, 'import PyPDF2\n'), ((323, 345), 'PyPDF2.PdfFileWriter', 'PyPDF2.PdfFileWriter', ([], {}), '()\n', (343, 345), False, 'import PyPDF2\n')]
|
# import Libraries of other lib packages
import numpy
import bob.core
# import our own Library
import bob.extension
bob.extension.load_bob_library('bob.io.base', __file__)
from ._library import File as _File_C, HDF5File as _HDF5File_C, extensions
from . import version
from .version import module as __version__
from .version import api as __api_version__
import os
class File(_File_C):
__doc__ = _File_C.__doc__
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
class HDF5File(_HDF5File_C):
__doc__ = _HDF5File_C.__doc__
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
return self.close()
def __contains__(self, x):
__doc__ = self.has_key.__doc__
return self.has_key(x)
def __iter__(self):
__doc__ = self.keys.__doc__
return iter(self.keys())
def __getitem__(self, name):
__doc__ = self.get.__doc__
return self.get(name)
def __setitem__(self, name, value):
__doc__ = self.set.__doc__
return self.set(name, value)
def values(self):
'''Yields the datasets contained in the current directory.
Yields
-------
object
The datasets that are being read.
'''
return (self[key] for key in self)
def items(self):
'''Yields the keys and the datasets contained in the current directory.
Yields
-------
tuple
The key and the datasets that are being read in a tuple.
'''
return ((key, self[key]) for key in self)
def _is_string(s):
"""Returns ``True`` if the given object is a string
This method can be used with Python-2.x or 3.x and returns a string
respecting each environment's constraints.
"""
from sys import version_info
return (version_info[0] < 3 and isinstance(s, (str, unicode))) or \
isinstance(s, (bytes, str))
@numpy.deprecate(new_name="os.makedirs(directory, exist_ok=True)")
def create_directories_safe(directory, dryrun=False):
"""Creates a directory if it does not exists, with concurrent access support.
This function will also create any parent directories that might be required.
If the dryrun option is selected, it does not actually create the directory,
but just writes the (Linux) command that would have been executed.
**Parameters:**
``directory`` : str
The directory that you want to create.
``dryrun`` : bool
Only ``print`` the command to console, but do not execute it.
"""
if dryrun:
print("[dry-run] mkdir -p '%s'" % directory)
else:
os.makedirs(directory, exist_ok=True)
def load(inputs):
"""load(inputs) -> data
Loads the contents of a file, an iterable of files, or an iterable of
:py:class:`bob.io.base.File`'s into a :py:class:`numpy.ndarray`.
**Parameters:**
``inputs`` : various types
This might represent several different entities:
1. The name of a file (full path) from where to load the data. In this
case, this assumes that the file contains an array and returns a loaded
numpy ndarray.
2. An iterable of filenames to be loaded in memory. In this case, this
would assume that each file contains a single 1D sample or a set of 1D
samples, load them in memory and concatenate them into a single and
returned 2D :py:class:`numpy.ndarray`.
3. An iterable of :py:class:`File`. In this case, this would assume
that each :py:class:`File` contains a single 1D sample or a set
of 1D samples, load them in memory if required and concatenate them into
a single and returned 2D :py:class:`numpy.ndarray`.
4. An iterable with mixed filenames and :py:class:`File`. In this
case, this would returned a 2D :py:class:`numpy.ndarray`, as described
by points 2 and 3 above.
**Returns:**
``data`` : :py:class:`numpy.ndarray`
The data loaded from the given ``inputs``.
"""
from collections import Iterable
import numpy
if _is_string(inputs):
if not os.path.exists(inputs):
raise RuntimeError(f"`{inputs}' does not exist!")
return File(inputs, 'r').read()
elif isinstance(inputs, Iterable):
retval = []
for obj in inputs:
if _is_string(obj):
retval.append(load(obj))
elif isinstance(obj, File):
retval.append(obj.read())
else:
raise TypeError(
"Iterable contains an object which is not a filename nor a "
"bob.io.base.File.")
return numpy.vstack(retval)
else:
raise TypeError(
"Unexpected input object. This function is expecting a filename, "
"or an iterable of filenames and/or bob.io.base.File's")
def merge(filenames):
"""merge(filenames) -> files
Converts an iterable of filenames into an iterable over read-only
:py:class:`bob.io.base.File`'s.
**Parameters:**
``filenames`` : str or [str]
A list of file names.
This might represent:
1. A single filename. In this case, an iterable with a single
:py:class:`File` is returned.
2. An iterable of filenames to be converted into an iterable of
:py:class:`File`'s.
**Returns:**
``files`` : [:py:class:`File`]
The list of files.
"""
from collections import Iterable
from .utils import is_string
if is_string(filenames):
return [File(filenames, 'r')]
elif isinstance(filenames, Iterable):
return [File(k, 'r') for k in filenames]
else:
raise TypeError(
"Unexpected input object. This function is expecting an "
"iterable of filenames.")
def save(array, filename, create_directories=False):
"""Saves the contents of an array-like object to file.
Effectively, this is the same as creating a :py:class:`File` object
with the mode flag set to ``'w'`` (write with truncation) and calling
:py:meth:`File.write` passing ``array`` as parameter.
Parameters:
``array`` : array_like
The array-like object to be saved on the file
``filename`` : str
The name of the file where you need the contents saved to
``create_directories`` : bool
Automatically generate the directories if required (defaults to ``False``
because of compatibility reasons; might change in future to default to
``True``)
"""
# create directory if not existent yet
if create_directories:
create_directories_safe(os.path.dirname(filename))
# requires data is c-contiguous and aligned, will create a copy otherwise
array = numpy.require(array, requirements=('C_CONTIGUOUS', 'ALIGNED'))
return File(filename, 'w').write(array)
# Just to make it homogenous with the C++ API
write = save
read = load
def append(array, filename):
"""append(array, filename) -> position
Appends the contents of an array-like object to file.
Effectively, this is the same as creating a :py:class:`File` object
with the mode flag set to ``'a'`` (append) and calling
:py:meth:`File.append` passing ``array`` as parameter.
**Parameters:**
``array`` : array_like
The array-like object to be saved on the file
``filename`` : str
The name of the file where you need the contents saved to
**Returns:**
``position`` : int
See :py:meth:`File.append`
"""
# requires data is c-contiguous and aligned, will create a copy otherwise
array = numpy.require(array, requirements=('C_CONTIGUOUS', 'ALIGNED'))
return File(filename, 'a').append(array)
def peek(filename):
"""peek(filename) -> dtype, shape, stride
Returns the type of array (frame or sample) saved in the given file.
Effectively, this is the same as creating a :py:class:`File` object
with the mode flag set to `r` (read-only) and calling
:py:meth:`File.describe`.
**Parameters**:
``filename`` : str
The name of the file to peek information from
**Returns:**
``dtype, shape, stride`` : see :py:meth:`File.describe`
"""
return File(filename, 'r').describe()
def peek_all(filename):
"""peek_all(filename) -> dtype, shape, stride
Returns the type of array (for full readouts) saved in the given file.
Effectively, this is the same as creating a :py:class:`File` object
with the mode flag set to ``'r'`` (read-only) and returning
``File.describe`` with its parameter ``all`` set to ``True``.
**Parameters:**
``filename`` : str
The name of the file to peek information from
**Returns:**
``dtype, shape, stride`` : see :py:meth:`File.describe`
"""
return File(filename, 'r').describe(all=True)
# Keeps compatibility with the previously existing API
open = File
def get_config():
"""Returns a string containing the configuration information.
"""
return bob.extension.get_config(__name__, version.externals, version.api)
def get_include_directories():
"""get_include_directories() -> includes
Returns a list of include directories for dependent libraries, such as HDF5.
This function is automatically used by
:py:func:`bob.extension.get_bob_libraries` to retrieve the non-standard
include directories that are required to use the C bindings of this library
in dependent classes. You shouldn't normally need to call this function by
hand.
**Returns:**
``includes`` : [str]
The list of non-standard include directories required to use the C bindings
of this class. For now, only the directory for the HDF5 headers are
returned.
"""
# try to use pkg_config first
try:
from bob.extension.utils import find_header
# locate pkg-config on our own
header = 'hdf5.h'
candidates = find_header(header)
if not candidates:
raise RuntimeError(
"could not find %s's `%s' - have you installed %s on this "
"machine?" % ('hdf5', header, 'hdf5'))
return [os.path.dirname(candidates[0])]
except RuntimeError:
from bob.extension import pkgconfig
pkg = pkgconfig('hdf5')
return pkg.include_directories()
def get_macros():
"""get_macros() -> macros
Returns a list of preprocessor macros, such as ``(HAVE_HDF5, 1)``. This
function is automatically used by :py:func:`bob.extension.get_bob_libraries`
to retrieve the prerpocessor definitions that are required to use the C
bindings of this library in dependent classes. You shouldn't normally need to
call this function by hand.
**Returns:**
``macros`` : [(str,str)]
The list of preprocessor macros required to use the C bindings of this
class. For now, only ``('HAVE_HDF5', '1')`` is returned, when applicable.
"""
# get include directories
if get_include_directories():
return [('HAVE_HDF5', '1')]
def _generate_features(reader, paths, same_size=False):
"""Load and stack features in a memory efficient way. This function is
meant to be used inside :py:func:`vstack_features`.
Parameters
----------
reader : ``collections.Callable``
See the documentation of :py:func:`vstack_features`.
paths : ``collections.Iterable``
See the documentation of :py:func:`vstack_features`.
same_size : :obj:`bool`, optional
See the documentation of :py:func:`vstack_features`.
Yields
------
object
The first object returned is a tuple of :py:class:`numpy.dtype` of
features and the shape of the first feature. The rest of objects are
the actual values in features. The features are returned in C order.
"""
shape_determined = False
for i, path in enumerate(paths):
feature = numpy.atleast_2d(reader(path))
feature = numpy.ascontiguousarray(feature)
if not shape_determined:
shape_determined = True
dtype = feature.dtype
shape = list(feature.shape)
yield (dtype, shape)
else:
# make sure all features have the same shape and dtype
if same_size:
assert shape == list(feature.shape)
else:
assert shape[1:] == list(feature.shape[1:])
assert dtype == feature.dtype
if same_size:
yield (feature.ravel(),)
else:
for feat in feature:
yield (feat.ravel(),)
def vstack_features(reader, paths, same_size=False, dtype=None):
"""Stacks all features in a memory efficient way.
Parameters
----------
reader : ``collections.Callable``
The function to load the features. The function should only take one
argument ``path`` and return loaded features. Use :any:`functools.partial`
to accommodate your reader to this format.
The features returned by ``reader`` are expected to have the same
:py:class:`numpy.dtype` and the same shape except for their first
dimension. First dimension should correspond to the number of samples.
paths : ``collections.Iterable``
An iterable of paths to iterate on. Whatever is inside path is given to
``reader`` so they do not need to be necessarily paths to actual files.
If ``same_size`` is ``True``, ``len(paths)`` must be valid.
same_size : :obj:`bool`, optional
If ``True``, it assumes that arrays inside all the paths are the same
shape. If you know the features are the same size in all paths, set this
to ``True`` to improve the performance.
dtype : :py:class:`numpy.dtype`, optional
If provided, the data will be casted to this format.
Returns
-------
numpy.ndarray
The read features with the shape ``(n_samples, *features_shape[1:])``.
Examples
--------
This function in a simple way is equivalent to calling
``numpy.vstack([reader(p) for p in paths])``.
>>> import numpy
>>> from bob.io.base import vstack_features
>>> def reader(path):
... # in each file, there are 5 samples and features are 2 dimensional.
... return numpy.arange(10).reshape(5,2)
>>> paths = ['path1', 'path2']
>>> all_features = vstack_features(reader, paths)
>>> numpy.allclose(all_features, numpy.array(
... [[0, 1],
... [2, 3],
... [4, 5],
... [6, 7],
... [8, 9],
... [0, 1],
... [2, 3],
... [4, 5],
... [6, 7],
... [8, 9]]))
True
>>> all_features_with_more_memory = numpy.vstack([reader(p) for p in paths])
>>> numpy.allclose(all_features, all_features_with_more_memory)
True
You can allocate the array at once to improve the performance if you know
that all features in paths have the same shape and you know the total number
of the paths:
>>> all_features = vstack_features(reader, paths, same_size=True)
>>> numpy.allclose(all_features, numpy.array(
... [[0, 1],
... [2, 3],
... [4, 5],
... [6, 7],
... [8, 9],
... [0, 1],
... [2, 3],
... [4, 5],
... [6, 7],
... [8, 9]]))
True
"""
iterable = _generate_features(reader, paths, same_size)
data_dtype, shape = next(iterable)
if dtype is None:
dtype = data_dtype
if same_size:
# numpy black magic: https://stackoverflow.com/a/12473478/1286165
field_dtype = [("", (dtype, (numpy.prod(shape),)))]
total_size = len(paths)
all_features = numpy.fromiter(iterable, field_dtype, total_size)
else:
field_dtype = [("", (dtype, (numpy.prod(shape[1:]),)))]
all_features = numpy.fromiter(iterable, field_dtype)
# go from a field array to a normal array
all_features = all_features.view(dtype)
# the shape is assumed to be (n_samples, ...) it can be (5, 2) or (5, 3, 4).
shape = list(shape)
shape[0] = -1
return numpy.reshape(all_features, shape, order="C")
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
|
[
"os.makedirs",
"os.path.dirname",
"os.path.exists",
"numpy.require",
"numpy.prod",
"bob.extension.utils.find_header",
"bob.extension.pkgconfig",
"numpy.reshape",
"numpy.fromiter",
"numpy.deprecate",
"numpy.ascontiguousarray",
"numpy.vstack"
] |
[((1860, 1925), 'numpy.deprecate', 'numpy.deprecate', ([], {'new_name': '"""os.makedirs(directory, exist_ok=True)"""'}), "(new_name='os.makedirs(directory, exist_ok=True)')\n", (1875, 1925), False, 'import numpy\n'), ((6421, 6483), 'numpy.require', 'numpy.require', (['array'], {'requirements': "('C_CONTIGUOUS', 'ALIGNED')"}), "(array, requirements=('C_CONTIGUOUS', 'ALIGNED'))\n", (6434, 6483), False, 'import numpy\n'), ((7254, 7316), 'numpy.require', 'numpy.require', (['array'], {'requirements': "('C_CONTIGUOUS', 'ALIGNED')"}), "(array, requirements=('C_CONTIGUOUS', 'ALIGNED'))\n", (7267, 7316), False, 'import numpy\n'), ((15239, 15284), 'numpy.reshape', 'numpy.reshape', (['all_features', 'shape'], {'order': '"""C"""'}), "(all_features, shape, order='C')\n", (15252, 15284), False, 'import numpy\n'), ((2540, 2577), 'os.makedirs', 'os.makedirs', (['directory'], {'exist_ok': '(True)'}), '(directory, exist_ok=True)\n', (2551, 2577), False, 'import os\n'), ((9473, 9492), 'bob.extension.utils.find_header', 'find_header', (['header'], {}), '(header)\n', (9484, 9492), False, 'from bob.extension.utils import find_header\n'), ((11379, 11411), 'numpy.ascontiguousarray', 'numpy.ascontiguousarray', (['feature'], {}), '(feature)\n', (11402, 11411), False, 'import numpy\n'), ((14851, 14900), 'numpy.fromiter', 'numpy.fromiter', (['iterable', 'field_dtype', 'total_size'], {}), '(iterable, field_dtype, total_size)\n', (14865, 14900), False, 'import numpy\n'), ((14988, 15025), 'numpy.fromiter', 'numpy.fromiter', (['iterable', 'field_dtype'], {}), '(iterable, field_dtype)\n', (15002, 15025), False, 'import numpy\n'), ((3976, 3998), 'os.path.exists', 'os.path.exists', (['inputs'], {}), '(inputs)\n', (3990, 3998), False, 'import os\n'), ((4449, 4469), 'numpy.vstack', 'numpy.vstack', (['retval'], {}), '(retval)\n', (4461, 4469), False, 'import numpy\n'), ((6307, 6332), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (6322, 6332), False, 'import os\n'), ((9674, 9704), 'os.path.dirname', 'os.path.dirname', (['candidates[0]'], {}), '(candidates[0])\n', (9689, 9704), False, 'import os\n'), ((9779, 9796), 'bob.extension.pkgconfig', 'pkgconfig', (['"""hdf5"""'], {}), "('hdf5')\n", (9788, 9796), False, 'from bob.extension import pkgconfig\n'), ((14781, 14798), 'numpy.prod', 'numpy.prod', (['shape'], {}), '(shape)\n', (14791, 14798), False, 'import numpy\n'), ((14942, 14963), 'numpy.prod', 'numpy.prod', (['shape[1:]'], {}), '(shape[1:])\n', (14952, 14963), False, 'import numpy\n')]
|
#!/usr/bin/env python3
import serial
import datetime
import paho.mqtt.publish as pub
import redis
import psycopg2 as pg
ser = serial.Serial('/dev/ttyACM0', 9600)
doors = [
{'door':'front','open':None},
{'door':'french','open':None},
{'door':'kitchen','open':None},
{'door':'music','open':None},
{'door':'prayer','open':None}]
def set_door_state(bt,state):
changes = []
for i,d in enumerate(state):
thisDoor = bool(bt & (1 << i))
if (state[i]['open'] != thisDoor):
state[i]['open'] = thisDoor
changes.append(state[i])
return changes
r = redis.StrictRedis(host='mylocalipaddr', port=6379, db=0)
conn = pg.connect(host='mylocalipaddr',port=5433,dbname='flintstone',user='fred',password='<PASSWORD>')
cur = conn.cursor()
old = None
while 1:
raw = ser.readline()
this = int(raw.strip()[0])
if (this != old):
changes = set_door_state(this,doors)
print(datetime.datetime.now(), ' ', changes)
pub.single('doors',this)
old = this
for change in changes:
r.set('doors.'+change['door'],change['open'])
cur.execute("insert into discretehistory (point,eventtime,value) values(%s,%s,%s)",
('doors.'+change['door'],datetime.datetime.now(),change['open']))
conn.commit()
|
[
"serial.Serial",
"paho.mqtt.publish.single",
"redis.StrictRedis",
"datetime.datetime.now",
"psycopg2.connect"
] |
[((128, 163), 'serial.Serial', 'serial.Serial', (['"""/dev/ttyACM0"""', '(9600)'], {}), "('/dev/ttyACM0', 9600)\n", (141, 163), False, 'import serial\n'), ((612, 668), 'redis.StrictRedis', 'redis.StrictRedis', ([], {'host': '"""mylocalipaddr"""', 'port': '(6379)', 'db': '(0)'}), "(host='mylocalipaddr', port=6379, db=0)\n", (629, 668), False, 'import redis\n'), ((676, 781), 'psycopg2.connect', 'pg.connect', ([], {'host': '"""mylocalipaddr"""', 'port': '(5433)', 'dbname': '"""flintstone"""', 'user': '"""fred"""', 'password': '"""<PASSWORD>"""'}), "(host='mylocalipaddr', port=5433, dbname='flintstone', user=\n 'fred', password='<PASSWORD>')\n", (686, 781), True, 'import psycopg2 as pg\n'), ((1018, 1043), 'paho.mqtt.publish.single', 'pub.single', (['"""doors"""', 'this'], {}), "('doors', this)\n", (1028, 1043), True, 'import paho.mqtt.publish as pub\n'), ((964, 987), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (985, 987), False, 'import datetime\n'), ((1326, 1349), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1347, 1349), False, 'import datetime\n')]
|
import sys
import nuvolasdk
sys.exit(nuvolasdk.run(".", sys.argv))
|
[
"nuvolasdk.run"
] |
[((39, 67), 'nuvolasdk.run', 'nuvolasdk.run', (['"""."""', 'sys.argv'], {}), "('.', sys.argv)\n", (52, 67), False, 'import nuvolasdk\n')]
|
# -*- coding: utf-8 -*-
#
# Dell EMC OpenManage Ansible Modules
# Version 3.0.0
# Copyright (C) 2018-2021 Dell Inc. or its subsidiaries. All Rights Reserved.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import pytest
import json
from ansible_collections.dellemc.openmanage.plugins.modules import idrac_network
from ansible_collections.dellemc.openmanage.tests.unit.plugins.modules.common import FakeAnsibleModule, Constants
from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock
from io import StringIO
from ansible.module_utils._text import to_text
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import ConnectionError, SSLValidationError
from pytest import importorskip
importorskip("omsdk.sdkfile")
importorskip("omsdk.sdkcreds")
MODULE_PATH = 'ansible_collections.dellemc.openmanage.plugins.modules.'
class TestConfigNetwork(FakeAnsibleModule):
module = idrac_network
@pytest.fixture
def idrac_configure_network_mock(self):
omsdk_mock = MagicMock()
idrac_obj = MagicMock()
omsdk_mock.file_share_manager = idrac_obj
omsdk_mock.config_mgr = idrac_obj
type(idrac_obj).create_share_obj = Mock(return_value="networkstatus")
type(idrac_obj).set_liason_share = Mock(return_value="networkstatus")
return idrac_obj
@pytest.fixture
def idrac_file_manager_config_networking_mock(self, mocker):
try:
file_manager_obj = mocker.patch(
MODULE_PATH + 'idrac_network.file_share_manager')
except AttributeError:
file_manager_obj = MagicMock()
obj = MagicMock()
file_manager_obj.create_share_obj.return_value = obj
return file_manager_obj
@pytest.fixture
def idrac_connection_configure_network_mock(self, mocker, idrac_configure_network_mock):
idrac_conn_class_mock = mocker.patch(MODULE_PATH +
'idrac_network.iDRACConnection',
return_value=idrac_configure_network_mock)
idrac_conn_class_mock.return_value.__enter__.return_value = idrac_configure_network_mock
return idrac_configure_network_mock
def test_main_idrac_configure_network_success_case(self, idrac_connection_configure_network_mock, mocker,
idrac_default_args, idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename"})
message = {'changed': False, 'msg': {'Status': "Success", "message": "No changes found to commit!"}}
mocker.patch(MODULE_PATH +
'idrac_network.run_idrac_network_config', return_value=message)
result = self._run_module(idrac_default_args)
assert result == {'msg': 'Successfully configured the idrac network settings.',
'network_status': {
'changed': False,
'msg': {'Status': 'Success', 'message': 'No changes found to commit!'}},
'changed': False, 'failed': False}
def test_run_idrac_network_config_success_case01(self, idrac_connection_configure_network_mock, idrac_default_args,
idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "<PASSWORD>", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "172.16.17.32", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {"changes_applicable": True, "message": "changes are applicable"}
idrac_connection_configure_network_mock.config_mgr.is_change_applicable.return_value = message
f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == {'changes_applicable': True, 'message': 'changes are applicable'}
def test_run_idrac_network_config_success_case02(self, idrac_connection_configure_network_mock, idrac_default_args,
idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "<PASSWORD>", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "172.16.17.32", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {"changes_applicable": True, "message": "changes found to commit!", "changed": True,
"Status": "Success"}
idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == {'Status': 'Success',
'changed': True,
'changes_applicable': True,
'message': 'changes found to commit!'}
def test_run_idrac_network_config_success_case03(self, idrac_connection_configure_network_mock, idrac_default_args,
idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "<PASSWORD>", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "172.16.17.32", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {"changes_applicable": False, "Message": "No changes found to commit!", "changed": False,
"Status": "Success"}
idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == {'Message': 'No changes found to commit!',
'Status': 'Success',
'changed': False,
'changes_applicable': False}
def test_run_idrac_network_config_success_case04(self, idrac_connection_configure_network_mock,
idrac_default_args, idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "<PASSWORD>", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "172.16.17.32", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
"Status": "Success"}
idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == {'Message': 'No changes were applied',
'Status': 'Success',
'changed': False,
'changes_applicable': False}
def test_run_idrac_network_config_success_case05(self, idrac_connection_configure_network_mock, idrac_default_args,
idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "<PASSWORD>", "register_idrac_on_dns": None,
"dns_idrac_name": None, "auto_config": None, "static_dns": None,
"setup_idrac_nic_vlan": None, "vlan_id": None, "vlan_priority": None,
"enable_nic": None, "nic_selection": None,
"failover_network": None, "auto_detect": None, "auto_negotiation": None,
"network_speed": None, "duplex_mode": None, "nic_mtu": None,
"enable_dhcp": None, "ip_address": None, "enable_ipv4": None,
"dns_from_dhcp": None, "static_dns_1": None, "static_dns_2": None,
"static_gateway": None, "static_net_mask": None})
message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
"Status": "Success"}
idrac_connection_configure_network_mock.config_mgr.configure_dns.return_value = message
idrac_connection_configure_network_mock.config_mgr.configure_nic_vlan.return_value = message
idrac_connection_configure_network_mock.config_mgr.configure_network_settings.return_value = message
idrac_connection_configure_network_mock.config_mgr.configure_ipv4.return_value = message
idrac_connection_configure_network_mock.config_mgr.configure_static_ipv4.return_value = message
idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == {'Message': 'No changes were applied',
'Status': 'Success',
'changed': False,
'changes_applicable': False}
def test_run_idrac_network_config_failed_case01(self, idrac_connection_configure_network_mock, idrac_default_args,
idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "<PASSWORD>", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "172.16.17.32", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {'Status': 'Failed', "Data": {'Message': 'status failed in checking Data'}}
idrac_connection_configure_network_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
idrac_connection_configure_network_mock.config_mgr.set_liason_share.return_value = message
f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
result = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert result == idrac_connection_configure_network_mock.config_mgr.is_change_applicable()
def test_run_idrac_network_config_failed_case02(self, idrac_connection_configure_network_mock,
idrac_default_args, idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "<PASSWORD>", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "172.16.17.32", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {"changes_applicable": False, "Message": "No changes were applied", "changed": False,
"Status": "failed"}
idrac_connection_configure_network_mock.config_mgr.apply_changes.return_value = message
f_module = self.get_module_mock(params=idrac_default_args)
f_module.check_mode = False
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == {'Message': 'No changes were applied',
'Status': 'failed',
'changed': False,
'changes_applicable': False}
def test_run_idrac_network_config_failed_case03(self, idrac_connection_configure_network_mock,
idrac_default_args, idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename", "share_mnt": "mountname", "share_user": "shareuser",
"share_password": "<PASSWORD>", "register_idrac_on_dns": "Enabled",
"dns_idrac_name": "testname", "auto_config": "Disabled", "static_dns": "staticdns",
"setup_idrac_nic_vlan": "Enabled", "vlan_id": 4, "vlan_priority": "Enabled",
"enable_nic": "Enabled", "nic_selection": "Dedicated",
"failover_network": "ALL", "auto_detect": "Enabled", "auto_negotiation": "Enabled",
"network_speed": "T_10", "duplex_mode": "Full", "nic_mtu": "nicmtu",
"enable_dhcp": "Enabled", "ip_address": "172.16.17.32", "enable_ipv4": "Enabled",
"dns_from_dhcp": "Enabled", "static_dns_1": "staticdns1",
"static_dns_2": "staticdns2", "static_gateway": "staticgateway",
"static_net_mask": "staticnetmask"})
message = {'Status': 'Failed', "Data": {'Message': "Failed to found changes"}}
idrac_connection_configure_network_mock.file_share_manager.create_share_obj.return_value = "mnt/iso"
idrac_connection_configure_network_mock.config_mgr.set_liason_share.return_value = message
f_module = self.get_module_mock(params=idrac_default_args, check_mode=True)
msg = self.module.run_idrac_network_config(idrac_connection_configure_network_mock, f_module)
assert msg == idrac_connection_configure_network_mock.config_mgr.is_change_applicable()
@pytest.mark.parametrize("exc_type", [RuntimeError, SSLValidationError, ConnectionError, KeyError,
ImportError, ValueError, TypeError, HTTPError, URLError])
def test_main_idrac_configure_network_exception_handling_case(self, exc_type, mocker, idrac_default_args,
idrac_connection_configure_network_mock,
idrac_file_manager_config_networking_mock):
idrac_default_args.update({"share_name": "sharename"})
json_str = to_text(json.dumps({"data": "out"}))
if exc_type not in [HTTPError, SSLValidationError]:
mocker.patch(
MODULE_PATH + 'idrac_network.run_idrac_network_config',
side_effect=exc_type('test'))
else:
mocker.patch(
MODULE_PATH + 'idrac_network.run_idrac_network_config',
side_effect=exc_type('http://testhost.com', 400, 'http error message',
{"accept-type": "application/json"}, StringIO(json_str)))
if not exc_type == URLError:
result = self._run_module_with_fail_json(idrac_default_args)
assert result['failed'] is True
else:
result = self._run_module(idrac_default_args)
assert 'msg' in result
|
[
"pytest.importorskip",
"io.StringIO",
"ansible_collections.dellemc.openmanage.tests.unit.compat.mock.Mock",
"json.dumps",
"ansible_collections.dellemc.openmanage.tests.unit.compat.mock.MagicMock",
"pytest.mark.parametrize"
] |
[((918, 947), 'pytest.importorskip', 'importorskip', (['"""omsdk.sdkfile"""'], {}), "('omsdk.sdkfile')\n", (930, 947), False, 'from pytest import importorskip\n'), ((948, 978), 'pytest.importorskip', 'importorskip', (['"""omsdk.sdkcreds"""'], {}), "('omsdk.sdkcreds')\n", (960, 978), False, 'from pytest import importorskip\n'), ((19423, 19586), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""exc_type"""', '[RuntimeError, SSLValidationError, ConnectionError, KeyError, ImportError,\n ValueError, TypeError, HTTPError, URLError]'], {}), "('exc_type', [RuntimeError, SSLValidationError,\n ConnectionError, KeyError, ImportError, ValueError, TypeError,\n HTTPError, URLError])\n", (19446, 19586), False, 'import pytest\n'), ((1211, 1222), 'ansible_collections.dellemc.openmanage.tests.unit.compat.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1220, 1222), False, 'from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock\n'), ((1243, 1254), 'ansible_collections.dellemc.openmanage.tests.unit.compat.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1252, 1254), False, 'from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock\n'), ((1390, 1424), 'ansible_collections.dellemc.openmanage.tests.unit.compat.mock.Mock', 'Mock', ([], {'return_value': '"""networkstatus"""'}), "(return_value='networkstatus')\n", (1394, 1424), False, 'from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock\n'), ((1468, 1502), 'ansible_collections.dellemc.openmanage.tests.unit.compat.mock.Mock', 'Mock', ([], {'return_value': '"""networkstatus"""'}), "(return_value='networkstatus')\n", (1472, 1502), False, 'from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock\n'), ((1826, 1837), 'ansible_collections.dellemc.openmanage.tests.unit.compat.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1835, 1837), False, 'from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock\n'), ((20038, 20065), 'json.dumps', 'json.dumps', (["{'data': 'out'}"], {}), "({'data': 'out'})\n", (20048, 20065), False, 'import json\n'), ((1800, 1811), 'ansible_collections.dellemc.openmanage.tests.unit.compat.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1809, 1811), False, 'from ansible_collections.dellemc.openmanage.tests.unit.compat.mock import MagicMock, patch, Mock\n'), ((20544, 20562), 'io.StringIO', 'StringIO', (['json_str'], {}), '(json_str)\n', (20552, 20562), False, 'from io import StringIO\n')]
|
#!/usr/bin/env python
#
# Public Domain 2014-present MongoDB, Inc.
# Public Domain 2008-2014 WiredTiger, Inc.
#
# This is free and unencumbered software released into the public domain.
#
# Anyone is free to copy, modify, publish, use, compile, sell, or
# distribute this software, either in source code form or as a compiled
# binary, for any purpose, commercial or non-commercial, and by any
# means.
#
# In jurisdictions that recognize copyright laws, the author or authors
# of this software dedicate any and all copyright interest in the
# software to the public domain. We make this dedication for the benefit
# of the public at large and to the detriment of our heirs and
# successors. We intend this dedication to be an overt act of
# relinquishment in perpetuity of all present and future rights to this
# software under copyright law.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from helper import copy_wiredtiger_home
from suite_subprocess import suite_subprocess
import os
import wiredtiger, wttest
# test_bug018.py
# JIRA WT-3590: if writing table data fails during close then tables
# that were updated within the same transaction could get out of sync with
# each other.
class test_bug018(wttest.WiredTigerTestCase, suite_subprocess):
'''Test closing/reopening/recovering tables when writes fail'''
conn_config = 'log=(enabled)'
basename = 'bug018.'
baseuri = 'file:' + basename
flist = []
uri1 = baseuri + '01.wt'
uri2 = baseuri + '02.wt'
def setUp(self):
# This test uses Linux-specific code so skip on any other system.
if os.name != 'posix' or os.uname()[0] != 'Linux':
self.skipTest('Linux-specific test skipped on ' + os.name)
super(test_bug018, self).setUp()
def close_files(self):
for f in self.flist:
f.close()
def open_files(self):
numfiles = 6
dir = self.conn.get_home()
for i in range(1, numfiles):
fname = dir + '/file.' + str(i)
self.flist.append(open(fname, 'w'))
def create_table(self, uri):
self.session.create(uri, 'key_format=S,value_format=S')
return self.session.open_cursor(uri)
def subprocess_bug018(self):
'''Test closing multiple tables'''
# The first thing we do is open several files. We will close them later. The reason is
# that sometimes, without that, this test would fail to report an error as expected. We
# hypothesize, but could not prove (nor reproduce under strace), that after closing the
# file descriptor that an internal thread would open a file, perhaps a pre-allocated log
# file, and then would open the file descriptor we just closed. So on close, instead of
# getting an error, we would actually write to the wrong file.
#
# So we'll open some files now, and then close them before closing the one of interest to
# the test so that any stray internal file opens will use the file descriptor of one of
# the earlier files we just closed.
self.open_files()
c1 = self.create_table(self.uri1)
c2 = self.create_table(self.uri2)
self.session.begin_transaction()
c1['key'] = 'value'
c2['key'] = 'value'
self.session.commit_transaction()
self.close_files()
# Simulate a write failure by closing the file descriptor for the second
# table out from underneath WiredTiger. We do this right before
# closing the connection so that the write error happens during close
# when writing out the final data. Allow table 1 to succeed and force
# an error writing out table 2.
#
# This is Linux-specific code to figure out the file descriptor.
for f in os.listdir('/proc/self/fd'):
try:
if os.readlink('/proc/self/fd/' + f).endswith(self.basename + '02.wt'):
os.close(int(f))
except OSError:
pass
# Expect an error and messages, so turn off stderr checking.
with self.expectedStderrPattern(''):
try:
self.close_conn()
except wiredtiger.WiredTigerError:
self.conn = None
def test_bug018(self):
'''Test closing multiple tables'''
self.close_conn()
subdir = 'SUBPROCESS'
[ignore_result, new_home_dir] = self.run_subprocess_function(subdir,
'test_bug018.test_bug018.subprocess_bug018')
# Make a backup for forensics in case something goes wrong.
backup_dir = 'BACKUP'
copy_wiredtiger_home(self, new_home_dir, backup_dir, True)
# After reopening and running recovery both tables should be in
# sync even though table 1 was successfully written and table 2
# had an error on close.
self.open_conn(new_home_dir)
results1 = list(self.session.open_cursor(self.uri1))
# It's possible the second table can't even be opened.
# That can happen only if the root page was not pushed out.
# We can't depend on the text of a particular error message to be
# emitted, so we'll just ignore the error.
self.captureerr.check(self) # check there is no error output so far
try:
results2 = list(self.session.open_cursor(self.uri2))
except:
# Make sure there's some error, but we don't care what.
self.captureerr.checkAdditionalPattern(self, '.')
results2 = []
self.assertEqual(results1, results2)
if __name__ == '__main__':
wttest.run()
|
[
"os.readlink",
"os.uname",
"wttest.run",
"helper.copy_wiredtiger_home",
"os.listdir"
] |
[((6041, 6053), 'wttest.run', 'wttest.run', ([], {}), '()\n', (6051, 6053), False, 'import wiredtiger, wttest\n'), ((4206, 4233), 'os.listdir', 'os.listdir', (['"""/proc/self/fd"""'], {}), "('/proc/self/fd')\n", (4216, 4233), False, 'import os\n'), ((5041, 5099), 'helper.copy_wiredtiger_home', 'copy_wiredtiger_home', (['self', 'new_home_dir', 'backup_dir', '(True)'], {}), '(self, new_home_dir, backup_dir, True)\n', (5061, 5099), False, 'from helper import copy_wiredtiger_home\n'), ((2029, 2039), 'os.uname', 'os.uname', ([], {}), '()\n', (2037, 2039), False, 'import os\n'), ((4271, 4304), 'os.readlink', 'os.readlink', (["('/proc/self/fd/' + f)"], {}), "('/proc/self/fd/' + f)\n", (4282, 4304), False, 'import os\n')]
|
import bpy
from .road import register_road, unregister_road
from .building import register_building, unregister_building
bl_info = {
"name": "Building Tools",
"author": "<NAME> (ranjian0), <NAME> (luckykadam), Marcus (MCrafterzz)",
"version": (1, 0, 6),
"blender": (2, 80, 0),
"location": "View3D > Toolshelf > Building Tools",
"description": "Building Creation Tools",
"warning": "",
"wiki_url": "",
"tracker_url": "",
"category": "Mesh",
}
class BTOOLS_PT_road_tools(bpy.types.Panel):
bl_label = "Road Tools"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "Building Tools"
def draw(self, context):
layout = self.layout
# Draw Operators
# ``````````````
col = layout.column(align=True)
col.operator("btools.add_road")
col.operator("btools.finalize_road")
col = layout.column(align=True)
col.operator("btools.add_array")
col.operator("btools.finalize_array")
class BTOOLS_PT_building_tools(bpy.types.Panel):
bl_label = "Building Tools"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "Building Tools"
def draw(self, context):
layout = self.layout
# Draw Operators
# ``````````````
col = layout.column(align=True)
col.operator("btools.add_floorplan")
row = col.row(align=True)
row.operator("btools.add_floors")
row.operator("btools.add_roof")
col = layout.column(align=True)
col.operator("btools.add_balcony")
col.operator("btools.add_stairs")
col = layout.column(align=True)
row = col.row(align=True)
row.operator("btools.add_window")
row.operator("btools.add_door")
col.operator("btools.add_multigroup")
col.operator("btools.add_fill")
col = layout.column(align=True)
col.operator("btools.add_custom")
col.prop(context.scene, "btools_custom_object", text="")
class BTOOLS_PT_material_tools(bpy.types.Panel):
bl_label = "Material Tools"
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "Building Tools"
bl_options = {"DEFAULT_CLOSED"}
@classmethod
def poll(cls, context):
obj = context.object
return obj and obj.type == "MESH"
def draw(self, context):
layout = self.layout
ob = context.object
facemap = ob.face_maps.active
rows = 2
if facemap:
rows = 4
if not len(ob.face_maps):
return
layout.label(text="Face Maps")
row = layout.row()
args = ob, "face_maps", ob.face_maps, "active_index"
row.template_list("BTOOLS_UL_fmaps", "", *args, rows=rows)
col = row.column(align=True)
col.operator("object.face_map_add", icon="ADD", text="")
col.operator("object.face_map_remove", icon="REMOVE", text="")
col.separator()
col.operator("btools.face_map_clear", icon="TRASH", text="")
if ob.face_maps and (ob.mode == "EDIT" and ob.type == "MESH"):
row = layout.row()
sub = row.row(align=True)
sub.operator("object.face_map_assign", text="Assign")
sub.operator("object.face_map_remove_from", text="Remove")
sub = row.row(align=True)
sub.operator("object.face_map_select", text="Select")
sub.operator("object.face_map_deselect", text="Deselect")
if ob.face_maps:
face_map_index = ob.face_maps.active_index
face_map_material = ob.facemap_materials[face_map_index]
layout.label(text="UV Mapping")
col = layout.column()
row = col.row(align=True)
row.alignment = "LEFT"
row.prop(face_map_material, "auto_map", text="Auto")
row.prop(face_map_material, "uv_mapping_method", text="")
layout.label(text="Material")
layout.operator("btools.create_facemap_material")
layout.template_ID_preview(face_map_material, "material", hide_buttons=True)
classes = (BTOOLS_PT_road_tools, BTOOLS_PT_building_tools, BTOOLS_PT_material_tools)
def register():
register_road()
register_building()
for cls in classes:
bpy.utils.register_class(cls)
def unregister():
unregister_road()
unregister_building()
for cls in classes:
bpy.utils.unregister_class(cls)
if __name__ == "__main__":
import os
os.system("clear")
# -- custom unregister for script watcher
for tp in dir(bpy.types):
if "BTOOLS_" in tp:
bpy.utils.unregister_class(getattr(bpy.types, tp))
register()
|
[
"bpy.utils.unregister_class",
"os.system",
"bpy.utils.register_class"
] |
[((4524, 4542), 'os.system', 'os.system', (['"""clear"""'], {}), "('clear')\n", (4533, 4542), False, 'import os\n'), ((4314, 4343), 'bpy.utils.register_class', 'bpy.utils.register_class', (['cls'], {}), '(cls)\n', (4338, 4343), False, 'import bpy\n'), ((4444, 4475), 'bpy.utils.unregister_class', 'bpy.utils.unregister_class', (['cls'], {}), '(cls)\n', (4470, 4475), False, 'import bpy\n')]
|
# Generated by Django 2.0.4 on 2018-04-18 13:58
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='activityjournal',
name='time_lapse',
field=models.IntegerField(blank=True, null=True),
),
migrations.AlterField(
model_name='activityjournal',
name='end',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='activityjournal',
name='start',
field=models.DateTimeField(default=datetime.datetime(2018, 4, 18, 15, 58, 34, 603734)),
),
migrations.AlterField(
model_name='registry',
name='end',
field=models.DateTimeField(blank=True, null=True),
),
migrations.AlterField(
model_name='registry',
name='start',
field=models.DateTimeField(default=datetime.datetime(2018, 4, 18, 15, 58, 34, 605193)),
),
]
|
[
"django.db.models.DateTimeField",
"django.db.models.IntegerField",
"datetime.datetime"
] |
[((349, 391), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (368, 391), False, 'from django.db import migrations, models\n'), ((519, 562), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (539, 562), False, 'from django.db import migrations, models\n'), ((893, 936), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'blank': '(True)', 'null': '(True)'}), '(blank=True, null=True)\n', (913, 936), False, 'from django.db import migrations, models\n'), ((721, 771), 'datetime.datetime', 'datetime.datetime', (['(2018)', '(4)', '(18)', '(15)', '(58)', '(34)', '(603734)'], {}), '(2018, 4, 18, 15, 58, 34, 603734)\n', (738, 771), False, 'import datetime\n'), ((1088, 1138), 'datetime.datetime', 'datetime.datetime', (['(2018)', '(4)', '(18)', '(15)', '(58)', '(34)', '(605193)'], {}), '(2018, 4, 18, 15, 58, 34, 605193)\n', (1105, 1138), False, 'import datetime\n')]
|
from importlib import import_module
from servicelayer.extensions import get_entry_point
from memorious.model import Crawl
class CrawlerStage(object):
"""A single step in a data processing crawler."""
def __init__(self, crawler, name, config):
self.crawler = crawler
self.name = name
self.config = config
self.method_name = config.get('method')
self.params = config.get('params') or {}
self.handlers = config.get('handle') or {}
@property
def method(self):
# method A: via a named Python entry point
func = get_entry_point('memorious.operations', self.method_name)
if func is not None:
return func
# method B: direct import from a module
if ':' not in self.method_name:
raise ValueError("Unknown method: %s", self.method_name)
package, method = self.method_name.rsplit(':', 1)
module = import_module(package)
return getattr(module, method)
@property
def op_count(self):
"""Total operations performed for this stage"""
return Crawl.op_count(self.crawler, self)
def __str__(self):
return self.name
def __repr__(self):
return '<CrawlerStage(%r, %s)>' % (self.crawler, self.name)
|
[
"memorious.model.Crawl.op_count",
"importlib.import_module",
"servicelayer.extensions.get_entry_point"
] |
[((591, 648), 'servicelayer.extensions.get_entry_point', 'get_entry_point', (['"""memorious.operations"""', 'self.method_name'], {}), "('memorious.operations', self.method_name)\n", (606, 648), False, 'from servicelayer.extensions import get_entry_point\n'), ((934, 956), 'importlib.import_module', 'import_module', (['package'], {}), '(package)\n', (947, 956), False, 'from importlib import import_module\n'), ((1106, 1140), 'memorious.model.Crawl.op_count', 'Crawl.op_count', (['self.crawler', 'self'], {}), '(self.crawler, self)\n', (1120, 1140), False, 'from memorious.model import Crawl\n')]
|
from __future__ import print_function
import os
import argparse
import numpy as np
from dcase_task2.lasagne_wrapper.network import Network
from utils.data_tut18_task2 import load_data as load_data_tut18_task2
from utils.data_tut18_task2 import ID_CLASS_MAPPING as id_class_mapping_tut18_task2
from config.settings import EXP_ROOT
# seed seed for reproducibility
np.random.seed(4711)
def select_model(model_path):
""" select model """
model_str = os.path.basename(model_path)
model_str = model_str.split('.py')[0]
import_root = ".".join((model_path.split(os.path.sep))[:-1])
exec("from %s import %s as model" % (import_root, model_str))
model.EXP_NAME = model_str
return model
def load_data(data_set, fold, args):
""" select data """
if "tut18T2ver" in data_set:
normalize = "norm" in data_set
spec_dir = data_set.split("-")[1]
data = load_data_tut18_task2(fold=fold, n_workers=1, spec_dir=spec_dir,
train_verified=True, train_unverified=False, normalize=normalize,
fix_lengths=args.no_len_fix, max_len=args.max_len, min_len=args.min_len,
train_file=args.train_file, train_on_all=args.train_on_all,
validate_verified=not args.validate_unverified)
id_class_mapping = id_class_mapping_tut18_task2
elif "tut18T2unver" in data_set:
normalize = "norm" in data_set
spec_dir = data_set.split("-")[1]
data = load_data_tut18_task2(fold=fold, n_workers=1, spec_dir=spec_dir,
train_verified=False, train_unverified=True, normalize=normalize,
fix_lengths=args.no_len_fix, max_len=args.max_len, min_len=args.min_len,
train_file=args.train_file, train_on_all=args.train_on_all,
validate_verified=not args.validate_unverified)
id_class_mapping = id_class_mapping_tut18_task2
elif "tut18T2" in data_set:
normalize = "norm" in data_set
spec_dir = data_set.split("-")[1]
data = load_data_tut18_task2(fold=fold, n_workers=1, spec_dir=spec_dir,
train_verified=True, train_unverified=True, normalize=normalize,
fix_lengths=args.no_len_fix, max_len=args.max_len, min_len=args.min_len,
train_file=args.train_file, train_on_all=args.train_on_all,
validate_verified=not args.validate_unverified)
id_class_mapping = id_class_mapping_tut18_task2
return data, id_class_mapping
def get_dump_file_paths(out_path, fold):
par = 'params.pkl' if fold is None else 'params_%d.pkl' % fold
log = 'results.pkl' if fold is None else 'results_%d.pkl' % fold
dump_file = os.path.join(out_path, par)
log_file = os.path.join(out_path, log)
return dump_file, log_file
if __name__ == '__main__':
""" main """
# add argument parser
parser = argparse.ArgumentParser(description='Train audio tagging network.')
parser.add_argument('--model', help='select model to train.')
parser.add_argument('--data', help='select model to train.')
parser.add_argument('--fold', help='train split.', type=int, default=None)
parser.add_argument('--ini_params', help='path to pretrained parameters.', type=str, default=None)
parser.add_argument('--tag', help='add tag to result files.', type=str, default=None)
parser.add_argument('--fine_tune', help='use fine-tune train configuration.', action='store_true')
# tut18 task2
parser.add_argument('--train_file', help='train data file.', type=str, default="train.csv")
parser.add_argument('--max_len', help='maximum spectrogram length.', type=int, default=None)
parser.add_argument('--min_len', help='minimum spectrogram length.', type=int, default=None)
parser.add_argument('--no_len_fix', help='do not fix lengths of spectrograms.', action='store_false')
parser.add_argument('--train_on_all', help='use all files for training.', action='store_true')
parser.add_argument('--validate_unverified', help='validate also on unverified samples.', action='store_true')
args = parser.parse_args()
# select model
model = select_model(args.model)
# load data
print("\nLoading data ...")
data, _ = load_data(args.data, args.fold, args)
# set model dump file
print("\nPreparing model ...")
out_path = os.path.join(os.path.join(EXP_ROOT), model.EXP_NAME)
dump_file, log_file = get_dump_file_paths(out_path, args.fold)
# change parameter dump files
if not args.fine_tune:
dump_file = dump_file.replace(".pkl", "_it0.pkl")
log_file = log_file.replace(".pkl", "_it0.pkl")
print("parameter file", dump_file)
print("log file", log_file)
# compile network
net = model.build_model()
# initialize neural network
my_net = Network(net)
# load initial parametrization
if args.ini_params:
ini_params = args.ini_params % args.fold
ini_params = dump_file.replace(os.path.basename(dump_file).split(".")[0], ini_params)
my_net.load(ini_params)
print("initial parameter file %s" % ini_params)
# add tag to results
if args.tag:
dump_file = dump_file.replace(".pkl", "_%s.pkl" % args.tag)
log_file = log_file.replace(".pkl", "_%s.pkl" % args.tag)
print("tagged parameter file %s" % dump_file)
# train network
train_strategy = model.compile_train_strategy(args.fine_tune)
my_net.fit(data, train_strategy, log_file=log_file, dump_file=dump_file)
|
[
"numpy.random.seed",
"argparse.ArgumentParser",
"os.path.basename",
"utils.data_tut18_task2.load_data",
"os.path.join",
"dcase_task2.lasagne_wrapper.network.Network"
] |
[((368, 388), 'numpy.random.seed', 'np.random.seed', (['(4711)'], {}), '(4711)\n', (382, 388), True, 'import numpy as np\n'), ((463, 491), 'os.path.basename', 'os.path.basename', (['model_path'], {}), '(model_path)\n', (479, 491), False, 'import os\n'), ((2947, 2974), 'os.path.join', 'os.path.join', (['out_path', 'par'], {}), '(out_path, par)\n', (2959, 2974), False, 'import os\n'), ((2990, 3017), 'os.path.join', 'os.path.join', (['out_path', 'log'], {}), '(out_path, log)\n', (3002, 3017), False, 'import os\n'), ((3135, 3202), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Train audio tagging network."""'}), "(description='Train audio tagging network.')\n", (3158, 3202), False, 'import argparse\n'), ((5079, 5091), 'dcase_task2.lasagne_wrapper.network.Network', 'Network', (['net'], {}), '(net)\n', (5086, 5091), False, 'from dcase_task2.lasagne_wrapper.network import Network\n'), ((907, 1234), 'utils.data_tut18_task2.load_data', 'load_data_tut18_task2', ([], {'fold': 'fold', 'n_workers': '(1)', 'spec_dir': 'spec_dir', 'train_verified': '(True)', 'train_unverified': '(False)', 'normalize': 'normalize', 'fix_lengths': 'args.no_len_fix', 'max_len': 'args.max_len', 'min_len': 'args.min_len', 'train_file': 'args.train_file', 'train_on_all': 'args.train_on_all', 'validate_verified': '(not args.validate_unverified)'}), '(fold=fold, n_workers=1, spec_dir=spec_dir,\n train_verified=True, train_unverified=False, normalize=normalize,\n fix_lengths=args.no_len_fix, max_len=args.max_len, min_len=args.min_len,\n train_file=args.train_file, train_on_all=args.train_on_all,\n validate_verified=not args.validate_unverified)\n', (928, 1234), True, 'from utils.data_tut18_task2 import load_data as load_data_tut18_task2\n'), ((4618, 4640), 'os.path.join', 'os.path.join', (['EXP_ROOT'], {}), '(EXP_ROOT)\n', (4630, 4640), False, 'import os\n'), ((1557, 1884), 'utils.data_tut18_task2.load_data', 'load_data_tut18_task2', ([], {'fold': 'fold', 'n_workers': '(1)', 'spec_dir': 'spec_dir', 'train_verified': '(False)', 'train_unverified': '(True)', 'normalize': 'normalize', 'fix_lengths': 'args.no_len_fix', 'max_len': 'args.max_len', 'min_len': 'args.min_len', 'train_file': 'args.train_file', 'train_on_all': 'args.train_on_all', 'validate_verified': '(not args.validate_unverified)'}), '(fold=fold, n_workers=1, spec_dir=spec_dir,\n train_verified=False, train_unverified=True, normalize=normalize,\n fix_lengths=args.no_len_fix, max_len=args.max_len, min_len=args.min_len,\n train_file=args.train_file, train_on_all=args.train_on_all,\n validate_verified=not args.validate_unverified)\n', (1578, 1884), True, 'from utils.data_tut18_task2 import load_data as load_data_tut18_task2\n'), ((2202, 2528), 'utils.data_tut18_task2.load_data', 'load_data_tut18_task2', ([], {'fold': 'fold', 'n_workers': '(1)', 'spec_dir': 'spec_dir', 'train_verified': '(True)', 'train_unverified': '(True)', 'normalize': 'normalize', 'fix_lengths': 'args.no_len_fix', 'max_len': 'args.max_len', 'min_len': 'args.min_len', 'train_file': 'args.train_file', 'train_on_all': 'args.train_on_all', 'validate_verified': '(not args.validate_unverified)'}), '(fold=fold, n_workers=1, spec_dir=spec_dir,\n train_verified=True, train_unverified=True, normalize=normalize,\n fix_lengths=args.no_len_fix, max_len=args.max_len, min_len=args.min_len,\n train_file=args.train_file, train_on_all=args.train_on_all,\n validate_verified=not args.validate_unverified)\n', (2223, 2528), True, 'from utils.data_tut18_task2 import load_data as load_data_tut18_task2\n'), ((5240, 5267), 'os.path.basename', 'os.path.basename', (['dump_file'], {}), '(dump_file)\n', (5256, 5267), False, 'import os\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
from PyQt5.QtWidgets import QApplication, QTableView
from PyQt5.QtSql import QSqlDatabase, QSqlQueryModel, QSqlQuery
db = QSqlDatabase.addDatabase('QSQLITE')
db.setDatabaseName('database.sqlite')
if not db.open():
raise Exception(db.lastError().text())
TABLE = 'word2emoji'
query = QSqlQuery()
query.exec(f'SELECT COUNT(*) FROM {TABLE}')
query.next()
TABLE_ROW_COUNT = query.value(0)
def update_window_title():
mw.setWindowTitle(f'{model.rowCount()} / {TABLE_ROW_COUNT}')
app = QApplication([])
model = QSqlQueryModel()
model.rowsInserted.connect(update_window_title)
model.setQuery(f"SELECT * FROM {TABLE}")
mw = QTableView()
mw.setEditTriggers(QTableView.NoEditTriggers)
mw.setModel(model)
mw.resize(600, 480)
mw.show()
update_window_title()
app.exec()
|
[
"PyQt5.QtSql.QSqlQuery",
"PyQt5.QtSql.QSqlDatabase.addDatabase",
"PyQt5.QtWidgets.QTableView",
"PyQt5.QtSql.QSqlQueryModel",
"PyQt5.QtWidgets.QApplication"
] |
[((198, 233), 'PyQt5.QtSql.QSqlDatabase.addDatabase', 'QSqlDatabase.addDatabase', (['"""QSQLITE"""'], {}), "('QSQLITE')\n", (222, 233), False, 'from PyQt5.QtSql import QSqlDatabase, QSqlQueryModel, QSqlQuery\n'), ((363, 374), 'PyQt5.QtSql.QSqlQuery', 'QSqlQuery', ([], {}), '()\n', (372, 374), False, 'from PyQt5.QtSql import QSqlDatabase, QSqlQueryModel, QSqlQuery\n'), ((567, 583), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['[]'], {}), '([])\n', (579, 583), False, 'from PyQt5.QtWidgets import QApplication, QTableView\n'), ((593, 609), 'PyQt5.QtSql.QSqlQueryModel', 'QSqlQueryModel', ([], {}), '()\n', (607, 609), False, 'from PyQt5.QtSql import QSqlDatabase, QSqlQueryModel, QSqlQuery\n'), ((705, 717), 'PyQt5.QtWidgets.QTableView', 'QTableView', ([], {}), '()\n', (715, 717), False, 'from PyQt5.QtWidgets import QApplication, QTableView\n')]
|
"""
DRF ViewSet filters.
"""
import django_filters
from django.contrib.auth import get_user_model
from .openedx_modules import CourseOverview
class UserFilter(django_filters.FilterSet):
email_exact = django_filters.CharFilter('email', lookup_expr='iexact')
group = django_filters.NumberFilter('membership__group_id')
no_group = django_filters.BooleanFilter('membership__id', lookup_expr='isnull')
class Meta:
model = get_user_model()
fields = ['email_exact', 'group', 'no_group']
class CourseOverviewFilter(django_filters.FilterSet):
group = django_filters.NumberFilter('group_courses__group_id')
no_group = django_filters.BooleanFilter('group_courses', lookup_expr='isnull')
is_public = django_filters.BooleanFilter('public_course', lookup_expr='isnull', exclude=True)
class Meta:
model = CourseOverview
fields = ['group', 'no_group', 'is_public']
|
[
"django_filters.NumberFilter",
"django_filters.CharFilter",
"django.contrib.auth.get_user_model",
"django_filters.BooleanFilter"
] |
[((208, 264), 'django_filters.CharFilter', 'django_filters.CharFilter', (['"""email"""'], {'lookup_expr': '"""iexact"""'}), "('email', lookup_expr='iexact')\n", (233, 264), False, 'import django_filters\n'), ((277, 328), 'django_filters.NumberFilter', 'django_filters.NumberFilter', (['"""membership__group_id"""'], {}), "('membership__group_id')\n", (304, 328), False, 'import django_filters\n'), ((344, 412), 'django_filters.BooleanFilter', 'django_filters.BooleanFilter', (['"""membership__id"""'], {'lookup_expr': '"""isnull"""'}), "('membership__id', lookup_expr='isnull')\n", (372, 412), False, 'import django_filters\n'), ((585, 639), 'django_filters.NumberFilter', 'django_filters.NumberFilter', (['"""group_courses__group_id"""'], {}), "('group_courses__group_id')\n", (612, 639), False, 'import django_filters\n'), ((655, 722), 'django_filters.BooleanFilter', 'django_filters.BooleanFilter', (['"""group_courses"""'], {'lookup_expr': '"""isnull"""'}), "('group_courses', lookup_expr='isnull')\n", (683, 722), False, 'import django_filters\n'), ((739, 825), 'django_filters.BooleanFilter', 'django_filters.BooleanFilter', (['"""public_course"""'], {'lookup_expr': '"""isnull"""', 'exclude': '(True)'}), "('public_course', lookup_expr='isnull', exclude\n =True)\n", (767, 825), False, 'import django_filters\n'), ((446, 462), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (460, 462), False, 'from django.contrib.auth import get_user_model\n')]
|