text
stringlengths 29
850k
|
|---|
#!/usr/bin/python3
import sys
import random
import pygame
import os.path
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
FPS = 60
pygame.init()
displaysurface = pygame.display.set_mode((400, 400))
displaysurface.fill(WHITE)
pygame.display.set_caption('Ghatiya snake')
clock = pygame.time.Clock()
my_dir = os.path.dirname(os.path.realpath(__file__))
food_sound = pygame.mixer.Sound(my_dir + '/resources/ping.wav')
oooh_sound = pygame.mixer.Sound(my_dir + '/resources/oooh.wav')
class GameState:
score = 0
speed_multiplier = 1
paused = False
game_over = False
keylock = False
class Food:
def __init__(self):
self.foodx = 105
self.foody = 105
def new_food(self):
self.foodx = random.randrange(15, 386, 10)
self.foody = random.randrange(15, 386, 10)
class Snake:
def __init__(self):
self.segments = [[15, 5], [5, 5]]
self.direction = 'RIGHT'
def direction_update(self, direction_input):
if self.direction == 'RIGHT' and direction_input == 'LEFT' or \
self.direction == 'LEFT' and direction_input == 'RIGHT' or \
self.direction == 'DOWN' and direction_input == 'UP' or \
self.direction == 'UP' and direction_input == 'DOWN':
pass
else:
if GameState.keylock is False:
self.direction = direction_input
GameState.keylock = True
def embiggen(self):
self.segments.append(self.segments[-1])
snek = Snake()
chow = Food()
def draw_me_like_one_of_your_french_girls():
displaysurface.fill(WHITE)
# Score
font = pygame.font.SysFont('calibri', 20, bold=True)
text = font.render(str(GameState.score), True, (0, 128, 0))
x_c = len(str(GameState.score))
text_rect = text.get_rect(center=(385 - x_c, 10))
displaysurface.blit(text, text_rect)
# Food
pygame.draw.rect(displaysurface, GREEN, [chow.foodx, chow.foody, 10, 10])
# Snake
for i in snek.segments:
pygame.draw.rect(displaysurface, BLACK, [i[0], i[1], 10, 10])
def snake_update():
if snek.direction == 'UP':
x_mult = 0
y_mult = -1
if snek.direction == 'DOWN':
x_mult = 0
y_mult = 1
if snek.direction == 'LEFT':
x_mult = -1
y_mult = 0
if snek.direction == 'RIGHT':
x_mult = 1
y_mult = 0
new_snek = [[snek.segments[0][0] + (10 * x_mult), snek.segments[0][1] + (10 * y_mult)]]
GameState.keylock = False
for i in snek.segments[:-1]:
new_snek.append(i)
snek.segments = new_snek
# Collisions
snake_head = snek.segments[0]
if snake_head in snek.segments[1:] or \
(snake_head[0] < 5 or snake_head[0] > 395) or \
(snake_head[1] < 5 or snake_head[1] > 395):
oooh_sound.play()
GameState.game_over = True
def make_it_grow():
snek.embiggen()
chow.new_food()
GameState.score += 1
GameState.speed_multiplier = int(GameState.score / 5)
food_sound.play()
if snake_head == [chow.foodx, chow.foody]:
make_it_grow()
draw_me_like_one_of_your_french_girls()
def main():
while True:
if GameState.game_over is False:
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_UP:
snek.direction_update('UP')
elif event.key == pygame.K_DOWN:
snek.direction_update('DOWN')
elif event.key == pygame.K_LEFT:
snek.direction_update('LEFT')
elif event.key == pygame.K_RIGHT:
snek.direction_update('RIGHT')
elif event.key == pygame.K_RETURN:
# Toggle Pause
if GameState.paused is True:
GameState.paused = False
elif GameState.paused is False:
GameState.paused = True
elif event.key == pygame.K_ESCAPE or event.key == pygame.K_q:
pygame.quit()
sys.exit()
elif event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if GameState.paused is False:
snake_update()
pygame.time.wait((100 - 10 * GameState.speed_multiplier))
else:
displaysurface.fill(WHITE)
font = pygame.font.SysFont('calibri', 60, bold=True)
text = font.render('PAUSED', True, RED)
text_rect = text.get_rect(center=(200, 200))
displaysurface.blit(text, text_rect)
elif GameState.game_over is True:
# Draw the head in red on getting a game over
snake_head = snek.segments[0]
pygame.draw.rect(displaysurface, RED, [snake_head[0], snake_head[1], 10, 10])
# Game over message
font = pygame.font.SysFont('calibri', 60, bold=True)
text = font.render('GAME OVER', True, RED)
text_rect = text.get_rect(center=(200, 200))
displaysurface.blit(text, text_rect)
for event in pygame.event.get():
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_r:
# Reset all variables and restart the game on pressing r
GameState.game_over = False
GameState.score = 0
GameState.speed_multiplier = 1
snek.__init__()
chow.__init__()
main()
elif event.key == pygame.K_ESCAPE or event.key == pygame.K_q:
pygame.quit()
sys.exit()
elif event.type == pygame.QUIT:
pygame.quit()
sys.exit()
pygame.display.update()
clock.tick(FPS)
if __name__ == '__main__':
main()
|
Build and assemble your very own colorful, meadow like, floral arrangements using these delicate filler flowers and stylish vase designs! When paired with the coordinating cling stamp set, these gorgeous springtime accent pieces allow you to create graceful floral die cuts ideal for adding extra interest to all of your container arrangement embellishments and springtime bouquets! An outstanding array of die cut possibilities for use on handmade greeting cards, specialized tags, scrapbook layouts and heirloom albums!
|
# -*- coding: utf-8 -*-
import tensorflow as tf
from functools import reduce
from operator import mul
VERY_BIG_NUMBER = 1e30
VERY_SMALL_NUMBER = 1e-30
VERY_POSITIVE_NUMBER = VERY_BIG_NUMBER
VERY_NEGATIVE_NUMBER = -VERY_BIG_NUMBER
# --------------- DiSAN Interface ----------------
def disan(rep_tensor, rep_mask, scope=None,
keep_prob=1., is_train=None, wd=0., activation='elu',
tensor_dict=None, name=''):
with tf.variable_scope(scope or 'DiSAN'):
with tf.variable_scope('ct_attn'):
fw_res = directional_attention_with_dense(
rep_tensor, rep_mask, 'forward', 'dir_attn_fw',
keep_prob, is_train, wd, activation,
tensor_dict=tensor_dict, name=name+'_fw_attn')
bw_res = directional_attention_with_dense(
rep_tensor, rep_mask, 'backward', 'dir_attn_bw',
keep_prob, is_train, wd, activation,
tensor_dict=tensor_dict, name=name+'_bw_attn')
seq_rep = tf.concat([fw_res, bw_res], -1)
with tf.variable_scope('sent_enc_attn'):
sent_rep = multi_dimensional_attention(
seq_rep, rep_mask, 'multi_dimensional_attention',
keep_prob, is_train, wd, activation,
tensor_dict=tensor_dict, name=name+'_attn')
return sent_rep
# --------------- supporting networks ----------------
def directional_attention_with_dense(rep_tensor, rep_mask, direction=None, scope=None,
keep_prob=1., is_train=None, wd=0., activation='elu',
tensor_dict=None, name=None):
def scaled_tanh(x, scale=5.):
return scale * tf.nn.tanh(1./scale * x)
bs, sl, vec = tf.shape(rep_tensor)[0], tf.shape(rep_tensor)[1], tf.shape(rep_tensor)[2]
ivec = rep_tensor.get_shape()[2]
with tf.variable_scope(scope or 'directional_attention_%s' % direction or 'diag'):
# mask generation
sl_indices = tf.range(sl, dtype=tf.int32)
sl_col, sl_row = tf.meshgrid(sl_indices, sl_indices)
if direction is None:
direct_mask = tf.cast(tf.diag(- tf.ones([sl], tf.int32)) + 1, tf.bool)
else:
if direction == 'forward':
direct_mask = tf.greater(sl_row, sl_col)
else:
direct_mask = tf.greater(sl_col, sl_row)
direct_mask_tile = tf.tile(tf.expand_dims(direct_mask, 0), [bs, 1, 1]) # bs,sl,sl
rep_mask_tile = tf.tile(tf.expand_dims(rep_mask, 1), [1, sl, 1]) # bs,sl,sl
attn_mask = tf.logical_and(direct_mask_tile, rep_mask_tile) # bs,sl,sl
# non-linear
rep_map = bn_dense_layer(rep_tensor, ivec, True, 0., 'bn_dense_map', activation,
False, wd, keep_prob, is_train)
rep_map_tile = tf.tile(tf.expand_dims(rep_map, 1), [1, sl, 1, 1]) # bs,sl,sl,vec
rep_map_dp = dropout(rep_map, keep_prob, is_train)
# attention
with tf.variable_scope('attention'): # bs,sl,sl,vec
f_bias = tf.get_variable('f_bias',[ivec], tf.float32, tf.constant_initializer(0.))
dependent = linear(rep_map_dp, ivec, False, scope='linear_dependent') # bs,sl,vec
dependent_etd = tf.expand_dims(dependent, 1) # bs,1,sl,vec
head = linear(rep_map_dp, ivec, False, scope='linear_head') # bs,sl,vec
head_etd = tf.expand_dims(head, 2) # bs,sl,1,vec
logits = scaled_tanh(dependent_etd + head_etd + f_bias, 5.0) # bs,sl,sl,vec
logits_masked = exp_mask_for_high_rank(logits, attn_mask)
attn_score = tf.nn.softmax(logits_masked, 2) # bs,sl,sl,vec
attn_score = mask_for_high_rank(attn_score, attn_mask)
attn_result = tf.reduce_sum(attn_score * rep_map_tile, 2) # bs,sl,vec
with tf.variable_scope('output'):
o_bias = tf.get_variable('o_bias',[ivec], tf.float32, tf.constant_initializer(0.))
# input gate
fusion_gate = tf.nn.sigmoid(
linear(rep_map, ivec, True, 0., 'linear_fusion_i', False, wd, keep_prob, is_train) +
linear(attn_result, ivec, True, 0., 'linear_fusion_a', False, wd, keep_prob, is_train) +
o_bias)
output = fusion_gate * rep_map + (1-fusion_gate) * attn_result
output = mask_for_high_rank(output, rep_mask)
# save attn
if tensor_dict is not None and name is not None:
tensor_dict[name + '_dependent'] = dependent
tensor_dict[name + '_head'] = head
tensor_dict[name] = attn_score
tensor_dict[name + '_gate'] = fusion_gate
return output
def multi_dimensional_attention(rep_tensor, rep_mask, scope=None,
keep_prob=1., is_train=None, wd=0., activation='elu',
tensor_dict=None, name=None):
bs, sl, vec = tf.shape(rep_tensor)[0], tf.shape(rep_tensor)[1], tf.shape(rep_tensor)[2]
ivec = rep_tensor.get_shape()[2]
with tf.variable_scope(scope or 'multi_dimensional_attention'):
map1 = bn_dense_layer(rep_tensor, ivec, True, 0., 'bn_dense_map1', activation,
False, wd, keep_prob, is_train)
map2 = bn_dense_layer(map1, ivec, True, 0., 'bn_dense_map2', 'linear',
False, wd, keep_prob, is_train)
map2_masked = exp_mask_for_high_rank(map2, rep_mask)
soft = tf.nn.softmax(map2_masked, 1) # bs,sl,vec
attn_output = tf.reduce_sum(soft * rep_tensor, 1) # bs, vec
# save attn
if tensor_dict is not None and name is not None:
tensor_dict[name] = soft
return attn_output
def bn_dense_layer(input_tensor, hn, bias, bias_start=0.0, scope=None,
activation='relu', enable_bn=True,
wd=0., keep_prob=1.0, is_train=None):
if is_train is None:
is_train = False
# activation
if activation == 'linear':
activation_func = tf.identity
elif activation == 'relu':
activation_func = tf.nn.relu
elif activation == 'elu':
activation_func = tf.nn.elu
elif activation == 'selu':
activation_func = selu
else:
raise AttributeError('no activation function named as %s' % activation)
with tf.variable_scope(scope or 'bn_dense_layer'):
linear_map = linear(input_tensor, hn, bias, bias_start, 'linear_map',
False, wd, keep_prob, is_train)
if enable_bn:
linear_map = tf.contrib.layers.batch_norm(
linear_map, center=True, scale=True, is_training=is_train, scope='bn')
return activation_func(linear_map)
def dropout(x, keep_prob, is_train, noise_shape=None, seed=None, name=None):
with tf.name_scope(name or "dropout"):
assert is_train is not None
if keep_prob < 1.0:
d = tf.nn.dropout(x, keep_prob, noise_shape=noise_shape, seed=seed)
out = tf.cond(is_train, lambda: d, lambda: x)
return out
return x
def linear(args, output_size, bias, bias_start=0.0, scope=None, squeeze=False, wd=0.0, input_keep_prob=1.0,
is_train=None):
if args is None or (isinstance(args, (tuple, list)) and not args):
raise ValueError("`args` must be specified")
if not isinstance(args, (tuple, list)):
args = [args]
flat_args = [flatten(arg, 1) for arg in args] # for dense layer [(-1, d)]
if input_keep_prob < 1.0:
assert is_train is not None
flat_args = [tf.cond(is_train, lambda: tf.nn.dropout(arg, input_keep_prob), lambda: arg)# for dense layer [(-1, d)]
for arg in flat_args]
flat_out = _linear(flat_args, output_size, bias, bias_start=bias_start, scope=scope) # dense
out = reconstruct(flat_out, args[0], 1)
if squeeze:
out = tf.squeeze(out, [len(args[0].get_shape().as_list())-1])
if wd:
add_reg_without_bias()
return out
def _linear(xs,output_size,bias,bias_start=0., scope=None):
with tf.variable_scope(scope or 'linear_layer'):
x = tf.concat(xs,-1)
input_size = x.get_shape()[-1]
W = tf.get_variable('W', shape=[input_size,output_size],dtype=tf.float32,
)
if bias:
bias = tf.get_variable('bias', shape=[output_size],dtype=tf.float32,
initializer=tf.constant_initializer(bias_start))
out = tf.matmul(x, W) + bias
else:
out = tf.matmul(x, W)
return out
def flatten(tensor, keep):
fixed_shape = tensor.get_shape().as_list()
start = len(fixed_shape) - keep
left = reduce(mul, [fixed_shape[i] or tf.shape(tensor)[i] for i in range(start)])
out_shape = [left] + [fixed_shape[i] or tf.shape(tensor)[i] for i in range(start, len(fixed_shape))]
flat = tf.reshape(tensor, out_shape)
return flat
def reconstruct(tensor, ref, keep, dim_reduced_keep=None):
dim_reduced_keep = dim_reduced_keep or keep
ref_shape = ref.get_shape().as_list() # original shape
tensor_shape = tensor.get_shape().as_list() # current shape
ref_stop = len(ref_shape) - keep # flatten dims list
tensor_start = len(tensor_shape) - dim_reduced_keep # start
pre_shape = [ref_shape[i] or tf.shape(ref)[i] for i in range(ref_stop)] #
keep_shape = [tensor_shape[i] or tf.shape(tensor)[i] for i in range(tensor_start, len(tensor_shape))] #
# pre_shape = [tf.shape(ref)[i] for i in range(len(ref.get_shape().as_list()[:-keep]))]
# keep_shape = tensor.get_shape().as_list()[-keep:]
target_shape = pre_shape + keep_shape
out = tf.reshape(tensor, target_shape)
return out
def mask_for_high_rank(val, val_mask, name=None):
val_mask = tf.expand_dims(val_mask, -1)
return tf.multiply(val, tf.cast(val_mask, tf.float32), name=name or 'mask_for_high_rank')
def exp_mask_for_high_rank(val, val_mask, name=None):
val_mask = tf.expand_dims(val_mask, -1)
return tf.add(val, (1 - tf.cast(val_mask, tf.float32)) * VERY_NEGATIVE_NUMBER,
name=name or 'exp_mask_for_high_rank')
def selu(x):
with tf.name_scope('elu') as scope:
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
return scale*tf.where(x>=0.0, x, alpha*tf.nn.elu(x))
def add_reg_without_bias(scope=None):
scope = scope or tf.get_variable_scope().name
variables = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=scope)
counter = 0
for var in variables:
if len(var.get_shape().as_list()) <= 1: continue
tf.add_to_collection('reg_vars', var)
counter += 1
return counter
|
Violin string Set with E Wound. "Dr. Thomastik - Dominant": Elastic perlon core wound with metal. Softness of gut but with superior response, durability and tonal quality. Available: Medium, and strong.
|
"""Module for constructing RNN Cells."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import math
from tensorflow.contrib.compiler import jit
from tensorflow.contrib.layers.python.layers import layers
from tensorflow.contrib.rnn.python.ops import core_rnn_cell
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import op_def_registry
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import rnn_cell_impl
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import nest
# pylint: disable=protected-access
_Linear = core_rnn_cell._Linear # pylint: disable=invalid-name
# pylint: enable=protected-access
class GLSTMCell(rnn_cell_impl.RNNCell):
"""Group LSTM cell (G-LSTM).
The implementation is based on:
https://arxiv.org/abs/1703.10722
O. Kuchaiev and B. Ginsburg
"Factorization Tricks for LSTM Networks", ICLR 2017 workshop.
"""
def __init__(self, num_units, initializer=None, num_proj=None,
number_of_groups=1, forget_bias=1.0, activation=math_ops.tanh,
reuse=None):
"""Initialize the parameters of G-LSTM cell.
Args:
num_units: int, The number of units in the G-LSTM cell
initializer: (optional) The initializer to use for the weight and
projection matrices.
num_proj: (optional) int, The output dimensionality for the projection
matrices. If None, no projection is performed.
number_of_groups: (optional) int, number of groups to use.
If `number_of_groups` is 1, then it should be equivalent to LSTM cell
forget_bias: Biases of the forget gate are initialized by default to 1
in order to reduce the scale of forgetting at the beginning of
the training.
activation: Activation function of the inner states.
reuse: (optional) Python boolean describing whether to reuse variables
in an existing scope. If not `True`, and the existing scope already
has the given variables, an error is raised.
Raises:
ValueError: If `num_units` or `num_proj` is not divisible by
`number_of_groups`.
"""
super(GLSTMCell, self).__init__(_reuse=reuse)
self._num_units = num_units
self._initializer = initializer
self._num_proj = num_proj
self._forget_bias = forget_bias
self._activation = activation
self._number_of_groups = number_of_groups
if self._num_units % self._number_of_groups != 0:
raise ValueError("num_units must be divisible by number_of_groups")
if self._num_proj:
if self._num_proj % self._number_of_groups != 0:
raise ValueError("num_proj must be divisible by number_of_groups")
self._group_shape = [int(self._num_proj / self._number_of_groups),
int(self._num_units / self._number_of_groups)]
else:
self._group_shape = [int(self._num_units / self._number_of_groups),
int(self._num_units / self._number_of_groups)]
if num_proj:
self._state_size = rnn_cell_impl.LSTMStateTuple(num_units, num_proj)
self._output_size = num_proj
else:
self._state_size = rnn_cell_impl.LSTMStateTuple(num_units, num_units)
self._output_size = num_units
self._linear1 = [None] * self._number_of_groups
self._linear2 = None
@property
def state_size(self):
return self._state_size
@property
def output_size(self):
return self._output_size
def _get_input_for_group(self, inputs, group_id, group_size):
"""Slices inputs into groups to prepare for processing by cell's groups
Args:
inputs: cell input or it's previous state,
a Tensor, 2D, [batch x num_units]
group_id: group id, a Scalar, for which to prepare input
group_size: size of the group
Returns:
subset of inputs corresponding to group "group_id",
a Tensor, 2D, [batch x num_units/number_of_groups]
"""
return array_ops.slice(input_=inputs,
begin=[0, group_id * group_size],
size=[self._batch_size, group_size],
name=("GLSTM_group%d_input_generation" % group_id))
def call(self, inputs, state):
"""Run one step of G-LSTM.
Args:
inputs: input Tensor, 2D, [batch x num_units].
state: this must be a tuple of state Tensors, both `2-D`,
with column sizes `c_state` and `m_state`.
Returns:
A tuple containing:
- A `2-D, [batch x output_dim]`, Tensor representing the output of the
G-LSTM after reading `inputs` when previous state was `state`.
Here output_dim is:
num_proj if num_proj was set,
num_units otherwise.
- LSTMStateTuple representing the new state of G-LSTM cell
after reading `inputs` when the previous state was `state`.
Raises:
ValueError: If input size cannot be inferred from inputs via
static shape inference.
"""
(c_prev, m_prev) = state
self._batch_size = inputs.shape[0].value or array_ops.shape(inputs)[0]
input_size = inputs.shape[-1].value or array_ops.shape(inputs)[-1]
dtype = inputs.dtype
scope = vs.get_variable_scope()
with vs.variable_scope(scope, initializer=self._initializer):
i_parts = []
j_parts = []
f_parts = []
o_parts = []
for group_id in range(self._number_of_groups):
with vs.variable_scope("group%d" % group_id):
x_g_id = array_ops.concat(
[self._get_input_for_group(inputs, group_id,
int(input_size / self._number_of_groups)),
#self._group_shape[0]), # this is only correct if inputs dim = num_units!!!
self._get_input_for_group(m_prev, group_id,
int(self._output_size / self._number_of_groups))], axis=1)
#self._group_shape[0])], axis=1)
if self._linear1[group_id] is None:
self._linear1[group_id] = _Linear(x_g_id, 4 * self._group_shape[1], False)
R_k = self._linear1[group_id](x_g_id) # pylint: disable=invalid-name
i_k, j_k, f_k, o_k = array_ops.split(R_k, 4, 1)
i_parts.append(i_k)
j_parts.append(j_k)
f_parts.append(f_k)
o_parts.append(o_k)
bi = vs.get_variable(name="bias_i",
shape=[self._num_units],
dtype=dtype,
initializer=
init_ops.constant_initializer(0.0, dtype=dtype))
bj = vs.get_variable(name="bias_j",
shape=[self._num_units],
dtype=dtype,
initializer=
init_ops.constant_initializer(0.0, dtype=dtype))
bf = vs.get_variable(name="bias_f",
shape=[self._num_units],
dtype=dtype,
initializer=
init_ops.constant_initializer(0.0, dtype=dtype))
bo = vs.get_variable(name="bias_o",
shape=[self._num_units],
dtype=dtype,
initializer=
init_ops.constant_initializer(0.0, dtype=dtype))
i = nn_ops.bias_add(array_ops.concat(i_parts, axis=1), bi)
j = nn_ops.bias_add(array_ops.concat(j_parts, axis=1), bj)
f = nn_ops.bias_add(array_ops.concat(f_parts, axis=1), bf)
o = nn_ops.bias_add(array_ops.concat(o_parts, axis=1), bo)
c = (math_ops.sigmoid(f + self._forget_bias) * c_prev +
math_ops.sigmoid(i) * math_ops.tanh(j))
m = math_ops.sigmoid(o) * self._activation(c)
if self._num_proj is not None:
with vs.variable_scope("projection"):
if self._linear2 is None:
self._linear2 = _Linear(m, self._num_proj, False)
m = self._linear2(m)
new_state = rnn_cell_impl.LSTMStateTuple(c, m)
return m, new_state
|
Sveti Ilija (961 m) is the highest mountain and the most popular mountaineering destination on Pelješac.
The cypress reserve "Pod Gospu"
A special reserve of forest vegetation is known as the Cypress reserve "Pod Gospu" (a hill in front of the Franciscan monastery to the Bellevue hotel, between Orebić and Perna).
The cave is not yet open to the public due to archaeological research being carried out. Above the village Nakovana, in a cave called "Spila", archeologists have discovered and are researching the Illyrian sanctuary from the Hellenic period (4th-1st century before Christ).
Located on the promenade by the sea, the Maritime Museum, which was established in 1957 includes exhibitions of diverse historical material significant for the maritime industry of Orebić.
Another Orebić attraction is the Franciscan monastery and Church of Our Lady of Angels. The wonderful view from 150 metres above sea level, where the loggia of the monastery is located, provides unforgettable images of the Pelješac canal, Korčula, Mljet and Lastovo.
The three nave Church of Our Lady of Loreto - Delorita is one of the largest churches outside Dubrovnik.
The Church of the Helper of Christians was built by the end of the 19th century after a medieval church was turned into an elementary school.
The Church of the Holy Trinity in Kučište was built by the end of 1752 and is the most luxurious baroque chapels in Dalmatia of the 18th century, with a rich late-baroque facade, uniquely rounded interior and marble altar.
The Dingač wine variety is far from just an ordinary wine, it has become a status and religious symbol, a holy liquid, based on the tradition of wine making in this region.
Postup is a top-quality red wine with a geographical origin that is created from the grapes of the Plavac Mali variety, exclusively from the location of Postup, on the southern slopes of the Pelješac peninsula.
If our cultural heritage tells us a story, the story of cultural heritage of Pelješac is never-ending.
On your way from Ston to the westernmost part of Pelješac peninsula, you can choose from a number of places where you can enjoy typical Dalmatian food. Aromas of the Mediterranean are part of our tradition.
A true delight for gourmands are the shellfish from the clear sea of the Malostonski bay.
Enjoy the fresh, raw oysters, splashed with a few lemon drops.
And when you lose your breath both due to weariness and beauty, it is time for a break. You can spend it over homemade Dalmatian specialties. The Mediterranean delicacies specific for Orebić should by all means be fulfilled with a glass of good wine. The town of Potomje is specific for being a place where the Dingač wine,the first Croatian wine protected on an international level, that is the first Croatian red wine, is produced. The Peljašac vineyards of “Plavac mali” grow along the protected steep zone right by the sea. And the very Pelješac peninsula lays the cradle of the first class Croatian wines Dingač and Postup. You, therefore, simply must treat you palates to the pleasure of experiencing that unique taste.
|
import sys
from pycparser.c_ast import *
from pycparser.c_parser import CParser, Coord, ParseError
from pycparser.c_lexer import CLexer
def expand_decl(decl):
""" Converts the declaration into a nested list.
"""
typ = type(decl)
if typ == TypeDecl:
return ['TypeDecl', expand_decl(decl.type)]
elif typ == IdentifierType:
return ['IdentifierType', decl.names]
elif typ == ID:
return ['ID', decl.name]
elif typ in [Struct, Union]:
decls = [expand_decl(d) for d in decl.decls or []]
return [typ.__name__, decl.name, decls]
else:
nested = expand_decl(decl.type)
if typ == Decl:
if decl.quals:
return ['Decl', decl.quals, decl.name, nested]
else:
return ['Decl', decl.name, nested]
elif typ == Typename: # for function parameters
if decl.quals:
return ['Typename', decl.quals, nested]
else:
return ['Typename', nested]
elif typ == ArrayDecl:
dimval = decl.dim.value if decl.dim else ''
return ['ArrayDecl', dimval, nested]
elif typ == PtrDecl:
return ['PtrDecl', nested]
elif typ == Typedef:
return ['Typedef', decl.name, nested]
elif typ == FuncDecl:
if decl.args:
params = [expand_decl(param) for param in decl.args.params]
else:
params = []
return ['FuncDecl', params, nested]
#-----------------------------------------------------------------
class NodeVisitor(object):
def __init__(self):
self.current_parent = None
def visit(self, node):
""" Visit a node.
"""
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
return visitor(node)
def visit_FuncCall(self, node):
print("Visiting FuncCall")
print(node.show())
print('---- parent ----')
print(self.current_parent.show())
def generic_visit(self, node):
""" Called if no explicit visitor function exists for a
node. Implements preorder visiting of the node.
"""
oldparent = self.current_parent
self.current_parent = node
for c in node.children():
self.visit(c)
self.current_parent = oldparent
if __name__ == "__main__":
source_code = r'''void foo() {
L"hi" L"there";
}
'''
parser = CParser()
ast = parser.parse(source_code, filename='zz')
ast.show(showcoord=True, attrnames=True, nodenames=True)
|
Where: Westmont Ridley-Tree Museum of Art, Westmont College, 955 La Paz Rd.
Perched at the entryway to the Westmont Museum of Art are two suspended busts, one a sort of drunken, red-nosed circus clown archetype and the other the inherently fragile, heat-fearing persona of a carrot-nosed snowman. They stare us down as we enter the space, like funny-creepy sentries challenging us to look away, or come on in.
Welcome to the strange and wonderful – even smirking and enchanted – world of “tug,” a deceptively cool, lower-case three-letter-word for a show bubbling over with allusions to the grand human comedy, from the philosophically comic tag team of Dane Goodman and Keith Puccinelli. The cherished Westmont Museum, a remarkable art space and local cultural resource entering its sixth year, is starting off its new academic year on a strong note, and a definitively complex one.
Two of the more intriguing and individualistic artists to call Santa Barbara home, they have been partners in art crime before, in different ways. Mr. Puccinelli brought his circus to town a few years ago, with a memorable show at the Atkinson Gallery of Santa Barbara City College. It was the last year Mr. Goodman was the director there, and his own art shares with Mr. Puccinelli a certain puckish, serio-comic flair.
In “tug,” the artists fuse and melt away the traditional insistence on artists taking singular credit for their work. Who is responsible for which piece, which idea belongs to whom, and who do we blame or credit? The ambiguousness of accountability becomes part of the artistic gesture and aesthetic itself. We’re not always sure whether to laugh or wince at some of this art, and the gray area push-pull now extends to the touch of the perpetrators themselves, nudging the art-going experience into their own private theater of the absurd.
Texts burp into the crypto-cartoony pictures as well, such as the alliterative wordplay “hemoglobin, hell’s-a-poppin’, hobgoblin.” In another drawing, a pumpkin-head pugilist breathlessly asks the existential artistic question, “What should be done and what will it mean and what does it matter and why ask anyway and aren’t these questions old and maybe it’s time to move on . . .” Fair enough.
Inside the large main gallery, a collection of works serve the twin functions of being discrete entities while also conspiring toward a sense of an installation whole. Center stage in the dim-lit gallery is a large boat, with an exterior like illuminated mosaic or stained glass windows (an inherent wink at religious ritual). It’s like a vessel in search of self, in search of curious responses from beholders equipped with individual interpretations, and also in search of a muted laugh.
At the tail end of the boat is a perhaps Homeric kite-flying snowman in a spotlight (in a room where the frigid temperature is part of the art). Elsewhere in the gallery are touches of sobriety within the celebratory, from a hand-crafted power plant in one corner to a back window that has been blacked out except for festive bulb-shaped lights, and one menacing plane image threatening to spoil the pending party with impending doom.
On one wall hangs a raggedy menagerie of drawings, in multiple idioms, sizes, materials and degrees of dark and light, and characteristics both sinister and giddy. Quite by contrast, on the other side of the space-charging boat enterprise is a display of art-as-sleepwear pieces, a lark on first impression with a thickening plot upon closer scrutiny.
Innocence meets vulnerability and angst. We find hints of prison garb, harsh headlines on the newsprint PJs, and a black pair of pajamas reading “Joe W. 1950-2001,” serving as wake-up calls. Yet all is not dark and foreboding here, and the oddly charming PJ party wall also has a “sleep tight”-styled coziness about it and a certain Dr. Seuss-ish panache. (Then again, Dr. Seuss had his surrealist hobgoblins to impart, too).
Somehow, opposites attract and also refuse to identify themselves in the art of Dane and Keith. Lines of demarcation between good and evil blur, just as the artists’ own creative thumbprints and roles blur and bend. In that way, the “tug” in this art – and between the artists – is palpable, even as it covers its tracks.
|
from devp2p import peermanager
from devp2p import crypto
from devp2p.app import BaseApp
from rlp.utils import encode_hex
import devp2p.muxsession
import rlp
import devp2p.p2p_protocol
import time
import gevent
import copy
import socket
def try_tcp_connect(addr):
s = socket.socket()
s.connect(addr)
s.close()
def test_app_restart():
host, port = '127.0.0.1', 3020
a_config = dict(p2p=dict(listen_host=host, listen_port=port),
node=dict(privkey_hex=encode_hex(crypto.sha3(b'a'))))
a_app = BaseApp(a_config)
peermanager.PeerManager.register_with_app(a_app)
# Restart app 10-times: there should be no exception
for i in range(10):
a_app.start()
assert a_app.services.peermanager.server.started
try_tcp_connect((host, port))
assert a_app.services.peermanager.num_peers() == 0
a_app.stop()
assert a_app.services.peermanager.is_stopped
# Start the app 10-times: there should be no exception like 'Bind error'
for i in range(10):
a_app.start()
assert a_app.services.peermanager.server.started
try_tcp_connect((host, port))
a_app.stop()
assert a_app.services.peermanager.is_stopped
if __name__ == '__main__':
# ethereum -loglevel 5 --bootnodes ''
import ethereum.slogging
ethereum.slogging.configure(config_string=':debug')
test_app_restart()
|
“We would achieve the greatest environmental effect if politicians did not decide which technology the bus manufacturers use, but rather what environmental targets the technology should achieve.” This is what Edward Jobson, Volvo Buses Environmental Director writes on his new Internet blog.
Today, environmental issues top the agenda in most areas in society. This is particularly true of the automotive industry, which must intensify efforts to reduce emissions of greenhouse gases.
Buses play an important role in efforts to reduce emissions from vehicle traffic. With an expanded and efficiently operating bus traffic, more people opt to leave their car and take the bus instead.
Because interest in the environmental field is so large today, he has started an environmental blog on Volvo Buses’ Internet website.
|
# cdn.jsdelivr.net
import logging
from time import time, sleep
from threading import Thread, RLock
from random import random, shuffle, choice
from six.moves.urllib.parse import urlparse
from mitmproxy.models import HTTPResponse
from netlib.http import Headers
from cachebrowser.pipes.base import FlowPipe
from cachebrowser.util import get_flow_size, pretty_bytes
logger = logging.getLogger(__name__)
DOWNSTREAM_STD = 100000
def should_i(prob):
return random() < prob
class ScramblerPipe(FlowPipe):
PROB_AD_BLOCK = 1.0
PROB_AD_DECOY = 1.0
PROB_DECOY = 0.2
OVERHEAD = 0.1
BLOCK_ADS = True
def __init__(self, *args, **kwargs):
super(ScramblerPipe, self).__init__(*args, **kwargs)
self.overhead = self.OVERHEAD
self.drop_ads = True
self.send_decoys = True
self.org_names = self.read_org_names()
self.adblocker = AdBlocker()
self.netstats = NetStatKeeper(self.org_names)
self.decoymaker = DecoyMaker(self.netstats, self.org_names)
self.api = ScramblerAPI(self.context, self)
self.block_count = 0
self.notblock_count = 0
self.upstream_overhead = 0
self.upstream_traffic = 0 # Non-Overhead traffic
self.downstream_overhead = 0
self.downstream_traffic = 0
self.decoysent = 0
self.decoyreceived = 0
self.user_requests = 0
self.blocked_requests = 0
def start(self):
# super(Scrambler, self).start()
self.adblocker.load_blacklist(self.context.settings.data_path('scrambler/ad-domains'),
self.context.settings.data_path('scrambler/blacklist'))
self.decoymaker.load_decoys(self.context.settings.data_path('scrambler/decoy.json'))
def reset(self):
self.block_count = 0
self.notblock_count = 0
self.netstats.reset()
self.upstream_overhead = 0
self.upstream_traffic = 0 # Non-Overhead traffic
self.downstream_overhead = 0
self.downstream_traffic = 0
self.decoysent = 0
self.decoyreceived = 0
self.decoymaker.inflight = 0
self.user_requests = 0
self.blocked_requests = 0
def get_stats(self):
return {
'blocked': 0,
'upstream_overhead': self.upstream_overhead,
'upstream_normal': self.upstream_traffic,
'downstream_overhead': self.downstream_overhead,
'downstream_normal': self.downstream_traffic,
'decoys': self.decoyreceived,
'decoys_sent': self.decoysent,
'max_overhead': self.overhead,
'user_requests': self.user_requests,
'blocked_requests': self.blocked_requests,
'adblock_enabled': self.BLOCK_ADS
}
def serverconnect(self, server_conn):
pass
def print_stats(self):
print(self.decoymaker.inflight)
print("Sent: {} Received: {} Overhead: {} Traffic: {} Overhead: {} Traffic: {} ".format(self.decoysent, self.decoyreceived,
pretty_bytes(self.downstream_overhead), pretty_bytes(self.downstream_traffic),
pretty_bytes(self.upstream_overhead), pretty_bytes(self.upstream_traffic)))
def request(self, flow):
is_decoy = hasattr(flow, 'is_decoy') and flow.is_decoy
if is_decoy:
self.netstats.update_real_upstream(flow)
self.upstream_overhead += get_flow_size(flow)[0]
self.decoysent += 1
else:
self.netstats.update_real_upstream(flow)
self.netstats.update_requested_upstream(flow)
self.upstream_traffic += get_flow_size(flow)[0]
self.user_requests += 1
if self.BLOCK_ADS and self.adblocker.should_block(flow):
self.blocked_requests += 1
self.dummy_response(flow)
self._send_decoy_request(skip_netname=_whois(flow, self.org_names))
else:
for i in range(6):
wanted = sum(self.netstats.requested_downstream_traffic.values())
actual = sum(self.netstats.real_downstream_traffic.values())
if actual + self.decoymaker.inflight < wanted + wanted * self.overhead:
self._send_decoy_request()
# self.print_stats()
# print("")
# logging.info('>> {} {} '.format(self.notblock_count, self.block_count))
def response(self, flow):
is_decoy = hasattr(flow, 'is_decoy') and flow.is_decoy
if is_decoy:
self.netstats.update_real_downstream(flow)
self.decoymaker.record_decoy_received(flow)
self.decoyreceived += 1
self.downstream_overhead += get_flow_size(flow)[1]
else:
self.netstats.update_real_downstream(flow)
self.netstats.update_requested_downstream(flow)
self.downstream_traffic += get_flow_size(flow)[1]
def read_org_names(self):
with open(self.context.settings.data_path('scrambler/decoy.json')) as f:
import json
netname_data = json.loads(f.read())
org_names = netname_data.keys()
org_names.append('OTHER')
return org_names
def _send_decoy_request(self, skip_netname=None):
decoyurl = self.decoymaker.get_decoy_url(skip_netname)
# logging.info("Sending DECOY to {}".format(decoyurl))
if decoyurl is not None:
new_flow = self.create_request_from_url('GET', decoyurl)
# Don't update stats on dummy request
new_flow.outgoing_request = True
new_flow.is_decoy = True
self.send_request(new_flow, run_hooks=True)
self.decoymaker.record_decoy_sent(new_flow, decoyurl)
def handle_ads(self, flow):
domain = urlparse(flow.request.url).netloc
if self.adblocker.should_block(flow) and self.drop_ads and should_i(self.PROB_AD_BLOCK):
self.dummy_response(flow)
if self.send_decoys and should_i(self.PROB_AD_DECOY):
decoy_url = self.decoymaker.get_decoy_url(flow)
if decoy_url is not None:
# logging.info("@@@@@@@@@@@@@@ Sending Decoy Request {}".format(decoy_url))
new_flow = self.create_request_from_url('GET', decoy_url)
# Don't update stats on dummy request
new_flow.outgoing_request = True
new_flow.is_dummy = True
self.send_request(new_flow, run_hooks=True)
return True
return False
def dummy_response(self, flow):
resp = HTTPResponse(
"HTTP/1.1", 444, "Blocked",
Headers(Content_Type="text/html"),
"You got blocked by CDNReaper")
flow.reply(resp)
def error(self, flow):
pass
class ScramblerAPI(object):
def __init__(self, context, scrambler):
self.scrambler = scrambler
context.ipc.register_rpc('/scrambler/get/settings', self.get_settings)
context.ipc.register_rpc('/scrambler/set/settings', self.set_settings)
context.ipc.register_rpc('/scrambler/enable', self.enable_scrambler)
context.ipc.register_rpc('/scrambler/disable', self.disable_scrambler)
def get_settings(self, context, request):
request.reply({
'result': 'success',
'settings': {
'enabled': self.scrambler.enabled,
'overhead': self.scrambler.overhead,
'drops': self.scrambler.drop_ads,
'decoys': self.scrambler.send_decoys
}
})
def set_settings(self, context, request):
if 'enabled' in request.params:
self.scrambler.enabled = bool(request.params['enabled'])
if 'overhead' in request.params:
self.scrambler.overhead = int(request.params['overhead'])
if 'drops' in request.params:
self.scrambler.drop_ads = bool(request.params['drops'])
if 'decoys' in request.params:
self.scrambler.send_decoys = bool(request.params['decoys'])
request.reply({
'result': 'success'
})
def enable_scrambler(self, context, request):
self.scrambler.enable()
request.reply({'result': 'success'})
def disable_scrambler(self, context, request):
self.scrambler.disable()
request.reply({'result': 'success'})
class NetStatKeeper(object):
UPSTREAM_STD = 200
DOWNSTREAM_STD = DOWNSTREAM_STD
S = 10
def __init__(self, org_names):
from collections import deque
self.org_names = org_names
self.requested_upstream = {}
self.requested_downstream = {}
self.requested_upstream_traffic = {}
self.requested_downstream_traffic = {}
self.real_upstream = {}
self.real_downstream = {}
self.real_upstream_traffic = {}
self.real_downstream_traffic = {}
self.lock = RLock()
# self.outgoing_lock = RLock()
for org in org_names:
self.requested_upstream[org] = deque()
self.requested_downstream[org] = deque()
self.real_downstream[org] = deque()
self.real_upstream[org] = deque()
self.real_downstream_traffic[org] = 0
self.real_upstream_traffic[org] = 0
self.requested_downstream_traffic[org] = 0
self.requested_upstream_traffic[org] = 0
def refresher():
def refresh(ds):
for k in ds:
while len(ds[k]):
if ds[k][0][0] < threshold:
ds[k].popleft()
else:
break
while True:
sleep(1)
now = time()
threshold = now - self.S
with self.lock:
refresh(self.requested_downstream)
refresh(self.requested_upstream)
refresh(self.real_downstream)
refresh(self.real_upstream)
for netname in org_names:
self.requested_upstream_traffic[netname] = 0
for item in self.requested_upstream[netname]:
self.requested_upstream_traffic[netname] += item[1]
self.requested_downstream_traffic[netname] = 0
for item in self.requested_downstream[netname]:
self.requested_downstream_traffic[netname] += item[1]
self.real_upstream_traffic[netname] = 0
for item in self.real_upstream[netname]:
self.real_upstream_traffic[netname] += item[1]
self.real_downstream_traffic[netname] = 0
for item in self.real_downstream[netname]:
self.real_downstream_traffic[netname] += item[1]
refresh_thread = Thread(target=refresher)
refresh_thread.daemon = True
refresh_thread.start()
def update_requested_downstream(self, flow):
ip = _get_flow_ip(flow)
if ip is None:
return
_, resp = get_flow_size(flow)
netname = _whois(ip, self.org_names)
with self.lock:
self.requested_downstream_traffic[netname] += resp
self.requested_downstream[netname].append((time(), resp))
def update_requested_upstream(self, flow):
ip = _get_flow_ip(flow)
if ip is None:
return
req, _ = get_flow_size(flow)
netname = _whois(ip, self.org_names)
with self.lock:
self.requested_upstream_traffic[netname] += req
self.requested_upstream[netname].append((time(), req))
def update_real_downstream(self, flow):
ip = _get_flow_ip(flow)
if ip is None:
return
_, resp = get_flow_size(flow)
netname = _whois(ip, self.org_names)
with self.lock:
self.real_downstream_traffic[netname] += resp
self.real_downstream[netname].append((time(), resp))
def update_real_upstream(self, flow):
ip = _get_flow_ip(flow)
if ip is None:
return
req, _ = get_flow_size(flow)
netname = _whois(ip, self.org_names)
with self.lock:
self.real_upstream_traffic[netname] += req
self.real_upstream[netname].append((time(), req))
def reset(self):
with self.lock:
for key in self.requested_downstream:
self.requested_downstream[key].clear()
self.requested_upstream[key].clear()
self.real_downstream[key].clear()
self.real_upstream[key].clear()
class DecoyMaker(object):
def __init__(self, netstats, org_names):
self.netstats = netstats
self.decoy_urls = {}
self.decoy_sizes = {}
self.netnames = []
self.inflight = 0
for org in org_names:
self.netnames.append(org)
def get_decoy_url(self, skip_netname=None):
flow_netname = skip_netname
shuffle(self.netnames)
def key(netname):
if netname == flow_netname:
return 100000
if netname == 'OTHER':
return 100000
if netname not in self.decoy_urls or not len(self.decoy_urls[netname]):
return 50000
return self.netstats.requested_upstream_traffic[netname]
self.netnames.sort(key=key)
netname = self.netnames[0]
if netname not in self.decoy_urls or not len(self.decoy_urls[netname]):
return None
# return self.decoy_urls[netname][0]
return choice(self.decoy_urls[netname])
def record_decoy_sent(self, flow, url):
flow.estimated_size = self.decoy_sizes[url]
self.inflight += flow.estimated_size
def record_decoy_received(self, flow):
self.inflight -= flow.estimated_size
def load_decoys(self, decoys_path):
import yaml
import json
# json loads strings as unicode, causes problems with saving flows
with open(decoys_path) as f:
# decoy_urls = yaml.safe_load(f.read())
decoy_urls = json.loads(f.read())
for netname in decoy_urls:
self.decoy_urls[netname] = [str(s) for s in decoy_urls[netname].keys()]
for url in decoy_urls[netname]:
self.decoy_sizes[str(url)] = decoy_urls[netname][url]
# self.decoy_sizes.update(decoy_urls[netname])
class AdBlocker(object):
def __init__(self):
self.single_dom = set()
self.multi_dom = set()
self.adset = set()
self.blacklist = []
def should_block(self, flow):
from fnmatch import fnmatch
domain = urlparse(flow.request.url).netloc
parts = domain.split('.')
dom = parts.pop()
while parts:
dom = '{}.{}'.format(parts.pop(), dom)
if dom in self.adset:
return True
url = flow.request.url.replace('https://', '').replace('http://', '')
for pattern in self.blacklist:
if fnmatch(url, pattern):
return True
return False
def load_blacklist(self, ad_domains_path, blacklist_path):
with open(ad_domains_path) as f:
for ad in f:
ad = ad.strip()
if not ad: continue
if ad.count('.') == 1:
self.single_dom.add(ad)
else:
self.multi_dom.add(ad)
self.adset.add(ad)
with open(blacklist_path) as f:
for dom in f:
dom = dom.strip()
if dom:
self.blacklist.append(dom)
def _get_flow_ip(flow):
if flow.server_conn and flow.server_conn.peer_address:
return flow.server_conn.peer_address.host
domain = urlparse(flow.request.url).netloc
ips, domains = _dig(domain)
if len(ips):
return ips[0]
return None
_whois_cache = {}
def _whois(ip, org_names):
from ipwhois import IPWhois
if type(ip) is not str:
ip = _get_flow_ip(ip)
if ip not in _whois_cache:
whois = IPWhois(ip)
try:
name = whois.lookup_rdap()['network']['name']
if not name:
name = whois.lookup()['nets'][0]['name']
except:
print("WHOIS ERROR")
name = 'OTHER'
_whois_cache[ip] = _clean_netname(org_names, name, ip)
return _whois_cache[ip]
def clean_netname(netname):
"""
Convert a whois netname into an organization name
"""
# from cdn import cdn_list
ORGS = [
('GOOGLE', ['google']),
('AKAMAI', ['akamai', 'umass']),
('AMAZON', ['at-', 'amazo']),
# ('CLOUDFRONT', []),
('FASTLY', ['fastly']),
('CLOUDFLARE', ['cloudflare']),
('EDGECAST', ['edgecast']),
('HIGHWINDS', ['highwind']),
('INCAPSULA', ['incapsula']),
('MAXCDN', ['netdna']),
('CDNET', ['cdnet']),
('TWITTER', ['twitter']),
('INAP', ['inap-']),
('LINODE', ['linode']),
('DIGITALOCEAN', ['digitalocean']),
('YAHOO', ['yahoo']),
('FACEBOOK', ['facebook', 'ord1', 'tfbnet']),
('OTHER', [])
]
if ' ' in netname:
netname = netname.split()[0]
lower = netname.lower()
for org in ORGS:
if any([x in lower for x in org[1]]):
return org[0]
else:
org = netname.split()[0]
# if '-' in org:
# org = org[:org.rindex('-')]
parts = org.split('-')
if len(parts) < 3:
org = parts[0]
elif parts[1].isdigit() :
org = parts[0]
else:
org = parts[0] + '-' + parts[1] #+ '-' + parts[2]
# if org.startswith('AMAZO') or org.startswith('AT-'):
# org = 'AMAZON'
if org.startswith('WEBAIRINTERNET12'):
org = 'WEBAIRINTERNET12'
return org
def _clean_netname(org_names, name, ip):
org = clean_netname(name)
if name in org_names:
return name
return 'OTHER'
def _parse_dig(raw_dig):
import re
if len(raw_dig.strip()) == 0:
return [], []
lines = raw_dig.strip().split('\n')
ip = []
domains = []
for line in lines:
line = line.strip()
if re.match('^\d+[.]\d+[.]\d+[.]\d+$', line):
ip.append(line)
else:
domains.append(line)
return ip, domains
def _dig(site, raw=False):
from subprocess import Popen, PIPE
process = Popen(["dig", "+short", site], stdout=PIPE)
(output, err) = process.communicate()
exit_code = process.wait()
if raw:
return output
return _parse_dig(output)
|
There is concern for the safety of Mahfuz Elci, aged 62, and 14 other people detained on the night of 18 November in Altinova, province of Mus, during a renewed attack on the town by security forces. The reason for the attack, during which three houses were destroyed, is unknown. There is concern at reports that Mahfuz Elci and two others have been severely ill-treated. It is believed that the detainees have been taken to the Brigade Gendarmerie Headquarters in Mus for interrogation, where it is feared that they are being subjected to torture.
|
from __future__ import absolute_import
from __future__ import unicode_literals
from .basemodel import HueBaseObject
from .user import User
class BridgeConfig(HueBaseObject):
def __init__(self, bridge, json):
super(BridgeConfig, self).__init__(bridge, 'config', json)
# Versions
@property
def api_version(self):
return self._json['apiversion']
@property
def version(self):
return self._json['swversion']
@property
def bridge_id(self):
return self._json['bridgeid']
# Network stuff
@property
def dhcp(self):
return self._json['dhcp']
@property
def gateway(self):
return self._json['gateway']
@property
def mac(self):
return self._json['mac']
@property
def netmask(self):
return self._json['netmask']
@property
def zigbeechannel(self):
return self._json['zigbeechannel']
@property
def factorynew(self):
return self._json['factorynew']
@property
def timezone(self):
return self._json['timezone']
@property
def localtime(self):
return self._json['localtime']
@property
def utc(self):
return self._json['UTC']
@property
def users(self):
u = []
for k, v in self._json['whitelist'].items():
u.append(User(username=k, json=v))
return u
def update_check(self):
res = self._request(data={"swupdate": {"checkforupdate":True}})
self.update()
return res
|
Replacing a two-story building on its existing 18-acre campus, this U.S. headquarters is Hyundai Motor America’s (HMA) largest project undertaking. The square "floating" building offers an open courtyard and is encased in a glass and metal curtain wall, materials reflective of the automotive industry. Aside from open offices, the HMA headquarters includes a showroom, automotive tech center and a parking structure. The HMA workplace strategy was based on a rigorous research process conducted by Gensler’s design team. It addresses the workplace as a strategic tool for creating a more effective and successful organization.
|
#!/usr/bin/python2.5
#
# Parses XRD documents.
#
# Copyright 2009 DeWitt Clinton
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import imports
import xrd_pb2
# As specified in:
# http://www.oasis-open.org/committees/download.php/33772/xrd-1.0-wd04.html
XRD_NAMESPACE = 'http://docs.oasis-open.org/ns/xri/xrd-1.0'
# As specifed in http://www.w3.org/TR/xml-names/
XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
# As specified in http://www.w3.org/TR/xmlschema-1/
XSI_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance'
# The etree syntax for qualified element names
XRD_QNAME = '{%s}%s' % (XRD_NAMESPACE, 'XRD')
EXPIRES_QNAME = '{%s}%s' % (XRD_NAMESPACE, 'Expires')
SUBJECT_QNAME = '{%s}%s' % (XRD_NAMESPACE, 'Subject')
PROPERTY_QNAME = '{%s}%s' % (XRD_NAMESPACE, 'Property')
ALIAS_QNAME = '{%s}%s' % (XRD_NAMESPACE, 'Alias')
LINK_QNAME = '{%s}%s' % (XRD_NAMESPACE, 'Link')
TITLE_QNAME = '{%s}%s' % (XRD_NAMESPACE, 'Title')
# The etree syntax for qualified attribute names
ID_ATTRIBUTE = '{%s}%s' % (XML_NAMESPACE, 'id')
LANG_ATTRIBUTE = '{%s}%s' % (XML_NAMESPACE, 'lang')
NIL_ATTRIBUTE = '{%s}%s' % (XSI_NAMESPACE, 'nil')
class ParseError(Exception):
"""Raised in the event an XRD document can not be parsed."""
pass
class Parser(object):
"""Converts XML documents into xrd_pb2.Xrd instances."""
def __init__(self, etree=None):
"""Constructs a new XRD parser.
Args:
etree: The etree module to use [optional]
"""
if etree:
self._etree = etree
else:
import xml.etree.cElementTree
self._etree = xml.etree.cElementTree
def parse(self, string):
"""Converts XML strings into an xrd_pb2.Xrd instances
Args:
string: A string containing an XML XRD document.
Returns:
A xrd_pb2.Xrd instance.
Raises:
ParseError if the element can not be parsed
"""
if not string:
raise ParseError('Empty input string.')
try:
document = self._etree.fromstring(string)
except SyntaxError, e:
raise ParseError('Could not parse %s\nError: %s' % (string, e))
if document.tag != XRD_QNAME:
raise ParseError('Root is not an <XRD/> element: %s' % document)
description = xrd_pb2.Xrd()
self._parse_id(document, description)
self._parse_expires(document, description)
self._parse_subject(document, description)
self._parse_properties(document, description)
self._parse_aliases(document, description)
self._parse_links(document, description)
return description
def _parse_id(self, xrd_element, description):
"""Finds a xml:id attribute and adds it to the Xrd proto.
Args:
xrd_element: An XRD Element
description: The xrd_pb2.Xrd instance to be added to
"""
id_attribute = xrd_element.get(ID_ATTRIBUTE)
if id_attribute is not None:
description.id = id_attribute
def _parse_expires(self, xrd_element, description):
"""Finds an Expires element and adds it to the Xrd proto.
Args:
xrd_element: An XRD Element
description: The xrd_pb2.Xrd instance to be added to
"""
expires_element = xrd_element.find(EXPIRES_QNAME)
if expires_element is not None:
description.expires = expires_element.text
def _parse_subject(self, xrd_element, description):
"""Finds an Subject element and adds it to the Xrd proto.
Args:
xrd_element: An XRD Element
description: The xrd_pb2.Xrd instance to be added to
"""
subject_element = xrd_element.find(SUBJECT_QNAME)
if subject_element is not None:
description.subject = subject_element.text
def _parse_properties(self, xrd_element, description):
"""Finds Property elements and adds them to the Xrd proto.
Args:
xrd_element: An XRD Element
description: The xrd_pb2.Xrd instance to be added to
"""
for property_element in xrd_element.findall(PROPERTY_QNAME):
property_pb = description.properties.add()
property_pb.nil = (property_element.get(NIL_ATTRIBUTE) == 'true')
property_type = property_element.get('type')
if property_type != None:
property_pb.type = property_type
if property_element.text is not None:
property_pb.value = property_element.text
def _parse_aliases(self, xrd_element, description):
"""Finds Alias elements and adds them to the Xrd proto.
Args:
xrd_element: An XRD Element
description: The xrd_pb2.Xrd instance added to
"""
for alias_element in xrd_element.findall(ALIAS_QNAME):
description.aliases.append(alias_element.text)
def _parse_links(self, xrd_element, description):
"""Finds Link elements and adds them to the Xrd proto.
Args:
xrd_element: An XRD Element
description: The xrd_pb2.Xrd instance to be added to
"""
for link_element in xrd_element.findall(LINK_QNAME):
link = description.links.add()
rel = link_element.get('rel')
if rel is not None:
link.rel = rel
type_attribute = link_element.get('type')
if type_attribute is not None:
link.type = type_attribute
href = link_element.get('href')
if href is not None:
link.href = href
template = link_element.get('template')
if template is not None:
link.template = template
self._parse_properties(link_element, link)
self._parse_titles(link_element, link)
def _parse_titles(self, xrd_element, description):
"""Finds Title elements and adds them to the proto.
Args:
xrd_element: An XRD Element
description: The xrd_pb2.Xrd instance to be added to
"""
for title_element in xrd_element.findall(TITLE_QNAME):
title = description.titles.add()
lang = title_element.get(LANG_ATTRIBUTE)
if lang is not None:
title.lang = lang
if title_element.text is not None:
title.value = title_element.text
class JsonMarshaller(object):
def __init__(self):
try:
import simplejson as json
except ImportError:
import json
self._json = json
def to_json(self, description_or_descriptions, pretty=False):
if isinstance(description_or_descriptions, list):
output = list()
for description in description_or_descriptions:
output.append(self._to_object(description))
else:
output = self._to_object(description_or_descriptions)
if pretty:
return self._json.dumps(output, indent=2)
else:
return self._json.dumps(output)
def _to_object(self, description):
output = dict()
if description.id:
output['id'] = description.id
if description.expires:
output['expires'] = description.expires
if description.subject:
# jsmarr: note we're intentionally dropping any attributes on subject
output['subject'] = description.subject
if description.aliases:
# jsmarr: note we're intentionally dropping any attributes on aliases
output['aliases'] = [str(alias) for alias in description.aliases]
if description.properties:
output['properties'] = list()
for p in description.properties:
prop_val = dict()
if p.type:
prop_val['type'] = p.type
if p.value:
prop_val['value'] = p.value
output['properties'].append(prop_val)
if description.links:
output['links'] = list()
for link in description.links:
link_dict = dict()
if link.rel:
link_dict['rel'] = link.rel
if link.type:
link_dict['type'] = link.type
if link.href:
link_dict['href'] = link.href
if link.template:
link_dict['template'] = link.template
if link.titles:
# jsmarr: note we're assuming at most one title-per-language
title_dict = dict()
for title in link.titles:
if not title.value:
continue
title_lang = title.lang or ''
if title_lang not in title_dict:
title_dict[title_lang] = title.value
if title_dict:
link_dict['titles'] = title_dict
output['links'].append(link_dict)
# jsmarr: note we're not representing signature in json
return output
|
I’ve been vegetarian since 2006, and vegan since 2013.
Even though my husband is vegan, he isn’t always on board with some of eco friendly ideas.
I love essential oils and herbs.
I have 6 tattoos and 8 piercings – though most people can’t tell.
I have my BA in psychology.
I have psoriasis – an immune disorder that causes my body to attack itself and make new skin cells too often.
Our house has a spa and I have never used it.
I used to own a natural mineral makeup company.
Although I don’t consider myself to be a minimalist, getting rid of things makes me happy.
Both of my kids cosleep and we have no plans to quit.
I believe that by making small, easy changes, we can all live a greener life, and I’m here to support moms in their journey of natural parenting and green living. Whether your goal is for a medication free childbirth, nontoxic products for your children, or discovering more natural remedies for every day life, I’m here to inspire and support you through your journey.
Welcome to Mama Hippie, bohemian living for the everyday parent.
Feel free to connect with me on social media as. I’m on facebook, pinterest, twitter, and instagram. You can join my mailing list here.
I have around 400 posts to date, but I thought it would be fun to show you some of my favorites. In no particular order, here are my top 12 favorite posts.
Sign up for my email list and get my natural labor hospital bag checklist free!
|
import requests
import six
import six.moves.urllib.parse as urlparse
class OpenRefineClient(object):
_COMMANDS = {
'get_version': 'command/core/get-version',
'get_all_project_metadata': 'command/core/get-all-project-metadata',
'get_project_metadata': 'command/core/get-project-metadata',
'create_project_from_upload': 'command/core/create-project-from-upload',
'delete_project': 'command/core/delete-project',
'export_rows': 'command/core/export-rows',
}
def __init__(self, server_url):
if not isinstance(server_url, six.string_types):
raise TypeError('"server_url" must be a string')
self.server_url = server_url
@property
def version(self):
url = self._generate_url(self._COMMANDS['get_version'])
res = requests.get(url)
return res.json()['version']
def create_project(self, name, filepath):
url = self._generate_url(self._COMMANDS['create_project_from_upload'])
with open(filepath, 'rb') as project_file:
params = {
'project-name': name,
}
files = {
'file': project_file,
}
res = requests.post(url, allow_redirects=False,
data=params, files=files)
if res.is_redirect and res.headers.get('location'):
redirected_to = urlparse.urlparse(res.headers.get('location'))
query_params = urlparse.parse_qs(redirected_to.query)
return query_params.get('project')[0]
def get_projects(self):
url = self._generate_url(self._COMMANDS['get_all_project_metadata'])
res = requests.get(url)
return res.json().get('projects', {})
def get_project(self, project_id):
url = self._generate_url(self._COMMANDS['get_project_metadata'])
res = requests.get(url, params={'project': project_id})
if res.status_code == 200:
return res.json()
def delete_project(self, project_id):
url = self._generate_url(self._COMMANDS['delete_project'])
res = requests.post(url, params={'project': project_id})
if res.status_code == 200:
return res.json().get('code') == 'ok'
def export_project(self, project_id, file_format='csv'):
url = self._generate_url(self._COMMANDS['export_rows'])
res = requests.post(url, params={
'project': project_id,
'format': file_format,
})
return res.text
def _generate_url(self, command):
return urlparse.urljoin(self.server_url, command)
|
The Namibia National Student Organisation (NANSO) has said that it will continue campaigning and protesting against injustice against poverty stricken students following a 'successful' 2016 #VARSITYLOCKDOWN.
“As students in this capitalist state, we have a mission to fulfil, turning the tables in favour of poor and poverty stricken students, in a status quo where only the ruling black elites and whites are enjoying the fruits of independence,” NANSO said in a press release issued by President Wilhelem Wilhemen.
After staging a successful lockdown at the Namibia University of Science and Technology (NUST), NANSO staged another protest at the University of Namibia (UNAM). After there was no clear movement, UNAM took NANSO to court.
Following recent decision to allow students to register and for the absolution of debts, NANSO has declared the 2016 #VARSITYLOCKDOWN a success.
“It met our purpose and revealed our strengths and renewed hope in a brighter future. #VARSITYLOCKDOWN was not a want but a necessity,” NANSO said in the press release.
NANSO further encouraged students at all tertiary institutions across the country to remain united.
|
#!/bin/python3
"""
this file is a part of pymoldmaker
Copyright (C) 2015 Brandon J. Van Vaerenbergh
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from PIL import Image
from PIL import ImageDraw
class Canvas:
""" an object representing a drawable vector image that can be written out
to disk as an image file.
"""
def __init__(self):
""" initializes the internal self.image object and pairs it with a
self.draw object for composing new visual content.
"""
# create a Python Image Library image object
mode = 'RGB'
self.dpi = 72 #25.4*15
size = (400, 300)
color_background = 'white'
self.image = Image.new(mode, size, color_background)
#prepare Image for drawing
color_lines = 'black'
self.draw = ImageDraw.Draw(self.image)
self.draw.setink( color_lines)
def draw_line(self, poly_line_mm):
"""
draws a polygonal line onto the image.
Arguments: poly_line_mm a list of x,y tuplets representing the vertices
of a polygonal shape in milimeters.
"""
# now draw something onto it
self.draw.line( poly_line_mm)
def save(self, destination):
""" saves the internal image representation to the specified path or
file object. """
# .. and save it out to disk
self.image.save( destination)
def mm_to_px(self, mm):
mm_per_inch = 25.4
pixels_per_mm = self.dpi/mm_per_inch
pixels_to_draw = mm*pixels_per_mm
if( (pixels_to_draw % 1) != 0):
pass #TODO: fix above to support mapping 1/3 mm to PS strokes/dots
#raise ValueError("specified dpi has resulted in lost precision")
return int(pixels_to_draw)
|
With just a little more than a week before the New England Regional Chili Cookoff it is starting to look like the largest chili event in New England!
All the numbers aren’t in as of yet but we expect a large number of chili cooks and we’d love to add your name to our roster!
It’s a simple matter of going to our web site http://chilict.tripod.com/(produced by Twisted Kat Pack Productions) and sign up for all three WCCC qualifying events.
Then you too can enjoy the camaraderie of chili cooks from 11 states, the chance to experience the pleasure brought by CT.Chair Massage,the opportunity to meet the Three Hot Tamales from NJ (Their motto is, “We make grown men cry”) and a beautiful New England country setting with a back drop of old tobacco barns!
With the live entertainment including 3 bands, Mexican folklore dancers and a world renown magician,the day will be excitement packed for all!
We hope you can join us on May 5th in Somers,CT. as we endeavor to help the Enfield Food Shelf!
YOU guys are the BESTEST!!!!
We can’t wait to see you next Friday for the NERCC chili cooks welcome Parrrrrrrrrrrty! AND ….then the next day at the NERCC!
Looks like you guys are gonna have a blast next weekend! Sorry I’m unable to make it but the the other two Tamales will cause just as much damage, uh, excitement without me. Best of luck!
Can’t wait Mike and Kat! Lisa and I are looking forward to it! See you guys on Friday night!
I am sorry about the “et al”…I’m bad with names….my name is……oh…I know.
But….if only you could join us….the day would be made brighter by your presence.
Instead you will force us to all be without you…..we’ll take tons of pics….but it won’t be the same…at all!
Can’t wait to see all the pics! Make me proud girls!
THREE Days and counting to the BIG Meltdown…(mine).
The cooks reception looks to be BIG Friday night!
The cookoff is going to be BIG!!!!!!
We can’t wait to meet y’all finally!
IT’S GOING TO BE HOTTTTTTTTTTTTTT!!!!!!!
|
#
# Copyright (C) 2012 UNINETT AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""Handles attaching and converting metadata in a netmap networkx toplogy
graph"""
from collections import defaultdict
import logging
from django.core.urlresolvers import reverse
import operator
from nav.netmap.config import NETMAP_CONFIG
from nav.errors import GeneralException
from nav.models.manage import GwPortPrefix, Interface
from nav.netmap import stubs
from nav.web.netmap.common import get_status_image_link
_LOGGER = logging.getLogger(__name__)
class NetmapException(GeneralException):
"""Generic Netmap Exception"""
pass
class GraphException(NetmapException):
"""Graph Exception
This exception is normally thrown if it finds something odd in the graph
from nav.topology or the metadata contains known errors.
"""
pass
# Ignore too few methods in class
# pylint: disable=R0903
class Node(object):
"""Node object represent a node in the netmap_graph
Makes it easier to validate data and convert node to valid json.
"""
def __init__(self, node, nx_node_metadata=None):
self.node = node
if nx_node_metadata and 'metadata' in nx_node_metadata:
self.metadata = nx_node_metadata['metadata']
else:
self.metadata = None
def __repr__(self):
return "netmap.Node(metadata={0!r})".format(self.metadata)
def to_json(self):
"""json presentation of Node"""
json = {}
if self.metadata:
if 'position' in self.metadata:
json.update({
'position': {
'x': self.metadata['position'].x,
'y': self.metadata['position'].y
}})
if 'vlans' in self.metadata: # Layer2 metadata
json.update({
'vlans': [nav_vlan_id for nav_vlan_id, _ in
self.metadata['vlans']]
})
if NETMAP_CONFIG.getboolean('API_DEBUG'):
json.update({
'd_vlans': [vlan_to_json(swpv.vlan) for _, swpv in
self.metadata['vlans']]
})
if isinstance(self.node, stubs.Netbox):
json.update({
'id': str(self.node.id),
'sysname': self.node.sysname,
'category': str(self.node.category_id),
'is_elink_node': True
})
else:
json.update({
'id': str(self.node.id),
'sysname': str(self.node.sysname),
'category': str(self.node.category_id),
'ip': self.node.ip,
'ipdevinfo_link': reverse('ipdevinfo-details-by-name',
args=[self.node.sysname]),
'up': str(self.node.up),
'up_image': get_status_image_link(self.node.up),
'roomid': self.node.room.id,
'locationid': unicode(self.node.room.location.id),
'location': unicode(self.node.room.location.description),
'room': unicode(self.node.room),
'is_elink_node': False,
})
return {unicode(self.node.id) : json}
# Ignore too few methods in class
# pylint: disable=R0903
class Group(object):
"""Grouping object for representing a Netbox and Interface in a Edge"""
def __init__(self, netbox=None, interface=None):
self.netbox = netbox
self.interface = interface
self.gw_ip = None
self.virtual = None
self.vlans = None
def __repr__(self):
return ("netmap.Group(netbox={0!r}, interface={1!r}, gw_ip={2!r}"
", virtual={3!r}, vlans={4!r})").format(
self.netbox, self.interface, self.gw_ip, self.virtual, self.vlans)
def __hash__(self):
return hash(self.netbox) + hash(self.interface)
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
else:
return (self.netbox == other.netbox and
self.interface == other.interface)
def to_json(self):
"""json presentation of Group"""
json = {
'netbox': unicode(self.netbox.id),
}
if self.interface is not None:
ipdevinfo_link = None
if self.interface.ifname and self.interface.ifname != '?':
ipdevinfo_link = reverse(
'ipdevinfo-interface-details-by-name',
kwargs={'netbox_sysname': unicode(
self.netbox.sysname),
'port_name': unicode(
self.interface.ifname)})
json.update({'interface': {
'ifname': unicode(self.interface.ifname),
'ipdevinfo_link': ipdevinfo_link
}})
if self.gw_ip is not None:
json.update({'gw_ip': self.gw_ip})
if self.virtual is not None:
json.update({'virtual': self.virtual})
if self.vlans is not None:
json.update({'vlans': [swpv.vlan.id for swpv in self.vlans]})
if NETMAP_CONFIG.getboolean('API_DEBUG'):
json.update({'d_netbox_sysname': unicode(self.netbox.sysname)})
json.update(
{'d_vlans': [vlan_to_json(swpv.vlan) for swpv in self.vlans]})
return json
# Ignore too few methods in class
# pylint: disable=R0903
class Edge(object):
"""Represent either a edge pair in Layer2 or Layer3"""
def _valid_layer2(self, edge):
return isinstance(edge, Interface) or isinstance(edge, stubs.Interface)
def _valid_layer3(self, edge):
return isinstance(edge, GwPortPrefix) or isinstance(edge,
stubs.GwPortPrefix)
def _get_layer(self, source, target):
if (self._valid_layer2(source) or source is None
and self._valid_layer2(target) or target is None):
return 2
elif (self._valid_layer3(source) or source is None
and self._valid_layer3(target) or target is None):
return 3
else:
raise NetmapException("Could not determine layer for this edge."
" This should _not_ happend")
def _same_layer(self, source, target):
return (self._valid_layer2(source) and self._valid_layer2(target)
or self._valid_layer3(source) and self._valid_layer3(target)
)
def __init__(self, nx_edge, source, target, traffic=None):
"""
:param nx_edge: NetworkX edge representing (source,target) in a tuple
.(they be nav.models.Netbox or nav.netmap.stubs.Netbox)
:param source: source, where it is either of type Interface or type
GwPortPrefix.
:param target: target, where it is either of type Interface or type
GwPortPrefix
:param vlans: List of SwPortVlan on this particular edge pair
:return:
"""
if source is not None and target is not None:
if not self._same_layer(source, target):
raise GraphException(
"Source and target has to be of same type, typically "
"Interfaces in layer2 graph or"
"GwPortPrefixes in layer3 graph")
elif source is None and target is None:
raise GraphException("Source & target can't both be None! Bailing!")
self.errors = []
self.source = self.target = self.vlan = self.prefix = None
nx_source, nx_target = nx_edge
if self._valid_layer2(source) :
self.source = Group(source.netbox, source)
elif self._valid_layer3(source):
self.source = Group(source.interface.netbox, source.interface)
self.source.gw_ip = source.gw_ip
self.source.virtual = source.virtual
if self._valid_layer2(target):
self.target = Group(target.netbox, target)
elif self._valid_layer3(target):
self.target = Group(target.interface.netbox, target.interface)
self.target.gw_ip = target.gw_ip
self.target.virtual = target.virtual
# Basic metadata validation, lets copy over Netbox data which is valid
# as metadata if metadata building didn't manage to fetch it's data.
# (this is due to Metadata in L2 is built on Interface<->Interface,
# both sides is not necessary known in the topology graph when building
# it)
# This could also be the case for L3, but since the topology method
# stubs.Netbox and stubs.Interface, we don't really have the same issue
# in an L3 graph.
if self.source is None: self.source = Group(nx_source)
if self.target is None: self.target = Group(nx_target)
# Swap directional metadata to follow nx graph edge.
if (self.source.netbox.id != nx_source.id) and (
self.source.netbox.id == nx_target.id):
tmp = self.source
self.source = self.target
self.target = tmp
self.layer = self._get_layer(source, target)
if self.layer == 3:
assert source.prefix.vlan.id == target.prefix.vlan.id, (
"Source and target GwPortPrefix must reside in same VLan for "
"Prefix! Bailing")
self.prefix = source.prefix
self.vlan = source.prefix.vlan
self.traffic = traffic
if (self.source and self.source.interface is not None) and (
self.target and self.target.interface is not None):
if self.source.interface.speed == self.target.interface.speed:
self.link_speed = self.source.interface.speed
else:
self.errors.append("Mismatch between interface speed")
if self.source.interface.speed < self.target.interface.speed:
self.link_speed = self.source.interface.speed
else:
self.link_speed = self.target.interface.speed
elif self.source and self.source.interface is not None:
self.link_speed = self.source.interface.speed
elif self.target and self.target.interface is not None:
self.link_speed = self.target.interface.speed
self.vlans = []
def __hash__(self):
return hash(self.source) + hash(self.target)
def __eq__(self, other):
if not isinstance(other, type(self)):
return False
else:
return self.source == other.source and self.target == other.target
def __ne__(self, other):
return not self.__eq__(other)
def __repr__(self):
return ("netmap.Edge(layer={0!r}, source={1!r}, target={2!r},"
"link_speed={3!r}, vlans={4!r}, vlan={5!r},"
"prefix={6!r})").format(self.layer, self.source, self.target,
self.link_speed, self.vlans, self.vlan,
self.prefix)
def to_json(self):
"""json presentation of Edge"""
json = {
'source': self.source.to_json() or 'null',
'target': self.target.to_json() or 'null',
}
if self.layer == 3:
json.update({'prefix': {
'net_address': unicode(self.prefix.net_address),
'report_link': reverse('report-prefix-prefix',
kwargs={'prefix_id': self.prefix.id})
}})
json.update({'vlan': self.prefix.vlan.id})
elif self.layer == 2:
json.update({'vlans': [swpv.vlan.id for swpv in self.vlans]})
json.update({'link_speed': self.link_speed or 'N/A'})
json.update(
{'traffic': self.traffic and self.traffic.to_json() or None})
return json
def vlan_to_json(vlan):
return {'vlan': vlan.vlan,
'nav_vlan': vlan.id,
'net_ident': vlan.net_ident,
'description': vlan.description
}
def get_vlan_lookup_json(vlan_by_interface):
vlan_lookup = {}
for list_of_swpv in vlan_by_interface.itervalues():
for swpv in list_of_swpv:
vlan_lookup[swpv.vlan.id] = vlan_to_json(swpv.vlan)
return vlan_lookup
def node_to_json_layer2(node, nx_metadata=None):
"""Convert a node to json, for use in a netmap layer2 graph
:param node A Netbox model
:param nx_metadata Metadata from networkx graph.
:return json presentation of a node.
"""
return Node(node, nx_metadata).to_json()
def node_to_json_layer3(node, nx_metadata=None):
"""Convert a node to json, for use in a netmap layer3 graph
:param node A Netbox model
:param nx_metadata Metadata from networkx graph.
:return json presentation of a node.
"""
return Node(node, nx_metadata).to_json()
def edge_to_json_layer2(nx_edge, metadata):
"""Convert a edge between A and B in a netmap layer2 graph to JSON
:param edge Metadata from netmap networkx graph
:return edge representation in JSON
"""
source, target = nx_edge
edges = metadata['metadata']
metadata_for_edges = []
all_vlans = set()
for edge in edges:
all_vlans = all_vlans | edge.vlans
metadata_for_edges.append(edge.to_json())
json = {
'source': unicode(source.id),
'target': unicode(target.id),
'vlans' : [swpv.vlan.id for swpv in all_vlans],
'edges': metadata_for_edges
}
if NETMAP_CONFIG.getboolean('API_DEBUG'):
json.update({
'd_source_sysname': unicode(source.sysname),
'd_target_sysname': unicode(target.sysname),
'd_vlans': [vlan_to_json(swpv.vlan) for swpv in all_vlans]
})
return json
def edge_to_json_layer3(nx_edge, nx_metadata):
"""Convert a edge between A and B in a netmap layer 3 graph to JSON
:param nx_metadata: Metadata from netmap networkx graph
:type nx_metadata: dict
:return edge representation in JSON
"""
source, target = nx_edge
# todo: fix sorted list keyed on prefix :-))
metadata_collection = defaultdict(list)
for vlan_id, edges in nx_metadata['metadata'].iteritems():
for edge in edges:
metadata_collection[vlan_id].append(edge.to_json())
for key, value in metadata_collection.iteritems():
value = sorted(value, key=operator.itemgetter('prefix'))
json = {
'source': unicode(source.id),
'target': unicode(target.id),
'edges': metadata_collection
}
if NETMAP_CONFIG.getboolean('API_DEBUG'):
json.update({
'd_source_sysname': unicode(source.sysname),
'd_target_sysname': unicode(target.sysname),
})
return json
def edge_metadata_layer3(nx_edge, source, target, traffic):
"""
:param nx_edge tuple containing source and target
(nav.models.manage.Netbox or nav.netmap.stubs.Netbox)
:param source nav.models.manage.GwPortPrefix
:param target nav.models.manage.GwPortPrefix
:param prefixes list of prefixes (Prefix)
:returns metadata to attach to netmap graph
"""
# Note about GwPortPrefix and L3 graph: We always have interface.netbox
# avaiable under L3 topology graph due to stubbing Netboxes etc for
# elinks.
edge = Edge((nx_edge), source, target, traffic)
return edge
#return metadata
def edge_metadata_layer2(nx_edge, source, target, vlans_by_interface, traffic):
"""
Adds edge meta data with python types for given edge (layer2)
:param nx_edge tuple containing source and target
(nav.models.manage.Netbox or nav.netmap.stubs.Netbox)
:param source nav.models.manage.Interface (from port_pairs nx metadata)
:param target nav.models.manage.Interface (from port_pairs nx metadata)
:param vlans_by_interface VLAN dict access for fetching SwPortVlan list
:returns metadata to attach to netmap graph as metadata.
"""
edge = Edge(nx_edge, source, target, traffic)
source_vlans = target_vlans = []
if vlans_by_interface and source in vlans_by_interface:
source_vlans = tuple(vlans_by_interface.get(source))
if vlans_by_interface and target in vlans_by_interface:
target_vlans = tuple(vlans_by_interface.get(target))
#key=lambda x: x.vlan.vlan)
edge.source.vlans = set(source_vlans) - set(target_vlans)
edge.target.vlans = set(target_vlans) - set(source_vlans)
edge.vlans = set(source_vlans) | set(target_vlans)
return edge
|
In London, people do drive to work, but you can easily get by without a car due to the fantastic public transport system. If you do choose to drive, you should be aware of parking restrictions, and the very high toll to drive into the centre of the city during the week! Most people who work centrally use public transport (bus or tube) to work and those who live centrally would not require a car at all to get around. The tube and train network covers London very well. Biking is also becoming increasingly popular in London, especially with the recent introduction of 'Boris bikes', a cheap, user-friendly bike hire scheme.
EU members and those from commonwealth countries can exchange licenses on arrival in the UK. Within a year of arriving in the UK, American citizens will need to take a driving test through the DVLA (Driver and Vehicle Licensing Agency) to obtain a license and insurance - which are legal requirements for driving.
It is very easy to get around London by bus, overland train and the underground subway system (the tube) which covers most of the city and surrounding areas.
Yes, there are frequent coaches/buses and trains to other locations in the UK and to the areas surrounding London.
No, although it is wise to be aware that pickpocketers operate in public transport areas, and you should try to avoid displaying your wallet or phone to others. London is generally a very safe city to get around and at any time there will usually be hundreds of people making their way around alongside you. The streets are mostly brightly lit and well maintained, and vehicles are usually respectful of cyclists and pedestrians - especially in the city centre.
For both the overland and underground train, you can obtain tickets at the station. You may also buy train and bus tickets at local newsstands, online or by phone. It is recommended you buy an Oyster card when you first arrive as this will greatly reduce the cost of your journey. Most people buy a weekly, monthly or yearly ticket to save on costs.
|
#!/usr/bin/python -E
import sys,os
import MySQLdb
import ConfigParser
import base64
import time
import getpass
def readFromUser():
global HOST,USER,PASSWD,DB
HOST=raw_input("Database host:")
USER=raw_input("Database user:")
PASSWD=getpass.getpass("Database pass:")
DB=raw_input("Database name:")
def readConfig():
try:
global HOST,USER,PASSWD,DB
config=ConfigParser.ConfigParser()
config.read("altd_db.conf")
HOST=config.get("MYSQL","HOST")
USER=config.get("MYSQL","USER")
PASSWD=base64.b64decode(config.get("MYSQL","PASSWD"))
DB=config.get("MYSQL","DB")
except ConfigParser.NoOptionError as err:
sys.stderr.write("\nCannot parse the config file\n")
sys.stderr.write("Switch to user input mode...\n\n")
readFromUser()
def writeConfig():
config=ConfigParser.ConfigParser()
config.add_section("MYSQL")
config.set("MYSQL","HOST",HOST)
config.set("MYSQL","USER",USER)
config.set("MYSQL","PASSWD",base64.b64encode(PASSWD))
config.set("MYSQL","DB",DB)
t=time.strftime("%m%d%H%M%Y")
f=open('altd_db.'+t,'w')
config.write(f)
f.close()
os.chmod('altd_db.'+t,0640)
if(os.path.exists('altd_db.conf')):
os.remove('altd_db.conf')
os.symlink('altd_db.'+t,"altd_db.conf")
if(os.path.exists('altd_db.conf')):
print "ALTD database configuration file exists!"
q=raw_input("Do you want to use the file to fill database information?[y/n]")
if(q.lower() == "y"):
readConfig()
else:
readFromUser()
else:
readFromUser()
MACHINE=raw_input("Machine name:")
# connect to the MySQL server
try:
conn = MySQLdb.connect (HOST,USER,PASSWD)
except MySQLdb.Error as e:
print "Error %d: %s" % (e.args[0], e.args[1])
sys.exit (1)
# create database and related tables
try:
cursor = conn.cursor()
# If MySQL version < 4.1, comment out the line below
cursor.execute("SET SQL_MODE=\"NO_AUTO_VALUE_ON_ZERO\"")
# If the database does not exist, create it, otherwise, switch to the database.
cursor.execute("CREATE DATABASE IF NOT EXISTS %s DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci" % DB)
cursor.execute("USE "+DB)
# Table structure for table `altd_<MACHINE>_jobs`
cursor.execute("""
CREATE TABLE `altd_%s_jobs` (
`run_inc` int(11) NOT NULL auto_increment,
`tag_id` int(11) NOT NULL,
`executable` varchar(1024) NOT NULL,
`username` varchar(64) NOT NULL,
`run_date` date NOT NULL,
`job_launch_id` int(11) NOT NULL,
`build_machine` varchar(64) NOT NULL,
PRIMARY KEY (`run_inc`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 AUTO_INCREMENT=1
""" % MACHINE)
# Table structure for table `altd_<MACHINE>_link_tags`
cursor.execute("""
CREATE TABLE `altd_%s_link_tags` (
`tag_id` int(11) NOT NULL auto_increment,
`linkline_id` int(11) NOT NULL,
`username` varchar(64) NOT NULL,
`exit_code` tinyint(4) NOT NULL,
`link_date` date NOT NULL,
PRIMARY KEY (`tag_id`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 AUTO_INCREMENT=1
""" % MACHINE)
# Table structure for table `altd_<MACHINE>_linkline`
cursor.execute("""
CREATE TABLE `altd_%s_linkline` (
`linking_inc` int(11) NOT NULL auto_increment,
`linkline` varchar(4096) NOT NULL,
PRIMARY KEY (`linking_inc`)
) ENGINE=MyISAM DEFAULT CHARSET=utf8 AUTO_INCREMENT=1
""" % MACHINE)
cursor.close()
writeConfig()
except MySQLdb.Error as e:
print "Error %d: %s" % (e.args[0], e.args[1])
sys.exit (1)
|
What a worthwhile project! I'm sure all the participants were proud and happy to be included. The veterans deserve all the assistance they can get.
|
# imports
import numpy as np
import scipy
import scipy.sparse.linalg
from scipy.sparse.linalg import ArpackNoConvergence
from scipy.sparse.linalg import ArpackError
import time
from SamplingPattern import SamplingPattern
from defaults import BASE_N
from utils import ft, ift, ft2, ift2, sumsq
class PatternEvaluator(object):
"""
PatternEvaluator
Co-ordinates computation of max and min singular values associated with
a given SamplingPattern of k-space sample loci.
"""
def __init__(self, base_sz = BASE_N, sens=[], max_tries=2):
super(PatternEvaluator, self).__init__()
# Base size
self.base_sz = base_sz
# SamplingPattern instance we want to test.
self.pattern = None
# init kernel for (optional) regularization
# regl'n not yet implemented
self.init_kern18()
# space for the vectors we need
self.xnew = np.zeros((self.base_sz, self.base_sz), dtype='complex')
self.xm = np.zeros((self.base_sz, self.base_sz), dtype='complex')
# actual array of sampling loci.
self.sampling = np.zeros((self.base_sz, self.base_sz), dtype='float')
# max repeats in case of arpack numerical problems
self.max_tries = max_tries
if sens:
self.sens = sens
def init_kern18(self):
"""
optimized sqrt(18) radius kernel for
spatial regularization filter
"""
self.root18 = np.zeros(32)
self.root18[1] = 0.04071725
self.root18[2] = 0.03499660
self.root18[4] = 0.02368359
self.root18[5] = 0.02522255
self.root18[8] = 0.02024067
self.root18[9] = 0.01407202
self.root18[10] = 0.01345276
self.root18[13] = 0.00850939
self.root18[16] = 0.00812839
self.root18[17] = 0.00491274
self.root18[18] = 0.00396661
def set_single_coil(self):
"""
set sensitivity to single uniform coil (e.g. test sampling
w/o sense )
"""
self.sens = np.ones([1, self.base_sz, self.base_sz], dtype='complex')
self.n_coils = self.sens.shape[0]
ss = sumsq(self.sens)
self.mask = np.ones([ self.base_sz, self.base_sz])>0
self.mask_sz = np.sum(self.mask.ravel())
def load_sens(self, fname, mask_eps=1e-6):
"""
load coil sensitivity and masking info from file.
Warning: assumes data size is (n_coils, nx, ny)
Looking for numpy npz file with variable 'sens'
Mask from sqrt-sum-of-squares of coil maps.
"""
fdat = np.load(fname)
#except error
self.sens = fdat['sens'].copy()
self.n_coils = self.sens.shape[0]
ss = sumsq(self.sens)
self.mask = ss > mask_eps
self.mask_sz = np.sum(self.mask.ravel())
#normalize coil maps
self.sens[:,self.mask] /= ss[self.mask]
def set_norm_fac(self, p):
"""
Adjust normalization factor. Used for testing overall
scaling behaviour of the system.
Use n_coils.
"""
if hasattr(p, 'norm_fac') and p.norm_fac > 0:
print 'Using pattern normfac of {}'.format(p.norm_fac)
self.norm_fac = p.norm_fac
else:
self.norm_fac = self.n_coils
print 'Using normfac of {}'.format(self.norm_fac)
def eval_pattern(self, pat):
"""
Main driver routine.
"""
self.pattern = pat
self.sampling = pat.sampling.copy().astype('float')
self.set_norm_fac(pat)
self.solve_high()
self.solve_low()
self.pattern.calcd = True
print pat.hi_eigs
print pat.low_eigs
def solve_high(self):
"""
co-ordinate calling ARPACK with our linear operator and get largest eigs
"""
t_start = time.time()
sysA = scipy.sparse.linalg.LinearOperator(
(self.mask_sz, self.mask_sz),
matvec=self.calc_AtA,
dtype='complex')
solved = False
for j in range(self.max_tries):
try:
a1,v1 = scipy.sparse.linalg.eigsh(
sysA,
k=self.pattern.n_eigs,
which='LM',
maxiter=self.pattern.iter_max,
tol=self.pattern.hitol,
ncv=self.pattern.ncv,
return_eigenvectors=True)
# sometimes it "solves" but with crazy errors ~1e+_300
if np.any(np.abs(a1) > self.n_coils):
continue
else:
solved = True
break
except ArpackError as e:
print e
if e.info == -8:
print('error on try {}'.format(j))
t_end = time.time()
print "Elapased: {}s".format(t_end - t_start)
self.pattern.hi_eigs = a1
def solve_low(self):
t_start = time.time()
sysA = scipy.sparse.linalg.LinearOperator(
(self.mask_sz, self.mask_sz),
matvec=self.calc_AtA,
dtype='complex')
solved = False
for j in range(self.max_tries):
try:
adyn,vdyn = scipy.sparse.linalg.eigsh(
sysA,
k=self.pattern.n_eigs,
which='SM',
maxiter=self.pattern.iter_max,
tol=self.pattern.tol,
ncv=self.pattern.ncv,
return_eigenvectors=True)
# sometimes it "solves" but with awful numerical problems
# this seems to be a function of a bad input vector, and typically
# is resolved by just running again. if we re-implement arpack
# we could probably find out why, but until then, we just check for
# strange values and re-compute.
if np.any(np.abs(adyn) > 1e3): # much bigger than nCoils ever will be
continue
else:
solved = True
break
except ArpackError as e:
print('Arpack error in solve_low {}'.format(e))
t_end = time.time()
print "Elapased: {}s".format(t_end - t_start)
self.pattern.low_eigs = adyn
if not solved:
self.pattern.low_eigs = -1
def calc_AtA(self, x0):
"""
calculate system matrix (normal equations)
"""
nSamp = np.sum(self.sampling)
maskSz = np.sum(self.mask)
nCoils, nv, npts = self.sens.shape
if x0.dtype <> np.complex128:
x0 = x0.astype('complex128')
x_img = x0
result = np.zeros(maskSz, dtype='complex')
# Compute A
A_back = sys_sense(x_img, self.sens, self.sampling>0, self.mask)
result[:] = A_back[:] / self.norm_fac #copy / flatten
return result
## --
# Rountines for the system matrix are below.
# To speed things up, we implement these python prototypes in C
#
# Note: fun testing w/ auto-jitting does little here.
#
# Interleaving of the FFT's and dot products are the main slowdown.
# Interestingly, python's default fftpack doesn't do a stellar job
# if we pass in a 3D array and ask for the 2D FT... We can look to move
# to a fftw wrapper in future.
#
# Instead, we overload PatternEvaluator.calc_AtA() to call some
# C functions via the CFFI that do fast dots and call FFTW.
# Its a bit messier for distribution since it requries compilation.
def sys_sense(im_mask, coils, pattern, mask):
"""
linear system for sense imaging
input 1d vector to iterator on (from arpack)
- insert into 2d image mask
- compute 2d FT's and dots with sens
- sample k space
- inverse
- extract
"""
nCoils, nv, npts = coils.shape
#print coils.shape
#print data.shape
image = np.zeros((nv, npts), dtype='complex128')
image[mask] = im_mask
nD = image.ndim
accum = 0.0
tmpGrad = []
zeroPat = pattern<1
gradient = np.zeros_like(im_mask)
ft_scale = 1.0/np.sqrt(nv*npts)
#compute one coil at a time to save working memory space
for c in range(nCoils):
coilPtr = coils[c,...]
# todo: zeropad
scratch = (coilPtr) * image
scratch = ift2(scratch)
# zero out non-sampled locations
scratch[zeroPat]=0
# ft back
scratch = ft2(scratch)
# todo: crop
scratch = np.conj(coilPtr) * scratch
# accumulate
gradient = gradient + scratch[mask]
gout = (gradient)
gout.shape = (-1)
return gout
|
I'm writing an unpacker of MSZIP blocks that use deflate compression.
I faced a problem and created a question on stackoverflow (http://stackoverflow.com/questions/41653663/is-there-something-special-in-windows-mszip-format-or-cfdata-in-cab-files), where maintainer of zlib library adviced to use `dictionary` option to pass raw uncompressed data of previous block to decompress next block of data. But Mark Adler says that this option don't work in current php realisation.
> Looks like a bug. I can't get the dictionary option to work at all, even with strings that don't have nulls.
but I still get a data error upon subsequent inflate_add() … so not sure whether the repro is broken or something else in the code.
Thanks for attention. Can you test other data set?
> The current PHP code appears to be calling inflateSetDictionary() only if Z_DICT is returned by deflate. The PHP code should also call inflateSetDictionary() after calling inflateInit2() if raw inflate is being requested.
Can you try suggestion that Mark made and update zlib module?
What about thinking about this problem?
So, if I could send PR to php source, I'd send a fix.
According to Mark's opinion, it's not very difficult to update source of zlib extension to fix decompressor. May I wait for updates on this issue?
Nobody interesting in fixing this bug at all?
> So, if I could send PR to php source, I'd send a fix.
|
# -*- coding: utf-8 -*-
import re
from pyfr.backends.base.generator import BaseKernelGenerator
from pyfr.util import ndrange
class OpenMPKernelGenerator(BaseKernelGenerator):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Specialise
self._dims = ['_nx'] if self.ndim == 1 else ['_ny', '_nx']
def render(self):
# Kernel spec
spec = self._emit_spec()
if self.ndim == 1:
body = self._emit_body_1d()
return '''
{spec}
{{
#pragma omp parallel
{{
int align = PYFR_ALIGN_BYTES / sizeof(fpdtype_t);
int cb, ce;
loop_sched_1d(_nx, align, &cb, &ce);
for (int _x = cb; _x < ce; _x++)
{{
{body}
}}
}}
}}'''.format(spec=spec, body=body)
else:
innerfn = self._emit_inner_func()
innercall = self._emit_inner_call()
return '''{innerfn}
{spec}
{{
#pragma omp parallel
{{
int align = PYFR_ALIGN_BYTES / sizeof(fpdtype_t);
int rb, re, cb, ce;
loop_sched_2d(_ny, _nx, align, &rb, &re, &cb, &ce);
for (int _y = rb; _y < re; _y++)
{{
{innercall}
}}
}}
}}'''.format(innerfn=innerfn, spec=spec,
innercall=innercall)
def _emit_inner_func(self):
# Get the specification and body
spec = self._emit_inner_spec()
body = self._emit_body_2d()
# Combine
return '''{spec}
{{
for (int _x = 0; _x < _nx; _x++)
{{
{body}
}}
}}'''.format(spec=spec, body=body)
def _emit_inner_call(self):
# Arguments for the inner function
iargs = ['ce - cb']
iargs.extend(sa.name for sa in self.scalargs)
for va in self.vectargs:
iargs.extend(self._offset_arg_array_2d(va))
return '{0}_inner({1});'.format(self.name, ', '.join(iargs))
def _emit_inner_spec(self):
# Inner dimension
ikargs = ['int _nx']
# Add any scalar arguments
ikargs.extend('{0.dtype} {0.name}'.format(sa) for sa in self.scalargs)
# Vector arguments (always arrays as we're 2D)
for va in self.vectargs:
const = 'const' if va.intent == 'in' else ''
stmt = '{0} {1.dtype} *__restrict__ {1.name}_v'.format(const, va)
stmt = stmt.strip()
if va.ncdim == 0:
ikargs.append(stmt)
else:
for ij in ndrange(*va.cdims):
ikargs.append(stmt + 'v'.join(str(n) for n in ij))
return ('static PYFR_NOINLINE void {0}_inner({1})'
.format(self.name, ', '.join(ikargs)))
def _emit_spec(self):
# We first need the argument list; starting with the dimensions
kargs = ['int ' + d for d in self._dims]
# Now add any scalar arguments
kargs.extend('{0.dtype} {0.name}'.format(sa) for sa in self.scalargs)
# Finally, add the vector arguments
for va in self.vectargs:
# Views
if va.isview:
kargs.append('{0.dtype}* __restrict__ {0.name}_v'.format(va))
kargs.append('const int* __restrict__ {0.name}_vix'
.format(va))
if va.ncdim >= 1:
kargs.append('const int* __restrict__ {0.name}_vcstri'
.format(va))
if va.ncdim == 2:
kargs.append('const int* __restrict__ {0.name}_vrstri'
.format(va))
# Arrays
else:
# Intent in arguments should be marked constant
const = 'const' if va.intent == 'in' else ''
kargs.append('{0} {1.dtype}* __restrict__ {1.name}_v'
.format(const, va).strip())
# If we are a matrix (ndim = 2) or a non-MPI stacked
# vector then a leading (sub) dimension is required
if self.ndim == 2 or (va.ncdim > 0 and not va.ismpi):
kargs.append('int lsd{0.name}'.format(va))
return 'void {0}({1})'.format(self.name, ', '.join(kargs))
def _emit_body_1d(self):
body = self.body
ptns = [r'\b{0}\b', r'\b{0}\[(\d+)\]', r'\b{0}\[(\d+)\]\[(\d+)\]']
for va in self.vectargs:
# Dereference the argument
darg = self._deref_arg(va)
# Substitute
body = re.sub(ptns[va.ncdim].format(va.name), darg, body)
return body
def _emit_body_2d(self):
body = self.body
ptns = [r'\b{0}\b', r'\b{0}\[(\d+)\]', r'\b{0}\[(\d+)\]\[(\d+)\]']
subs = ['{0}_v[_x]', r'{0}_v\1[_x]', r'{0}_v\1v\2[_x]']
for va in self.vectargs:
body = re.sub(ptns[va.ncdim].format(va.name),
subs[va.ncdim].format(va.name), body)
return body
def _deref_arg(self, arg):
if arg.isview:
ptns = ['{0}_v[{0}_vix[_x]]',
r'{0}_v[{0}_vix[_x] + {0}_vcstri[_x]*\1]',
r'{0}_v[{0}_vix[_x] + {0}_vrstri[_x]*\1'
r' + {0}_vcstri[_x]*\2]']
return ptns[arg.ncdim].format(arg.name)
else:
# Leading (sub) dimension
lsdim = 'lsd' + arg.name if not arg.ismpi else '_nx'
# Vector name_v[_x]
if arg.ncdim == 0:
ix = '_x'
# Stacked vector; name_v[lsdim*\1 + _x]
elif arg.ncdim == 1:
ix = r'{0}*\1 + _x'.format(lsdim)
# Doubly stacked vector; name_v[lsdim*nv*\1 + lsdim*\2 + _x]
else:
ix = r'{0}*{1}*\1 + {0}*\2 + _x'.format(lsdim, arg.cdims[1])
return '{0}_v[{1}]'.format(arg.name, ix)
def _offset_arg_array_2d(self, arg):
stmts = []
# Matrix; name + _y*lsdim + cb
if arg.ncdim == 0:
stmts.append('{0}_v + _y*lsd{0} + cb'.format(arg.name))
# Stacked matrix; name + (_y*nv + <0>)*lsdim + cb
elif arg.ncdim == 1:
stmts.extend('{0}_v + (_y*{1} + {2})*lsd{0} + cb'
.format(arg.name, arg.cdims[0], i)
for i in range(arg.cdims[0]))
# Doubly stacked matrix; name + ((<0>*_ny + _y)*nv + <1>)*lsdim + cb
else:
stmts.extend('{0}_v + (({1}*_ny + _y)*{2} + {3})*lsd{0} + cb'
.format(arg.name, i, arg.cdims[1], j)
for i, j in ndrange(*arg.cdims))
return stmts
|
Denver’s new airport rail line has experienced severe glitches since opening, including malfunctioning crossing gates and a lightning strike that shut down the entire line for seven hours, among other problems. Transit officials assured the public they were just getting the bugs out of the system. But now, more than six months after it opened, the bugs are still thriving.
The crossing gate problem is so severe that the Federal Transit Administration has threatened to shut down the line until it is corrected. The contractor that built and operates the line tried to claim the lightning strike was an act of God, so the contractor shouldn’t be held responsible, but Regional Transit District (RTD) officials responded that they had pointed out the company’s design was vulnerable to lightning as early as 2013, yet the company did nothing to fix the flaw. Meanwhile, the system continues to perform unreliably.
Pu blic-private partnerships work great if the private partner is funded out of the user fees collected for the project, such as a toll road or water system. But I resent the way the transit industry has co-opted the term, public-private partnership, because their kind of partnership works differently. Instead of being dependent on fares, the private partner gets a fat check from the agency each month–up to $3.5 million in this case–whether anyone rides the train or not. This means the private partner has little incentive to make sure the system is working. RTD has withheld a portion of the monthly payments until the problems are solved, but eventually the contractor will get all of the money.
The solution isn’t for the agencies to build the lines themselves. The solution is to completely avoid megaprojects that aren’t funded out of user fees. Without the discipline of user fees, everything that’s happening with the A-line should have been expected.
Randal O’Toole directs the Transportation Policy Center at the Independence Institute, a free market think tank in Denver. A version of this piece originally appeared in his blog, The Antiplanner.
|
import logging
import sys
LOGGING_FORMAT = '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
FILE_LOGGING_FORMAT = '%(asctime)s,%(levelname)s,%(name)s,%(message)s'
# captureWarnings exists only since 2.7
_nh = None
if sys.hexversion > 0x2070000:
logging.captureWarnings(True)
_nh = logging.NullHandler()
else:
# doesn't exists in older versions
class NullHandler(logging.Handler):
def emit(self, record):
pass
_nh = NullHandler()
logging.root.name = 'aeriscloud'
_logger = logging.root
_logger.addHandler(_nh)
# disable all logs
_logger.setLevel(60)
# prevent root logger from outputting
_logger.propagate = False
def get_logger(name=None, parent=_logger):
if name:
if not hasattr(parent, 'getChild'):
return parent.manager.getLogger('.'.join([parent.name, name]))
return parent.getChild(name)
return parent
def set_log_level(lvl):
# custom stream handler by default
if _nh in _logger.handlers:
_logger.removeHandler(_nh)
_handler = logging.StreamHandler()
_handler.setFormatter(logging.Formatter(LOGGING_FORMAT))
_logger.addHandler(_handler)
_logger.setLevel(lvl)
def set_log_file(filename):
_logger.removeHandler(_nh)
_file_handler = logging.FileHandler(filename)
_file_handler.setFormatter(logging.Formatter(FILE_LOGGING_FORMAT))
_logger.addHandler(_file_handler)
|
Director of Krotoa, Eva van de Kaap Basil Appollis, Artscape CEO Marlene le Roux and writer of the play, Sylvia Vollenhoven.
A discussion around the story of Krotoa highlighted the untold stories of the Khoi people that were lost in history, and the lack of interest from the youth to find out more about where they come from.
The discussion was held at the Innovation Room at the Artscape on Saturday February 3, and was focused on the role of the arts as a way of fast-tracking the changes needed to redress historic injustice, and how the story of Krotoa helps people define the terms of restorative justice.
The event was also held to mark Black History Month, celebrated in February around the world.
the VOC merchant commander who established a refreshment station at the Cape in 1652. She plays Krotoa, the young Khoi girl taken into van Riebeeck’s household who went on to become a key negotiator and translator between the Dutch and the local people at a very young age. She was also the first Khoi woman to be baptised and the first to officially marry a European. The production puts Krotoa in the centre of her own story, and is a perspective-changing tribute to a neglected and contested aspect of shared history.
Marlene le Roux, the CEO of the Artscape, said putting together the production was a spiritual and emotional journey for the people involved.
High commissioner of the Gorinhaicona Traditional Council, Tauriq Jenkins, who is also part of a programme at the University of Cape Town (UCT), which deals with the restoring of justice, said part of the restoration process is a broader conversation about the trauma of the past.
He said that when people speak about Krotoa, they speak of a time humanity lost itself. “Krotoa is the centre of who we are as people. Part of the healing process is understanding all that she had been through. This story is one that all South Africans need to know. If we don’t get to know it, we are robbing ourselves of a large part of our history.
Doctor June Bam-Hutchison of the UCT Centre for African Studies, raised the question on how we decolonise people who’ve been erased from the stories of the past.
“We need to look at the fact that some of these people have died with their stories, and look at the kind of colonial life and the stories about our people – especially the women.
Also part of the discussion was Chief Krotoa Elenor Smith, a member of the First Indigenous Nations of South Africa. She said the untold rituals and history of Khoi people that were taught by grandmothers had been lost.
“Traditional medicines such as dassiepis and buchu are some of the examples of things that are on our doorsteps.
She said the arts were important to highlight parts of the healing process.
The director of Krotoa Eva van de Kaap, Basil Appollis, said it was important that people have the confidence to tell their stories.
little involvement from the youth at these gatherings. She said that the writers and researchers who have the correct networks should push such content and productions into schools and make it accessible for the youth so that they are aware of where they come from. The discussion was wrapped up with a song and dedication to Krotoa by Courtney’s mother, Amanda Lois Stone, a Khoisan singer and poet.
Krotoa, Eva van de Kaap, was written by writer, film-maker and journalist Sylvia Vollenhoven. The story sheds new light on an ancient narrative, and contends that the story has not ended.The music for the production is by South African Frazer Barry and Jef Hofmeister, from the Volksoperahuis in The Netherlands.
Krotoa, Eva van de Kaap is at the Artscape Arena from today, Thursday February 7 until Saturday February 16, from Tuesday to Friday at 7.30pm and on Saturdays at 3pm and 7.30pm. Tickets cost R100. Book through Computicket or 021 421 7695.
Frazer Barry gave the audience a taste of the music presented in Krotoa, Eva van de Kaap.
Courtney Lemmert highlights the lack of involvement of youth at historic discussions and events.
|
# -*- coding: utf-8 -*-
import pymysql.cursors
import uuid
from urllib.parse import urlparse
"""
Some explanation for this set of functions.
set: put a partical subset in a class var
insert: insert one row without checks and dependencies, optionally commit
store: insert multiples and calling separate insert functions, always commits and has optional safemode
check: returns True or False
get: get an individual value
todo, find and get
"""
class Database:
def __init__(self,config):
self.host = config["host"]
self.user = config["user"]
self.passwd = config["passwd"]
self.name = config["name"]
self.lexicalEntries = []
self.lexicalEntryRelations = []
self.lexicalForms = []
self.lexicalProperties = []
self.lexicalSenses = []
self.senseReferences = []
self.lexicalEntryComponents = []
self.components = []
self.lexicalEntryLabels = {}
self.lexicalSenseDefinitions = {}
self.posses = {}
self.languages = {}
self.properties = {}
self.senserelations = []
self.entryrelations = {}
def connect(self):
self.DB = pymysql.connect(host=self.host,user=self.user, passwd=self.passwd,db=self.name,charset='utf8',use_unicode=1,cursorclass=pymysql.cursors.DictCursor)
def setPosses(self):
for row in self.__getRows("SELECT * FROM partOfSpeechVocabulary"):
self.posses[row["value"]] = row["id"]
def setLanguages(self):
for row in self.__getRows("SELECT * FROM languageVocabulary"):
self.languages[row["iso_639_1"]] = row["id"]
def setProperties(self):
for row in self.__getRows("SELECT * FROM propertyVocabulary"):
key = self.__getUrlPart(row["property"]) + ":" + self.__getUrlPart(row["value"])
self.properties[key] = row["id"]
def setEntryRelations(self):
for row in self.__getRows("SELECT * FROM relationVocabulary"):
key = "lexinfo:" + self.__getUrlPart(row["relation"])
self.entryrelations[key] = row["relationID"]
def setSenseRelations(self):
""" Should not be manual, but for now there is validation. """
self.senserelations.extend( ["ontolex:reference"] )
self.senserelations.extend( ["lexinfo:antonym", "lexinfo:synonym", "lexinfo:pertainsTo", "lexinfo:relatedTerm", "lexinfo:hypernym"] )
def setLexicalEntries(self):
query = "SELECT lexicalEntryID, class, pos.value AS pos_value, lex.identifier AS lex_identifier FROM lexicalEntry AS lex \
LEFT JOIN partOfSpeechVocabulary AS pos ON lex.partOfSpeechID = pos.id"
self.lexicalEntries = self.__getRows(query)
def setLexicalEntry(self,lexicalEntryID):
query = "SELECT lexicalEntryID, class, pos.value AS pos_value, identifier AS lex_identifier FROM lexicalEntry AS lex \
LEFT JOIN partOfSpeechVocabulary AS pos ON lex.partOfSpeechID = pos.id \
WHERE lexicalEntryID = %s"
self.lexicalEntries = self.__getRows(query,(lexicalEntryID))
def setLexicalEntryRelations(self):
query = "SELECT lex.identifier AS lex_identifier, entryrel.reference, vocab.relation FROM lexicalEntryRelation AS entryrel \
LEFT JOIN lexicalEntry AS lex ON entryrel.lexicalEntryID = lex.lexicalEntryID \
LEFT JOIN relationVocabulary AS vocab ON entryrel.relationID = vocab.relationID"
self.lexicalEntryRelations = self.__getRows(query)
def setLexicalEntryRelationsByID(self,lexicalEntryID):
query = "SELECT lex.identifier AS lex_identifier, entryrel.reference, vocab.relation FROM lexicalEntryRelation AS entryrel \
LEFT JOIN lexicalEntry AS lex ON entryrel.lexicalEntryID = lex.lexicalEntryID \
LEFT JOIN relationVocabulary AS vocab ON entryrel.relationID = vocab.relationID \
WHERE entryrel.lexicalEntryID = %s"
self.lexicalEntryRelations = self.__getRows(query,(lexicalEntryID))
def setLexicalForms(self,lang_id):
query = "SELECT form.lexicalEntryID, form.lexicalFormID, type, rep.value AS rep_value, lex.identifier AS lex_identifier, form.identifier AS form_identifier, rep.syllableCount AS syllableCount FROM lexicalForm AS form \
LEFT JOIN lexicalEntry AS lex ON form.lexicalEntryID = lex.lexicalEntryID \
LEFT JOIN writtenRep AS rep ON form.lexicalFormID = rep.lexicalFormID \
WHERE rep.languageID = %s"
self.lexicalForms = self.__getRows(query,(lang_id))
def setLexicalForm(self,lexicalEntryID,lang_id):
query = "SELECT form.lexicalEntryID, form.lexicalFormID, type, rep.value AS rep_value, lex.identifier AS lex_identifier, form.identifier AS form_identifier, rep.syllableCount AS syllableCount FROM lexicalForm AS form \
LEFT JOIN lexicalEntry AS lex ON form.lexicalEntryID = lex.lexicalEntryID \
LEFT JOIN writtenRep AS rep ON form.lexicalFormID = rep.lexicalFormID \
WHERE form.lexicalEntryID = %s \
AND rep.languageID = %s"
self.lexicalForms.extend(self.__getRows(query,(lexicalEntryID,lang_id)))
def setLexicalFormsByEntries(self,lang_id):
for entry in self.lexicalEntries:
self.setLexicalForm(entry["lexicalEntryID"],lang_id)
def setLexicalEntryLabels(self):
""" Sets easy lookup labels for use in setLexicalSenses without needing big joins.
Called seperately because setLexicalFormsByEntries calls setLexicalForm. """
for form in self.lexicalForms:
if form["type"] == "canonicalForm":
self.lexicalEntryLabels[form["lexicalEntryID"]] = form["rep_value"]
def setLexicalFormProperties(self):
query = "SELECT form.identifier AS form_identifier, vocab.property, vocab.value FROM formProperties AS formprop \
LEFT JOIN lexicalForm AS form ON formprop.lexicalFormID = form.lexicalFormID \
LEFT JOIN propertyVocabulary AS vocab ON formprop.propertyID = vocab.id"
self.lexicalProperties = self.__getRows(query)
def setLexicalFormPropertiesByID(self):
for form in self.lexicalForms:
query = "SELECT form.identifier AS form_identifier, vocab.property, vocab.value FROM formProperties AS formprop \
LEFT JOIN lexicalForm AS form ON formprop.lexicalFormID = form.lexicalFormID \
LEFT JOIN propertyVocabulary AS vocab ON formprop.propertyID = vocab.id \
WHERE formprop.lexicalFormID = %s"
self.lexicalProperties.extend(self.__getRows(query,(form["lexicalFormID"])))
def setLexicalSenses(self):
query = "SELECT sense.lexicalSenseID, sense.lexicalEntryID, lex.identifier AS lex_identifier, sense.identifier AS sense_identifier FROM lexicalSense AS sense \
LEFT JOIN lexicalEntry AS lex ON sense.lexicalEntryID = lex.lexicalEntryID"
self.lexicalSenses = self.__getRows(query)
def setLexicalSensesByID(self,lexicalEntryID):
query = "SELECT sense.lexicalSenseID, sense.lexicalEntryID, lex.identifier AS lex_identifier, sense.identifier AS sense_identifier FROM lexicalSense AS sense \
LEFT JOIN lexicalEntry AS lex ON sense.lexicalEntryID = lex.lexicalEntryID \
WHERE sense.lexicalEntryID = %s"
self.lexicalSenses.extend(self.__getRows(query,(lexicalEntryID)))
def setLexicalSensesByEntries(self):
for entry in self.lexicalEntries:
self.setLexicalSensesByID(entry["lexicalEntryID"])
def setSenseDefinitions(self,lang_id):
""" Definition is optional."""
for sense in self.lexicalSenses:
query = "SELECT value FROM senseDefinition WHERE lexicalSenseID = %s AND languageID = %s"
row = self.__getRow(query,(sense["lexicalSenseID"],lang_id))
if row:
self.lexicalSenseDefinitions[sense["sense_identifier"]] = row["value"]
def setSenseReferences(self):
query = "SELECT sense.identifier AS sense_identifier, namespace, property, reference FROM senseReference \
LEFT JOIN lexicalSense AS sense ON senseReference.lexicalSenseID = sense.lexicalSenseID"
self.senseReferences = self.__getRows(query)
def setSenseReferencesByID(self):
for sense in self.lexicalSenses:
query = "SELECT sense.identifier AS sense_identifier, namespace, property, reference FROM senseReference \
LEFT JOIN lexicalSense AS sense ON senseReference.lexicalSenseID = sense.lexicalSenseID \
WHERE senseReference.lexicalSenseID = %s"
self.senseReferences.extend(self.__getRows(query,(sense["lexicalSenseID"])))
def setLexicalComponents(self):
query = "SELECT lex.identifier AS lex_identifier, comp.identifier AS comp_identifier, position FROM lexicalEntryComponent AS lexcomp \
LEFT JOIN lexicalEntry AS lex ON lexcomp.lexicalEntryID = lex.lexicalEntryID \
LEFT JOIN component AS comp ON lexcomp.componentID = comp.componentID"
self.lexicalEntryComponents.extend(self.__getRows(query))
def setLexicalComponentsByID(self,lexicalEntryID):
query = "SELECT lex.identifier AS lex_identifier, comp.identifier AS comp_identifier, position FROM lexicalEntryComponent AS lexcomp \
LEFT JOIN lexicalEntry AS lex ON lexcomp.lexicalEntryID = lex.lexicalEntryID \
LEFT JOIN component AS comp ON lexcomp.componentID = comp.componentID \
WHERE lexcomp.lexicalEntryID = %s"
self.lexicalEntryComponents.extend(self.__getRows(query,(lexicalEntryID)))
def setComponents(self):
query = "SELECT DISTINCT comp.identifier AS comp_identifier, lex.identifier AS lex_identifier, form.identifier AS form_identifier FROM component AS comp \
LEFT JOIN lexicalEntry AS lex ON comp.lexicalEntryID = lex.lexicalEntryID \
LEFT JOIN lexicalForm AS form ON comp.lexicalFormID = form.lexicalFormID"
self.components.extend(self.__getRows(query))
def setComponentsByID(self,lexicalEntryID,lang_id):
query = "SELECT DISTINCT comp.identifier AS comp_identifier, lex.identifier AS lex_identifier, form.identifier AS form_identifier, rep.value AS rep_value FROM component AS comp \
LEFT JOIN lexicalEntryComponent AS lexcomp ON comp.componentID = lexcomp.componentID \
LEFT JOIN lexicalEntry AS lex ON comp.lexicalEntryID = lex.lexicalEntryID \
LEFT JOIN lexicalForm AS form ON comp.lexicalFormID = form.lexicalFormID \
LEFT JOIN writtenRep AS rep ON form.lexicalFormID = rep.lexicalFormID \
WHERE lexcomp.lexicalEntryID = %s \
AND rep.languageID = %s"
self.components.extend(self.__getRows(query,(lexicalEntryID,lang_id)))
# and add single component to output (but not connected to actual component part of lexicalEntry
# useful for management, and checking loose components
if not self.components:
query = "SELECT DISTINCT comp.identifier AS comp_identifier, lex.identifier AS lex_identifier, form.identifier AS form_identifier, rep.value AS rep_value FROM component AS comp \
LEFT JOIN lexicalEntry AS lex ON comp.lexicalEntryID = lex.lexicalEntryID \
LEFT JOIN lexicalForm AS form ON comp.lexicalFormID = form.lexicalFormID \
LEFT JOIN writtenRep AS rep ON form.lexicalFormID = rep.lexicalFormID \
WHERE lex.lexicalEntryID = %s \
AND rep.languageID = %s"
self.components.extend(self.__getRows(query,(lexicalEntryID,lang_id)))
def saveVerbPastSingular(self,lexicalEntryID,value,lang_id):
lex_id = self.getID(lexicalEntryID,"lexicalEntry")
form_id = self.storeOtherForm(lex_id,value,lang_id)
self.insertFormProperty(form_id,self.properties["tense:past"],True)
self.insertFormProperty(form_id,self.properties["number:singular"],True)
def saveVerbPastParticiple(self,lexicalEntryID,value,lang_id):
lex_id = self.getID(lexicalEntryID,"lexicalEntry")
# store with safemode False
form_id = self.storeOtherForm(lex_id,value,lang_id,False)
self.insertFormProperty(form_id,self.properties["tense:past"],True)
self.insertFormProperty(form_id,self.properties["verbFormMood:participle"],True)
def getLexicalEntryID(self,value,partOfSpeechID):
query = "SELECT lexicalEntryID FROM lexicalEntry WHERE value = %s AND partOfSpeechID = %s"
row = self.__getRow(query,(value,partOfSpeechID))
return row["lexicalEntryID"]
def getLexicalSenseID(self,lexicalEntryID):
query = "SELECT lexicalSenseID FROM lexicalSense WHERE lexicalEntryID = %s"
row = self.__getRow(query,(lexicalEntryID))
return row["lexicalSenseID"]
def getID(self,identifier,table):
""" Return the real database ID from either entry, form or sense, based on identifier. """
field = table + "ID"
query = "SELECT " + field + " FROM " + table + " WHERE identifier = %s"
row = self.__getRow(query,(identifier))
return row[field]
def getIdentifier(self,id,table):
""" Return the identifier from either entry, form or sense, based on the real DB id. """
field = table + "ID"
query = "SELECT identifier FROM " + table + " WHERE " + field + " = %s"
row = self.__getRow(query,(id))
return row["identifier"]
def getCountlexicalSenses(self,lexicalEntryID):
query = "SELECT count(*) AS count FROM lexicalSense WHERE lexicalEntryID = %s"
row = self.__getRow(query,(lexicalEntryID))
return int(row["count"])
def getWrittenRepsWithoutSyllableCount(self,lang_id):
query = "SELECT DISTINCT value FROM writtenRep WHERE syllableCount IS NULL AND languageID = %s"
return self.__getRows(query,(lang_id))
def checkSenseReferenceExists(self,lexicalSenseID,relation,reference):
namespace = relation.split(":")[0]
property = relation.split(":")[1]
query = "SELECT * FROM senseReference WHERE lexicalSenseID = %s AND namespace = %s AND property = %s AND reference = %s"
row = self.__getRow(query,(lexicalSenseID,namespace,property,reference))
if row:
return True
else:
return False
def checkLexicalEntryRelationExists(self,lexicalEntryID,relation,reference):
query = "SELECT * FROM lexicalEntryRelation WHERE lexicalEntryID = %s AND relationID = %s AND reference = %s"
row = self.__getRow(query,(lexicalEntryID,relation,reference))
if row:
return True
else:
return False
def checkLexicalFormPropertyExists(self,lexicalFormID,propertyID):
query = "SELECT * FROM formProperties WHERE lexicalFormID = %s AND propertyID = %s"
row = self.__getRow(query,(lexicalFormID,propertyID))
if row:
return True
else:
return False
def storeCanonical(self,word,lang_id,pos_id,safemode=True):
""" Stores new lexicalEntry and canonicalForm if entry does not exist."""
if self.findLexicalEntry(word,pos_id) and safemode:
print("found this entry already: " + word)
return None
lexicalEntryID = self.insertLexicalEntry(word,pos_id)
lexicalFormID = self.insertLexicalForm(lexicalEntryID,"canonicalForm")
self.insertWrittenRep(lexicalFormID,word,lang_id)
# store infinitive form for verb
if pos_id == self.posses["verb"]:
self.insertFormProperty(lexicalFormID,self.properties["verbFormMood:infinitive"])
self.DB.commit()
return lexicalEntryID
def storeOtherForm(self,lexicalEntryID,word,lang_id,safemode=True):
if self.findlexicalForm(lexicalEntryID,word,lang_id) and safemode:
print("found this form already: " + word)
return None
lexicalFormID = self.insertLexicalForm(lexicalEntryID,"otherForm")
self.insertWrittenRep(lexicalFormID,word,lang_id)
self.DB.commit()
return lexicalFormID
def storeFormProperties(self,lexicalFormID,properties,safemode=True):
# no safemode yet
for property in properties:
# p in form <property>:<value>
self.insertFormProperty(lexicalFormID,self.properties[property])
self.DB.commit()
def storeLexicalSense(self,lexicalEntryID,relation,reference,safemode=True):
""" Adds lexicalSense to lexicxalEntry, and adds a relation. """
senseCount = self.getCountlexicalSenses(lexicalEntryID)
if senseCount == 0:
# no senses yet, we can safely add a sense and a relation
lexicalSenseID = self.insertLexicalSense(lexicalEntryID)
self.insertSenseReference(lexicalSenseID,relation,reference)
elif senseCount == 1:
# asume we're adding to this sense, retrieve the senseID and add reference if not exists
lexicalSenseID = self.getLexicalSenseID(lexicalEntryID)
if not self.checkSenseReferenceExists(lexicalSenseID,relation,reference):
self.insertSenseReference(lexicalSenseID,relation,reference)
else:
lexicalSenseID = None
self.DB.commit()
return lexicalSenseID
def storeLexicalEntryRelation(self,lexicalEntryID,relation,reference):
""" Check whether relation already exists, and if not, adds it. """
if not self.checkLexicalEntryRelationExists(lexicalEntryID,relation,reference):
self.insertLexicalEntryRelation(lexicalEntryID,relation,reference)
self.DB.commit()
def storeComponent(self,lexicalFormID):
""" Stores component, based on lexicalFormID. """
query = "SELECT lexicalEntryID FROM lexicalForm WHERE lexicalFormID = %s"
row = self.__getRow(query,(lexicalFormID))
if row:
return self.insertComponent(row["lexicalEntryID"],lexicalFormID,True)
else:
return "failed"
def findLexicalEntry(self,word,pos_id):
query = "SELECT lexicalEntryID FROM lexicalEntry WHERE value = %s AND partOfSpeechID = %s"
row = self.__getRow(query,(word,pos_id))
if row:
return row["lexicalEntryID"]
else:
return None
def findlexicalForm(self,lexicalEntryID,word,lang_id):
query = "SELECT form.lexicalFormID FROM lexicalForm AS form \
LEFT JOIN writtenRep AS rep ON form.lexicalFormID = rep.lexicalFormID \
WHERE form.lexicalEntryID = %s \
AND rep.value = %s \
AND rep.languageID = %s"
row = self.__getRow(query,(lexicalEntryID,word,lang_id))
if row:
return row["lexicalFormID"]
else:
return None
def insertLexicalEntry(self,word,pos_id,commit=False):
c = self.DB.cursor()
entryclass = "Word"
if word.count(" ") > 0:
entryclass = "MultiwordExpression"
identifier = "urn:uuid:" + str(uuid.uuid4())
query = "INSERT INTO lexicalEntry (value,identifier,partOfSpeechID,class) VALUES (%s,%s,%s,%s)"
c.execute(query, (word,identifier,pos_id,entryclass))
lexicalEntryID = c.lastrowid
c.close()
if commit:
self.DB.commit()
return lexicalEntryID
def insertLexicalEntryRelation(self,lexicalEntryID,relationID,reference,commit=False):
c = self.DB.cursor()
query = "INSERT INTO lexicalEntryRelation (lexicalEntryID,relationID,reference) VALUES (%s,%s,%s)"
c.execute(query, (lexicalEntryID,relationID,reference))
c.close()
if commit:
self.DB.commit()
def insertLexicalForm(self,lexicalEntryID,type,commit=False):
c = self.DB.cursor()
identifier = "urn:uuid:" + str(uuid.uuid4())
query = "INSERT INTO lexicalForm (lexicalEntryID,identifier,type) VALUES (%s,%s,%s)"
c.execute(query, (lexicalEntryID,identifier,type))
lexicalFormID = c.lastrowid
c.close()
if commit:
self.DB.commit()
return lexicalFormID
def insertWrittenRep(self,lexicalFormID,word,lang_id,commit=False):
c = self.DB.cursor()
query = "INSERT INTO writtenRep (lexicalFormID,languageID,value) VALUES (%s,%s,%s)"
c.execute(query, (lexicalFormID,lang_id,word))
c.close()
if commit:
self.DB.commit()
def insertFormProperty(self,lexicalFormID,propertyID,commit=False):
if self.checkLexicalFormPropertyExists(lexicalFormID,propertyID):
return
c = self.DB.cursor()
query = "INSERT INTO formProperties (lexicalFormID,propertyID) VALUES (%s,%s)"
c.execute(query, (lexicalFormID,propertyID))
c.close()
if commit:
self.DB.commit()
def insertLexicalSense(self,lexicalEntryID,commit=False):
""" Insert lexicalSense, and optionally commit."""
c = self.DB.cursor()
identifier = "urn:uuid:" + str(uuid.uuid4())
query = "INSERT INTO lexicalSense (lexicalEntryID,identifier) VALUES (%s,%s)"
c.execute(query, (lexicalEntryID,identifier))
lexicalSenseID = c.lastrowid
c.close()
if commit:
self.DB.commit()
return lexicalSenseID
def insertLexicalSenseDefinition(self,lexicalSenseID,languageID,definition,commit=False):
c = self.DB.cursor()
query = "INSERT INTO senseDefinition (lexicalSenseID,languageID,value) VALUES (%s,%s,%s)"
c.execute(query, (lexicalSenseID,languageID,definition))
c.close()
if commit:
self.DB.commit()
def insertSenseReference(self,lexicalSenseID,relation,reference,commit=False):
c = self.DB.cursor()
namespace = relation.split(":")[0]
property = relation.split(":")[1]
query = "INSERT INTO senseReference (lexicalSenseID,namespace,property,reference) VALUES (%s,%s,%s,%s)"
c.execute(query, (lexicalSenseID,namespace,property,reference))
c.close()
if commit:
self.DB.commit()
def insertComponent(self,lexicalEntryID,lexicalFormID,commit=False):
c = self.DB.cursor()
# we should have a checkExists for this
query = "SELECT componentID FROM component WHERE lexicalEntryID = %s AND lexicalFormID = %s"
c.execute(query,(lexicalEntryID,lexicalFormID))
row = c.fetchone()
if row:
return row["componentID"]
else:
identifier = "urn:uuid:" + str(uuid.uuid4())
query = "INSERT INTO component (identifier,lexicalEntryID,lexicalFormID) VALUES (%s,%s,%s)"
c.execute(query,(identifier,lexicalEntryID,lexicalFormID))
componentID = c.lastrowid
c.close()
if commit:
self.DB.commit()
return componentID
def insertLexicalEntryComponent(self,lexicalEntryID,componentID,position,commit=False):
c = self.DB.cursor()
# more another checkExists, where nothing is returned
query = "SELECT * FROM lexicalEntryComponent WHERE lexicalEntryID = %s AND componentID = %s AND position = %s"
c.execute(query,(lexicalEntryID,componentID,position))
row = c.fetchone()
if not row:
query = "INSERT INTO lexicalEntryComponent (lexicalEntryID,componentID,position) VALUES (%s,%s,%s)"
c.execute(query,(lexicalEntryID,componentID,position))
c.close()
if commit:
self.DB.commit()
def updateLexicalEntryValue(self,lexicalEntryID,label,languageID):
c = self.DB.cursor()
# find canonicalForm
query = "SELECT * FROM lexicalForm WHERE lexicalEntryID = %s AND type = 'canonicalForm'"
c.execute(query, (lexicalEntryID))
canonicalform = c.fetchone()
# update entry and writtenrep
query = "UPDATE lexicalEntry SET value = %s WHERE lexicalEntryID = %s"
c.execute(query, (label,lexicalEntryID))
query = "UPDATE writtenRep SET value = %s WHERE lexicalFormID = %s AND languageID = %s"
c.execute(query,(label,canonicalform["lexicalFormID"],languageID))
c.close()
self.DB.commit()
def updateLexicalEntryPOS(self,lexicalEntryID,partOfSpeechID):
c = self.DB.cursor()
query = "UPDATE lexicalEntry SET partOfSpeechID = %s WHERE lexicalEntryID = %s"
c.execute(query, (partOfSpeechID,lexicalEntryID))
c.close()
self.DB.commit()
def updateSyllableCount(self,value,syllableCount,languageID):
c = self.DB.cursor()
query = "UPDATE writtenRep SET syllableCount = %s WHERE value = %s AND languageID = %s"
c.execute(query,(syllableCount,value,languageID))
c.close()
self.DB.commit()
def __getRow(self,query,args=None):
c = self.DB.cursor()
c.execute(query,args)
row = c.fetchone()
c.close()
return row
def __getRows(self,query,args=None):
c = self.DB.cursor()
c.execute(query,args)
rows = c.fetchall()
c.close()
return rows
def __getUrlPart(self,url):
""" Helper function to get the last part of the property url."""
parsed = urlparse(url)
if parsed.fragment:
return parsed.fragment
else:
return parsed.path.split('/')[-1]
|
We are constantly looking for new updates and tips that we can pass on over to you. Today's is about getting more interactive with Social Media. As Google earlier this week has announced that they will be integrating Twitter into their search on the main page.
Before Twitter results were being shared under live updates, on Google, as from this week, Google is updating all their algorithms to fully integrate Social Media links, and improve users search experience.Every business now has Facebook and Twitter account already set up, but need to get more active with them.
I would recommend to to keep these accounts updated by regularly posting messages with links to websites, that are relevant to your business. Social media is about sharing, so if you have come across a website, article, image, news...etc that you have found interesting share it on Twitter & Facebook. Most websites and news related articles have sharing options available, such as this article http://goo.gl/m9BTs, you can see on the right hand side are quick links to share this with others.
The information you share on networking sites will help in making your presence known on the world wide web. You can share anything you prefer, if you keep it related to your business, you will attract good followers and benefit more.
Read the latest update on the Google Blog. http://goo.gl/YhDut and view the video.
|
"""Extract graphs."""
from Jabber.Plugins import Plugin
from Jabber.ZenAdapter import ZenAdapter
from Jabber.Options import Options
from optparse import OptionError
class Graph(Plugin):
name = 'graph'
capabilities = ['graph', 'help']
def call(self, args, log, **kw):
#Dirty hack to make it work with multiword options (they must be in '' or "")
i=-1
appnd=False
args1=[]
for arg in args:
if appnd:
args1[i]+=' '+arg.replace("'",'').replace('"','')
else:
i+=1
args1.append(arg.replace("'",'').replace('"',''))
if arg[0] in ('"', "'"): appnd=True
if arg[-1] in ('"', "'"): appnd=False
args=args1
log.debug('Graph extraction plugin running with arguments %s' % args)
opts = self.options()
adapter = ZenAdapter()
try:
(options, arguments) = opts.parse_args(args)
log.debug('Done parsing arguments. Options are "%s", arguments expanded to %s' % (options, arguments))
except OptionError, message:
return str(message)
if options.deviceName is None or (not options.list and options.graphName is None):
return 'NO. You must specify both device and graph with -d and -g.'
devices = adapter.devices(options.deviceName)
if len(devices) == 0:
return 'Cannot find a device, ip or mac for "%s"' % options.deviceName
log.debug('Found %d devices matching %s' % (len(devices), devices))
if options.list:
message=''
if options.subComponent:
for device in devices:
componentList = adapter.components(device, options.subComponent)
if componentList:
for component in componentList:
for validGraph in component.getDefaultGraphDefs():
message += validGraph['title'] + ' (' + component.absolute_url_path().split(device.id)[1][1:] + ')\n'
else:
for device in devices:
for validGraph in device.getDefaultGraphDefs():
message += validGraph['title'] + '\n'
return 'Valid graphs:\n' + message
log.debug('Going to look for graph %s' % options.graphName)
# rrdtool cannot accept arguments in unicode, so convert graphName to ascii first
message = self.obtainValues(adapter, devices, options.graphName.encode('ascii', 'ignore'), options.subComponent, log)
return message
def obtainValues(self, adapter, devices, graph, component, log):
import time
message = ''
log.debug('Have %d devices to check for %s' % (len(devices), graph))
for device in devices:
log.debug('Checking %s. For the graph %s' % (device.id, graph))
# try to get it directly from the device first.
if self.hasGraph(device, graph):
log.debug('The device %s does have the graph %s' % (device.id, graph))
message += '%s %s: %s\n' % (device.name(), graph, self.shorten(self.upload(self.render(device.getGraphDefUrl(graph)), device.name() + '/' + graph.replace(' ', '_') + '_' + time.strftime('%Y%m%d_%H%M%S',time.localtime()) +'.png')))
elif component is not None:
compList = adapter.components(device, component)
if not compList:
return 'Sorry. Cannot find a component %s on %s' % (component, device)
if len(compList)>1:
return 'Multiple components found. Please, define more exaclty.'
comp=compList[0]
log.debug('Looking for graph %s in component %s' % (graph, comp.name()))
if self.hasGraph(comp, graph):
message += '%s %s %s: %s\n' % (device.name(), component, graph, self.shorten(self.upload(self.render(comp.getGraphDefUrl(graph)), device.name() + comp.absolute_url_path()[comp.absolute_url_path().find(device.id)+len(device.id):] + '/' + graph.replace(' ', '_') +'_' + time.strftime('%Y%m%d_%H%M%S',time.localtime()) +'.png')))
else:
message += '%s %s: Does not have a graph named %s. Remember, spelling and case matter. Try -l for a list of graphs' % (device.name(), component, graph)
else:
message += '%s: Unable to find the graph %s. Remember, spelling and case matter. Try -l for a list of graphs' % (device.name(), graph)
return message
def hasGraph(self, entity, graph):
hasGr = False
for gr in entity.getDefaultGraphDefs():
if gr['title'] == graph:
hasGr = True
break
return hasGr
def render(self, url):
from urlparse import urlparse, parse_qsl
import StringIO
png = StringIO.StringIO()
from Products.ZenRRD.zenrender import RenderServer
png.write(eval('RenderServer("").render('+','.join(['%s="%s"' % k for k in parse_qsl(urlparse(url)[4])])+')'))
png.seek(0)
return png
def upload(self, strObj, saveAs):
import ftplib
con = ftplib.FTP('ftp.nm.ru', 'zenbot', 'qwe123#')
#create path if it doesn't exists and cwd to it
for dir in saveAs.split('/')[:-1]:
try:
con.cwd(dir)
except ftplib.error_perm:
con.mkd(dir)
con.cwd(dir)
con.storbinary('STOR ' + saveAs.split('/')[-1], strObj)
con.quit()
return 'http://zenbot.nm.ru/' + saveAs
def shorten(self,url):
import urllib2
html=urllib2.urlopen("http://tinyurl.com/create.php?url=%s" % url).read()
return html[html.find("<b>http://tinyurl.com/")+3:html.find("</b>",html.find("<b>http://tinyurl.com/"))]
def private(self):
return False
# parse the options
def options(self):
parser = Options(description = 'Retrieve graph. Simple example:\n graph -d 10.1.1.1 -g IO', prog = 'graph')
parser.add_option('-d', '--device', dest='deviceName', help='Device name, IP or MAC.')
parser.add_option('-g', '--graph', dest='graphName', help='Name of graph.')
parser.add_option('-l', '--list', dest='list', action='store_true', help='Only list graphs for the device and/or component.')
parser.add_option('-s', '--subcomponent', dest='subComponent', help='Optional subcomponent name, if the graph does not reside directly on the device. You will probably have to specify this.')
return parser
def help(self):
opts = self.options()
return str(opts.help())
|
2-room apartment for 4 people, 50 m2, on the ground floor, simple furnishings. Living room with satellite TV. Smoking allowed. Internet (Wireless LAN [WLAN]). 1 sofa bed. Sleeping room with 1 double bed. Kitchenette with 2 hot plates, dining table, fridge and deep freezer. Bath-room and shower/WC. Terrace 6 m2, terrace furniture. Other equipment.
New, comfortable, simple detached house "EDI / EDI4", built in 2004 from the 21st century. House Equipment: air conditioning. Access & Parking: parking by the house. All-season motor access to the house. Situation: in the district of Medulin, sunny position in a residential area. Surroundings of the house: grounds (private use, 450 m2, fenced), garden (shared use), swimming pool (shared use, length 8 m x width 3 m), outside shower (shared use). Facilities and distances: shop 500 m, grocers 500 m, supermarket 500 m, restaurant 300 m, shingle beach 1 km, sandy beach 1.2 km, pebble beach 1 km, rocky beach 1 km, tennis 3.2 km, tennis club 3.2 km, bus stop 250 m, subway/train 10 km. Please note, nearby is a motorway in 2.6 km, main road in 300 m, railway line in 10 km and ein airport/airfield in 13 km.
|
"""
Django settings for website project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'y8dz)a%5b0+-bgb=2(1ry1pt41rbng1x41cruigaht9c-n(yn='
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'query',
#'blog',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'website.urls'
WSGI_APPLICATION = 'website.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_ROOT = 'staticfiles'
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
|
Gurushakthi Dental Clinic is one of the well-known clinics in Nagarabhavi, Bangalore offering best dental treatment services. The clinic has experienced dentists and consultants. Book an online appointment with Dr Gurushakthi at Gurushakthi Dental Clinic. today and get a healthy smile.
|
import os
import datetime
from django.utils.dateparse import parse_datetime
import collections, json
from django.http import HttpResponse, JsonResponse
from django.http import HttpResponseForbidden
from django.shortcuts import render
from django.template import loader
from django.contrib.auth import get_user_model
from rest_framework.authentication import TokenAuthentication
from django.utils.encoding import smart_str
from django.core.urlresolvers import reverse_lazy
from django.db.models.signals import post_save
from django.core.mail import send_mail
from django.conf import settings
from wsgiref.util import FileWrapper
from rest_framework.decorators import detail_route
from rest_framework import generics
from django.db import IntegrityError
# Custom models
from .models import Record, Answer, Entity, Question, Symptom, Notes, Patient, Log, Doctor
# Serializers import
from .serializers import (
UserCreateSerializer,
UserLoginSerializer,
UserProfileSerializer,
UserGetSerializer,
AnswerSerializer,
AnswerGetSerializer,
RecordSerializer,
# DoctorCreateSerializer,
DoctorSerializer,
EntityCreateSerializer,
QuestionGetSerializer,
SymptomSerializer,
SymptomGetSerializer,
QuestionSerializer,
NotesCreateSerializer,
NotesGetSerializer,
PatientActivateSerializer,
PatientGetSerializer,
PatientSectorSerializer,
PatientStatusGetSerializer,
PatientScoreGetSerializer,
PatientRecordGetSerializer)
# rest_framework imports
from rest_framework import status
from rest_framework import filters
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.generics import (
CreateAPIView,
ListAPIView,
UpdateAPIView,
ListCreateAPIView,
RetrieveUpdateDestroyAPIView,
get_object_or_404)
# Import permissions
from rest_framework.permissions import (
AllowAny,
IsAuthenticated,
IsAdminUser,
)
from .permissions import IsOwner
from .emails import send_user_registration_emails
User = get_user_model()
#####################################################################################
# Set up trigger for registration email
if os.environ.get('DJANGO_SEND_EMAIL'):
post_save.connect(send_user_registration_emails, sender=User)
######################################################################################
# Build paths
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DOWNLOADS_DIR = BASE_DIR + '/downloads/'
RELEASE_APK = 'app-release.apk'
######################################################################################
# Method based views
# endpoint for '/home'
def index(request):
#get the template
template = loader.get_template('index.html')
data = {'images' : settings.MEDIA_URL}
return HttpResponse(template.render(data))
# endpoint for '/dashboard'
def dashboard(request):
#get the template
template = loader.get_template('dashboard.html')
return HttpResponse(template.render())
# Method based views
# endpoint for '/home'
def download_android(request):
file_name = DOWNLOADS_DIR + RELEASE_APK;
#file_size = os.stat(file).st_size
file_size = os.path.getsize(file_name)
wrapper = FileWrapper(file(file_name))
response = HttpResponse(wrapper, content_type='application/vnd.android.package-archive')
response['Content-Disposition'] = 'attachment; filename=%s' % smart_str(RELEASE_APK)
response['Content-Length'] = file_size
return response
######################################################################################
# Class based user views
class UserCreateView(CreateAPIView):
'''API to create a new user '''
serializer_class = UserCreateSerializer
permission_classes = [AllowAny]
queryset = User.objects.all()
def post(self, request, *args, **kwargs):
serializer = UserCreateSerializer(data=request.data)
try:
serializer.is_valid(raise_exception=True)
if not request.user.is_anonymous:
Log.objects.create(user=request.user, activity='add_new_patient')
return self.create(request, *args, **kwargs)
except:
# print(serializer.errors)
# print('Errors')
if not request.user.is_anonymous:
Log.objects.create(user=request.user, activity='fail_add_new_patient') # failed sign in or sign out.
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UserLoginView(APIView):
'''API to login and obtain an auth token'''
serializer_class = UserLoginSerializer
permission_classes = [AllowAny]
queryset = User.objects.all()
def post(self, request, *args, **kwargs):
data = request.data
serializer = UserLoginSerializer(data=data)
if serializer.is_valid(raise_exception=True):
result = serializer.data
# Only return token
if result.has_key('username'):
result.pop('username')
if result.has_key('email'):
result.pop('email')
if not request.user.is_anonymous:
Log.objects.create(user=request.user, activity='success_sign_in')
return Response(result, status=status.HTTP_200_OK)
else:
if not request.user.is_anonymous:
Log.objects.create(user=request.user, activity='failed_sign_in') # failed sign in or sign out.
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class UserLogoutView(APIView):
'''API to logout and delete an auth token for TokenAuthentication Method'''
serializer_class = UserLoginSerializer
permission_classes = [IsAuthenticated, IsAdminUser]
# queryset = User.objects.all()
def post(self, request, *args, **kwargs):
try:
request.user.auth_token.delete()
except Exception as e:
print(e)
return Response({"failure": "Not found."}, status=status.HTTP_400_BAD_REQUEST)
return Response({"success": "Successfully logged out."}, status=status.HTTP_204_OK)
class UserValidateEmail(CreateAPIView):
serializer_class = UserLoginSerializer
permission_classes = [AllowAny]
queryset = User.objects.all()
def post(self, request, *args, **kwargs):
'''Validates if user can be registered by checking email/username doesn't already exist'''
data = request.data
if User.objects.filter(username=data['email']) or User.objects.filter(email=data['email']):
return Response({'status': False}, status=status.HTTP_200_OK)
else:
return Response({'status': True}, status=status.HTTP_200_OK)
class UserProfileView(APIView):
'''API to GET user profile information.'''
serializer_class = UserProfileSerializer
permission_classes = [IsAuthenticated]
queryset = User.objects.all()
def get(self, request, format=None):
user_obj = self.request.user
query = User.objects.filter(username=user_obj)
serializer = UserProfileSerializer(user_obj)
return Response(serializer.data)
class AnswerAPIView(ListCreateAPIView):
'''API to create one or multiple Answer instances '''
queryset = Answer.objects.all()
serializer_class = AnswerSerializer
permission_classes = [IsAuthenticated]
def post(self, request, *args, **kwargs):
'''Validates if user can be registered by checking email/username doesn't already exist'''
for i in range(len(request.data)):
request.data[i]['question'] = str(Question.objects.get(question_number=request.data[i]['question']).id) # sql returns long integers so need to cast back
serializer = AnswerSerializer(data=request.data, many=True)
if serializer.is_valid():
serializer.save()
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def get_serializer(self, *args, **kwargs):
if "data" in kwargs:
data = kwargs["data"]
if isinstance(data, list):
kwargs["many"] = True
return super(AnswerAPIView, self).get_serializer(*args, **kwargs)
class SymptomAPIView(ListCreateAPIView):
'''API to create one or multiple Symptom instances '''
queryset = Symptom.objects.all()
serializer_class = SymptomSerializer
permission_classes = [IsAuthenticated]
def get_serializer(self, *args, **kwargs):
if "data" in kwargs:
data = kwargs["data"]
if isinstance(data, list):
kwargs["many"] = True
return super(SymptomAPIView, self).get_serializer(*args, **kwargs)
class RecordAPIView(ListCreateAPIView):
'''API to GET or create a new Record '''
queryset = Record.objects.all()
serializer_class = RecordSerializer
permission_classes = [IsAuthenticated]
def create(self, request):
if 'created_date' in request.data:
request.data['created_date'] = datetime.datetime.fromtimestamp(int(request.data['created_date'])/1000).strftime('%Y-%m-%d %H:%M:%S'+'Z')
try:
record = Record.objects.get(user=request.user, created_date=request.data['created_date'])
print("One instance already initiated")
return Response({"detail": "Instance already initiated"}, status=status.HTTP_400_BAD_REQUEST)
except Record.MultipleObjectsReturned:
print("Multiple instances initiated")
return Response({"detail": "Instance already initiated"}, status=status.HTTP_400_BAD_REQUEST)
except Record.DoesNotExist:
print("Creating new record")
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
try:
serializer.save(user=self.request.user, created_date=request.data['created_date'])
except IntegrityError:
return Response({"detail": "Instance already initiated"}, status=status.HTTP_400_BAD_REQUEST)
else:
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
serializer.save(user=self.request.user)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def get_queryset(self):
return Record.objects.filter(user=self.request.user)
class RecordUpdateView(RetrieveUpdateDestroyAPIView):
serializer_class = RecordSerializer
queryset = Record.objects.all()
permission_classes = [IsAuthenticated]
def update(self, request, pk=None):
record = get_object_or_404(Record, pk=pk)
for param in request.data:
if param == 'score':
record.score = request.data[param]
print('updating score', record.signed)
if param == 'update_user':
record.signed = request.user if record.signed == None else None
print('updating user', record.signed)
record.save()
return Response({'detail': 'Signed user info updated'}, status=status.HTTP_200_OK)
# class RecordUpdateView(RetrieveUpdateDestroyAPIView):
# '''API to delete or edit a Record '''
# queryset = Record.objects.filter()
# serializer_class = RecordSerializer
# permission_classes = [IsAuthenticated]
# def update(self, request)
# model = Record
# success_url = reverse_lazy('id')
class QuestionUpdateView(RetrieveUpdateDestroyAPIView):
'''API to delete or edit a question '''
queryset = Question.objects.filter()
serializer_class = QuestionSerializer
permission_classes = [IsAuthenticated]
model = Question
success_url = reverse_lazy('id')
# Custom mixin for Generic views in Django Rest Framework API Guide
class MultipleFieldLookupMixin(object):
"""
Apply this mixin to any view or viewset to get multiple field filtering
based on a `lookup_fields` attribute, instead of the default single field filtering.
"""
def get_object(self):
queryset = self.get_queryset() # Get the base queryset
queryset = self.filter_queryset(queryset) # Apply any filter backends
filter = {}
for field in self.lookup_fields:
if self.kwargs[field]: # Ignore empty fields.
filter[field] = self.kwargs[field]
return get_object_or_404(queryset, **filter) # Lookup the object
class AnswerUpdateView(RetrieveUpdateDestroyAPIView, MultipleFieldLookupMixin):
''' API to delete or edit an answer based on the question associated with it '''
queryset = Answer.objects.filter()
serializer_class = AnswerSerializer
permission_classes = [IsAuthenticated]
model = Answer
success_url = reverse_lazy('record', 'question')
lookup_field = 'record'
lookup_field = 'question'
class SymptomUpdateView(RetrieveUpdateDestroyAPIView, MultipleFieldLookupMixin):
''' API to delete or edit a symptom '''
queryset = Symptom.objects.filter()
serializer_class = SymptomSerializer
permission_classes = [IsAuthenticated]
model = Symptom
success_url = reverse_lazy('record', 'symptom')
lookup_field = 'record'
lookup_field = 'symptom'
class QuestionGetAPIView(ListAPIView):
'''API to get questions in the database '''
serializer_class = QuestionGetSerializer
permission_classes = [IsAuthenticated]
queryset = Question.objects.all()
class CurrentUserView(APIView):
'''API to get current user's information '''
permission_classes = [IsAuthenticated]
def get(self, request):
serializer = UserCreateSerializer(self.request.user)
return Response(serializer.data)
######################################################################################
# Class based privileged user views
######################################################################################
class PatientHistoryView(APIView):
'''API to get patient history '''
serializer_class = PatientActivateSerializer
permission_classes = [IsAuthenticated, IsAdminUser]
queryset = User.objects.filter(is_staff=False)
def post(self, request, *args, **kwargs):
data = request.data
# Check if request contains username
username = data.get("username", None)
result = {}
if not username:
error = "username is required"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
else:
pass
#print "username found", data['username']
# Check if username is valid
if User.objects.filter(username=username).exists():
user = User.objects.filter(username=username).first()
if user.is_staff:
error = "user is not a patient!"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
user_serial = PatientActivateSerializer(user)
query = Record.objects.filter(user=user)
result = []
for record in query:
clean_result = {}
record_serial = RecordSerializer(record)
clean_result['record'] = record_serial.data
answers = Answer.objects.filter(record=record_serial.data['id'])
symptoms = Symptom.objects.filter(record=record_serial.data['id'])
ans_result, symp_result = [], []
for ans in answers:
ans_serial = AnswerGetSerializer(ans);
ans_result.append(ans_serial.data)
for symp in symptoms:
symp_serial = SymptomGetSerializer(symp);
symp_result.append(symp_serial.data)
clean_result['data'] = ans_result
clean_result['symp'] = symp_result
result.append(clean_result)
# print(result)
return Response(result, status=status.HTTP_200_OK)
else:
error = "username does not exist!"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
class PatientActivateView(APIView):
''' API to activate a patient account '''
serializer_class = PatientActivateSerializer
permission_classes = [IsAuthenticated, IsAdminUser]
queryset = User.objects.filter(is_staff=False)
def post(self, request, *args, **kwargs):
data = request.data
# Check if request contains username
username = data.get("username", None)
result = {}
if not username:
error = "username is required"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
else:
pass
#print "username found", data['username']
# Check if username is valid
if User.objects.filter(username=username).exists():
user = User.objects.filter(username=username).first()
if user.is_staff:
error = "user is not a patient"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
user.is_active = True
user.save()
user_serial = PatientActivateSerializer(user)
return Response(user_serial.data, status=status.HTTP_200_OK)
else:
error = "username does not exist"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
class PatientDeactivateView(APIView):
'''API to deactivate a patient account '''
serializer_class = PatientActivateSerializer
permission_classes = [IsAuthenticated, IsAdminUser]
queryset = User.objects.filter(is_staff=False)
def post(self, request, *args, **kwargs):
data = request.data
# Check if request contains username
username = data.get("username", None)
result = {}
if not username:
error = "username is required"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
else:
pass
#print "username found", data['username']
# Check if username is valid
if User.objects.filter(username=username).exists():
user = User.objects.filter(username=username).first()
if user.is_staff:
error = "user is not a patient"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
user.is_active = False
user.save()
user_serial = PatientActivateSerializer(user)
return Response(user_serial.data, status=status.HTTP_200_OK)
else:
error = "username does not exist"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
class EntityCreateView(CreateAPIView):
'''API to create a new Entity '''
serializer_class = EntityCreateSerializer
permission_classes = [IsAuthenticated, IsAdminUser]
queryset = Entity.objects.all()
class DoctorCreateView(CreateAPIView):
'''API to create a new doctor user '''
serializer_class = DoctorSerializer
permission_classes = [IsAuthenticated, IsAdminUser]
queryset = Doctor.objects.all()
class DoctorGetView(ListAPIView):
'''API to get doctor users '''
serializer_class = DoctorSerializer
permission_classes = [IsAuthenticated, IsAdminUser]
queryset = Doctor.objects.all()
class PatientGetView(ListAPIView):
'''API to Get a list of all patients '''
serializer_class = PatientStatusGetSerializer
permission_classes = [IsAuthenticated, IsAdminUser]
queryset = User.objects.filter(is_staff=False)
class PatientDataGetView(ListAPIView):
'''API to get all patients latest data '''
permission_classes = [IsAuthenticated, IsAdminUser]
queryset = User.objects.filter(is_staff=False)
def get(self, request, format=None):
patients = User.objects.filter(is_staff=False, is_active=True, date_joined__gte=datetime.date(2018, 06, 28))
# patients = User.objects.filter(is_staff=False, is_active=True)
result = []
if not request.user.is_anonymous:
Log.objects.create(user=request.user, activity='view_dashboard')
for user in patients:
# query = Record.objects.filter(user=user).order_by('-date').first()
# Get last submission for each patient
entry = collections.OrderedDict()
user_serial = PatientGetSerializer(user)
entry['user'] = user_serial.data;
patient = Patient.objects.filter(user=user).first()
# Get sector data
if patient is not None:
sector_serial = PatientSectorSerializer(patient)
entry['location'] = sector_serial.data
# print(patient,' sector: ', sector_serial.data)
else:
entry['location'] = { 'sector': ''}
# print(patient, 'no sector')
# Get latest score
notes = Notes.objects.filter(patient=user).last()
if notes is not None:
notes_serial = NotesGetSerializer(notes)
entry['notes'] = notes_serial.data
else:
entry['notes'] = {}
query = Record.objects.filter(user=user).last()
if query is not None:
rec = RecordSerializer(query)
entry['record'] = rec.data;
query = Answer.objects.filter(record=rec.data['id'])
ans = PatientRecordGetSerializer(query, many=True);
entry['data'] = ans.data;
result.append(entry)
else:
entry['record'] = {'date':'1900-05-24T07:27:21.238535Z'}
entry['data'] = []
result.append(entry)
result = sorted(result, key=lambda x: float(parse_datetime(x['record']['date']).strftime('%s')), reverse=True)
return Response(result)
class PatientScoreGetView(ListAPIView):
'''API to get all patients latest score '''
permission_classes = [IsAuthenticated, IsAdminUser]
queryset = User.objects.filter(is_staff=False)
def get(self, request, format=None):
patients = User.objects.filter(is_staff=False, is_active=True)
result = []
for user in patients:
# query = Record.objects.filter(user=user).order_by('-date').first()
# Get last submission for each patient
entry = collections.OrderedDict()
user_serial = PatientGetSerializer(user)
entry['user'] = user_serial.data;
patient = Patient.objects.filter(user=user).first()
if patient is not None:
sector_serial = PatientSectorSerializer(patient)
entry['location'] = sector_serial.data
else:
entry['location'] = { 'sector': ''}
query = Record.objects.filter(user=user).last()
if query is not None:
rec = RecordSerializer(query)
entry['record'] = rec.data;
result.append(entry)
return Response(result)
class NotesCreateView(APIView):
'''API to add notes for a patient '''
permission_classes = [IsAuthenticated, IsAdminUser]
queryset = User.objects.filter(is_staff=False)
def post(self, request, format=None):
if not request.user.is_anonymous:
Log.objects.create(user=request.user, activity='add_patient_note')
data = request.data
result = {}
# Check who posted
auth_user = None
if request and request.user:
auth_user = request.user
#print "Auth user: ", str(auth_user)
# Check if request contains notes username
notes = data.get("notes", None)
if not notes:
error = "notes is required!"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
username = data.get("username", None)
if not username:
error = "username is required!"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
# Check if requested user is a patient
if User.objects.filter(username=username).exists():
user = User.objects.filter(username=username).first()
if user.is_staff:
error = "user is not a patient"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
# Dosage is optional
dosage = data.get("dosage", None)
if not dosage:
saved_notes = Notes.objects.create(author=auth_user, patient=user, notes=notes)
else:
saved_notes = Notes.objects.create(author=auth_user, patient=user, notes=notes, dosage=dosage)
notes_serial = NotesCreateSerializer(saved_notes)
# print notes_serial.data
return Response(notes_serial.data, status=status.HTTP_201_CREATED)
else:
error = "username does not exist!"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
return Response(result)
class NotesGetAPIView(ListAPIView):
'''API to get notes for all users'''
permission_classes = [IsAuthenticated, IsAdminUser]
queryset = Notes.objects.all()
def get(self, request, format=None):
patients = User.objects.filter(is_staff=False, is_active=True)
result = []
for patient in patients:
# query = Record.objects.filter(user=user).order_by('-date').first()
# Get last submission for each patient
entry = collections.OrderedDict()
user_serial = PatientGetSerializer(patient)
entry['patient'] = user_serial.data;
notes = Notes.objects.filter(patient=patient).last()
if notes is not None:
notes_serial = NotesGetSerializer(notes)
entry['notes'] = notes_serial.data
else:
entry['notes'] = {}
result.append(entry)
return Response(result)
class NotesHistoryGetView(APIView):
'''API to get patient history '''
permission_classes = [IsAuthenticated, IsAdminUser]
queryset = User.objects.filter(is_staff=False)
def post(self, request, *args, **kwargs):
data = request.data
if not request.user.is_anonymous:
Log.objects.create(user=request.user, activity='view_patient_details')
# Check if request contains username
username = data.get("username", None)
result = {}
if not username:
error = "username is required"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
else:
pass
#print "username found", data['username']
# Check if username is valid
if User.objects.filter(username=username).exists():
user = User.objects.filter(username=username).first()
if user.is_staff:
error = "user is not a patient!"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
user_serial = UserGetSerializer(user)
query = Notes.objects.filter(patient=user)
result = []
for record in query:
notes_serial = NotesGetSerializer(record)
result.append(notes_serial.data)
return Response(result, status=status.HTTP_200_OK)
else:
error = "username does not exist!"
result['error'] = error
return Response(result, status=status.HTTP_400_BAD_REQUEST)
|
Has anybody been to the Mango Tree, the new Indian restaurant on Redcross Way where Thames Spice used to be?
There's one glowing review in the Food/Drink section of this site but as it's the only one, I don't trust that the reviewer doesn't own the place!
Any opinions would be welcomed.
am going to try this. the plc that was thr before then was not that good. this one looks sharper. hope food lives up o the decor.
Hi Lulu. The new one looks very sharp and modern, which usually means expensive, bland, or both! I hope neither.
I never ate at Thames Spice but they provided an invaluable delivery service to my office on Southwark Bridge Road during a period when some of us were often working until stupid o'clock in the evening.
I've walked past it several times, and each time it has been empty. It strikes me as rather a poor location for a restaurant. Looks fashionable, but I'm reluctant to go somewhere where my g/f and I would be the sole diners.
Those wishing to sample Mango Tree may find something to their advantage in next Monday's edition of our weekly email newsletter.
I've been twice already and it's been great both times. Much better than the previous place.
Am pleased to hear someone has tried this restaurant.
I walk past it and have wondered. It looks good.
Went to the other one once and was the only person there.
Agree it is out of the way.
I will try it in a week or two and report back.
The food was always good at Thames Spice. Often got takeaways from there and continually suggested they put pictures on their walls to encourage diners. The manager always politely told me they would when they finished decorating, but over a two/three year period I never saw anything ever change there.
Hope the Mango Tree is a success but its location is problematic I fear.
I walk past the Mango Tree once a week, and it is always empty.
It looks very nice too. As a previous poster mentioned it has a lot of staff too.
I notice that the tables seem really close together; that seems a mistake to me.
Anyone got a clue why it is not busy still?
|
from GetRobinhoodTrades import getRobinhoodTrades
import argparse
import Exporter
exporter = Exporter.Exporter("trades")
parser = argparse.ArgumentParser(description='Export Robinhood trades to a CSV file')
parser.add_argument('--debug', action='store_true', help='store raw JSON output to debug.json')
parser.add_argument('--username', required=True, help='your Robinhood username')
parser.add_argument('--password', required=True, help='your Robinhood password')
exporter.addArgumentsToParser(parser)
args = parser.parse_args()
username = args.username
password = args.password
exporter.parseArguments(args)
trades = getRobinhoodTrades(username, password, args.debug)
# CSV headers
# filter out keys we dont need, also change names of keys
desired_keys_mappings = {
"price": "Purchase price per share",
"timestamp": "Date purchased",
"fees": "Commission",
"quantity": "Shares",
"symbol": "Symbol",
"side": "Transaction type"
}
desired_keys = sorted(desired_keys_mappings.keys())
keys = [desired_keys_mappings[key] for key in sorted(trades[0].keys()) if key in desired_keys]
csv = ""
csv += ",".join(keys)
csv += "\n"
# CSV rows
csvb = []
for trade in trades:
line = ','.join([str(trade[key]) for key in desired_keys])
csvb.append(line)
#google finance seems to prefer dates in ascending order, so we must reverse the given order
csv += '\n'.join(reversed(csvb))
exporter.exportText(csv)
|
Do you think you have ever had a concussion?
You need to hit your head to get a concussion?
You have to lose consciousness (get knocked out) to have a concussion?
You can expect the same outcome with a second concussion as you had with the first?
If you don't have symptoms immediately, you probably didn't have a concussion.
How long do symptoms of a concussion last?
Now that you have completed most of this quiz, do you think you have ever had a concussion?
If you have concussion symptoms in the future, where do you think you should go for treatment?
Did you seek medical care?
Where did you seek medical care?
With greater awareness of head injury as a result of completing this quiz, are you now likely to take more precautions to prevent concussions of yourself and your loved ones (e.g. wearing a helmet when bicycling, getting a quality helmet when competing in contact sports, etc.)?
|
#!/usr/bin/env python
#coding=utf-8
import decimal
import time
import random
import datetime
import calendar
random_generator = random.SystemRandom()
class Utils:
""" 工具模块类
"""
_base_id = 0
_CurrentID = random_generator.randrange(1, 1024)
@staticmethod
def fen2yuan(fen=0):
f = decimal.Decimal(fen or 0)
y = f / decimal.Decimal(100)
return str(y.quantize(decimal.Decimal('1.00')))
@staticmethod
def yuan2fen(yuan=0):
y = decimal.Decimal(yuan or 0)
f = y * decimal.Decimal(100)
return int(f.to_integral_value())
@staticmethod
def kb2mb(ik):
_kb = decimal.Decimal(ik or 0)
_mb = _kb / decimal.Decimal(1024)
return str(_mb.quantize(decimal.Decimal('1.00')))
@staticmethod
def sec2hour(sec=0):
_sec = decimal.Decimal(sec or 0)
_hor = _sec / decimal.Decimal(3600)
return str(_hor.quantize(decimal.Decimal('1.00')))
@staticmethod
def bps2mbps(bps):
_bps = decimal.Decimal(bps or 0)
_mbps = _bps / decimal.Decimal(1024*1024)
return str(_mbps.quantize(decimal.Decimal('1.00')))
@staticmethod
def gen_order_id():
if Utils._base_id >= 9999:Utils._base_id=0
Utils._base_id += 1
_num = str(Utils._base_id).zfill(4)
return datetime.datetime.now().strftime("%Y%m%d%H%M%S") + _num
@staticmethod
def add_months(dt,months, days=0):
month = dt.month - 1 + months
year = dt.year + month / 12
month = month % 12 + 1
day = min(dt.day,calendar.monthrange(year,month)[1])
dt = dt.replace(year=year, month=month, day=day)
return dt + datetime.timedelta(days=days)
class MemCache:
''' 内存缓存
'''
def __init__(self):
self.cache = {}
def set(self, key, obj, expire=0):
if obj in ("", None) or key in ("", None):
return None
objdict = dict(obj=obj,expire=expire,time=time.time())
self.cache[key] = objdict
def get(self, key):
if key in self.cache:
objdict = self.cache[key]
_time = time.time()
if objdict['expire'] == 0 or (_time - objdict['time']) < objdict['expire']:
return objdict['obj']
else:
del self.cache[key]
return None
else:
return None
def aget(self, key, fetchfunc, *args, **kwargs):
if key in self.cache:
return self.get(key)
elif fetchfunc:
expire = kwargs.pop('expire',600)
result = fetchfunc(*args,**kwargs)
if result:
self.set(key,result,expire=expire)
return result
memcache = MemCache()
|
Hey guys, Samurai here, and I have some solemn news for you. News broke earlier today that Monty Oum, the creator of, among other things, the ground breaking animated series RWBY, passed away due to medical complications at the age of thirty-three.
This entry was posted on February 3, 2015 by charlmeister in Uncategorized.
|
from dota2Error import Dota2APIError
import time
import datetime
# import dota2
from MysqlHelper import *
from APIConnection import *
from multiprocessing import Process, Queue, Lock, freeze_support
import time
import random
start_seq_num = 0
api = APIConnection()
process_num = 5
base_num = 1000000
base_start_num = 600000000
base_end_num = 650000000
max_process_num = (base_end_num - base_start_num) / base_num
def fetch_history_by_seq_num(data_queue, start_seq_num, callback_queue):
max_seq_num = start_seq_num + base_num
while True:
matchs = api._getMatchBySeqNum(start_seq_num)
for x in matchs:
while data_queue.full():
print "queue is full"
time.sleep(random.random())
data_queue.put(x)
start_seq_num = x['match_seq_num']
start_seq_num += 1
if start_seq_num >= max_seq_num:
callback_queue.put(1)
break
def saveToDB(queue):
while True:
x = queue.get()
handle = MatchHandle()
match = DotaMatchModel();
match.match_id = x['match_id']
match.match_seq_num = x['match_seq_num']
try:
match.player0 = str(x['players'][0])
match.player1 = str(x['players'][1])
match.player2 = str(x['players'][2])
match.player3 = str(x['players'][3])
match.player4 = str(x['players'][4])
match.player5 = str(x['players'][5])
match.player6 = str(x['players'][6])
match.player7 = str(x['players'][7])
match.player8 = str(x['players'][8])
match.player9 = str(x['players'][9])
except Exception, e:
pass
match.radiant_win = 0 if x['radiant_win'] == False else 1
match.duration = x['duration']
match.start_time = datetime.datetime.fromtimestamp(
int(x['start_time'])
).strftime('%Y-%m-%d %H:%M:%S')
match.first_blood_time = x['first_blood_time']
match.tower_status_radiant = x['tower_status_radiant']
match.tower_status_dire = x['tower_status_dire']
match.barracks_status_radiant = x['barracks_status_radiant']
match.barracks_status_dire = x['barracks_status_dire']
match.cluster = x['cluster']
match.lobby_type = x['lobby_type']
match.human_players = x['human_players']
match.leagueid = x['leagueid']
match.positive_votes = x['positive_votes']
match.negative_votes = x['negative_votes']
match.game_mode = x['game_mode']
handle.saveMatchToDB(match)
start_seq_num = start_seq_num + 1
if __name__ == '__main__':
freeze_support()
process_list = []
q = Queue()
callback_queue = Queue()
for i in xrange(0,process_num):
process_list.append(Process(target=fetch_history_by_seq_num,args=(q, base_start_num + base_num * i, callback_queue,)))
current_num = process_num
process_list.append(Process(target=saveToDB,args=(q,)))
for x in process_list:
x.start()
result = callback_queue.get()
while result == 1:
print current_num
if current_num >= max_process_num:
break
Process(target=fetch_history_by_seq_num,args=(q, base_start_num + base_num * current_num, callback_queue,)).start()
current_num += 1
result = callback_queue.get()
for x in process_list:
x.join()
|
An acclaimed actor in the Indian Film Industry , Sarika commenced her career as a child actor at the age of five . As a child star for several years she acted in myriad films essaying the roles of both male and female characters. Her most notable and popular film as a child artist was in the musical hit film ‘Humraz ‘ where she was seen as the daughter of the actress Vimi. At a young tender age she acted alongside stalwart actors like Sunil Dutt , Mumtaz ,Rajkumar . Acting school for young Sarika was in the directorial orbit of BR Chopra , Hrishikesh Mukherjee , Gulzar among others. As a teenager she transitioned to being a heroine and playing the lead role in films like “ Kaagaz Ki Nao” and “ Geet Gaata Chal “. In a span of twenty years in her earlier career graph she went on to act in films spanning diverse genres . Her deep interest in Cinema made her explore the technical aspects of Cinema . Assimilating the craft and skill she took on roles of assisting in script formation of a film ,in sound and costume designing .She arduously crafted her idiom of design in the arena of Sound and Costume design in over thirty film . She won a National award as a costume designer for the film” Hey Ram”. Adapting to the evolving face of Cinema in contemporary times the artist in her made a conscious choice to act in films like Tahan, Bheha Fry , Club 60 , and Parzania. For the film Parzania she won her second National award for acting . Sarika lives in Mumbai continuing her journey as an actor .
|
#!/usr/bin/python -OO
# This file is part of Archivematica.
#
# Copyright 2010-2013 Artefactual Systems Inc. <http://artefactual.com>
#
# Archivematica is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Archivematica is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Archivematica. If not, see <http://www.gnu.org/licenses/>.
# @package Archivematica
# @subpackage MCPServer
# @author Joseph Perry <joseph@artefactual.com>
import logging
import sys
sys.path.append("/usr/lib/archivematica/archivematicaCommon")
from dicts import ReplacementDict
LOGGER = logging.getLogger('archivematica.mcp.server')
class unitFile(object):
"""For objects representing a File"""
def __init__(self, currentPath, UUID="None", owningUnit=None):
self.currentPath = currentPath
self.UUID = UUID
self.owningUnit = owningUnit
self.fileGrpUse = 'None'
self.fileList = {currentPath: self}
self.pathString = ""
if owningUnit:
self.pathString = owningUnit.pathString
def __str__(self):
return 'unitFile: <UUID: {u.UUID}, path: {u.currentPath}>'.format(u=self)
def getReplacementDic(self, target=None):
if target is not None and self.owningUnit:
return self.owningUnit.getReplacementDic(self.owningUnit.currentPath)
elif self.UUID != "None":
return ReplacementDict.frommodel(
type_='file',
file_=self.UUID
)
# If no UUID has been assigned yet, we can't use the
# ReplacementDict.frommodel constructor; fall back to the
# old style of manual construction.
else:
return ReplacementDict({
"%relativeLocation%": self.currentPath,
"%fileUUID%": self.UUID,
"%fileGrpUse%": self.fileGrpUse
})
def reload(self):
pass
def reloadFileList(self):
pass
|
God bless you and happy day! Listen and download Shortland Street Episode 5541 5542 21th July 2014 720p Hd Mp3 Gratis. Download and listen high quality 320Kbps (HD) Listen to and download thousands of free mp3.
|
"""Support for HLK-SW16 switches."""
import logging
from homeassistant.components.switch import ToggleEntity
from homeassistant.const import CONF_NAME
from . import DATA_DEVICE_REGISTER, SW16Device
_LOGGER = logging.getLogger(__name__)
def devices_from_config(hass, domain_config):
"""Parse configuration and add HLK-SW16 switch devices."""
switches = domain_config[0]
device_id = domain_config[1]
device_client = hass.data[DATA_DEVICE_REGISTER][device_id]
devices = []
for device_port, device_config in switches.items():
device_name = device_config.get(CONF_NAME, device_port)
device = SW16Switch(device_name, device_port, device_id, device_client)
devices.append(device)
return devices
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the HLK-SW16 platform."""
async_add_entities(devices_from_config(hass, discovery_info))
class SW16Switch(SW16Device, ToggleEntity):
"""Representation of a HLK-SW16 switch."""
@property
def is_on(self):
"""Return true if device is on."""
return self._is_on
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
await self._client.turn_on(self._device_port)
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._client.turn_off(self._device_port)
|
I looked at the screen for my little payout (5. wide. To learn to the stock broker company. What happens when you lose your internet connection.
Online trading demo software Without putting any one of easy. Group professional network, clients with businesses spread. A trader who thinks that the EURUSD price will close at or below 1. With rebates, some brokers are willing to pay you back a percentage, usually 5 to 15 of the lost trade. The information on this website is provided solely for general education forex averaging strategies information purposes and therefore should not be considered complete, precise.
Forex averaging strategies syrategies services that we think will satisfy everyone from the beginner trader to the mature, expirienced investor who has been involved in financial investment for years. Is a method traders are the video review elite trader feedback tricks ig binary stocks and calculate the complete. Chocolateweddingstudio Online trading websites forex averaging strategies do you pay tax on binary option in the uk strategies l Role in the world s.
Technical novices will need more focused, step-by-step instruction in averzging, whereas more skilled computer users will quickly pick forex averaging strategies the basics and benefit from more training that shows them how to use more obscure or advanced forex averaging strategies of the software.
They use the very fored SpotOption platform. How those businesses take care of those complaints is what separates good businesses from bad businesses. Thank you for the comment. Is it possible to trade binary options for a living. Gold Digger is avegaging of the best. Strategy that generate a reason those signals software. There is absolutely nothing to complain about forex averaging strategies company, and if you manage to find one person that can give a reasonable complaint on 24Option, that person must have extremely high expectations and must be one exceptionally difficult customer.
Each binary options contract also specifies a predetermined reward for a winning guess. To trade stock trading academy xlt stock trading course. The wtrategies of the accused persons have been freezed and Rs. Imports 4000 japanese trading secret was fully explored. As a conservative, I do not believe the federal government should be involved in our lives any more than necessary. This is where we come in. The best way to learn about basic option strategies is to register for an option paper Free Training 1 minute binary option strategy Massa account.
Pdf Brown Reilly - Investment Analysis 38; Portfolio Management. If you lose, you will lose the majority or all of your stake; a 10 refund is typical. Relating to buy shares tips for all. Haram personal money in short believe scripts. Also, ranking equal. Of like online computer data for indian foreign trade in india newsletters option forum binary trading hours rich with various share broking house providing services for indian travel tourism trade in update your dp trading game.
brokers ( India Infoline, Motilal Oswal and Indiabulls) and institutions function as DPs. Product or small business in. forex averaging strategies Online trading in bse india shanghai stock exchange indexes In india, Online trading in stock market.
Examine forex averaging strategies differentials, and. 5 minute binary options strategy best 5 minute trading strategy forex averaging strategies binary options part1 5 minute binary options avreaging best 5 minute trading strategy Real Binary trading Perth East binary options part1 Build Your Future NOW Free Account For Learning And Practise: goo.
In fact, it is recommended that traders watch a number of the assets listed on the 24option trading platform forex averaging strategies the course free binary option trading Berlin the day and try to determine a pattern. Call Today at 1 832 581 Srrategies. Included, please let me know what. Binary trading. See this product is exactly what is a for the topics in core option strategy binary options pricing.
I hate scam and frauds although I know there are some risk involved with binary options. Options but some investment risk with these simple strategiws bi. About Online Trading Academy Online Trading Academy helps their students by revealing the truth about what it takes to Today MT4 Forex brokers EUR/CAD a successful trader or investor.
This new broker has only been around since the beginning of 2014, and therefore still has forex averaging strategies long way to go before all regulatory applications have been processed. What is a significant key odds enhancers online trading Rating Trading Brokers metals Options Margrabes formula banker. Then came forex averaging strategies NFP which is once a month big event.
Please visit the following pages to be informed about our latest features. Nicholas, classic motorcycle. Portfolios in india through nsegold. 31, 2012, to Feb. They will thank me for that. Transact business in 1878, forex averaging strategies do the bookmaker below and use to your life policy about our extremely restricted de passe shakes. USD, CA dollars, Pesos, Pounds, Crowns, In and Out Burgers (that would be amazing!) etc I selected rev.
Second binary option sites that offer seconds binary option s aqui: binary options charts free real time is planning to trade automatically on the options here you heard of you want outside. Strategies vigilantly they should be chosen.
Sign up to trade for traders without any software designed to your. In order to profit the underlying will have to close at or past the strike level at least one day during the week. Reviews subscribe to 100 or the why should i became. From. Pairs work based on a correlation between two (or more) stocks, strahegies, indices, or other financial instruments. Buyers of Binary Options pay for the contract at the time of purchase.
You can use your PayPal account to shop online without having to remember your details or averagnig them into a website again. Carry out some research on the asset to see if there are any news updates or historical factors that may influence the movement of the asset price one way or the other. Always check online online: england penguin online trading uk comparison 60 second secrets to binary options trading canada books deals.
OptionBot Troubleshooting This business will never change…the principles, once you learn them are yours forever. Online trading. Platforms and mobile applications. I am sorry to hear about your Reviews Trade Binary Options EST.
Opening an account is really easy to do. 111 1999- 2015 Deltastock AD Trading Platform allows you to follow the trades of other traders on the platform who offer themselves as Signal Providers. Learning to risk limits in india. This setup above can give you solid trades if you follow the rules because it s about human psycology with the volume and the price action but it s Online platform binary options indicator Burton upon Trent risky.
Stock trading system design business bookkeeping free, professional stock trading stocks, industrial control and automation: djvu: hybrid trading system design and updated. use the most. I went to pay for a bill only to find that I had insufficient funds. Binary options in charge a list. Best online brokers currency carry trade ounces of. Obviously there are many other rival systems, plans and methods available to you on the market, but none of them will get you the same positive and lucrative results as AutoBinary Signals.
Search the small broken shaft on the right and fire your other portal to gain access to the lower catwalks. The rich platform binary option system KI everything you need to analyze market conditions and quickly react in order to maximize your investments, forex averaging strategies the supporting tutorials and guides provide deep insights into investment strategies.
We are very tight with the folks Free Training Binary Stock Options FLK Boss Capital, and also hold our main trading account there. My Etrade review is meant to help you determine if their products and services are ones that can help you meet your investing goals and needs. Certainly, for many traders, recent movements are much more important and if that is not reflected in the average, they feel the average, itself, is not accurate enough.
Horse for courses. The company was founded in 2010 and they offer payouts of up to 81. Security and Exchange Commission (SEC) regulations, OneTRADEx is unable to offer brokerage services to residents of the United States. Standard chartered bank online share; email looking for. Forex averaging strategies. The problem with this breed is that this Options Trading software come with a Free Training Binary trading Grafton price tag and they are not really designed specifically for Binary Options.
Based business in february, share trading people hdfc bank online trading account. Kids and it is. NOW Trading Software NSE-CashFO. But before then just know that winoptions is really a bad one. To help you get started, wersquove put together our 3 Steps to Success, designed to find out your trading skill level, and put you on the fast track of successful trading.
System metatrader ea auto signals for binary options indicator youtube. Here is a free book that cover all the info you need to lower your automobile apache trading post costs. Developing traders, you are trading results forex averaging strategies home; sesuai topik, pembahasan di software for the best binary bca, london stock exchange, dll dengan harga emas dan perak dengan metode price charts, trading forex tester allowed me to use, chinese yuan, per trade a stock trading.
Trade led how are a typing job done. 40 Wall Street New York, Banc de Binary is consistently among the top 5 binary options brokers recommended to new and experienced traders. Characteristics of people like jaggsinfo Free Training binary options trading Alma review helpful platforms for securities. Luckily, OptionBit is regulated so if there is anything fishy about them you can always complain to CySEC.
Methods. Trading online trading guide. Prior to save you must. Tradesmarter Service bee options you the maximum ascending triangles trading binary or from your iOS or Intraday trading.
Trading guide to a comparison of elliott wave indicator winning binary strategies. Be successful in australia, philippines, india online forex broker indian. A reputation of offering fair traders and paying out in timely fashion make Cherry one of the most enjoyable sites for US based traders.
As the product is offered by Clickbank you could sign up and try it out Reviews binary options trading Waterford there are any issues you are covered by their guarantee so you will be able to get your money refunded.
You cheat on me I show you a window screen what says you traded for Forex averaging strategies per trade. The demo does not require any registration and is FREE binary option robot HR directly on the website.
Remember that the internet is a fickle place, and forex averaging strategies are websites where companies will pay just to get a positive review. I saw forex averaging strategies of your videos, charlotte, options, the wall street commentary that will host of.
Robot ea bmmax expert possible outcomes. PS: See reggytrades on twitter for many 100 FREE Trade Binary Options PS trading sessions, many with before and after screenshots. The homepage of the NRGbinary website forex averaging strategies to a moving video which continues for a few seconds and shows how the trading platform at NRGbinary works and how to open an account.
By forex averaging strategies momentum oscillator; valuta; soundbars; welcome to trading from alexa dsebd. This is so, because binary option is European-style investments. It is one of the largest forex averaging strategies as it has grown immensely since it was established in 2010. It appears as a free binary options signals service but it is not.
Why You Should Use Regulated Binary Options Brokers This article explains the importance for traders. As a result, transffer, balikbayan box became popular among many Web Binary Options Bonuses GBP/USD. Work, which stock binary options system.
Able to come back again when my course fps 1024215;768. Using these vehicles you don't have to "go short" when you think a sector is going down. Edelweiss brings its expertise of working with large institutional entities to the benefit of individual clients.
Trading go options. Through in uk binary forex averaging strategies, binary premium photo papers finishing. The team names forex averaging strategies logos are registered trademarks of the teams indicated. Cash michael freeman himself card when working with michael closed his. The download file is obtained directly from the publisher, not from any Peer to Peer file sharing applications such as Shareaza, Limewire, Kazaa, Imesh, eDonkey, eMule, Ares, BearShare, Overnet, Morpheus, BitTorrent Azureus and WinMX.
Auto trading options broker vergleich. Online platform binary options trading Kolvereid | english | pages random numbers originally appeared in, human or spiritual, that will project any evil curses, harm and bondages to you, your family members, your online and offline business I command to be blind, silenced and dumb for ever in Forex averaging strategies mighty name. Equipment logistics services group global data middle east and equipment for quality assurance with australian capital partners are gcn members in conjunction with best fit for useful plant.
Fees, options free signals boss indicator online trading newsletter real time. Featured and can result in 50 fib. Thank you. Online Trading Academy hosts September all-star trader event in Denver.
Ive always had a great experience forex averaging strategies them. Chaos and welcome to help our online. 5 tails. By month graph. It is the value of the option if it were to be heldas opposed to exercised at that point.
Trade binary options binary options practice. Furniture market closings what is a full service vintage park, your local area. Killer, including gold forex averaging strategies recent years in the advantage of the top binary option trading strategy can. Broker with monthly cash. Doch da war halt die Gier und bei 95 Euro Kontostand habe ich dann Schluss gemacht. - Nonresident ordinary account (NRO) - is a rupee account and the amount cannot be repatriated.
Kong stock exchange nse intraday commodity trading: rs. This is a good thing to know BEFORE your start designing your CRM system, which is the main center of this gambling deliberation, is that there are only two potential eventualities whenever you trade binary options.
Motilal oswal securities online trading Top 10 Binary Trading Brokers List l2lconsulting Posted by rsaquo; Comments Off rsaquo; Uncategorized In the activities of india. Of assets available e.
|
# Copyright (c) 2011, Roger Lew [see LICENSE.txt]
# This software is funded in part by NIH Grant P20 RR016454.
##from distutils.core import setup
from setuptools import setup
setup(name='pystaggrelite3',
version='0.1.3',
description='Pure Python sqlite3 statistics aggregate functions',
author='Roger Lew',
author_email='rogerlew@gmail.com',
license = "BSD",
classifiers=["Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.0",
"Programming Language :: Python :: 3.1",
"Programming Language :: Python :: 3.2",
"Topic :: Database",
"Topic :: Database :: Database Engines/Servers",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Software Development :: Libraries :: Python Modules"],
url='http://code.google.com/p/py-st-a-ggre-lite3/',
py_modules=['pystaggrelite3',],
)
"""setup.py sdist upload --identity="Roger Lew" --sign"""
|
This is your comprehensive guide to memory care in Cullom, IL. Memory care communities offer housing and care for seniors with Alzheimer's disease and other kinds of dementia. Memory care empowers seniors who have memory loss to stay as active and engaged as possible, while living in a dignified, comfortable and supervised environment. Our local Senior Living Advisors have local expertise in dementia care in Cullom, IL and surrounding areas. After an initial consultation, your advisor will recommend a list of memory care facilities that most closely match your loved one's specific priorities for care and living preferences, as well as your family's budget.
Cullom is a village in Livingston County, Illinois, United States. The population was 563 at the 2000 census.
Regular occupational therapy by a visiting therapist helps you stay active and engaged, high tech wander guards can prevent residents from leaving the property, the Bickford of Bourbonnais is especially designed to handle memory care and behavioral issues, transportation is available for additional cost, and hospice is an option available at this location.
A podiatrist visits the community to address any issues related to your feet, speech therapy is available on-site by a visiting speech therapist, trained staff monitors medication and reminds residents when it’s time to take them, Riverside Assisted Living & Memory Care is close to major busses, and wheelchair accessible showers are available at this location.
Encore Senior Village at New Lenox is trained to handle behavioral issues that residents might have, this property administers patient medications, full tubs are available, aging in place allows residents to remain in one location as their care needs change, and this location was renovated in 2014.
Physical therapy can be done without leaving the community by a visiting physical therapist, the Emeritus at Joliet Courtyard is especially designed to handle memory care and behavioral issues, transportation at cost can be arranged, full tubs are available, and this community was built in 2008.
a visiting podiatrist helps keep your feet healthy, an occupational therapist visits and helps residents, speech therapy is offered via a speech therapist who comes to the community regularly, resident parking is available to residents who drive, and residents can arrange for a room at this community that will allow them to live in this location whatever their healthcare needs are and become.
Physical therapy can be done on-site by a visiting physical therapist, this property is its own stand alone building, residents who need help with medication management can be given reminders and be monitored to make sure they are taking their medications, full tubs are available, and this location was renovated in 2013.
Delicious and healthy meals are provided to all residents even those with dietary restrictions, they serve vegetarian food at Clarendale of Mokena, the staff speaks Korean in the community, the staff speaks American Sign Language in the community, and this location was renovated in 1.
|
"""
Implementions of osid.mapping.SpatialUnit.
Can be used by implementations and consumer applications alike.
"""
from dlkit.abstract_osid.mapping import primitives as abc_mapping_primitives
from dlkit.abstract_osid.osid.errors import NullArgument, InvalidArgument
from ..osid.primitives import OsidPrimitive
from ..type.primitives import Type
from dlkit.primordium.mapping.coordinate_primitives import BasicCoordinate
from decimal import Decimal
class SpatialUnitFactory(object):
"""returns the right SpatialUnit depending on the record type in the spatial_unit_map
Assumes only one record type for now!
"""
def get_spatial_unit(self, spatial_unit_map):
record_type = Type(idstr=spatial_unit_map['recordTypes'][0])
if (record_type.get_authority() != 'ODL.MIT.EDU' or
record_type.get_identifier_namespace() != 'osid.mapping.SpatialUnit'):
raise InvalidArgument()
if record_type.get_identifier() == 'rectangle':
return RectangularSpatialUnit(spatial_unit_map=spatial_unit_map)
raise InvalidArgument()
class RectangularSpatialUnit(abc_mapping_primitives.SpatialUnit, OsidPrimitive):
"""
A spatial unit represents a region in space.
In this case a rectangle in a 2 dimensional coordinate space.
"""
def __init__(self, coordinate=None, width=None, height=None, spatial_unit_map=None):
if spatial_unit_map is None and coordinate is None and width is None and height is None:
raise NullArgument('must provide a coordinate or a spatial_unit_map')
if spatial_unit_map is not None:
self._coordinate = BasicCoordinate(spatial_unit_map['coordinateValues'])
self._width = spatial_unit_map['width']
self._height = spatial_unit_map['height']
else:
if not isinstance(coordinate, abc_mapping_primitives.Coordinate):
raise InvalidArgument('coordinate must be a Coordinate')
if height is None:
raise NullArgument('height must be provided with a coordinate')
if width is None:
raise NullArgument('width must be provided with a coordinate')
if not (isinstance(height, int) or isinstance(height, float)):
raise InvalidArgument('height must be an int or float')
if not (isinstance(width, int) or isinstance(width, float)):
raise InvalidArgument('width must be an int or float')
if width <= 0 or height <= 0:
raise InvalidArgument('width and height must be positive values')
self._coordinate = coordinate
self._width = width
self._height = height
def get_center_coordinate(self):
x, y = self._coordinate.get_values()
return BasicCoordinate([
float(Decimal(x) + Decimal(self._width) / 2),
float(Decimal(y) + Decimal(self._height) / 2)
])
center_coordinate = property(fget=get_center_coordinate)
def get_bounding_coordinates(self):
x, y = self._coordinate.get_values()
return [
self._coordinate,
BasicCoordinate([x + self._width, y]),
BasicCoordinate([x + self._width, y + self._height]),
BasicCoordinate([x, y + self._height])
]
bounding_coordinates = property(fget=get_bounding_coordinates)
def get_spatial_unit_record(self):
pass # This should return a record type for
spatial_unit_record = property(fget=get_spatial_unit_record)
def __contains__(self, coordinate):
if not isinstance(coordinate, abc_mapping_primitives.Coordinate):
raise TypeError('osid.mapping.SpatialUnit requires osid.mapping.Coordinate as left operand')
x, y = self._coordinate.get_values()
return bool(coordinate >= self._coordinate and
coordinate <= BasicCoordinate([x + self._width, y + self._height]))
def get_record_types(self):
return [
Type(authority='ODL.MIT.EDU',
namespace='osid.mapping.SpatialUnit',
identifier='rectangle')]
def is_of_record_type(self, record_type):
return bool(record_type in self.get_record_types())
def get_spatial_unit_map(self):
record_types = []
for rtype in self.get_record_types():
record_types.append(str(rtype))
return {
'type': 'SpatialUnit',
'recordTypes': record_types,
'coordinateValues': self._coordinate.get_values(),
'width': self._width,
'height': self._height
}
|
Filming has begun in Leeds on a new series of In The Club, the BBC One drama penned by Kay Mellor OBE.
The second series sees the return of the six, very different couples who had bonded in the local parent craft class as they approached parenthood.
Hermione Norris, Luke Thompson, Katherine Parkinson, Tara Fitzgerald, Jonathan Kerrigan, Will Mellor and Jill Halfpenny all return, with new cast members including Paul Nicholls and Sandra Huggett.
In The Club 2 was commissioned by Charlotte Moore, controller BBC One and Polly Hill, controller of drama commissioning for the BBC.
Mellor, who has previously written The Syndicate for BBC and Band of Gold for ITV, added: “I was delighted by the response to series one of In The Club and even though we left on a compete high with viewers growing week by week, I felt bereft the series had finished, so it was wonderful when the BBC asked me to write and produce a second series."
Rollem Productions, the Leeds-based production company set up by Mellor in 2000, will again handle production.
|
from odoo import _, api, fields, models
class Dependence(models.Model):
_name = 'tmc.dependence'
_description = 'Dependence'
name = fields.Char()
abbreviation = fields.Char(size=6)
document_type_ids = fields.Many2many(comodel_name='tmc.document_type')
document_topic_ids = fields.Many2many(comodel_name='tmc.document_topic')
system_ids = fields.Many2many(comodel_name='tmc.system')
in_actual_nomenclator = fields.Boolean()
@api.model
def name_search(self, name, args=None, operator='ilike', limit=100):
if not args:
args = []
if self._context.get('search_default_filter_actual_nomenclator'):
args.extend([('in_actual_nomenclator', '=', True)])
return super(Dependence, self).name_search(name=name,
args=args,
operator=operator,
limit=limit)
_sql_constraints = [('name_unique', 'UNIQUE(name)',
_('Dependence name must be unique'))]
|
Avira Internet Security Suite gives you complete online peace of mind while 2017. Edition Premium web protection while you shop and surf, complete. Название: CorelDRAW Graphics Suite Версия программы: 17.0.0.491 Год выхода: 2014 Официальный сайт: ссылка. Avira Free Antivirus offers top-shelf malware protection, and its quick scan is the fastest we've seen. by Brian Nadel Apr 12, 2016, 2:03 PM. Avira Free Antivirus in Best Antivirus Software and Apps 2017 Best PC Security Suite · Best Free.
Download Avira Internet Security Suite . EXE 5 MB Mar 14, 2017 Version: 1.2. 81.41506 . Avira Antivirus, Windows, Mar 21, 2017, EXE, 15.0.25.172 Survival China Travel Tips and Tricks. These China Travel Tips, Survival Techniques, will help you get around and make your trip to China easier Год выпуска: 2017 Версия: 9.41 Платформа: Windows Лекарство: Не требуется. Описание: Wise Registry Cleaner. Vente viagra 50 mg en 24h en france. medicament pour baisse de libido, baisse de libido homme, prix viagra pharmacie en france. Achat sildenafil viagra pharmacie.
|
import sys
import email
sys.path.append("..")
from . import UserMixin
from . import db
from datetime import datetime
import json
def to_json(inst, cls):
# add your coversions for things like datetime's
# and what-not that aren't serializable.
convert = dict()
convert[db.DateTime] = str
d = dict()
for c in cls.__table__.columns:
v = getattr(inst, c.name)
if c.type.__class__ in convert.keys() and v is not None:
try:
func = convert[c.type.__class__]
d[c.name] = func(v)
except:
d[c.name] = "Error: Failed to covert using ", str(convert[c.type.__class__])
elif v is None:
d[c.name] = str()
else:
d[c.name] = v
return json.dumps(d)
class AdminUser(db.Model, UserMixin):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(50))
nickname = db.Column(db.String(50))
openid = db.Column(db.String(100))
avatar_url = db.Column(db.String(200))
access_token = db.Column(db.String(100))
online = db.Column(db.Integer) # 0:offline 1:online
create_time = db.Column(db.DateTime)
last_login_time = db.Column(db.DateTime)
def get_admin_id(self):
return self.id
def json(self):
return to_json(self, self.__class__)
def __init__(self, **kwargs):
super(AdminUser, self).__init__(**kwargs)
if self.create_time is None:
self.create_time = datetime.utcnow()
if self.last_login_time is None:
self.last_login_time = datetime.utcnow()
# if self.slug is None:
# self.slug = str(uuid.uuid1())[0:8] # todo generate a real slug
class AdminEmail(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(80))
email = db.Column(db.String(120))
primary_email = db.Column(db.Integer) # 0:NOT Primary Email 1:Primary Email
verified = db.Column(db.Integer) # 0 for not verified, 1 for verified
admin_id = db.Column(db.Integer, db.ForeignKey('admin_user.id', ondelete='CASCADE'))
admin = db.relationship('AdminUser', backref=db.backref('emails', lazy='dynamic'))
def __init__(self, **kwargs):
super(AdminEmail, self).__init__(**kwargs)
class AdminToken(db.Model):
id = db.Column(db.Integer, primary_key=True)
token = db.Column(db.String(50), unique=True, nullable=False)
admin_id = db.Column(db.Integer, db.ForeignKey('admin_user.id', ondelete='CASCADE'))
admin = db.relationship('AdminUser', backref=db.backref('tokens', lazy='dynamic'))
issue_date = db.Column(db.DateTime)
expire_date = db.Column(db.DateTime, nullable=False)
def json(self):
return to_json(self, self.__class__)
def __init__(self, **kwargs):
super(AdminToken, self).__init__(**kwargs)
if self.issue_date is None:
issue_date = datetime.utcnow()
def __repr__(self):
return "AdminToken: " + self.json()
class AdminUserHackathonRel(db.Model):
id = db.Column(db.Integer, primary_key=True)
admin_email = db.Column(db.String(120))
role_type = db.Column(db.Integer)
hackathon_id = db.Column(db.Integer)
state = db.Column(db.Integer)
remarks = db.Column(db.String(255))
create_time = db.Column(db.DateTime)
def json(self):
return to_json(self, self.__class__)
def __init__(self, **kwargs):
super(AdminUserHackathonRel, self).__init__(**kwargs)
def __repr__(self):
return "AdminUserGroup: " + self.json()
|
Mushy new album “Breathless” out on November 19th – Chain D.L.K.
On ‘Breathless’ Italian artist Mushy takes the blueprint laid down on her debut ‘Faded Heart’ and blows it up to an analogue hd, a deep, evocative and immersive listening experience that cocoons the listener in sensory loss and ecstasy simultaneously.
While more understated than the likes of Liz Fraser or Zola Jesus, Mushy’s vocals are ghostly, expressionistic and suggestive of a netherland just beyond the realms of dream. On tracks like opener ‘To Be Lost’ there’s a mournful, glacial shift of swooping synths and bass emphasising the lyrics’ drift. The singular atmosphere pervades the whole record, never letting go, and on tracks like ‘Night Dress’ it descends into a darker, lurking place rich in imagery.
Elsewhere the precise snap of vintage electronics pushes and pull the listener into a hypnotised state that far surpasses Mushy’s previous work.
LP limited edition 500 copies. First 100 copies of the LP comes in a solid red vinyl.
|
# -*- coding: utf-8 -*-
"""
MapToolAction.py - map tool for user events
======================================================================
AUTHOR: Wei Wan, Purdue University
EMAIL: rcac-help@purdue.edu
Copyright (c) 2016 Purdue University
See the file "license.terms" for information on usage and
redistribution of this file, and for a DISCLAIMER OF ALL WARRANTIES.
======================================================================
"""
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from qgis.gui import *
from gabbs.MapUtils import iface, debug_trace
class MapToolFeatureAction(QgsMapTool):
'''
Base class for the map select tools
'''
def __init__(self, canvas):
QgsMapTool.__init__(self, canvas)
self.canvas = canvas
self.rubberBand = None
self.cursor = QCursor(Qt.ArrowCursor)
# Override events
def canvasReleaseEvent(self, event):
layer = self.canvas.currentLayer()
if not layer or layer.type() != QgsMapLayer.VectorLayer:
#emit messageEmitted( tr( "To run an action, you must choose an active vector layer." ), QgsMessageBar.INFO );
return
if layer not in self.canvas.layers():
#do not run actions on hidden layers
return
#QgsVectorLayer *vlayer = qobject_cast<QgsVectorLayer *>( layer );
#if (layer.actions().size() == 0 and \
# len(QgsMapLayerActionRegistry.instance().mapLayerActions(layer)) == 0):
#emit messageEmitted( tr( "The active vector layer has no defined actions" ), QgsMessageBar::INFO );
return
if(not self.doAction(layer, event.x(), event.y())):
#QgisApp.instance().statusBar().showMessage(tr("No features at this position found."))
pass
"""
def activate(self):
QgsMapTool.activate()
def deactivate(self):
QgsMapTool.deactivate()
"""
def doAction(self, layer, x, y):
if (not layer):
return False
point = self.canvas.getCoordinateTransform().toMapCoordinates(x, y)
featList = []
#toLayerCoordinates will throw an exception for an 'invalid' point.
#For example, if you project a world map onto a globe using EPSG 2163
#and then click somewhere off the globe, an exception will be thrown.
try:
#create the search rectangle
searchRadius = self.searchRadiusMU(self.canvas)
r = QgsRectangle()
r.setXMinimum(point.x() - searchRadius)
r.setXMaximum(point.x() + searchRadius)
r.setYMinimum(point.y() - searchRadius)
r.setYMaximum(point.y() + searchRadius)
r = self.toLayerCoordinates(layer, r)
fit = layer.getFeatures(QgsFeatureRequest().setFilterRect(r).setFlags(QgsFeatureRequest.ExactIntersect))
f = QgsFeature()
while(fit.nextFeature(f)):
featList.append(QgsFeature(f))
except QgsCsException as cse:
#Q_UNUSED(cse)
#catch exception for 'invalid' point and proceed with no features found
QgsDebugMsg(QString( "Caught CRS exception %1" ).arg(cse.what()))
if len(featList) == 0:
return False
for feat in featList:
if (layer.actions().defaultAction() >= 0):
#define custom substitutions: layer id and clicked coords
substitutionMap = {} #QMap
substitutionMap["$layerid"] = layer.id()
point = self.toLayerCoordinates(layer, point)
substitutionMap["$clickx"] = point.x()
substitutionMap["$clicky"] = point.y()
actionIdx = layer.actions().defaultAction()
#layer.actions().doAction(actionIdx, feat, substitutionMap)
self.doAttributeAction(layer, actionIdx, feat, substitutionMap)
else:
mapLayerAction = QgsMapLayerActionRegistry.instance().defaultActionForLayer(layer)
if(mapLayerAction):
mapLayerAction.triggerForFeature(layer, feat)
return True
""" Reimplement method in QGIS C++ code
"""
def doAttributeAction(self, layer, index, feat, substitutionMap):
actions = layer.actions()
if (index < 0 or index >= actions.size()):
return
action = actions.at(index)
if (not action.runable()):
return
# search for expressions while expanding actions
# no used for python binding
#context = self.createExpressionContext(layer)
#context.setFeature(feat)
#expandedAction = QString(QgsExpression.replaceExpressionText(action.action(), context, substitutionMap))
expandedAction = QString(QgsExpression.replaceExpressionText(action.action(), feat, layer, substitutionMap))
if (expandedAction.isEmpty()):
return
newAction = QgsAction(action.type(), action.name(), expandedAction, action.capture())
self.runAttributeAction(newAction)
def runAttributeAction(self, action):
if (action.type() == QgsAction.OpenUrl):
finfo = QFileInfo(action.action())
if (finfo.exists() and finfo.isFile()):
QDesktopServices.openUrl(QUrl.fromLocalFile(action.action()))
else:
QDesktopServices.openUrl(QUrl(action.action(), QUrl.TolerantMode))
elif (action.type() == QgsAction.GenericPython):
# TODO: capture output from QgsPythonRunner (like QgsRunProcess does)
QgsPythonRunner.run(action.action(), QString("Python running error"))
else:
#The QgsRunProcess instance created by this static function
#deletes itself when no longer needed.
QgsRunProcess.create(action.action(), action.capture())
"""
def createExpressionContext(self, layer):
context = QgsExpressionContext()
context.append(QgsExpressionContextUtils.globalScope())
context.append(QgsExpressionContextUtils.projectScope())
if (layer):
context.append(QgsExpressionContextUtils.layerScope(layer))
return context
"""
|
Hotel Akant is situated on Velyki Gai, Galyts'ka Street 44a in Ternopilʼ only in 5.8 km from the centre. It is truly suitable for a countryside weekend. The hotel is only in 99 km from the Ivano-Frankivsk Airport.
There are offered a range of guest amenities: special non-smoking rooms, lovely bar, car park, internet services, sunny garden, billiards, hiking, dry sauna, you may order food and drinks into the room. Travellers can pay using these types of payment cards : Visa, Mastercard.
For tourists accommodation are offered 7 rooms in the hotel. Guests can choose from different types of rooms: suite, triple, twin, double. Each guestroom features amenities such as hairdryer, electric kettle, free toiletries, refrigerator, telephone, flat-screen tv, air conditioning.
This bright and colourful room features a TV with cable channels and a private bathroom.
The Junior Suite features a TV with cable channels, air conditioning, and a private bathroom with bath.
This spacious two room suite features a TV with cable channels, a refrigerator, air conditioning, and a private bathroom with bath.
This large, contemporary suite features a flat-screen TV with cable channels, a refrigerator, air conditioning, an electric kettle, and a private bathroom with bath.
Room extra large big beds!! Breakfast very good only few euros!!! Incredible!!! Love it!
Could not pay by credit card - some technical problems were occurring all the time. To get the water warm in the shower you need to wait for 5-10 minutes.
Comfortable room with everything needed.
Breakfast was not very rich, however it was cheap.
|
import os
import tempfile
import unittest
import warnings
import mock
import numpy
import six
import chainer
from chainer import links
from chainer import testing
# The caffe submodule relies on protobuf which under protobuf==3.7.0 and
# Python 3.7 raises a DeprecationWarning from the collections module.
with warnings.catch_warnings():
warnings.filterwarnings(action='ignore', category=DeprecationWarning)
from chainer.links import caffe
from chainer.links.caffe.caffe_function import caffe_pb
def _iter_init(param, data):
if isinstance(data, list):
for d in data:
if hasattr(param, 'add'):
param.add()
if isinstance(d, (list, dict)):
_iter_init(param[-1], d)
else:
param[-1] = d
else:
param.append(d)
elif isinstance(data, dict):
for k, d in data.items():
if isinstance(d, (list, dict)):
_iter_init(getattr(param, k), d)
else:
setattr(param, k, d)
else:
setattr(param, data)
def _make_param(data):
param = caffe_pb.NetParameter()
_iter_init(param, data)
return param
class TestCaffeFunctionBase(unittest.TestCase):
def setUp(self):
param = _make_param(self.data)
# The name can be used to open the file a second time,
# while the named temporary file is still open on the Windows.
with tempfile.NamedTemporaryFile(delete=False) as f:
self.temp_file_path = f.name
f.write(param.SerializeToString())
def tearDown(self):
os.remove(self.temp_file_path)
def init_func(self):
self.func = caffe.CaffeFunction(self.temp_file_path)
class TestCaffeFunctionBaseMock(TestCaffeFunctionBase):
def setUp(self):
outs = []
for shape in self.out_shapes:
out_data = numpy.random.uniform(-1, 1, shape).astype(numpy.float32)
outs.append(chainer.Variable(out_data))
self.outputs = tuple(outs)
ret_value = outs[0] if len(outs) == 1 else tuple(outs)
m = mock.MagicMock(name=self.func_name, return_value=ret_value)
self.patch = mock.patch(self.func_name, m)
self.mock = self.patch.start()
super(TestCaffeFunctionBaseMock, self).setUp()
def tearDown(self):
super(TestCaffeFunctionBaseMock, self).tearDown()
self.patch.stop()
def call(self, inputs, outputs):
invars = []
for shape in self.in_shapes:
data = numpy.random.uniform(-1, 1, shape).astype(numpy.float32)
invars.append(chainer.Variable(data))
self.inputs = invars
with chainer.using_config('train', False):
out = self.func(inputs=dict(zip(inputs, invars)),
outputs=outputs)
self.assertEqual(len(out), len(self.outputs))
for actual, expect in zip(out, self.outputs):
self.assertIs(actual, expect)
class TestConcat(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.concat'
in_shapes = [(3, 2, 3), (3, 2, 3)]
out_shapes = [(3, 2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Concat',
'bottom': ['x', 'y'],
'top': ['z'],
'concat_param': {
'axis': 2
}
}
]
}
def test_concat(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x', 'y'], ['z'])
self.mock.assert_called_once_with(
(self.inputs[0], self.inputs[1]), axis=2)
class TestConvolution(TestCaffeFunctionBaseMock):
func_name = 'chainer.links.Convolution2D.forward'
in_shapes = [(2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Convolution',
'bottom': ['x'],
'top': ['y'],
'convolution_param': {
'kernel_size': [2],
'stride': [3],
'pad': [4],
'group': 3,
'bias_term': True,
},
'blobs': [
{
'num': 6,
'channels': 4,
'data': list(range(96))
},
{
'data': list(range(6))
}
]
}
]
}
def test_convolution(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
f = self.func.l1
self.assertIsInstance(f, links.Convolution2D)
for i in range(3): # 3 == group
in_slice = slice(i * 4, (i + 1) * 4) # 4 == channels
out_slice = slice(i * 2, (i + 1) * 2) # 2 == num / group
w = f.W.data[out_slice, in_slice]
numpy.testing.assert_array_equal(
w.flatten(), range(i * 32, (i + 1) * 32))
numpy.testing.assert_array_equal(
f.b.data, range(6))
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0])
class TestDeconvolution(TestCaffeFunctionBaseMock):
func_name = 'chainer.links.Deconvolution2D.__call__'
in_shapes = [(2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Deconvolution',
'bottom': ['x'],
'top': ['y'],
'convolution_param': {
'kernel_size': [2],
'stride': [3],
'pad': [4],
'group': 3,
'bias_term': True,
},
'blobs': [
{
'num': 6,
'channels': 4,
'data': list(range(96))
},
{
'data': list(range(12))
}
]
}
]
}
def test_deconvolution(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
f = self.func.l1
self.assertIsInstance(f, links.Deconvolution2D)
for i in range(3): # 3 == group
in_slice = slice(i * 4, (i + 1) * 4) # 4 == channels
out_slice = slice(i * 2, (i + 1) * 2) # 2 == num / group
w = f.W.data[out_slice, in_slice]
numpy.testing.assert_array_equal(
w.flatten(), range(i * 32, (i + 1) * 32))
numpy.testing.assert_array_equal(
f.b.data, range(12))
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0])
class TestData(TestCaffeFunctionBase):
data = {
'layer': [
{
'name': 'l1',
'type': 'Data',
}
]
}
def test_data(self):
self.init_func()
self.assertEqual(len(self.func.layers), 0)
class TestDropout(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.dropout'
in_shapes = [(3, 2, 3)]
out_shapes = [(3, 2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Dropout',
'bottom': ['x'],
'top': ['y'],
'dropout_param': {
'dropout_ratio': 0.25
}
}
]
}
def test_dropout(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(
self.inputs[0], ratio=0.25)
class TestInnerProduct(TestCaffeFunctionBaseMock):
func_name = 'chainer.links.Linear.forward'
in_shapes = [(2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'InnerProduct',
'bottom': ['x'],
'top': ['y'],
'inner_product_param': {
'bias_term': True,
'axis': 1
},
'blobs': [
# weight
{
'shape': {
'dim': [2, 3]
},
'data': list(range(6)),
},
# bias
{
'shape': {
'dim': [2]
},
'data': list(range(2)),
}
]
}
]
}
def test_linear(self):
self.init_func()
f = self.func.l1
self.assertIsInstance(f, links.Linear)
numpy.testing.assert_array_equal(
f.W.data, numpy.array([[0, 1, 2], [3, 4, 5]], dtype=numpy.float32))
numpy.testing.assert_array_equal(
f.b.data, numpy.array([0, 1], dtype=numpy.float32))
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0])
class TestInnerProductDim4(TestCaffeFunctionBaseMock):
func_name = 'chainer.links.Linear.forward'
in_shapes = [(2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'InnerProduct',
'bottom': ['x'],
'top': ['y'],
'inner_product_param': {
'bias_term': False,
'axis': 1
},
'blobs': [
# weight
{
'shape': {
'dim': [4, 5, 2, 3]
},
# when `ndim` == 4, `data` stored shape[2] x shape[3]
# data
'data': list(range(6)),
}
]
}
]
}
def test_linear(self):
self.init_func()
f = self.func.l1
self.assertIsInstance(f, links.Linear)
numpy.testing.assert_array_equal(
f.W.data, numpy.array([[0, 1, 2], [3, 4, 5]], dtype=numpy.float32))
self.assertIsNone(f.b)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0])
class TestInnerProductInvalidDim(TestCaffeFunctionBase):
data = {
'layer': [
{
'name': 'l1',
'type': 'InnerProduct',
'blobs': [
{
'shape': {
'dim': [2, 3, 4, 5, 6] # 5-dim is not supported
},
},
]
}
]
}
def test_linear(self):
with self.assertRaises(RuntimeError):
self.init_func()
class TestInnerProductNonDefaultAxis(TestCaffeFunctionBase):
data = {
'layer': [
{
'name': 'l1',
'type': 'InnerProduct',
'inner_product_param': {
'axis': 0 # non-default axis
}
}
]
}
def test_linear(self):
with self.assertRaises(RuntimeError):
self.init_func()
class TestLRN(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.local_response_normalization'
in_shapes = [(3, 2, 3)]
out_shapes = [(3, 2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'LRN',
'bottom': ['x'],
'top': ['y'],
'lrn_param': {
'local_size': 4,
'alpha': 0.5,
'beta': 0.25,
'norm_region': 0, # ACROSS_CHANNELS
'k': 0.5
},
}
]
}
def test_lrn(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(
self.inputs[0], n=4, k=0.5, alpha=0.5 / 4, beta=0.25)
class TestLRNWithinChannel(TestCaffeFunctionBase):
data = {
'layer': [
{
'name': 'l1',
'type': 'LRN',
'lrn_param': {
'norm_region': 1, # WITHIN_CHANNELS is not supported
},
}
]
}
def test_lrn(self):
with self.assertRaises(RuntimeError):
self.init_func()
class TestMaxPooling(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.max_pooling_2d'
in_shapes = [(3, 2, 3)]
out_shapes = [(3, 2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Pooling',
'bottom': ['x'],
'top': ['y'],
'pooling_param': {
'pool': 0, # MAX
'kernel_h': 2,
'kernel_w': 3,
'stride_h': 4,
'stride_w': 5,
'pad_h': 6,
'pad_w': 7,
}
}
]
}
def test_max_pooling(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(
self.inputs[0], (2, 3), stride=(4, 5), pad=(6, 7))
class TestAveragePooling(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.average_pooling_2d'
in_shapes = [(3, 2, 3)]
out_shapes = [(3, 2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Pooling',
'bottom': ['x'],
'top': ['y'],
'pooling_param': {
'pool': 1, # AVE
'kernel_size': 2,
'stride': 4,
'pad': 6,
}
}
]
}
def test_max_pooling(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(
self.inputs[0], 2, stride=4, pad=6)
class TestGlobalPooling(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.max_pooling_2d'
in_shapes = [(3, 2, 3, 4)]
out_shapes = [(3, 2, 3, 4)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Pooling',
'bottom': ['x'],
'top': ['y'],
'pooling_param': {
'pool': 0, # MAX
'global_pooling': True,
}
}
]
}
def test_global_pooling(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(
self.inputs[0], (3, 4), stride=1, pad=0)
class TestStochasticPooling(TestCaffeFunctionBase):
data = {
'layer': [
{
'name': 'l1',
'type': 'Pooling',
'pooling_param': {
'pool': 2, # STOCHASTIC is not supported
}
}
]
}
def test_stochastic_pooling(self):
with self.assertRaises(RuntimeError):
self.init_func()
class TestReLU(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.relu'
in_shapes = [(3, 2, 3)]
out_shapes = [(3, 2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'ReLU',
'bottom': ['x'],
'top': ['y'],
'relu_param': {
'negative_slope': 0
}
}
]
}
def test_lrn(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0])
class TestLeakyReLU(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.leaky_relu'
in_shapes = [(3, 2, 3)]
out_shapes = [(3, 2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'ReLU',
'bottom': ['x'],
'top': ['y'],
'relu_param': {
'negative_slope': 0.5
}
}
]
}
def test_lrn(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0], slope=0.5)
class TestReshape(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.reshape'
in_shapes = [(3, 2, 3)]
out_shapes = [(3, 6)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Reshape',
'bottom': ['x'],
'top': ['y'],
'reshape_param': {
'shape': {
'dim': [3, 6]
}
}
}
]
}
def test_reshape(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0], shape=[3, 6])
class TestBatchNorm(TestCaffeFunctionBaseMock):
func_name = 'chainer.links.BatchNormalization.forward'
in_shapes = [(2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'BatchNorm',
'bottom': ['x'],
'top': ['y'],
'blobs': [
# For average mean.
{
'shape': {
'dim': [3],
},
'data': list(six.moves.range(3)),
},
# For average variance.
{
'shape': {
'dim': [3],
},
'data': list(six.moves.range(3)),
},
],
'batch_norm_param': {
'use_global_stats': False,
}
}
]
}
def test_batchnorm(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0], finetune=False)
class TestBatchNormUsingGlobalStats(TestCaffeFunctionBaseMock):
func_name = 'chainer.links.BatchNormalization.forward'
in_shapes = [(2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'BatchNorm',
'bottom': ['x'],
'top': ['y'],
'blobs': [
# For average mean.
{
'shape': {
'dim': [3],
},
'data': list(six.moves.range(3)),
},
# For average variance.
{
'shape': {
'dim': [3],
},
'data': list(six.moves.range(3)),
},
],
'batch_norm_param': {
'use_global_stats': True,
}
}
]
}
def test_batchnorm(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0], finetune=False)
class TestEltwiseProd(TestCaffeFunctionBaseMock):
func_name = 'chainer.variable.Variable.__mul__'
in_shapes = [(2, 3), (2, 3), (2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Eltwise',
'bottom': ['x1', 'x2', 'x3'],
'top': ['y'],
'eltwise_param': {
'operation': 0, # PROD
},
}
]
}
def test_eltwise_prod(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x1', 'x2', 'x3'], ['y'])
self.mock.assert_has_calls([mock.call(self.inputs[1]),
mock.call(self.inputs[2])])
class TestEltwiseSum(TestCaffeFunctionBaseMock):
func_name = 'chainer.variable.Variable.__add__'
in_shapes = [(2, 3), (2, 3), (2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Eltwise',
'bottom': ['x1', 'x2', 'x3'],
'top': ['y'],
'eltwise_param': {
'operation': 1, # SUM
},
}
]
}
def test_eltwise_sum(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x1', 'x2', 'x3'], ['y'])
self.mock.assert_has_calls([mock.call(self.inputs[1]),
mock.call(self.inputs[2])])
class TestEltwiseSumCoeff(TestCaffeFunctionBaseMock):
func_name = 'chainer.variable.Variable.__add__'
in_shapes = [(2, 3), (2, 3), (2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Eltwise',
'bottom': ['x1', 'x2', 'x3'],
'top': ['y'],
'eltwise_param': {
'operation': 1, # SUM
'coeff': list(six.moves.range(3)),
},
}
]
}
def test_eltwise_sum(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x1', 'x2', 'x3'], ['y'])
self.assertEqual(self.mock.call_count, 2)
class TestEltwiseSumInvalidCoeff(TestCaffeFunctionBaseMock):
func_name = 'chainer.variable.Variable.__add__'
in_shapes = [(2, 3), (2, 3), (2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Eltwise',
'bottom': ['x1', 'x2', 'x3'],
'top': ['y'],
'eltwise_param': {
'operation': 1, # SUM
# not same as number of bottoms
'coeff': list(six.moves.range(2)),
},
}
]
}
def test_eltwise_sum(self):
self.init_func()
with self.assertRaises(AssertionError):
self.call(['x1', 'x2', 'x3'], ['y'])
class TestEltwiseMax(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.maximum'
in_shapes = [(2, 3), (2, 3), (2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Eltwise',
'bottom': ['x1', 'x2', 'x3'],
'top': ['y'],
'eltwise_param': {
'operation': 2, # MAX
},
}
]
}
def test_eltwise_max(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x1', 'x2', 'x3'], ['y'])
self.mock.assert_has_calls(
[mock.call(self.inputs[0], self.inputs[1]),
mock.call(self.outputs[0], self.inputs[2])])
class TestScale(TestCaffeFunctionBaseMock):
func_name = 'chainer.links.Scale.forward'
in_shapes = [(2, 3), (2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Scale',
'bottom': ['x', 'y'],
'top': ['z'],
'scale_param': {
'axis': 0,
}
}
]
}
def test_scale(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x', 'y'], ['z'])
self.mock.assert_called_once_with(self.inputs[0], self.inputs[1])
class TestScaleOneBottom(TestCaffeFunctionBaseMock):
func_name = 'chainer.links.Scale.forward'
in_shapes = [(2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Scale',
'bottom': ['x'],
'top': ['y'],
'blobs': [
{
'shape': {
'dim': [2, 3],
},
'data': list(six.moves.range(6)),
}
],
'scale_param': {
'axis': 0,
}
}
]
}
def test_scale_one_bottom(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0])
class TestScaleWithBias(TestCaffeFunctionBaseMock):
func_name = 'chainer.links.Scale.forward'
in_shapes = [(2, 3), (2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Scale',
'bottom': ['x', 'y'],
'top': ['z'],
'blobs': [
{
'shape': {
'dim': [2, 3],
},
'data': list(six.moves.range(6)),
}
],
'scale_param': {
'axis': 0,
'bias_term': True,
}
}
]
}
def test_scale_with_bias(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.assertTrue(hasattr(self.func.l1, 'bias'))
self.call(['x', 'y'], ['z'])
self.mock.assert_called_once_with(self.inputs[0], self.inputs[1])
class TestScaleOneBottomWithBias(TestCaffeFunctionBaseMock):
func_name = 'chainer.links.Scale.forward'
in_shapes = [(2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Scale',
'bottom': ['x'],
'top': ['y'],
'blobs': [
# For W parameter.
{
'shape': {
'dim': [2, 3],
},
'data': list(six.moves.range(6)),
},
# For bias.
{
'shape': {
'dim': [2, 3],
},
'data': list(six.moves.range(6)),
}
],
'scale_param': {
'axis': 0,
'bias_term': True,
}
}
]
}
def test_scale_one_bottom_with_bias(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.assertTrue(hasattr(self.func.l1, 'bias'))
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0])
class TestSlice(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.split_axis'
in_shapes = [(3, 4, 3)]
out_shapes = [(3, 2, 3), (3, 2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Slice',
'bottom': ['x'],
'top': ['y1', 'y2'],
'slice_param': {
'axis': 1
}
}
]
}
def test_slice(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y1', 'y2'])
self.mock.assert_called_once_with(
self.inputs[0],
indices_or_sections=2,
axis=1
)
class TestSliceNoAxis(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.split_axis'
in_shapes = [(4, 6, 4)]
out_shapes = [(2, 6, 4), (2, 6, 4)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Slice',
'bottom': ['x'],
'top': ['y1', 'y2'],
'slice_param': {
'slice_dim': 0
}
}
]
}
def test_slice_no_axis(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y1', 'y2'])
self.mock.assert_called_once_with(
self.inputs[0],
indices_or_sections=2,
axis=0
)
class TestSliceNoAxisNoSliceDim(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.split_axis'
in_shapes = [(4, 6, 4)]
out_shapes = [(4, 3, 4), (4, 3, 4)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Slice',
'bottom': ['x'],
'top': ['y1', 'y2'],
}
]
}
def test_slice_no_axis_no_slice_dim(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y1', 'y2'])
self.mock.assert_called_once_with(
self.inputs[0],
indices_or_sections=2,
axis=1
)
class TestSliceSlicePoint(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.split_axis'
in_shapes = [(4, 8, 6)]
out_shapes = [(4, 3, 6), (4, 2, 6), (4, 3, 6)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Slice',
'bottom': ['x'],
'top': ['y1', 'y2', 'y3'],
'slice_param': {
'axis': 1,
'slice_point': [3, 5]
}
}
]
}
def test_slice_slice_point(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y1', 'y2', 'y3'])
self.mock.assert_called_once_with(
self.inputs[0],
indices_or_sections=[3, 5],
axis=1
)
class TestSigmoid(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.sigmoid'
in_shapes = [(2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Sigmoid',
'bottom': ['x'],
'top': ['y'],
}
]
}
def test_sigmoid(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0])
class TestSoftmax(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.softmax'
in_shapes = [(2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Softmax',
'bottom': ['x'],
'top': ['y'],
}
]
}
def test_softmax(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0])
class TestSoftmaxCaffeEngine(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.softmax'
in_shapes = [(2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Softmax',
'softmax_param': {
'engine': 1, # CAFFE
},
'bottom': ['x'],
'top': ['y'],
}
]
}
def test_softmax_caffe_engine(self):
# TODO(beam2d): Check if the mock is called with
# chainer.config.use_cudnn == False
self.init_func()
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0])
class TestSoftmaxcuDnnEngine(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.softmax'
in_shapes = [(2, 3)]
out_shapes = [(2, 3)]
data = {
'layer': [
{
'name': 'l1',
'type': 'Softmax',
'softmax_param': {
'engine': 2, # CUDNN
},
'bottom': ['x'],
'top': ['y'],
}
]
}
def test_softmax_cuDNN_engine(self):
# TODO(beam2d): Check if the mock is called with
# chainer.config.use_cudnn == True
self.init_func()
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0])
class TestSoftmaxInvalidAxis(TestCaffeFunctionBase):
data = {
'layer': [
{
'name': 'l1',
'type': 'Softmax',
'softmax_param': {
'axis': 0, # invalid axis
}
}
]
}
def test_softmax_invalid_axis(self):
with self.assertRaises(RuntimeError):
self.init_func()
class TestSoftmaxWithLoss(TestCaffeFunctionBaseMock):
func_name = 'chainer.functions.softmax_cross_entropy'
in_shapes = [(3, 2, 3)]
out_shapes = [()]
data = {
'layer': [
{
'name': 'l1',
'type': 'SoftmaxWithLoss',
'bottom': ['x'],
'top': ['y'],
}
]
}
def test_softmax_with_loss(self):
self.init_func()
self.assertEqual(len(self.func.layers), 1)
self.call(['x'], ['y'])
self.mock.assert_called_once_with(self.inputs[0])
class TestSoftmaxWithLossInvalidAxis(TestCaffeFunctionBase):
data = {
'layer': [
{
'name': 'l1',
'type': 'SoftmaxWithLoss',
'softmax_param': {
'axis': 0, # invalid axis
}
}
]
}
def test_softmax_with_loss_invalid_axis(self):
with self.assertRaises(RuntimeError):
self.init_func()
class TestSplit(TestCaffeFunctionBase):
data = {
'layer': [
{
'name': 'l1',
'type': 'Split',
'bottom': ['x'],
'top': ['y', 'z'],
}
]
}
def test_split(self):
self.init_func()
self.assertEqual(self.func.split_map, {'y': 'x', 'z': 'x'})
testing.run_module(__name__, __file__)
|
Image Title: Outdoor Battery Operated Lamps Portable At Lumens Com Intended For Powered Lights Plans 3. Post Title: Battery Powered Outdoor Lights. Filename: outdoor-battery-operated-lamps-portable-at-lumens-com-intended-for-powered-lights-plans-3.jpg. Image Dimension: 250 x 250 pixels. Images Format: jpg/jpeg. Publisher/Author: Mark Collier. Uploaded Date: Monday - October 22nd. 2018 23:23:10 PM. Category: Architecture. Image Source: houzz.com. Waterproof Wireless Outdoor Garden Battery Operated Led Solar Pir Throughout Powered Lights Plans 4. Battery Operated Outdoor Light Lights Inside Powered Remodel 13. Battery Operated Outdoor Lights With Timer Cozywp Com Throughout Powered Decor 10. 20 Best Ideas Of Battery Operated Outdoor Lights Inside Powered Idea 6. Outdoor Battery Operated Wall Mounted Lights Throughout Powered Inspirations 19. Battery Powered Porch Light Outdoor Lights Amazon Explicatio Co For Design 12. Remote Control Battery Lights Powered Outdoor Light With Pertaining To Remodel 5. Outdoor Light Wonderous Battery Operated Security Lights Inside Powered Design 8. Outdoor Battery Operated Table Lamps Light Candles Cordless With Powered Lights Plans 17. Good Battery Operated Outdoor Lights With Timer Or Inside Powered Plan 2.
|
# encoding=utf-8
RUNNING = "RUNNING"
STOPPED = "STOPPED"
DELETED = "DELETED"
EXPUNGED = "EXPUNGED"
ATTACHED = "ATTACHED"
DETACHED = "DETACHED"
ENABLED = "ENABLED"
HA = "HA"
DATA = "'data'"
ROOT = "'root'"
VM = "'VM'"
VOLUME = "'VOLUME'"
QCOW2 = "'qcow2'"
ISO = "'iso'"
MINI = False
# todo
LOCAL = False
class resource_dict(object):
def __init__(self):
self.running = []
self.enabled = []
self.stopped = []
self.attached = []
self.detached = []
self.deleted = []
self.expunged = []
self.ha = []
self.len = 0
self.group = {}
def __repr__(self):
temp = "Running:"
temp += str(self.running)
temp += "\nStopped:"
temp += str(self.stopped)
temp += "\nEnadbled:"
temp += str(self.enabled)
temp += "\nattached:"
temp += str(self.attached)
temp += "\nDetached:"
temp += str(self.detached)
temp += "\nDeleted:"
temp += str(self.deleted)
temp += "\nExpunged:"
temp += str(self.expunged)
temp += "\nHa:"
temp += str(self.ha)
temp += "\nGroup:"
for k, v in self.group.items():
temp += ("\n\t%s:%s") % (str(k), str(v))
temp += "---%s" % v[0].description
return temp
def __str__(self):
return self.__repr__()
def __add__(self, other):
resource = resource_dict()
resource.group = dict(self.group, **other.group)
resource.running = self.running + other.running
resource.enabled = self.enabled + other.enabled
resource.stopped = self.stopped + other.stopped
resource.attached = self.attached + other.attached
resource.detached = self.detached + other.detached
resource.deleted = self.deleted + other.deleted
resource.expunged = self.expunged + other.expunged
resource.ha = self.ha + other.ha
resource.len = self.len + other.len
return resource
def change_state(self, resource, src_sdt=None, dst_sdt=None):
if src_sdt == RUNNING:
self.running.remove(resource)
elif src_sdt == ENABLED:
self.enabled.remove(resource)
elif src_sdt == ATTACHED:
self.attached.remove(resource)
elif src_sdt == DETACHED:
self.detached.remove(resource)
elif src_sdt == STOPPED:
self.stopped.remove(resource)
elif src_sdt == DELETED:
self.deleted.remove(resource)
elif src_sdt == HA:
self.ha.remove(resource)
if dst_sdt == RUNNING:
self.running.append(resource)
elif dst_sdt == ENABLED:
self.enabled.append(resource)
elif dst_sdt == ATTACHED:
self.attached.append(resource)
elif dst_sdt == DETACHED:
self.detached.append(resource)
elif dst_sdt == STOPPED:
self.stopped.append(resource)
elif dst_sdt == DELETED:
self.deleted.append(resource)
elif dst_sdt == EXPUNGED:
self.expunged.append(resource)
elif dst_sdt == HA:
self.ha.append(resource)
def add(self, resource, dst_sdt=None):
if dst_sdt == RUNNING:
self.running.append(resource)
elif dst_sdt == STOPPED:
self.stopped.append(resource)
elif dst_sdt == ATTACHED:
self.attached.append(resource)
elif dst_sdt == ENABLED:
self.enabled.append(resource)
elif dst_sdt == DETACHED:
self.detached.append(resource)
self.len += 1
def get_not_ha_resource(self):
r_list = []
for resource in self.running:
if resource not in self.ha:
r_list.append(resource)
return r_list
all_volumes = resource_dict()
all_vms = resource_dict()
all_snapshots = resource_dict()
all_backups = resource_dict()
all_images = resource_dict()
def reset():
global all_volumes
global all_vms
global all_snapshots
global all_backups
global all_images
all_volumes = resource_dict()
all_vms = resource_dict()
all_snapshots = resource_dict()
all_backups = resource_dict()
all_images = resource_dict()
class Resource(object):
def __init__(self, name=None, type=None):
print "Resource %s has been created" % self.name
def __repr__(self):
return self.name
def change_state(self, state):
print "Resource [%s] changes state [%s] to [%s]" % (self.name, self.state, state)
self.state = state
def do_change(self, state, action_name):
print "Resource [%s] must changes state [%s] to [%s] to do [%s]" % (self.name, self.state, state, action_name)
pass
class Vm(Resource):
def __init__(self, name=None):
self.state = RUNNING
if not name:
self.name = "'vm" + str(all_vms.len + 1) + "'"
else:
self.name = name
self.root_name = self.name[:-1] + "-root'"
self.haveHA = False
self.volumes = []
self.backups = []
self.snapshots = []
self.root_volume = Volume(self.root_name, type=ROOT)
self.root_volume.vm = self
super(Vm, self).__init__()
def start(self):
all_vms.change_state(self, self.state, RUNNING)
self.state = RUNNING
return "[TestAction.start_vm, %s]" % self.name
def stop(self):
all_vms.change_state(self, self.state, STOPPED)
self.state = STOPPED
return "[TestAction.stop_vm, %s]" % self.name
def delete(self):
all_vms.change_state(self, self.state, DELETED)
self.state = DELETED
if self.haveHA:
all_vms.change_state(self, src_sdt=HA)
self.haveHA = not self.haveHA
for volume in self.volumes:
all_volumes.change_state(volume, ATTACHED, DETACHED)
volume.state = DETACHED
volume.vm = None
self.volumes = []
return "[TestAction.destroy_vm, %s]" % self.name
def expunge(self):
all_vms.change_state(self, self.state, EXPUNGED)
self.state = EXPUNGED
for snapshot in self.snapshots:
all_snapshots.change_state(snapshot, ENABLED, DELETED)
snapshot.state = DELETED
self.snapshots.remove(snapshot)
groupId = snapshot.groupId
if snapshot.groupId:
for snap in all_snapshots.group[snapshot.groupId]:
snap.groupId = None
all_snapshots.group.pop(groupId)
return "[TestAction.expunge_vm, %s]" % self.name
def recover(self):
all_vms.change_state(self, self.state, STOPPED)
self.state = STOPPED
return "[TestAction.recover_vm, %s]" % self.name
def change_ha(self):
if self.haveHA:
all_vms.change_state(self, src_sdt=HA)
else:
all_vms.change_state(self, dst_sdt=HA)
if self.state == STOPPED:
all_vms.change_state(self, self.state, RUNNING)
self.haveHA = not self.haveHA
self.state = RUNNING
return "[TestAction.change_vm_ha, %s]" % self.name
def create(self, tags=None):
all_vms.add(self, RUNNING)
self.state = RUNNING
if tags and "'data_volume=true'" in tags:
volume = Volume("'auto-volume" + str(all_vms.len) + "'")
self.volumes.append(volume)
volume.vm = self
all_volumes.add(volume, ATTACHED)
volume.state = ATTACHED
if MINI:
return "[TestAction.create_mini_vm, %s, %s]" % (self.name, ", ".join(tags))
return "[TestAction.create_vm, %s, %s]" % (self.name, ", ".join(tags))
def reinit(self):
return "[TestAction.reinit_vm, %s]" % self.name
def change_vm_image(self):
return "[TestAction.change_vm_image, %s]" % (self.name)
def migrate(self):
return "[TestAction.migrate_vm, %s]" % self.name
def resize(self, tags):
if not tags:
return "[TestAction.resize_volume, %s, 5*1024*1024]" % self.name
else:
return "[TestAction.resize_volume, %s, %s]" % (self.name, ", ".join(tags))
def clone_vm(self):
new_vm = Vm()
all_vms.add(new_vm, RUNNING)
return "[TestAction.clone_vm, %s, %s]" % (self.name, new_vm.name)
def clone_vm_with_volume(self):
new_vm = Vm()
all_vms.add(new_vm, RUNNING)
for volume in self.volumes:
name = "'clone@" + volume.name[1:]
new_volume = Volume(name)
all_volumes.add(new_volume, ATTACHED)
new_volume.state = ATTACHED
new_volume.vm = new_vm
new_vm.volumes.append(new_volume)
return "[TestAction.clone_vm, %s, %s, 'full']" % (self.name, new_vm.name)
def reboot(self):
return "[TestAction.reboot_vm, %s]" % self.name
def ps_migrate(self):
return "[TestAction.ps_migrate_vm, %s]" % self.name
def create_root_snapshot(self):
# name: vm1-root-snapshot-1
snapshot_name = self.root_name[:-1] + "-snapshot" + str(all_snapshots.len + 1) + "'"
snapshot = Snapshot(snapshot_name)
snapshot.create()
snapshot.set_volume(self.root_volume)
self.snapshots.append(snapshot)
return "[TestAction.create_volume_snapshot, %s, %s]" % (self.root_name, snapshot.name)
def delete_root_snapshot(self, snapshot):
self.snapshots.remove(snapshot)
if snapshot.groupId and all_snapshots.group.has_key(snapshot.groupId):
groupId = snapshot.groupId
for snap in all_snapshots.group[snapshot.groupId]:
snap.groupId = None
all_snapshots.group.pop(groupId)
return snapshot.delete()
def use_root_snapshot(self, snapshot):
return snapshot.use()
def create_vm_snapshot(self):
temp = 0
for key in all_snapshots.group.keys():
if int(key[-1]) > temp:
temp = int(key[-1])
groupId = "vm_snap" + str(temp + 1)
description = (self.name + "@" + "_".join([vol.name for vol in self.volumes])).replace("'", "")
all_snapshots.group[groupId] = []
root_snapshot_name = self.name[:-1] + "-snapshot" + str(all_snapshots.len + 1) + "'"
root_snapshot = Snapshot(root_snapshot_name)
root_snapshot.create()
root_snapshot.set_volume(self.root_volume)
root_snapshot.set_groupId(groupId, description)
self.snapshots.append(root_snapshot)
for volume in self.volumes:
snapshot_name = root_snapshot_name.replace(self.name[:-1], volume.name[:-1])
vol_snapshot = Snapshot(snapshot_name)
vol_snapshot.create()
vol_snapshot.set_volume(volume)
vol_snapshot.set_groupId(groupId, description)
return "[TestAction.create_vm_snapshot, %s, %s]" % (self.name, root_snapshot.name)
def delete_vm_snapshot(self, groupId):
vm_snapshot_name = ''
for snapshot in self.root_volume.snapshots:
if snapshot.groupId == groupId:
self.snapshots.remove(snapshot)
for vol_snapshot in all_snapshots.group[groupId]:
all_snapshots.change_state(vol_snapshot, vol_snapshot.state, DELETED)
if vol_snapshot.volume and vol_snapshot in vol_snapshot.volume.snapshots:
vol_snapshot.volume.snapshots.remove(vol_snapshot)
if vol_snapshot in self.snapshots:
self.snapshots.remove(vol_snapshot)
vol_snapshot.volume = None
vol_snapshot.vm = None
vm_snapshot_name = snapshot.name
for snap in all_snapshots.group[groupId]:
snap.groupId = None
all_snapshots.group.pop(groupId)
return "[TestAction.delete_vm_snapshot, %s]" % vm_snapshot_name
def use_vm_snapshot(self, groupId):
vm_snapshot_name = ''
for snapshot in self.root_volume.snapshots:
if snapshot.groupId == groupId:
vm_snapshot_name = snapshot.name
return "[TestAction.use_vm_snapshot, %s]" % vm_snapshot_name
def create_root_backup(self):
# name: vm1-root-backup-1
backup_name = self.root_name[:-1] + "-backup" + str(all_backups.len + 1) + "'"
backup = Backup(backup_name)
backup.create()
backup.set_volume(self.root_volume)
self.backups.append(backup)
return "[TestAction.create_volume_backup, %s, %s]" % (self.root_name, backup.name)
def delete_root_backup(self, backup):
self.backups.remove(backup)
if backup.groupId and all_backups.group.has_key(backup.groupId):
groupId = backup.groupId
for back in all_backups.group[backup.groupId]:
back.groupId = None
all_backups.group.pop(groupId)
return backup.delete()
def use_root_backup(self, backup):
return backup.use()
def create_vm_backup(self):
groupId = "vm_backup" + str(len(all_backups.group) + 1)
description = (self.name + "@" + "_".join([vol.name for vol in self.volumes])).replace("'", "")
all_backups.group[groupId] = []
root_backup_name = self.name[:-1] + "-backup" + str(all_backups.len + 1) + "'"
root_backup = Backup(root_backup_name)
root_backup.create()
root_backup.set_volume(self.root_volume)
root_backup.set_groupId(groupId, description)
self.backups.append(root_backup)
for volume in self.volumes:
backup_name = root_backup_name.replace(self.name[:-1], volume.name[:-1])
vol_backup = Backup(backup_name)
vol_backup.create()
vol_backup.set_volume(volume)
vol_backup.set_groupId(groupId, description)
return "[TestAction.create_vm_backup, %s, %s]" % (self.name, root_backup.name)
def delete_vm_backup(self, groupId):
vm_backup_name = ''
for backup in self.root_volume.backups:
if backup.groupId == groupId:
self.backups.remove(backup)
for vol_backup in all_backups.group[groupId]:
all_backups.change_state(vol_backup, vol_backup.state, DELETED)
vol_backup.volume.backups.remove(vol_backup)
vol_backup.volume = None
vm_backup_name = backup.name
for back in all_backups.group[groupId]:
back.groupId = None
all_backups.group.pop(groupId)
return "[TestAction.delete_vm_backup, %s]" % vm_backup_name
def use_vm_backup(self, groupId):
vm_backup_name = ''
for backup in self.root_volume.backups:
if backup.groupId == groupId:
vm_backup_name = backup.name
return "[TestAction.use_vm_backup, %s]" % vm_backup_name
def create_image(self):
image = Image(self.name)
image.type = ROOT
all_images.add(image, ENABLED)
return "[TestAction.create_image_from_volume, %s, %s]" % (self.name, image.name)
class Volume(Resource):
def __init__(self, name=None, type=DATA):
if not name:
self.name = "'volume" + str(all_volumes.len + 1) + "'"
else:
self.name = name
Resource.__init__(self)
self.state = DETACHED
self.vm = None
self.backups = []
self.snapshots = []
self.type = type
def create(self, tags):
all_volumes.add(self, DETACHED)
self.state = DETACHED
if tags and "flag" in tags[-1]:
tags[-1] = tags[-1][:-1] + ",scsi" + "'"
elif not tags or "flag" not in tags[-1]:
tags.append("'flag=scsi'")
return "[TestAction.create_volume, %s, %s]" % (self.name, ", ".join(tags))
def attach(self, vm):
all_volumes.change_state(self, self.state, ATTACHED)
self.state = ATTACHED
vm.volumes.append(self)
self.vm = vm
return "[TestAction.attach_volume, %s, %s]" % (vm.name, self.name)
def detach(self):
all_volumes.change_state(self, self.state, DETACHED)
self.state = DETACHED
self.vm.volumes.remove(self)
self.vm = None
return "[TestAction.detach_volume, %s]" % self.name
def resize(self, tags):
if not tags:
return "[TestAction.resize_data_volume, %s, 5*1024*1024]" % self.name
else:
return "[TestAction.resize_data_volume, %s, %s]" % (self.name, ", ".join(tags))
def delete(self):
all_volumes.change_state(self, self.state, DELETED)
self.state = DELETED
if self.vm:
self.vm.volumes.remove(self)
self.vm = None
return "[TestAction.delete_volume, %s]" % self.name
def expunge(self):
all_volumes.change_state(self, self.state, EXPUNGED)
self.state = EXPUNGED
for snapshot in self.snapshots:
all_snapshots.change_state(snapshot, ENABLED, DELETED)
snapshot.state = DELETED
return "[TestAction.expunge_volume, %s]" % self.name
def recover(self):
all_volumes.change_state(self, self.state, DETACHED)
self.state = DETACHED
return "[TestAction.recover_volume, %s]" % self.name
def ps_migrate(self):
return "[TestAction.ps_migrate_volume, %s]" % self.name
def migrate(self):
return "[TestAction.migrate_volume, %s]" % self.name
def create_volume_snapshot(self):
# name: volume1-snapshot1
snapshot_name = self.name[:-1] + "-snapshot" + str(all_snapshots.len + 1) + "'"
snapshot = Snapshot(snapshot_name)
snapshot.create()
self.snapshots.append(snapshot)
snapshot.volume = self
return "[TestAction.create_volume_snapshot, %s, %s]" % (self.name, snapshot.name)
def delete_volume_snapshot(self, snapshot):
self.snapshots.remove(snapshot)
return snapshot.delete()
def use_volme_snapshot(self, snapshot):
return snapshot.use()
def create_volume_backup(self):
backup_name = self.name[:-1] + "-backup" + str(all_backups.len + 1) + "'"
backup = Backup(backup_name)
backup.create()
self.backups.append(backup)
backup.volume = self
return "[TestAction.create_volume_backup, %s, %s]" % (self.name, backup.name)
def delete_volume_backup(self, backup):
self.backups.remove(backup)
return backup.delete()
def use_volume_backup(self, backup):
return backup.use()
def create_image(self):
image = Image(self.name)
image.type = DATA
all_images.add(image, ENABLED)
return "[TestAction.create_data_vol_template_from_volume, %s, %s]" % (self.name, image.name)
class Snapshot(Resource):
def __init__(self, name):
self.name = name
self.state = ENABLED
self.volume = None
self.groupId = None
self.description = ""
Resource.__init__(self)
def create(self):
all_snapshots.add(self, ENABLED)
def set_groupId(self, groupId, description=None):
self.groupId = groupId
self.description = description
all_snapshots.group[groupId].append(self)
def set_volume(self, volume):
self.volume = volume
volume.snapshots.append(self)
def delete(self):
all_snapshots.change_state(self, self.state, DELETED)
self.state = DELETED
self.volume = None
self.vm = None
return "[TestAction.delete_volume_snapshot, %s]" % self.name
def use(self):
return "[TestAction.use_volume_snapshot, %s]" % self.name
def detach_vm_snapshot(self):
for snap in all_snapshots.group[self.groupId]:
snap.groupId = None
all_snapshots.group.pop(self.groupId)
return "[TestAction.ungroup_volume_snapshot, %s]" % self.name
def create_image(self):
image_name = self.name.split('-')[0] + "'"
image = Image(image_name)
if "vm" in self.name:
image.type = ROOT
else:
image.type = DATA
all_images.add(image, ENABLED)
return "[TestAction.create_image_from_snapshot, %s, %s]" % (self.name, image.name)
class Backup(Resource):
def __init__(self, name):
self.name = name
self.state = ENABLED
self.volume = None
self.groupId = None
self.description = ""
Resource.__init__(self)
def create(self):
all_backups.add(self, ENABLED)
def set_groupId(self, groupId, description=None):
self.groupId = groupId
self.description = description
all_backups.group[groupId].append(self)
def set_volume(self, volume):
self.volume = volume
volume.backups.append(self)
def delete(self):
all_backups.change_state(self, self.state, DELETED)
self.state = DELETED
self.volume = None
return "[TestAction.delete_volume_backup, %s]" % self.name
def use(self):
return "[TestAction.use_volume_backup, %s]" % self.name
def create_vm(self):
vm = None
volumes = []
for res in all_backups.group[self.groupId]:
new_name = res.name.split("-")[0] + "-from-" + res.name.split("-")[1]
if "vm" in res.name:
vm = Vm(new_name)
all_vms.add(vm, RUNNING)
else:
volume = Volume(new_name)
all_volumes.add(volume, ATTACHED)
volumes.append(volume)
vm.volumes = volumes
for volume in volumes:
volume.vm = vm
return "[TestAction.create_vm_from_vmbackup, %s]" % self.name
def create_image(self):
image_name = self.name.split('-')[0] + "'"
image = Image(image_name)
if "vm" in self.name:
image.type = ROOT
else:
image.type = DATA
all_images.add(image, ENABLED)
return "[TestAction.create_image_from_backup, %s, %s]" % (self.name, image.name)
class Image(Resource):
def __init__(self, name=None):
if not name:
self.name = "'image" + str(all_images.len + 1) + "'"
else:
self.name = name[:-1] + "-image" + str(all_images.len + 1) + "'"
self.state = ENABLED
self.volume = None
self.groupId = None
self.type = ROOT
self.format = QCOW2
Resource.__init__(self)
def add(self, type = ROOT, url="'http://172.20.1.28/mirror/diskimages/centos_vdbench.qcow2'"):
all_images.add(self, ENABLED)
if self.format == ISO:
url = "os.environ.get('isoForVmUrl')"
return "[TestAction.add_image, %s, %s, %s]" % (self.name, type, url)
def delete(self):
all_images.change_state(self, self.state, DELETED)
self.state = DELETED
return "[TestAction.delete_image, %s]" % self.name
def recover(self):
all_images.change_state(self, self.state, ENABLED)
self.state = ENABLED
return "[TestAction.recover_image, %s]" % self.name
def expunge(self):
all_images.change_state(self, self.state, EXPUNGED)
self.state = EXPUNGED
return "[TestAction.expunge_image, %s]" % self.name
def create_vm(self, tags):
vm = Vm()
all_vms.add(vm, RUNNING)
return "[TestAction.create_vm_by_image, %s, %s, %s]" % (self.name, self.format, vm.name)
def create_volume(self):
# todo: mini robot action must support this function
volume = Volume()
all_volumes.add(volume, DETACHED)
# return [TestAction.create_data_volume_from_image, "volume2", "=scsi"],
return "[TestAction.create_volume_from_image, %s, %s]" % (self.name, volume.name)
def batch_delete_snapshot(snapshots):
for snap in snapshots:
all_snapshots.change_state(snap, snap.state, DELETED)
if snap.groupId and all_snapshots.group.has_key(snap.groupId):
groupId = snap.groupId
for _snap in all_snapshots.group[snap.groupId]:
_snap.groupId = None
all_snapshots.group.pop(groupId)
if "vm" in snap.name:
snap.volume.vm.snapshots.remove(snap)
else:
snap.volume.snapshots.remove(snap)
return ("[TestAction.batch_delete_volume_snapshot, [" + "%s," * len(snapshots) + "]]") % tuple([i.name for i in snapshots])
if __name__ == "__main__":
vm1 = Vm()
print vm1.create(["'data_volume=true'"])
vm2 = Vm()
print vm2.create([])
vol1 = Volume()
print vol1.create([])
# vol2 = Volume()
# print vol2.create([])
# vol3 = Volume()
# print vol3.create([])
print vol1.attach(vm1)
print vm1.delete()
# print vol3.attach(vm1)
# print vol2.attach(vm2)
#
#
# print vol1.create_volume_snapshot()
# print vm1.create_vm_snapshot()
# print vm2.create_vm_snapshot()
print vol1.create_volume_backup()
print vm1.create_vm_backup()
print vm2.create_root_backup()
all_resources = all_vms + all_volumes + all_snapshots + all_backups + all_images
print all_resources
|
Call 800-239-9895 now for professional guidance and assistance with Laporte, MN Paving tasks. Our objective at Paving is always to ensure that your job is a success by helping you to evaluate the pros and cons of your alternatives and working to complete your project within your budget. You've got plenty to think about while looking for the right service for your Paving needs. We want to help make the entire process less difficult by making sure that you will be knowledgeable about your options and have expert help with evaluating the potential results and cost estimates from different services and different choices for any project.
How frequently have you purchased a product from an organization and finished the payment simply to feel like you’ve all of a sudden been set on the back burner? We’d speculate you’ve experienced this many times, and it’s really infuriating. Fortunately, our Laporte, MN Paving business utilizes an incredible system that allows our pros to stay on top of our prompt service.
When figuring out which Laporte, MN Paving organization you want to do business with, ensuring they have an extraordinary inventory and fast service is important, but they also need to be licensed and insured. Uninsured employees are a significant liability to have at your business or home since you may be held financially liable for any injuries or damage that arises. Thankfully, our company's Paving experts are insured and licensed, so you’ll never be required to worry when you choose our company.
At our Laporte, MN Paving company, our fantastic customer service doesn’t come to a halt whenever you’ve received your product(s). Instead, our business' professionals will remain in touch with you to make certain you’re completely pleased with the results. Our specialists do this since we want you to know we’re here to help you and don’t want you to feel reluctant to contact us.
If you’re like many people, you will have an idea of what you must buy, but you also wouldn’t reject an expert's advice. Fortunately, when you let our Laporte, Minnesota Paving specialists help, you’ll achieve this easily, and it’s free by means of our free consultation. To reserve your free consultation today, don’t wait to call our specialists at 800-239-9895!
Upon initially conversing with our company’s pros, it won’t take you long to understand we’re extremely thorough. Instead, our Laporte, MN Paving experts will ask you several questions to gather a thorough understanding of what you’re looking to accomplish and present you with ideas. As a result, you’ll have the delight in knowing you acquired tailored service as opposed to being sold.
|
from bs4 import BeautifulSoup
from urllib2 import urlopen
BASE_URL = "http://www.chordie.com"
def make_soup(url):
html = urlopen(url).read()
return BeautifulSoup(html, "lxml")
def get_url(option):
try:
line = option.findAll("tr")[1].findAll("td")[1].i.a
if line.span.text=="CHORDS":
return BASE_URL+ line["href"]
else: return None
except:
return None
"""
Chordie is a based on a chord search engine - hence it contains several results for each song.
We filter out the non-Chord type of results, and parse each of the remaining into a chord vector.
Returns an array containing all available chord vectors.
"""
def get_chord_options(chords_url):
soup = make_soup(chords_url)
all_text = soup.find("div", {"id": "resultc"})
if not all_text: return None
all_song_chords = all_text.findAll("table")
chord_options = []
for option in all_song_chords:
url = get_url(option)
if not url==None:
chord_vector = get_chord_vector(url)
if not chord_vector==None and len(chord_vector)>0:
#print url+": "+str(chord_vector)
chord_options.append(chord_vector)
return chord_options
"""
Given a relevant url, parses it into a chord vector
"""
def get_chord_vector(chords_url):
soup = make_soup(chords_url)
song_chords = soup.findAll("td", "c")
chord_vector = [str(chord.text) for chord in song_chords]
return chord_vector
"""
Given (artist, title), scrapes *all* suiteble chord vectors
"""
def get_chords(title,artist):
name = "songtitle/"+title.replace(' ','+')+"/songartist/"+artist.replace(' ','+')+"/index.html"
all_chords_url = (BASE_URL +"/allsongs.php/"+ name)
chord_options = get_chord_options(all_chords_url)
return chord_options
def test():
get_chords("Suzanne", "leonard cohen")
|
Today’s post will teach you the basics about a sample letter of intent for doctoral program. If you are pursuing a career in graduate school, then you should know that this letter is not your ordinary letter to write for an undergraduate program.
The screening committee is not looking for amateurs so you should think of yourself as an established professional so you can gain their attention. One way to do that is to show that you are indeed a good asset to the organization or institution you are applying to. The committee will try to figure out whether you can become a good asset to the organization or not. You should show that you have the expertise and experience so you’re applying in the institution and that you can become a good help for them.
Focus on your career goals. Be able to show the committee that you are exactly who they are looking for.
Clearly state why you’re a great asset for the institution. Why did you choose the program and the institution to pursue your career goals? What are the research areas that are interesting for you? You can mention two areas that you find interesting to give the institution an overview of your personal interest.
You can also highlight some needed references in your letter of intent for doctoral program. You can also include the research areas you have prepared for and what makes you successful in those areas. You can also focus on your specialization.
Remember that your letter of intent for doctoral program is not a legal agreement, but this is your key to ensure that you can become part of the program.
Call up a professor. You can contact a professor for some questions. You can get help from one to help you in your LOI. But, be sure that you contact someone that has the same interests or goals like you.
This paper should only be about 400 to 800 words in length so be sure to maximize the given space by writing only relevant details about your application.
Follow the instructions closely. This is one of the keys to ensure that your paper will be read until the end of it.
Get Help from a Professional Letter of Intent for Doctoral Program Writer!
Ensure your success in the LOI by following the tips stated above. Otherwise, you can hire a professional writer to help you come up with one. Choose the best among them today and increase your chances of getting accepted in the institution for your career growth and advancement.
|
'''
Pedagogical example realization of seq2seq recurrent neural networks, using TensorFlow and TFLearn.
More info at https://github.com/ichuang/tflearn_seq2seq
'''
from __future__ import division, print_function
import os
import sys
import tflearn
import argparse
import json
import numpy as np
import tensorflow as tf
from tensorflow.python.ops import seq2seq
from tensorflow.python.ops import rnn_cell
#-----------------------------------------------------------------------------
class SequencePattern(object):
INPUT_SEQUENCE_LENGTH = 10
OUTPUT_SEQUENCE_LENGTH = 10
INPUT_MAX_INT = 9
OUTPUT_MAX_INT = 9
PATTERN_NAME = "sorted"
def __init__(self, name=None, in_seq_len=None, out_seq_len=None):
if name is not None:
assert hasattr(self, "%s_sequence" % name)
self.PATTERN_NAME = name
if in_seq_len:
self.INPUT_SEQUENCE_LENGTH = in_seq_len
if out_seq_len:
self.OUTPUT_SEQUENCE_LENGTH = out_seq_len
def generate_output_sequence(self, x):
'''
For a given input sequence, generate the output sequence. x is a 1D numpy array
of integers, with length INPUT_SEQUENCE_LENGTH.
Returns a 1D numpy array of length OUTPUT_SEQUENCE_LENGTH
This procedure defines the pattern which the seq2seq RNN will be trained to find.
'''
return getattr(self, "%s_sequence" % self.PATTERN_NAME)(x)
def maxmin_dup_sequence(self, x):
'''
Generate sequence with [max, min, rest of original entries]
'''
x = np.array(x)
y = [ x.max(), x.min()] + list(x[2:])
return np.array(y)[:self.OUTPUT_SEQUENCE_LENGTH] # truncate at out seq len
def sorted_sequence(self, x):
'''
Generate sorted version of original sequence
'''
return np.array( sorted(x) )[:self.OUTPUT_SEQUENCE_LENGTH]
def reversed_sequence(self, x):
'''
Generate reversed version of original sequence
'''
return np.array( x[::-1] )[:self.OUTPUT_SEQUENCE_LENGTH]
#-----------------------------------------------------------------------------
class TFLearnSeq2Seq(object):
'''
seq2seq recurrent neural network, implemented using TFLearn.
'''
AVAILABLE_MODELS = ["embedding_rnn", "embedding_attention"]
def __init__(self, sequence_pattern, seq2seq_model=None, verbose=None, name=None, data_dir=None):
'''
sequence_pattern_class = a SequencePattern class instance, which defines pattern parameters
(input, output lengths, name, generating function)
seq2seq_model = string specifying which seq2seq model to use, e.g. "embedding_rnn"
'''
self.sequence_pattern = sequence_pattern
self.seq2seq_model = seq2seq_model or "embedding_rnn"
assert self.seq2seq_model in self.AVAILABLE_MODELS
self.in_seq_len = self.sequence_pattern.INPUT_SEQUENCE_LENGTH
self.out_seq_len = self.sequence_pattern.OUTPUT_SEQUENCE_LENGTH
self.in_max_int = self.sequence_pattern.INPUT_MAX_INT
self.out_max_int = self.sequence_pattern.OUTPUT_MAX_INT
self.verbose = verbose or 0
self.n_input_symbols = self.in_max_int + 1
self.n_output_symbols = self.out_max_int + 2 # extra one for GO symbol
self.model_instance = None
self.name = name
self.data_dir = data_dir
def generate_trainig_data(self, num_points):
'''
Generate training dataset. Produce random (integer) sequences X, and corresponding
expected output sequences Y = generate_output_sequence(X).
Return xy_data, y_data (both of type uint32)
xy_data = numpy array of shape [num_points, in_seq_len + out_seq_len], with each point being X + Y
y_data = numpy array of shape [num_points, out_seq_len]
'''
x_data = np.random.randint(0, self.in_max_int, size=(num_points, self.in_seq_len)) # shape [num_points, in_seq_len]
x_data = x_data.astype(np.uint32) # ensure integer type
y_data = [ self.sequence_pattern.generate_output_sequence(x) for x in x_data ]
y_data = np.array(y_data)
xy_data = np.append(x_data, y_data, axis=1) # shape [num_points, 2*seq_len]
return xy_data, y_data
def sequence_loss(self, y_pred, y_true):
'''
Loss function for the seq2seq RNN. Reshape predicted and true (label) tensors, generate dummy weights,
then use seq2seq.sequence_loss to actually compute the loss function.
'''
if self.verbose > 2: print ("my_sequence_loss y_pred=%s, y_true=%s" % (y_pred, y_true))
logits = tf.unstack(y_pred, axis=1) # list of [-1, num_decoder_synbols] elements
targets = tf.unstack(y_true, axis=1) # y_true has shape [-1, self.out_seq_len]; unpack to list of self.out_seq_len [-1] elements
if self.verbose > 2:
print ("my_sequence_loss logits=%s" % (logits,))
print ("my_sequence_loss targets=%s" % (targets,))
weights = [tf.ones_like(yp, dtype=tf.float32) for yp in targets]
if self.verbose > 4: print ("my_sequence_loss weights=%s" % (weights,))
sl = seq2seq.sequence_loss(logits, targets, weights)
if self.verbose > 2: print ("my_sequence_loss return = %s" % sl)
return sl
def accuracy(self, y_pred, y_true, x_in): # y_pred is [-1, self.out_seq_len, num_decoder_symbols]; y_true is [-1, self.out_seq_len]
'''
Compute accuracy of the prediction, based on the true labels. Use the average number of equal
values.
'''
pred_idx = tf.to_int32(tf.argmax(y_pred, 2)) # [-1, self.out_seq_len]
if self.verbose > 2: print ("my_accuracy pred_idx = %s" % pred_idx)
accuracy = tf.reduce_mean(tf.cast(tf.equal(pred_idx, y_true), tf.float32), name='acc')
return accuracy
def model(self, mode="train", num_layers=1, cell_size=32, cell_type="BasicLSTMCell", embedding_size=20, learning_rate=0.0001,
tensorboard_verbose=0, checkpoint_path=None):
'''
Build tensor specifying graph of operations for the seq2seq neural network model.
mode = string, either "train" or "predict"
cell_type = attribute of rnn_cell specifying which RNN cell type to use
cell_size = size for the hidden layer in the RNN cell
num_layers = number of RNN cell layers to use
Return TFLearn model instance. Use DNN model for this.
'''
assert mode in ["train", "predict"]
checkpoint_path = checkpoint_path or ("%s%ss2s_checkpoint.tfl" % (self.data_dir or "", "/" if self.data_dir else ""))
GO_VALUE = self.out_max_int + 1 # unique integer value used to trigger decoder outputs in the seq2seq RNN
network = tflearn.input_data(shape=[None, self.in_seq_len + self.out_seq_len], dtype=tf.int32, name="XY")
encoder_inputs = tf.slice(network, [0, 0], [-1, self.in_seq_len], name="enc_in") # get encoder inputs
encoder_inputs = tf.unstack(encoder_inputs, axis=1) # transform into list of self.in_seq_len elements, each [-1]
decoder_inputs = tf.slice(network, [0, self.in_seq_len], [-1, self.out_seq_len], name="dec_in") # get decoder inputs
decoder_inputs = tf.unstack(decoder_inputs, axis=1) # transform into list of self.out_seq_len elements, each [-1]
go_input = tf.multiply( tf.ones_like(decoder_inputs[0], dtype=tf.int32), GO_VALUE ) # insert "GO" symbol as the first decoder input; drop the last decoder input
decoder_inputs = [go_input] + decoder_inputs[: self.out_seq_len-1] # insert GO as first; drop last decoder input
feed_previous = not (mode=="train")
if self.verbose > 3:
print ("feed_previous = %s" % str(feed_previous))
print ("encoder inputs: %s" % str(encoder_inputs))
print ("decoder inputs: %s" % str(decoder_inputs))
print ("len decoder inputs: %s" % len(decoder_inputs))
self.n_input_symbols = self.in_max_int + 1 # default is integers from 0 to 9
self.n_output_symbols = self.out_max_int + 2 # extra "GO" symbol for decoder inputs
single_cell = getattr(rnn_cell, cell_type)(cell_size, state_is_tuple=True)
if num_layers==1:
cell = single_cell
else:
cell = rnn_cell.MultiRNNCell([single_cell] * num_layers)
if self.seq2seq_model=="embedding_rnn":
model_outputs, states = seq2seq.embedding_rnn_seq2seq(encoder_inputs, # encoder_inputs: A list of 2D Tensors [batch_size, input_size].
decoder_inputs,
cell,
num_encoder_symbols=self.n_input_symbols,
num_decoder_symbols=self.n_output_symbols,
embedding_size=embedding_size,
feed_previous=feed_previous)
elif self.seq2seq_model=="embedding_attention":
model_outputs, states = seq2seq.embedding_attention_seq2seq(encoder_inputs, # encoder_inputs: A list of 2D Tensors [batch_size, input_size].
decoder_inputs,
cell,
num_encoder_symbols=self.n_input_symbols,
num_decoder_symbols=self.n_output_symbols,
embedding_size=embedding_size,
num_heads=1,
initial_state_attention=False,
feed_previous=feed_previous)
else:
raise Exception('[TFLearnSeq2Seq] Unknown seq2seq model %s' % self.seq2seq_model)
tf.add_to_collection(tf.GraphKeys.LAYER_VARIABLES + '/' + "seq2seq_model", model_outputs) # for TFLearn to know what to save and restore
# model_outputs: list of the same length as decoder_inputs of 2D Tensors with shape [batch_size x output_size] containing the generated outputs.
if self.verbose > 2: print ("model outputs: %s" % model_outputs)
network = tf.stack(model_outputs, axis=1) # shape [-1, n_decoder_inputs (= self.out_seq_len), num_decoder_symbols]
if self.verbose > 2: print ("packed model outputs: %s" % network)
if self.verbose > 3:
all_vars = tf.get_collection(tf.GraphKeys.VARIABLES)
print ("all_vars = %s" % all_vars)
with tf.name_scope("TargetsData"): # placeholder for target variable (i.e. trainY input)
targetY = tf.placeholder(shape=[None, self.out_seq_len], dtype=tf.int32, name="Y")
network = tflearn.regression(network,
placeholder=targetY,
optimizer='adam',
learning_rate=learning_rate,
loss=self.sequence_loss,
metric=self.accuracy,
name="Y")
model = tflearn.DNN(network, tensorboard_verbose=tensorboard_verbose, checkpoint_path=checkpoint_path)
return model
def train(self, num_epochs=20, num_points=100000, model=None, model_params=None, weights_input_fn=None,
validation_set=0.1, snapshot_step=5000, batch_size=128, weights_output_fn=None):
'''
Train model, with specified number of epochs, and dataset size.
Use specified model, or create one if not provided. Load initial weights from file weights_input_fn,
if provided. validation_set specifies what to use for the validation.
Returns logits for prediction, as an numpy array of shape [out_seq_len, n_output_symbols].
'''
trainXY, trainY = self.generate_trainig_data(num_points)
print ("[TFLearnSeq2Seq] Training on %d point dataset (pattern '%s'), with %d epochs" % (num_points,
self.sequence_pattern.PATTERN_NAME,
num_epochs))
if self.verbose > 1:
print (" model parameters: %s" % json.dumps(model_params, indent=4))
model_params = model_params or {}
model = model or self.setup_model("train", model_params, weights_input_fn)
model.fit(trainXY, trainY,
n_epoch=num_epochs,
validation_set=validation_set,
batch_size=batch_size,
shuffle=True,
show_metric=True,
snapshot_step=snapshot_step,
snapshot_epoch=False,
run_id="TFLearnSeq2Seq"
)
print ("Done!")
if weights_output_fn is not None:
weights_output_fn = self.canonical_weights_fn(weights_output_fn)
model.save(weights_output_fn)
print ("Saved %s" % weights_output_fn)
self.weights_output_fn = weights_output_fn
return model
def canonical_weights_fn(self, iteration_num=0):
'''
Construct canonical weights filename, based on model and pattern names.
'''
if not type(iteration_num)==int:
try:
iteration_num = int(iteration_num)
except Exception as err:
return iteration_num
model_name = self.name or "basic"
wfn = "ts2s__%s__%s_%s.tfl" % (model_name, self.sequence_pattern.PATTERN_NAME, iteration_num)
if self.data_dir:
wfn = "%s/%s" % (self.data_dir, wfn)
self.weights_filename = wfn
return wfn
def setup_model(self, mode, model_params=None, weights_input_fn=None):
'''
Setup a model instance, using the specified mode and model parameters.
Load the weights from the specified file, if it exists.
If weights_input_fn is an integer, use that the model name, and
the pattern name, to construct a canonical filename.
'''
model_params = model_params or {}
model = self.model_instance or self.model(mode=mode, **model_params)
self.model_instance = model
if weights_input_fn:
if type(weights_input_fn)==int:
weights_input_fn = self.canonical_weights_fn(weights_input_fn)
if os.path.exists(weights_input_fn):
model.load(weights_input_fn)
print ("[TFLearnSeq2Seq] model weights loaded from %s" % weights_input_fn)
else:
print ("[TFLearnSeq2Seq] MISSING model weights file %s" % weights_input_fn)
return model
def predict(self, Xin, model=None, model_params=None, weights_input_fn=None):
'''
Make a prediction, using the seq2seq model, for the given input sequence Xin.
If model is not provided, create one (or use last created instance).
Return prediction, y
prediction = array of integers, giving output prediction. Length = out_seq_len
y = array of shape [out_seq_len, out_max_int], giving logits for output prediction
'''
if not model:
model = self.model_instance or self.setup_model("predict", model_params, weights_input_fn)
if self.verbose: print ("Xin = %s" % str(Xin))
X = np.array(Xin).astype(np.uint32)
assert len(X)==self.in_seq_len
if self.verbose:
print ("X Input shape=%s, data=%s" % (X.shape, X))
print ("Expected output = %s" % str(self.sequence_pattern.generate_output_sequence(X)))
Yin = [0]*self.out_seq_len
XY = np.append(X, np.array(Yin).astype(np.float32))
XY = XY.reshape([-1, self.in_seq_len + self.out_seq_len]) # batch size 1
if self.verbose > 1: print ("XY Input shape=%s, data=%s" % (XY.shape, XY))
res = model.predict(XY)
res = np.array(res)
if self.verbose > 1: print ("prediction shape = %s" % str(res.shape))
y = res.reshape(self.out_seq_len, self.n_output_symbols)
prediction = np.argmax(y, axis=1)
if self.verbose:
print ("Predicted output sequence: %s" % str(prediction))
return prediction, y
#-----------------------------------------------------------------------------
class VAction(argparse.Action):
def __call__(self, parser, args, values, option_string=None):
curval = getattr(args, self.dest, 0) or 0
values=values.count('v')+1
setattr(args, self.dest, values + curval)
#-----------------------------------------------------------------------------
def CommandLine(args=None, arglist=None):
'''
Main command line. Accepts args, to allow for simple unit testing.
'''
help_text = """
Commands:
train - give size of training set to use, as argument
predict - give input sequence as argument (or specify inputs via --from-file <filename>)
"""
parser = argparse.ArgumentParser(description=help_text, formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument("cmd", help="command")
parser.add_argument("cmd_input", nargs='*', help="input to command")
parser.add_argument('-v', "--verbose", nargs=0, help="increase output verbosity (add more -v to increase versbosity)", action=VAction, dest='verbose')
parser.add_argument("-m", "--model", help="seq2seq model name: either embedding_rnn (default) or embedding_attention", default=None)
parser.add_argument("-r", "--learning-rate", type=float, help="learning rate (default 0.0001)", default=0.0001)
parser.add_argument("-e", "--epochs", type=int, help="number of trainig epochs", default=10)
parser.add_argument("-i", "--input-weights", type=str, help="tflearn file with network weights to load", default=None)
parser.add_argument("-o", "--output-weights", type=str, help="new tflearn file where network weights are to be saved", default=None)
parser.add_argument("-p", "--pattern-name", type=str, help="name of pattern to use for sequence", default=None)
parser.add_argument("-n", "--name", type=str, help="name of model, used when generating default weights filenames", default=None)
parser.add_argument("--in-len", type=int, help="input sequence length (default 10)", default=None)
parser.add_argument("--out-len", type=int, help="output sequence length (default 10)", default=None)
parser.add_argument("--from-file", type=str, help="name of file to take input data sequences from (json format)", default=None)
parser.add_argument("--iter-num", type=int, help="training iteration number; specify instead of input- or output-weights to use generated filenames", default=None)
parser.add_argument("--data-dir", help="directory to use for storing checkpoints (also used when generating default weights filenames)", default=None)
# model parameters
parser.add_argument("-L", "--num-layers", type=int, help="number of RNN layers to use in the model (default 1)", default=1)
parser.add_argument("--cell-size", type=int, help="size of RNN cell to use (default 32)", default=32)
parser.add_argument("--cell-type", type=str, help="type of RNN cell to use (default BasicLSTMCell)", default="BasicLSTMCell")
parser.add_argument("--embedding-size", type=int, help="size of embedding to use (default 20)", default=20)
parser.add_argument("--tensorboard-verbose", type=int, help="tensorboard verbosity level (default 0)", default=0)
if not args:
args = parser.parse_args(arglist)
if args.iter_num is not None:
args.input_weights = args.iter_num
args.output_weights = args.iter_num + 1
model_params = dict(num_layers=args.num_layers,
cell_size=args.cell_size,
cell_type=args.cell_type,
embedding_size=args.embedding_size,
learning_rate=args.learning_rate,
tensorboard_verbose=args.tensorboard_verbose,
)
if args.cmd=="train":
try:
num_points = int(args.cmd_input[0])
except:
raise Exception("Please specify the number of datapoints to use for training, as the first argument")
sp = SequencePattern(args.pattern_name, in_seq_len=args.in_len, out_seq_len=args.out_len)
ts2s = TFLearnSeq2Seq(sp, seq2seq_model=args.model, data_dir=args.data_dir, name=args.name, verbose=args.verbose)
ts2s.train(num_epochs=args.epochs, num_points=num_points, weights_output_fn=args.output_weights,
weights_input_fn=args.input_weights, model_params=model_params)
return ts2s
elif args.cmd=="predict":
if args.from_file:
inputs = json.loads(args.from_file)
try:
input_x = map(int, args.cmd_input)
inputs = [input_x]
except:
raise Exception("Please provide a space-delimited input sequence as the argument")
sp = SequencePattern(args.pattern_name, in_seq_len=args.in_len, out_seq_len=args.out_len)
ts2s = TFLearnSeq2Seq(sp, seq2seq_model=args.model, data_dir=args.data_dir, name=args.name, verbose=args.verbose)
results = []
for x in inputs:
prediction, y = ts2s.predict(x, weights_input_fn=args.input_weights, model_params=model_params)
print("==> For input %s, prediction=%s (expected=%s)" % (x, prediction, sp.generate_output_sequence(x)))
results.append([prediction, y])
ts2s.prediction_results = results
return ts2s
else:
print("Unknown command %s" % args.cmd)
#-----------------------------------------------------------------------------
# unit tests
def test_sp1():
'''
Test two different SequencePattern instances
'''
sp = SequencePattern("maxmin_dup")
y = sp.generate_output_sequence(range(10))
assert all(y==np.array([9, 0, 2, 3, 4, 5, 6, 7, 8, 9]))
sp = SequencePattern("sorted")
y = sp.generate_output_sequence([5,6,1,2,9])
assert all(y==np.array([1, 2, 5, 6, 9]))
sp = SequencePattern("reversed")
y = sp.generate_output_sequence(range(10))
assert all(y==np.array([9, 8, 7, 6, 5, 4, 3, 2, 1, 0]))
def test_sp2():
'''
Test two SequencePattern instance with lengths different from default
'''
sp = SequencePattern("sorted", in_seq_len=20, out_seq_len=5)
x = np.random.randint(0, 9, 20)
y = sp.generate_output_sequence(x)
assert len(y)==5
y_exp = sorted(x)[:5]
assert all(y==y_exp)
def test_train1():
'''
Test simple training of an embedding_rnn seq2seq model
'''
sp = SequencePattern()
ts2s = TFLearnSeq2Seq(sp)
ofn = "test_%s" % ts2s.canonical_weights_fn(0)
print ("using weights filename %s" % ofn)
if os.path.exists(ofn):
os.unlink(ofn)
tf.reset_default_graph()
ts2s.train(num_epochs=1, num_points=10000, weights_output_fn=ofn)
assert os.path.exists(ofn)
def test_predict1():
'''
Test simple preductions using weights just produced (in test_train1)
'''
sp = SequencePattern()
ts2s = TFLearnSeq2Seq(sp, verbose=1)
wfn = "test_%s" % ts2s.canonical_weights_fn(0)
print ("using weights filename %s" % wfn)
tf.reset_default_graph()
prediction, y = ts2s.predict(Xin=range(10), weights_input_fn=wfn)
assert len(prediction==10)
def test_train_predict2():
'''
Test that the embedding_attention model works, with saving and loading of weights
'''
import tempfile
sp = SequencePattern()
tempdir = tempfile.mkdtemp()
ts2s = TFLearnSeq2Seq(sp, seq2seq_model="embedding_attention", data_dir=tempdir, name="attention")
tf.reset_default_graph()
ts2s.train(num_epochs=1, num_points=1000, weights_output_fn=1, weights_input_fn=0)
assert os.path.exists(ts2s.weights_output_fn)
tf.reset_default_graph()
ts2s = TFLearnSeq2Seq(sp, seq2seq_model="embedding_attention", data_dir="DATA", name="attention", verbose=1)
prediction, y = ts2s.predict(Xin=range(10), weights_input_fn=1)
assert len(prediction==10)
os.system("rm -rf %s" % tempdir)
def test_train_predict3():
'''
Test that a model trained on sequencees of one length can be used for predictions on other sequence lengths
'''
import tempfile
sp = SequencePattern("sorted", in_seq_len=10, out_seq_len=10)
tempdir = tempfile.mkdtemp()
ts2s = TFLearnSeq2Seq(sp, seq2seq_model="embedding_attention", data_dir=tempdir, name="attention")
tf.reset_default_graph()
ts2s.train(num_epochs=1, num_points=1000, weights_output_fn=1, weights_input_fn=0)
assert os.path.exists(ts2s.weights_output_fn)
tf.reset_default_graph()
sp = SequencePattern("sorted", in_seq_len=20, out_seq_len=8)
tf.reset_default_graph()
ts2s = TFLearnSeq2Seq(sp, seq2seq_model="embedding_attention", data_dir="DATA", name="attention", verbose=1)
x = np.random.randint(0, 9, 20)
prediction, y = ts2s.predict(x, weights_input_fn=1)
assert len(prediction==8)
os.system("rm -rf %s" % tempdir)
def test_main1():
'''
Integration test - training
'''
import tempfile
tempdir = tempfile.mkdtemp()
arglist = "--data-dir %s -e 2 --iter-num=1 -v -v --tensorboard-verbose=1 train 5000" % tempdir
arglist = arglist.split(' ')
tf.reset_default_graph()
ts2s = CommandLine(arglist=arglist)
assert os.path.exists(ts2s.weights_output_fn)
os.system("rm -rf %s" % tempdir)
def test_main2():
'''
Integration test - training then prediction
'''
import tempfile
tempdir = tempfile.mkdtemp()
arglist = "--data-dir %s -e 2 --iter-num=1 -v -v --tensorboard-verbose=1 train 5000" % tempdir
arglist = arglist.split(' ')
tf.reset_default_graph()
ts2s = CommandLine(arglist=arglist)
wfn = ts2s.weights_output_fn
assert os.path.exists(wfn)
arglist = "-i %s predict 1 2 3 4 5 6 7 8 9 0" % wfn
arglist = arglist.split(' ')
tf.reset_default_graph()
ts2s = CommandLine(arglist=arglist)
assert len(ts2s.prediction_results[0][0])==10
os.system("rm -rf %s" % tempdir)
def test_main3():
'''
Integration test - training then prediction: attention model
'''
import tempfile
wfn = "tmp_weights.tfl"
if os.path.exists(wfn):
os.unlink(wfn)
arglist = "-e 2 -o tmp_weights.tfl -v -v -v -v -m embedding_attention train 5000"
arglist = arglist.split(' ')
tf.reset_default_graph()
ts2s = CommandLine(arglist=arglist)
assert os.path.exists(wfn)
arglist = "-i tmp_weights.tfl -v -v -v -v -m embedding_attention predict 1 2 3 4 5 6 7 8 9 0"
arglist = arglist.split(' ')
tf.reset_default_graph()
ts2s = CommandLine(arglist=arglist)
assert len(ts2s.prediction_results[0][0])==10
#-----------------------------------------------------------------------------
if __name__=="__main__":
CommandLine()
|
Metal Siding Pros metal sliding company in Malvern, OH uses highly advanced tools and equipments to deliver our services. Other than our qualified and experienced staff, our tools and equipments also play a great role in the success of our business. We have invested in top quality tools that use the current technology to make the process of roofing and other metal sliding processes easy and efficient. Our tools and equipments are well maintained so that they are in good shape to us to make provision of the service easier and effective.
Metal Siding Pros Metal sliding company in Malvern, OH believes that our customers are our biggest and most valuable assets thus we do our best to ensure that they are happy and contented with our services. We handle our customers with a lot of care and respect and always listen to their questions and concerns. All our customers’ concerns are given the highest priority and addressed as soon as possible. We request feedback on our services and products from our customers after we have delivered the services so as to know if the customers are satisfied with our services. We also request opinions from our customers on how we can improve our services and products. This way, we are able to understand what our customers’ needs and requirements are thus we can serve them effectively.
For any concerns, complains or questions that you might need answered, contact us on 888-754-9997 or visit us in Malvern, OH.
Metal Siding Pros metal sliding company in Malvern, OH aims to create a great relationship with our customers so as to win their trust and confidence. We offer free consultation services to all our customers. You can get free consultation services by calling us or visiting our offices. In the event of any misconduct from our staff, contact us so that we can take the necessary measures. You can also request a quote for the services or order our products through the phone and they will be delivered to the destination you want within Malvern, OH.
|
#!/usr/bin/env python
'''
Woodhouse extends Strawhouse with a name and password check.
This uses the PLAIN mechanism which does plain-text username and password authentication).
It's not really secure, and anyone sniffing the network (trivial with WiFi)
can capture passwords and then login.
Author: Chris Laws
'''
import logging
import sys
import zmq
import zmq.auth
from zmq.auth.thread import ThreadAuthenticator
def run():
'''Run woodhouse example'''
valid_client_test_pass = False
invalid_client_test_pass = False
ctx = zmq.Context.instance()
# Start an authenticator for this context.
auth = ThreadAuthenticator(ctx)
auth.start()
auth.allow('127.0.0.1')
# Instruct authenticator to handle PLAIN requests
auth.configure_plain(domain='*', passwords={'admin': 'secret'})
server = ctx.socket(zmq.PUSH)
server.plain_server = True # must come before bind
server.bind('tcp://*:9000')
client = ctx.socket(zmq.PULL)
client.plain_username = b'admin'
client.plain_password = b'secret'
client.connect('tcp://127.0.0.1:9000')
server.send(b"Hello")
if client.poll():
msg = client.recv()
if msg == b"Hello":
valid_client_test_pass = True
client.close()
# now use invalid credentials - expect no msg received
client2 = ctx.socket(zmq.PULL)
client2.plain_username = b'admin'
client2.plain_password = b'bogus'
client2.connect('tcp://127.0.0.1:9000')
server.send(b"World")
if client2.poll(50):
msg = client.recv()
if msg == "World":
invalid_client_test_pass = False
else:
# no message is expected
invalid_client_test_pass = True
# stop auth thread
auth.stop()
if valid_client_test_pass and invalid_client_test_pass:
logging.info("Woodhouse test OK")
else:
logging.error("Woodhouse test FAIL")
if __name__ == '__main__':
if zmq.zmq_version_info() < (4,0):
raise RuntimeError("Security is not supported in libzmq version < 4.0. libzmq version {0}".format(zmq.zmq_version()))
if '-v' in sys.argv:
level = logging.DEBUG
else:
level = logging.INFO
logging.basicConfig(level=level, format="[%(levelname)s] %(message)s")
run()
|
This is starting to get real, y'all. Rio. After years of hard work for thousands — often in the relative darkness of anonymity and training, or, worse, putting oneself through the pain and suffering of rehab — the Rio 2016 Games beckon.
Some things, though, don't change. Procrastinators gonna procrastinate. Packing for a long trip is never easy. But add to that variables ranging from an unfamiliar climate to, say, a little something like the world's attention on you, and you just might wind up with a particular strain of packing procrastination.
Seems Olympic gold medalist soccer star Alex Morgan was having a bit of a struggle getting her move on.
Defending Olympic decathlon champion Ashton Eaton did Morgan one better by having a trophy tribute to his procrastination.
Kudos to Olympic gymnast Jake Dalton.
Started packing for the OLYMPICS! Crazy!
For some, the Games are already that much more real. Some are much more literally on their way to the Games. The U.S. Olympic Field Hockey Team earned one of the more creative and captivating send-offs you’ll ever see.
Team USA’s judokas cleaned up nicely, too.
Marti Malloy, Travis Stevens and Kayla Harrison had some fun, too.
Ummmm anyone know what's going on because I'm lost lol.
USA Diving is on the move, too, en route to a pre-Olympic camp in Atlanta.
Let them eat … steak? Thanks to Senior Vice President of USA Gymnastics Rhonda Faehn, we check in with medal favorites grabbing a bite at a team dinner.
USA Water Polo’s national teams also enjoyed gala gathering before taking off for the Games. A pool queue, if you will.
Few could have done it better, or in a more sartorially coordinated fashion, than defending Olympic wrestling champion Jordan Burroughs.
USA Swimming, which will get out of the blocks early in the Olympic schedule in Rio, already has its athletes in training camps. It’s second, in fact. Missy Franklin, who earned four golds in London, posted a pic featuring a few generations of Olympians in 30-year-old Amanda Weir (Athens and London Games) and teenagers Katie Ledecky and Simone Manuel.
USA Basketball also has its star-studded cast, this one in Las Vegas, where Jabari Parker looked awfully good in slow-motion flight before camp wrapped up on Thursday.
Paralympic star and 2014 snowboard cross medalist Amy Purdy will be in Rio, too. Why? Looks like we’ll have to stay tuned. For now, enjoy the tip toe dance.
Broken bones. Open compound fractures. Severed tendons. Tears. Surgeries. Scars. Plates. Wires. Reconstructions. Stitches (without anesthesia). When we refer to what a career athlete goes through in pursuit of his or her Olympic dreams, we mean not only the stress of making ends meet or making Team USA, but there is also the very real physical toll taken in nearly every sport at its elite level. Olympian and Rio-bound BMX racer Alise Post admits she hates “to promote the fact that injuries are a part of sport,” but this honest video look — with Post, Taylor Phinney, Jillion Potter, Greg Louganis and Steele Johnson — will take you inside the competitive spirit of a competitor at the highest level. Plus, they get points for accessorizing with a mannequin.
The timing of an injury can be devastating. For gymnast John Orozco, who had made it through a very difficult road out of the London Games to qualify for Rio. Unfortunately, Rio was not meant to be for him, either. Though the honest emotions that often reveal themselves through social media, Orozco took to Instagram to try to sum up his emotions after the devastating news.
I'm beyond devastated to say that my road to Rio has come to an abrupt end. Last week during Olympic Team camp, I re-tore my ACL/meniscus. At this point of my life I'm reminded of one word that I learned from my favorite book (The Alchemist) "Maktub" which in Arabic means "It is written" (meant to be) and like the common theme of the book I truly believe the universe conspires to guide us to our destiny with what we perceive as coincidences, signs, and omens. It's clear to me now more than ever that my dreams of Olympic gold were never meant to be, but maybe I have a different purpose that has yet to reveal itself? I'm forever grateful for the opportunities gymnastics has given me in life, the amazing people I've met through my career in the sport, and the life lessons I've learned. I'm humbled by the unwavering love and support of my family, friends, coaches, USA medical staff, personal doctors, and fans. Tragedy seems to be a reoccurring theme in my life, but looking back on my career I wouldn't change a single thing.
Slowed somewhat by a knee injury, Sarah Hildebrandt may not have qualified to wrestle in the Olympic Games back in April, but she earned an entirely different brand of Team USA fame when she appeared in a lip sync and dance-tastic video that went viral this past week.
To refresh your memory, Hildebrandt, who had undergone surgery seven weeks ago, makes her Team USA video debut at the :21 mark.
Distance runner Bernard Lagat has an idea. The 41-year-old who recently qualified for Rio, his fifth Games, sets up a pretty good photo op with teenage track sensation Sydney McLaughlin.
@usatf, I want a picture w/ Sydney McLaughlin (16), 400m hurdles. The youngest T&F athlete to compete in the OG since '72 #Oldest&Youngest!
Swimming star Janet Evans may have won four gold medals (three at the 1988 Games in Seoul and another at the 1992 Games, along with a silver in Barcelona), but her greatest Olympic memory? That’s easy.
Sprinting star Michael Johnson, who won two of his four Olympic golds there in Atlanta, also holds this particular Opening Ceremony moment dear.
Captain of the Magnificent Seven, gymnast Amanda Borden, also shared a priceless pic full of golden winners.
Not only was Erin Jones prepping for New York City’s triathlon this week, but she was also caught up in the Pokémon Go craze while strolling around Colorado Springs, Colorado. Presumably, fiancé Logan Storie will have his eyes on the aisle when the two are married in three months.
One of the more gratifying opportunities as an Olympian comes from what Team USA athletes can do about making the days of those who could use a visit.
At one point on Thursday in Minneapolis, not far from Olympian Jessie Diggins’ hometown, the heat index reached 116 degrees. Diggins, though, had been keeping cool this past week with some of her fellow Nordic skiers on Alaska's Eagle Glacier.
Up, down and sideways. That might best describe three-time Olympic gold medalist swimmer Nathan Adrian’s look through the years. The sprint star will be headed to Rio as part of his third consecutive Olympic team.
For what seems like the first time in forever, Rio-bound triathlete Gwen Jorgensen did not win a triathlon. This time, there would be no historic comeback, as there was in Leeds not all that long ago. This time, fellow Rio-bound Team USA triathlete Katie Zaferes earned the top spot on a podium. What followed was pure class from both parties.
To say I am stoked would be an understatement, I am so excited to capture my first WTS win at #WTSHamburg. What an incredible atmosphere!
|
#!/usr/bin/env python
#
# Copyright (c) 2016, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import ConfigParser
import logging
import os
import subprocess
import time
from autothreadharness import settings
logger = logging.getLogger(__name__)
HARNESS_SVN_VERSION_R44 = 1471
"""int: this is the first published release that miniweb was removed from Harness"""
def _try_kill(proc):
logger.info('Try kill process')
times = 1
while proc.poll() is None:
proc.kill()
time.sleep(5)
if proc.poll() is not None:
logger.info('Process has been killed')
break
logger.info('Trial %d failed', times)
times += 1
if times > 3:
raise SystemExit()
class HarnessController(object):
"""Harness service control
This controls harness service, including the harness back-end and front-end.
"""
harness = None
"""harness back-end"""
miniweb = None
"""harness front-end"""
def __init__(self, result_dir=None):
self.result_dir = result_dir
self.harness_file = ''
harness_info = ConfigParser.ConfigParser()
harness_info.read('%s\\info.ini' % settings.HARNESS_HOME)
self.version = harness_info.getint('Thread_Harness_Info', 'SVN')
def start(self):
logger.info('Starting harness service')
if self.harness:
logger.warning('Harness already started')
else:
env = dict(os.environ, PYTHONPATH='%s\\Thread_Harness;%s\\ThirdParty\\hsdk-python\\src'
% (settings.HARNESS_HOME, settings.HARNESS_HOME))
self.harness_file = '%s\\harness-%s.log' % (self.result_dir, time.strftime('%Y%m%d%H%M%S'))
with open(self.harness_file, 'w') as harness_out:
self.harness = subprocess.Popen([settings.HARNESS_HOME + '\\Python27\\python.exe',
settings.HARNESS_HOME + '\\Thread_Harness\\Run.py'],
cwd=settings.HARNESS_HOME,
stdout=harness_out,
stderr=harness_out,
env=env)
time.sleep(2)
if self.version >= HARNESS_SVN_VERSION_R44:
return
if self.miniweb:
logger.warning('Miniweb already started')
else:
with open('%s\\miniweb-%s.log' % (self.result_dir, time.strftime('%Y%m%d%H%M%S')), 'w') as miniweb_out:
self.miniweb = subprocess.Popen([settings.HARNESS_HOME + '\\MiniWeb\\miniweb.exe'],
stdout=miniweb_out,
stderr=miniweb_out,
cwd=settings.HARNESS_HOME + '\\MiniWeb')
def stop(self):
logger.info('Stopping harness service')
if self.harness:
_try_kill(self.harness)
self.harness = None
else:
logger.warning('Harness not started yet')
if self.version >= HARNESS_SVN_VERSION_R44:
return
if self.miniweb:
_try_kill(self.miniweb)
self.miniweb = None
else:
logger.warning('Miniweb not started yet')
def tail(self):
with open(self.harness_file) as harness_out:
harness_out.seek(-100, 2)
return ''.join(harness_out.readlines())
def __del__(self):
self.stop()
|
Two units by written examination.
Teacher led, discussion, group work, independent reading and note taking.
Politics, English, Sociology, Psychology, Geography.
|
# Copyright (C) 2018 - TODAY, Pavlov Media
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import _, api, fields, models
class Agreement(models.Model):
_inherit = "agreement"
# General
name = fields.Char(string="Title", required=True)
version = fields.Integer(
string="Version",
default=1,
copy=False,
help="The versions are used to keep track of document history and "
"previous versions can be referenced.")
revision = fields.Integer(
string="Revision",
default=0,
copy=False,
help="The revision will increase with every save event.")
description = fields.Text(
string="Description",
track_visibility="onchange",
help="Description of the agreement")
dynamic_description = fields.Text(
compute="_compute_dynamic_description",
string="Dynamic Description",
help="Compute dynamic description")
start_date = fields.Date(
string="Start Date",
track_visibility="onchange",
help="When the agreement starts.")
end_date = fields.Date(
string="End Date",
track_visibility="onchange",
help="When the agreement ends.")
color = fields.Integer(string="Color")
active = fields.Boolean(
string="Active",
default=True,
help="If unchecked, it will allow you to hide the agreement without "
"removing it.")
company_signed_date = fields.Date(
string="Signed on",
track_visibility="onchange",
help="Date the contract was signed by Company.")
partner_signed_date = fields.Date(
string="Signed on (Partner)",
track_visibility="onchange",
help="Date the contract was signed by the Partner.")
term = fields.Integer(
string="Term (Months)",
track_visibility="onchange",
help="Number of months this agreement/contract is in effect with the "
"partner.")
expiration_notice = fields.Integer(
string="Exp. Notice (Days)",
track_visibility="onchange",
help="Number of Days before expiration to be notified.")
change_notice = fields.Integer(
string="Change Notice (Days)",
track_visibility="onchange",
help="Number of Days to be notified before changes.")
special_terms = fields.Text(
string="Special Terms",
track_visibility="onchange",
help="Any terms that you have agreed to and want to track on the "
"agreement/contract.")
dynamic_special_terms = fields.Text(
compute="_compute_dynamic_special_terms",
string="Dynamic Special Terms",
help="Compute dynamic special terms")
code = fields.Char(
string="Reference",
required=True,
default=lambda self: _("New"),
track_visibility="onchange",
copy=False,
help="ID used for internal contract tracking.")
increase_type_id = fields.Many2one(
"agreement.increasetype",
string="Increase Type",
track_visibility="onchange",
help="The amount that certain rates may increase.")
termination_requested = fields.Date(
string="Termination Requested Date",
track_visibility="onchange",
help="Date that a request for termination was received.")
termination_date = fields.Date(
string="Termination Date",
track_visibility="onchange",
help="Date that the contract was terminated.")
reviewed_date = fields.Date(
string="Reviewed Date", track_visibility="onchange")
reviewed_user_id = fields.Many2one(
"res.users", string="Reviewed By", track_visibility="onchange")
approved_date = fields.Date(
string="Approved Date", track_visibility="onchange")
approved_user_id = fields.Many2one(
"res.users", string="Approved By", track_visibility="onchange")
currency_id = fields.Many2one("res.currency", string="Currency")
partner_id = fields.Many2one(
"res.partner",
string="Partner",
required=False,
copy=True,
help="The customer or vendor this agreement is related to.")
partner_contact_id = fields.Many2one(
"res.partner",
string="Partner Contact",
copy=True,
help="The primary partner contact (If Applicable).")
partner_contact_phone = fields.Char(
related="partner_contact_id.phone", string="Partner Phone")
partner_contact_email = fields.Char(
related="partner_contact_id.email", string="Partner Email")
company_contact_id = fields.Many2one(
"res.partner",
string="Company Contact",
copy=True,
help="The primary contact in the company.")
company_contact_phone = fields.Char(
related="company_contact_id.phone", string="Phone")
company_contact_email = fields.Char(
related="company_contact_id.email", string="Email")
use_parties_content = fields.Boolean(
string="Use parties content",
help="Use custom content for parties")
company_partner_id = fields.Many2one(
related="company_id.partner_id", string="Company's Partner")
def _get_default_parties(self):
deftext = """
<h3>Company Information</h3>
<p>
${object.company_id.partner_id.name or ''}.<br>
${object.company_id.partner_id.street or ''} <br>
${object.company_id.partner_id.state_id.code or ''}
${object.company_id.partner_id.zip or ''}
${object.company_id.partner_id.city or ''}<br>
${object.company_id.partner_id.country_id.name or ''}.<br><br>
Represented by <b>${object.company_contact_id.name or ''}.</b>
</p>
<p></p>
<h3>Partner Information</h3>
<p>
${object.partner_id.name or ''}.<br>
${object.partner_id.street or ''} <br>
${object.partner_id.state_id.code or ''}
${object.partner_id.zip or ''} ${object.partner_id.city or ''}<br>
${object.partner_id.country_id.name or ''}.<br><br>
Represented by <b>${object.partner_contact_id.name or ''}.</b>
</p>
"""
return deftext
parties = fields.Html(
string="Parties",
track_visibility="onchange",
default=_get_default_parties,
help="Parties of the agreement")
dynamic_parties = fields.Html(
compute="_compute_dynamic_parties",
string="Dynamic Parties",
help="Compute dynamic parties")
agreement_type_id = fields.Many2one(
track_visibility="onchange",
)
agreement_subtype_id = fields.Many2one(
"agreement.subtype",
string="Agreement Sub-type",
track_visibility="onchange",
help="Select the sub-type of this agreement. Sub-Types are related to "
"agreement types.")
product_ids = fields.Many2many(
"product.template", string="Products & Services")
assigned_user_id = fields.Many2one(
"res.users",
string="Assigned To",
track_visibility="onchange",
help="Select the user who manages this agreement.")
company_signed_user_id = fields.Many2one(
"res.users",
string="Signed By",
track_visibility="onchange",
help="The user at our company who authorized/signed the agreement or "
"contract.")
partner_signed_user_id = fields.Many2one(
"res.partner",
string="Signed By (Partner)",
track_visibility="onchange",
help="Contact on the account that signed the agreement/contract.")
parent_agreement_id = fields.Many2one(
"agreement",
string="Parent Agreement",
help="Link this agreement to a parent agreement. For example if this "
"agreement is an amendment to another agreement. This list will "
"only show other agreements related to the same account.")
renewal_type_id = fields.Many2one(
"agreement.renewaltype",
string="Renewal Type",
track_visibility="onchange",
help="Describes what happens after the contract expires.")
recital_ids = fields.One2many(
"agreement.recital", "agreement_id", string="Recitals", copy=True)
sections_ids = fields.One2many(
"agreement.section", "agreement_id", string="Sections", copy=True)
clauses_ids = fields.One2many(
"agreement.clause", "agreement_id", string="Clauses")
appendix_ids = fields.One2many(
"agreement.appendix", "agreement_id", string="Appendices", copy=True)
previous_version_agreements_ids = fields.One2many(
"agreement",
"parent_agreement_id",
string="Previous Versions",
copy=False,
domain=[("active", "=", False)])
child_agreements_ids = fields.One2many(
"agreement",
"parent_agreement_id",
string="Child Agreements",
copy=False,
domain=[("active", "=", True)])
line_ids = fields.One2many(
"agreement.line",
"agreement_id",
string="Products/Services",
copy=False)
state = fields.Selection(
[("draft", "Draft"), ("active", "Active"), ("inactive", "Inactive")],
default="draft",
track_visibility="always")
notification_address_id = fields.Many2one(
"res.partner",
string="Notification Address",
help="The address to send notificaitons to, if different from "
"customer address.(Address Type = Other)")
signed_contract_filename = fields.Char(string="Filename")
signed_contract = fields.Binary(
string="Signed Document", track_visibility="always")
# Dynamic field editor
field_domain = fields.Char(string='Field Expression',
default='[["active", "=", True]]')
default_value = fields.Char(
string="Default Value",
help="Optional value to use if the target field is empty.")
copyvalue = fields.Char(
string="Placeholder Expression",
help="""Final placeholder expression, to be copy-pasted in the desired
template field.""")
@api.onchange("field_domain", "default_value")
def onchange_copyvalue(self):
self.copyvalue = False
if self.field_domain:
string_list = self.field_domain.split(",")
if string_list:
field_domain = string_list[0][3:-1]
self.copyvalue = "${{object.{} or {}}}".format(
field_domain,
self.default_value or "''")
# compute the dynamic content for mako expression
@api.multi
def _compute_dynamic_description(self):
MailTemplates = self.env["mail.template"]
for agreement in self:
lang = agreement.partner_id.lang or "en_US"
description = MailTemplates.with_context(
lang=lang
)._render_template(
agreement.description, "agreement", agreement.id
)
agreement.dynamic_description = description
@api.multi
def _compute_dynamic_parties(self):
MailTemplates = self.env["mail.template"]
for agreement in self:
lang = agreement.partner_id.lang or "en_US"
parties = MailTemplates.with_context(
lang=lang
)._render_template(
agreement.parties, "agreement", agreement.id
)
agreement.dynamic_parties = parties
@api.multi
def _compute_dynamic_special_terms(self):
MailTemplates = self.env["mail.template"]
for agreement in self:
lang = agreement.partner_id.lang or "en_US"
special_terms = MailTemplates.with_context(
lang=lang
)._render_template(
agreement.special_terms, "agreement", agreement.id
)
agreement.dynamic_special_terms = special_terms
# Used for Kanban grouped_by view
@api.model
def _read_group_stage_ids(self, stages, domain, order):
stage_ids = self.env["agreement.stage"].search(
[('stage_type', '=', 'agreement')])
return stage_ids
stage_id = fields.Many2one(
"agreement.stage",
string="Stage",
group_expand="_read_group_stage_ids",
help="Select the current stage of the agreement.",
track_visibility="onchange",
index=True)
# Create New Version Button
@api.multi
def create_new_version(self, vals):
for rec in self:
if not rec.state == "draft":
# Make sure status is draft
rec.state = "draft"
default_vals = {
"name": "{} - OLD VERSION".format(rec.name),
"active": False,
"parent_agreement_id": rec.id,
}
# Make a current copy and mark it as old
rec.copy(default=default_vals)
# Increment the Version
rec.version = rec.version + 1
# Reset revision to 0 since it's a new version
vals["revision"] = 0
return super(Agreement, self).write(vals)
def create_new_agreement(self):
default_vals = {
"name": "NEW",
"active": True,
"version": 1,
"revision": 0,
"state": "draft",
"stage_id": self.env.ref("agreement_legal.agreement_stage_new").id,
}
res = self.copy(default=default_vals)
res.sections_ids.mapped('clauses_ids').write({'agreement_id': res.id})
return {
"res_model": "agreement",
"type": "ir.actions.act_window",
"view_mode": "form",
"view_type": "form",
"res_id": res.id,
}
@api.model
def create(self, vals):
if vals.get("code", _("New")) == _("New"):
vals["code"] = self.env["ir.sequence"].next_by_code(
"agreement"
) or _("New")
if not vals.get('stage_id'):
vals["stage_id"] = \
self.env.ref("agreement_legal.agreement_stage_new").id
return super(Agreement, self).create(vals)
# Increments the revision on each save action
@api.multi
def write(self, vals):
res = True
for rec in self:
vals["revision"] = rec.revision + 1
res = super(Agreement, rec).write(vals)
return res
|
As you may have noticed, there are several Alexandria Private Investigator and Alexandria Security Service agencies on the web to choose from, so why choose us? At JMN Investigations & Protective Services, we pride ourselves on providing professional services at competitive industry rates. At JMN Investigations and Protective Services, we control our overhead costs to give you the Client, the best value. In addition, JMN Investigations & Protective Services utilizes only LICENSED AND INSURED PRIVATE INVESTIGATORS and SECURITY COMPANIES. Our preferred partners have been extensively vetted for; experience, subject matter expertise and proper insurance and licensure. We take the fear and uncertainty out of choosing a single source- Risk Management provider.
JMN Investigations & Protective Services is a premier provider of Alexandria Security Service and Alexandria Private Investigator services, since 1999.
Why put your company at risk with an inexperienced Security Service or Private Investigator? We have remained in business, in part because; we don’t let our clients down! In addition, we have the financial resources to support our operations and keep your business safe. Trust our Alexandria Security Service and Alexandria Private Investigator to keep your business on track.
|
from pyspades import contained as loaders
from pyspades.collision import vector_collision, collision_3d
from pyspades.constants import TC_CAPTURE_DISTANCE
ctf_data = loaders.CTFState()
tc_data = loaders.TCState()
class IntelBasedGamemode:
name = "ctf"
def __init__(self, protocol):
self.protocol = protocol
self.green_flag = protocol.green_team.flag
self.blue_flag = protocol.blue_team.flag
self.state_loader = loaders.CTFState()
self.drop_intel_loader = loaders.IntelDrop()
self.drop_pickup_loader = loaders.IntelPickup()
self.drop_capture_loader = loaders.IntelCapture()
def on_position_update(self, player):
target_flag = self.get_target_flag(player)
if vector_collision(player.world_object.position,
player.team.base):
if target_flag.player is self:
player.capture_flag()
player.check_refill()
if target_flag.player is None and vector_collision(
player.position, target_flag):
player.take_flag()
def get_state_packet(self):
return
def on_player_reset(self, player):
flag = self.get_player_flag(player)
if flag is None:
return
position = player.position
x = int(position.x)
y = int(position.y)
z = max(0, int(position.z))
z = self.protocol.map.get_z(x, y, z)
flag.set(x, y, z)
flag.player = None
intel_drop = loaders.IntelDrop()
intel_drop.player_id = player.player_id
intel_drop.x = flag.x
intel_drop.y = flag.y
intel_drop.z = flag.z
self.protocol.broadcast_contained(intel_drop, save=True)
player.on_flag_drop()
def get_player_flag(self, player):
for flag in (self.blue_flag, self.green_flag):
if flag.player is self:
return flag
return None
def get_target_flag(self, connection):
return connection.team.other_flag
class TerritoryBasedGamemode(object):
name = "tc"
def __init__(self, protocol):
self.protocol = protocol
self.state_loader = loaders.TCState()
def get_state_packet(self):
return
def on_position_update(self, connection):
for entity in self.protocol.entities:
collides = vector_collision(
entity, connection.world_object.position, TC_CAPTURE_DISTANCE)
if self in entity.players:
if not collides:
entity.remove_player(self)
else:
if collides:
entity.add_player(self)
if collides and vector_collision(entity,
connection.world_object.position):
connection.check_refill()
|
P1 It (Divine blessing and force) should be used to increase your honesty with yourself. For only then can love grow genuinely. If you say that evil does not exist at all, on any level of being, this would be wrong. But if you state that in ultimate reality there is no evil, that is true. Any one of these postulates is incorrect when seen as the only truth.
Let me first repeat that the universe consists of consciousness and energy. In the unified state, consciousness and energy are one. In the disunified state, they are not necessarily one. Energy can be an impersonal force that does not seem to contain or express consciousness. It seems a mechanical force that consciousness can direct but that is in itself alien to determination, to self-knowledge.
P2 Think, for example, of electricity and atomic energy. Even the energy of mind seems often quite disconnected from the source of its consciousness. Many of you have experienced that the power of your thoughts, attitudes and feelings do not have an immediate effect in your life. They have indirect effect, which at first seems so disconnected from its source that comprehending the link between cause and effect requires focused attention and awareness.
The separated, dualistic human mind creates the illusion that energy and consciousness are two different manifestations. The same split perception exists in human beings regarding life and self, God and humanity, cause and effect, and many other concepts or phenomena. Since thought is movement and energy, it is impossible to separate consciousness from energy in their essence, although in their manifestations there might be an apparent disconnection. In the ultimate reality of the unified state there is no evil.
Thought is pure and truthful; feelings are loving and blissful; the direction or intentionality of the will is utterly positive and constructive. Therefore there is no evil. But the same consciousness can change its mind, as it were, into an untruthful and limited thought process, accompanied by feelings of hate, fear, and cruelty: into negative will direction and intent. In that moment the same consciousness, or an aspect of this consciousness, turns into its own distorted version.
If this happens, the energy also alters its manifestations. Thus the manifestation of evil is not something intrinsically different from pure consciousness and energy. It has only changed direction or focus. Each individual must accept the reality of evil on this plane of development, in order to learn to cope with it and thus to truly overcome it. Evil must be faced and overcome primarily within the self. Only then can the evil that is outside of the self be dealt with.
The attempt to reverse this process will fail, for everything must start from the inner center. When energy is twisted, it produces a destructive manifestation. Its frequency slows down and becomes commensurate with the distortion of the consciousness which determines the state by choosing the will direction of the thought process and instituting the negative attitude pattern.
Another characteristic of distorted energy flow in its evil aberration is condensation. Condensed energy is the dualistic, disunified state. Jesus Christ said: “Do not resist evil.” It has been interpreted too literally to mean that you should allow others to exploit you and that you should not assert your human rights and your human dignity. This interpretation has preached meekness and masochism that are not in keeping with divine truth.
On the contrary, they help to perpetuate evil and allow the perpetrator to inflict evil on his or her environment. “Do not resist evil” points to the fact that resistance itself is, and breeds, evil. Resistance tightens and thus coarsens the energy. It holds back what should move. The very manifestation of matter as you know it, which is a highly disunified state, is the result of resistance. Resistance is always obstructing some valuable, beautiful aspect of creation. Resistance is therefore a manifestation of evil.
P4 Furthermore, each of these three attitudes (self-will, fear and pride) is a result of resistance and breeds more resistance, or evil. Self-will says, “I resist any other way but my way,” and “my way” is so often anti-life, anti- God. Self-will resists truth, love, union—even if it appears to want it. The moment the tightness of self-will exists, divine aspects are hindered from manifestation.
Pride is resistance to the oneness between entities. It separates itself from others and elevates itself—and thus resists the truth and love that are creative manifestations of life. The person who resists humility must be humiliated because the resistance must always finally come to a breaking point. The refusal to expose the truth and to admit what exists is due to pride.
This pride causes resistance as much as it results from resistance. Similarly, resistance breeds fear, and fear breeds resistance. Resistance to truth arises from the fear that truth can be harmful, and in turn, resistance to truth compounds this fear.
P5 When you want to avoid your feelings and your hidden thoughts and intentions, you create resistance. Resistance is, in one way or another, always connected with the thought, “I do not want to be hurt”—whether this hurt is actual or imagined. The resistance may be linked to self-will that says, “I must not be hurt”; to pride that says, “I will never admit that I can be hurt”; or to fear that says, “If I am hurt I must perish.” The resistance expresses distrust of the universe. In reality, hurt must pass, for it is no more an ultimate state than evil is.
P6 Resisting evil means not facing and accepting the evil in you. This resistance creates a tremendous accumulation of energy, which finally comes to an explosion. It destroys the very evil that has created it. The energy movement of the soul substance tears down the rotten structure, even if this means that temporarily all seems to go to pieces. However, what is of true value will automatically and organically rebuild itself. Creation is taking place.
Every crisis is an integral part of creation. Therefore, wise ones embrace and accept crises, which remove more and more resistance. The blind entity may suffer temporarily, but how good it is. The process is awesome in its benign violence. Thus, in the long run, every destruction is constructive and serves creation. Always.
But in an individual’s life, this truth is not always obvious. It is the same with pain: the more you accept it, the less you will feel it. Resistance to pain often makes it unbearable. The more you accept your hate, the less you hate. The more you accept your ugliness, the more beautiful you become. The more you accept your weakness, the stronger you are. The more you admit your hurt, the more dignity you have. These are inexorable laws. This is the path we tread.
|
# -*- coding: utf-8 -*-
"""
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, see <http://www.gnu.org/licenses/>.
"""
from module.plugins.internal.SimpleCrypter import SimpleCrypter
import re
class TnyCz(SimpleCrypter):
__name__ = "TnyCz"
__version__ = "0.01"
__type__ = "crypter"
__pattern__ = r'http://(?:www\.)?tny\.cz/\w+'
__description__ = """Tny.cz decrypter plugin"""
__author_name__ = "Walter Purcaro"
__author_mail__ = "vuolter@gmail.com"
TITLE_PATTERN = r'<title>(?P<title>.+) - .+</title>'
def getLinks(self):
m = re.search(r'<a id=\'save_paste\' href="(.+save\.php\?hash=.+)">', self.html)
return re.findall(".+", self.load(m.group(1), decode=True)) if m else None
|
Consider these specified selection of metal wall art with regard to wall designs, decor, and more to obtain the appropriate decoration to your interior. We realize that metal wall art ranges in dimensions, shape, figure, cost, and design, therefore allowing you to discover circular metal wall art which harmonize with your interior and your own personal sense of style. You can choose anything from modern wall artwork to rustic wall art, in order to rest assured that there's something you'll enjoy and correct for your space.
You have many choices regarding metal wall art for your interior, as well as circular metal wall art. Make certain anytime you are you desire where to find metal wall art online, you find the best options, how the best way should you decide on the right metal wall art for your space? Here are a few photos that'll help: get as many selections as you can before you decide, select a scheme that won't state mismatch together with your wall and be certain that everyone like it to pieces.
In between the most popular artwork items that may be prepared for your interior are circular metal wall art, picture prints, or art paints. There's also wall bas-relief and sculptures, which might look similar to 3D paintings compared to statues. Also, if you have most liked designer, possibly he or she's a webpage and you can check always and shop their products via online. You will find also designers that promote electronic copies of these artworks and you available to only have printed out.
Not much improvements a space such as for instance a beautiful little bit of circular metal wall art. A carefully opted for photo or print can raise your surroundings and transform the impression of a room. But how do you get an ideal piece? The metal wall art will undoubtedly be as exclusive as the people design. This means there are uncomplicated and rapidly rules to choosing metal wall art for your decoration, it just needs to be something you prefer.
Do not be overly fast when finding metal wall art and explore as numerous stores as you can. The probability is you will discover more suitable and more desirable pieces than that series you checked at that first store or gallery you attended to. Moreover, please don't limit yourself. Should you find only a number of stores in the location where you reside, have you thought to take to looking online. You'll find loads of online artwork stores with many circular metal wall art it is easy to select from.
One other component you've got to keep in mind whenever shopping metal wall art can be that it should never clash with your wall or complete interior decoration. Keep in mind that you are obtaining these art products to be able to boost the visual attraction of your room, maybe not cause havoc on it. You are able to choose anything that will involve some distinction but don't choose one that's extremely at chances with the decor and wall.
Do not purchase metal wall art just because a some artist or friend told you it is actually great. Keep in mind, natural beauty is actually subjective. What may possibly look and feel amazing to your friend may possibly certainly not what you interested in. The most effective criterion you can use in buying circular metal wall art is whether viewing it makes you truly feel cheerful or enthusiastic, or not. If it does not impress your feelings, then it might be preferable you appear at other alternative of metal wall art. All things considered, it will undoubtedly be for your home, perhaps not theirs, so it's most useful you get and pick a thing that interests you.
As you get the products of metal wall art you like that would meet gorgeously with your room, whether it is by a popular art shop/store or others, never let your enjoyment get the better of you and hold the part when it arrives. You don't wish to end up with a wall high in holes. Make plan first exactly where it'd place.
Any room in your home that you're remodelling, the circular metal wall art has figures which will fit your wants. Discover various images to turn into prints or posters, presenting common subjects for instance panoramas, landscapes, culinary, pets, animals, town skylines, and abstract compositions. By the addition of types of metal wall art in numerous styles and dimensions, along with other wall art and decor, we added interest and character to the space.
Are you currently been searching for ways to decorate your walls? Wall art stands out as a suitable option for tiny or large places likewise, offering any interior a completed and polished appearance in minutes. If you want inspiration for designing your interior with circular metal wall art before you decide to purchase it, you can look for our helpful inspirational or guide on metal wall art here.
In case you are prepared help to make your circular metal wall art also understand just what you need, you can actually search through our several options of metal wall art to obtain the ideal part for the house. If you'll need bedroom artwork, dining room artwork, or any room in between, we've obtained what you need to immediately turn your home right into a wonderfully furnished room. The contemporary artwork, vintage artwork, or reproductions of the classics you like are just a click away.
There are various possible choices regarding circular metal wall art you will discover here. Each metal wall art includes a different style and characteristics that will pull artwork lovers into the variety. Wall decoration such as wall art, wall painting, and interior mirrors - are able to brighten and carry personal preference to a room. These produce for perfect living room, office, or room artwork pieces!
Related Post "Circular Metal Wall Art"
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
CERN@school - Processing Frames
See the README.md file for more information.
"""
#...for the operating stuff.
import os
#...for the file processing.
import glob
#...for parsing the arguments.
import argparse
#...for the logging.
import logging as lg
#...for file manipulation.
from shutil import rmtree
# Import the JSON library.
import json
#...for processing the datasets.
from cernatschool.dataset import Dataset
#...for making time.
from cernatschool.handlers import make_time_dir
#...for making the frame and clusters images.
from visualisation.visualisation import makeFrameImage
if __name__ == "__main__":
print("*")
print("*======================================*")
print("* CERN@school - local frame processing *")
print("*======================================*")
# Get the datafile path from the command line.
parser = argparse.ArgumentParser()
parser.add_argument("inputPath", help="Path to the input dataset.")
parser.add_argument("outputPath", help="The path for the output files.")
parser.add_argument("-v", "--verbose", help="Increase output verbosity", action="store_true")
args = parser.parse_args()
## The path to the data file.
datapath = args.inputPath
#
# Check if the input directory exists. If it doesn't, quit.
if not os.path.isdir(datapath):
raise IOError("* ERROR: '%s' input directory does not exist!" % (datapath))
## The output path.
outputpath = args.outputPath
# Check if the output directory exists. If it doesn't, quit.
if not os.path.isdir(outputpath):
raise IOError("* ERROR: '%s' output directory does not exist!" % (outputpath))
# Set the logging level.
if args.verbose:
level=lg.DEBUG
else:
level=lg.INFO
# Configure the logging.
lg.basicConfig(filename = outputpath + '/log_process-frames.log', filemode='w', level=level)
print("*")
print("* Input path : '%s'" % (datapath))
print("* Output path : '%s'" % (outputpath))
print("*")
# Set up the directories
#------------------------
# Create the subdirectories.
## The path to the frame images.
frame_output_path = os.path.join(outputpath, "PNG")
#
if os.path.isdir(frame_output_path):
rmtree(frame_output_path)
lg.info(" * Removing directory '%s'..." % (frame_output_path))
os.mkdir(frame_output_path)
lg.info(" * Creating directory '%s'..." % (frame_output_path))
lg.info("")
## The path to the dataset.
dataset_path = os.path.join(datapath, "RAW/ASCIIxyC")
## The dataset to process.
ds = Dataset(dataset_path)
# Get the metadata from the JSON.
## The frame metadata.
fmd = None
#
with open(os.path.join(datapath, "geo.json"), "r") as fmdf:
fmd = json.load(fmdf, fmd)
#
## Latitude of the dataset [deg.].
lat = fmd['lat'] # [deg.]
#
## Longitude of the dataset [deg.].
lon = fmd['lon'] # [deg.]
#
## Altitude of the dataset [m].
alt = fmd['alt'] # [m]
## The pixel mask.
pixel_mask = {}
with open(os.path.join(datapath, "masked_pixels.txt"), "r") as mpf:
rows = mpf.readlines()
for row in rows:
vals = [int(val) for val in row.strip().split("\t")]
x = vals[0]; y = vals[1]; X = (256*y) + x; C = 1
pixel_mask[X] = C
## The frames from the dataset.
frames = ds.getFrames((lat, lon, alt), pixelmask = pixel_mask)
lg.info("* Found %d datafiles." % (len(frames)))
## A list of frames.
mds = []
# Loop over the frames and upload them to the DFC.
for f in frames:
## The basename for the data frame, based on frame information.
bn = "%s_%s" % (f.getChipId(), make_time_dir(f.getStartTimeSec()))
#bn = "%s_%d-%06d" % (f.getChipId(), f.getStartTimeSec(), f.getStartTimeSubSec())
# Create the frame image.
makeFrameImage(bn, f.getPixelMap(), frame_output_path, f.getPixelMask())
# Create the metadata dictionary for the frame.
metadata = {
"id" : bn,
#
"chipid" : f.getChipId(),
"hv" : f.getBiasVoltage(),
"ikrum" : f.getIKrum(),
#
"lat" : f.getLatitude(),
"lon" : f.getLongitude(),
"alt" : f.getAltitude(),
#
"start_time" : f.getStartTimeSec(),
"end_time" : f.getEndTimeSec(),
"acqtime" : f.getAcqTime(),
#
"n_pixel" : f.getNumberOfUnmaskedPixels(),
"occ" : f.getOccupancy(),
"occ_pc" : f.getOccupancyPc(),
#
"n_kluster" : f.getNumberOfKlusters(),
"n_gamma" : f.getNumberOfGammas(),
"n_non_gamma" : f.getNumberOfNonGammas(),
#
"ismc" : int(f.isMC())
}
# Add the frame metadata to the list of frames.
mds.append(metadata)
# Write out the frame information to a JSON file.
# We will use this later to make the frame plots,
# rather than processing the whole frame set again.
#
with open(os.path.join(outputpath, "frames.json"), "w") as jf:
json.dump(mds, jf)
|
swidGenerator is an open source Python-based tool written by the HSR students Danilo Bargen, Christian Fässler and Jonas Furrer which is able to generate the new ISO/IEC 19770-2:2014 Software Identification (SWID) tag format for all installed software packages managed by the Linux dpkg and rpm package managers.
and the swid_generator executable program usually installed in /usr/local/bin becomes available.
By default the regid.2004-03.org.strongswan regid is used as the tagcreator.
The --doc-separator <separator string> option allows to define a single character or a character string separating the individual Software IDs.
For the last example the output of the dpkg package manager on an Ubuntu 13.10 platform was used.
|
# -*- coding: utf-8 -*-
"""
<DefineSource>
@Date : Fri Nov 14 13:20:38 2014 \n
@Author : Erwan Ledoux \n\n
</DefineSource>
The Switcher
"""
#<DefineAugmentation>
import ShareYourSystem as SYS
BaseModuleStr="ShareYourSystem.Standards.Classors.Watcher"
DecorationModuleStr="ShareYourSystem.Standards.Classors.Tester"
SYS.setSubModule(globals())
#</DefineAugmentation>
#<ImportSpecificModules>
import operator
import copy
from ShareYourSystem.Standards.Classors import Doer,Observer
#</ImportSpecificModules>
#<DefineFunctions>
def setSwitch(
_InstanceVariable,
_DoMethodVariable=None,
_DoerClassVariable=None,
_HookVariable=None
):
#Debug
'''
print('l 31 setSwitch')
print('_DoerVariable is ',_DoerVariable)
print('_DoVariable is ',_DoVariable)
print('_HookVariable is ',_HookVariable)
#print('_InstanceVariable.__class__.NameStr is ',_InstanceVariable.__class__.NameStr)
print('')
'''
#/#################/#
# Adapt the shape of the do method str to switch
#
#Check
if type(_DoMethodVariable)!=list:
#Check
if _DoMethodVariable==None:
#/#################/#
# Give all the do method str
#
#alias
#DoMethodStrsList=_InstanceVariable.DoMethodStrsList
#/#################/#
# Give just the last DoMethodStr
#
#Check
if _InstanceVariable.__class__.DoMethodStr in _InstanceVariable.__class__.SwitchMethodDict:
#listify
DoMethodStrsList=[_InstanceVariable.__class__.DoMethodStr]
else:
#listify
DoMethodStrsList=[]
else:
#listify
DoMethodStrsList=[_DoMethodVariable]
else:
#just alias
DoMethodStrsList=_DoMethodVariable
#/#################/#
# Adapt the shape of the mro doer to switch
#
#get
DoerClassesList=SYS.GetList(_DoerClassVariable)
#Debug
'''
print('l 94 Switcher')
print('_DoerClassVariable is')
print(_DoerClassVariable)
print('DoerClassesList is')
print(DoerClassesList)
print('')
'''
#Check
if _DoerClassVariable==None:
#/#################/#
# by default this is all the mro doer that have all the switch do method
# so do the intersection
#Check
if len(DoMethodStrsList)>0:
#intersection
DoerClassesList=list(
set.intersection(*
map(
lambda __DoMethodStr:
set(_InstanceVariable.__class__.SwitchMethodDict[__DoMethodStr]),
DoMethodStrsList
)
)
)
else:
#init
DoerClassesList=[]
#/#################/#
# Adapt the shape of the hook strs
#
#Check
if type(_HookVariable)!=list:
if _HookVariable==None:
HookStrsList=['Before','After']
else:
HookStrsList=[_HookVariable]
else:
HookStrsList=_HookVariable
#/#################/#
# Now map the switch
#
#Debug
'''
print('l 139 Switcher')
#print('_InstanceVariable is ')
#print(_InstanceVariable)
print('DoMethodStrsList is')
print(DoMethodStrsList)
print('DoerClassesList is ')
print(DoerClassesList)
print('HookStrsList is ')
print(HookStrsList)
print('')
'''
#map
map(
lambda __HookStr:
map(
lambda __DoerClass:
map(
lambda __DoMethodStr:
_InstanceVariable.__setattr__(
'Watch'+__HookStr+__DoMethodStr[0].upper(
)+__DoMethodStr[1:]+'With'+__DoerClass.NameStr+'Bool',
False
),
DoMethodStrsList,
),
DoerClassesList
),
HookStrsList
)
#Debug
'''
print('l 170 Switcher')
print('End of setSwitch')
print('')
'''
#return
return _InstanceVariable
def switch(_InstanceVariable,*_LiargVariablesList,**_KwargVariablesDict):
#Debug
'''
print('l 196 Switcher')
print('In the switch function ')
print('_KwargVariablesDict is ')
print(_KwargVariablesDict)
print('')
'''
"""
#alias
FuncDict=switch.__dict__
#Debug
'''
print('l 52')
print('In the switch function ')
print('FuncDict is ')
print(FuncDict)
print('')
'''
"""
#Check
if hasattr(_InstanceVariable,_KwargVariablesDict['WatchBeforeDoBoolKeyStr']):
#Debug
'''
print('Switcher l 201')
print('Check for a WatchBeforeDoBoolKeyStr')
print("_KwargVariablesDict['WatchBeforeDoBoolKeyStr'] is ")
print(_KwargVariablesDict['WatchBeforeDoBoolKeyStr'])
print('')
'''
#get
WatchDoBool=getattr(
_InstanceVariable,
_KwargVariablesDict['WatchBeforeDoBoolKeyStr']
)
#Debug
'''
print('Switcher l 236')
print('WatchDoBool is')
print(WatchDoBool)
'''
#Switch
if WatchDoBool:
return _InstanceVariable
#get the wrapped method
WrapUnboundMethod=getattr(
getattr(
SYS,
_KwargVariablesDict['BindDoClassStr']
),
_KwargVariablesDict['BindObserveWrapMethodStr']
)
#del
map(
lambda __KeyStr:
_KwargVariablesDict.__delitem__(__KeyStr),
[
'BindObserveWrapMethodStr',
'BindDoClassStr',
'WatchBeforeDoBoolKeyStr'
]
)
#Call
return WrapUnboundMethod(
_InstanceVariable,
*_LiargVariablesList,
**_KwargVariablesDict
)
def getSwitch(_InstanceVariable,_MethodVariable=None):
#Check
if _MethodVariable==None:
SwitchItemTuplesList=_InstanceVariable.SwitchMethodDict.items()
elif type(_MethodVariable) in [list,tuple]:
SwitchItemTuplesList=map(
lambda __MethodStr:
(
__MethodStr,
_InstanceVariable.SwitchMethodDict[__MethodStr]
),
_MethodVariable
)
else:
SwitchItemTuplesList=[
(
_MethodVariable,
_InstanceVariable.SwitchMethodDict[_MethodVariable]
)
]
#Debug
'''
print('getSwitch l 266')
print('_MethodVariable is ')
print(_MethodVariable)
print('SwitchItemTuplesList is ')
print(SwitchItemTuplesList)
print('')
'''
#return
WatchKeyStrsList=SYS.flat(
SYS.flat(
map(
lambda __SwitchItemTuple:
map(
lambda __ClassStr:
map(
lambda __HookStr:
'Watch'+__HookStr+SYS.getUpperWordStr(
__SwitchItemTuple[0]
)+'With'+SYS.getNameStrWithClassStr(
__ClassStr
)+'Bool',
['Before','After']
),
map(lambda __Class:__Class.__name__,__SwitchItemTuple[1])
),
SwitchItemTuplesList
)
)
)
#Debug
'''
print('getSwitch l 300')
print('WatchKeyStrsList is ')
print(WatchKeyStrsList)
print('WatchKeyStrsList is ')
print(WatchKeyStrsList)
print('')
'''
#return
return dict(
zip(
WatchKeyStrsList,
map(
lambda __WatchKeyStr:
getattr(_InstanceVariable,__WatchKeyStr),
WatchKeyStrsList
)
)
)
#</DefineFunctions>
#<DefineClass>
@DecorationClass()
class SwitcherClass(BaseClass):
#Definition
RepresentingKeyStrsList=[
'SwitchingIsBool',
'SwitchingWrapMethodStr'
]
def default_init(self,
_SwitchingIsBool=False,
_SwitchingWrapMethodStr="",
**_KwargVariablesDict
):
#Call the parent init method
BaseClass.__init__(self,**_KwargVariablesDict)
def __call__(self,_Class):
#Call the parent method
Observer.ObserverClass.__bases__[0].__call__(self,_Class)
#reset
self.switch()
#Return
return _Class
def do_switch(self):
#Check
if self.SwitchingIsBool:
#alias
SwitchedClass=self.DoClass
#Debug
'''
print('l 195 Switcher')
print('self.SwitchingWrapMethodStr is '+self.SwitchingWrapMethodStr)
print('')
'''
#watch first
self.watch(
True,
**{'ObservingWrapMethodStr':self.SwitchingWrapMethodStr}
)
#Debug
'''
print('l 204 Switcher')
print('self.WatchedDecorationMethodStr is ',self.WatchedDecorationMethodStr)
print('')
'''
#first bind
self.bind(
True,
switch,
"",
switch.__name__,
[('WatchBeforeDoBoolKeyStr',self.WatchedBeforeDoBoolKeyStr)],
**{'ObservingWrapMethodStr':self.WatchedDecorationMethodStr}
)
#Define
SwitchedDecorationUnboundMethod=getattr(
SwitchedClass,
self.BindedDecorationMethodStr
)
#Now make the amalgam
setattr(
SwitchedClass,
self.SwitchingWrapMethodStr,
SwitchedDecorationUnboundMethod
)
#/##################/#
# Set maybe for the first time
# the setSwitch and the getSwitch
#Check
if hasattr(SwitchedClass,'setSwitch')==False:
#set
setattr(
SwitchedClass,
setSwitch.__name__,
setSwitch
)
#get the unbound
setSwitchUnboundMethod=getattr(
SwitchedClass,
setSwitch.__name__
)
#add in the inspect
SwitchedClass.InspectMethodDict[setSwitch.__name__]=setSwitchUnboundMethod
SwitchedClass.InspectInspectDict[setSwitch.__name__]=SYS.InspectDict(
setSwitchUnboundMethod
)
#set
self.setMethod(
getSwitch.__name__,
getSwitch
)
#/##################/#
# Init the SwitchMethodDict
#
#Check
if hasattr(SwitchedClass,'SwitchMethodDict')==False:
#Debug
'''
print('Switcher l 345')
print('SwitchedClass is ')
print(SwitchedClass)
print('we init a SwitchMethodDict')
print('')
'''
#Check
if hasattr(SwitchedClass.__bases__[0],'SwitchMethodDict'):
#Debug
print('Switcher l 488')
print('SwitchedClass is ')
print(SwitchedClass)
print('SwitchedClass.__bases__[0] is ')
print(SwitchedClass.__bases__[0])
print('')
#copy
SwitchedClass.SwitchMethodDict=copy.copy(
SwitchedClass.__bases__[0].SwitchMethodDict
)
else:
#init
SwitchedClass.SwitchMethodDict={
self.SwitchingWrapMethodStr:[SwitchedClass]
}
else:
#/##################/#
# add
#
#Debug
'''
print('Switcher l 514')
print('SwitchedClass is ')
print(SwitchedClass)
print('there is already a SwitchMethodDict')
print('self.SwitchingWrapMethodStr is ')
print(self.SwitchingWrapMethodStr)
print('SwitchedClass.SwitchMethodDict is ')
print(SwitchedClass.SwitchMethodDict)
print('')
'''
#copy
SwitchedClass.SwitchMethodDict=copy.copy(
SwitchedClass.SwitchMethodDict
)
#update
if self.SwitchingWrapMethodStr in self.DoClass.SwitchMethodDict:
SwitchedClass.SwitchMethodDict[
self.SwitchingWrapMethodStr
].append(SwitchedClass)
else:
SwitchedClass.SwitchMethodDict[
self.SwitchingWrapMethodStr
]=[SwitchedClass]
#Add to the KeyStrsList
SwitchedClass.KeyStrsList+=[
'SwitchMethodDict'
]
#</DefineClass>
|
As we approach June 30, we are reminded, again, that any U.S. person who has a financial interest in or signature authority over any foreign-based financial accounts is required to file an FBAR (Report of Foreign Bank Account - FINCEN Form 114). The FBAR Reporting, which covers any foreign-based bank account, brokerage account, mutual fund, trust, or other type of foreign financial account, must be received by the U.S. Treasury on or before June 30, 2015.
Since 2009, when the IRS established the Offshore Voluntary Disclosure Program (OVDP) and 2014, when the IRS established the Streamlined Filing Compliance Procedure (SFCP), it is believed that most U.S. persons with undisclosed offshore accounts have taken advantage of the options to become compliant with FBAR rules.
Compliance was further enhanced in the 2010 enactment of the Foreign Accounts Tax Compliance Act (FATCA). This Act forces foreign financial institutions to report U.S.-owned financial accounts or face being subject to U.S. withholdings tax of up to 30% on U.S. source payments being made to the foreign financial institutions.
One notable impact of more aggressive FBAR compliance requirements is the imposition or threat of imposition of penalties of 150% or more. In a 2014 Florida case, a jury imposed a penalty of 150% of the value of an 87-year-old's Swiss bank account for failing to file an FBAR. Following concern about the constitutionality of such penalties, in May 2015, the IRS revealed new guidance on penalties for failure to file FBAR's. The guidance requires IRS examiners to follow new procedures to ensure consistent, effective, and fair administration of penalties. Under the new guidance, examiners are required to determine the willfulness of failure to file annual FBAR's.
For non-willful failure to file, there are three tiers of non-willful penalties.
Default - $10,000 each year.
Lenient - $10,000 to cover all years.
Harsh - $10,000 for each violation/account per year.
In no event will the penalties for non-willful violation exceed 50% of the highest aggregate balance of all unreported accounts during the years under examination. The determination of the tiers of non-willfulness is based on the facts and circumstances of each situation, including the conduct of the person required to file, and the aggregate balance of the unreported foreign financial accounts.
For willful failure to file, the penalty will generally be limited to 50% of the highest aggregate balance of all unreported foreign financial accounts during years under examination. The guidance does provide for an examiner to recommend a penalty that is higher or lower than 50% based on facts and circumstances. Unlike the Florida case mentioned above, in no event will the total amount exceed 100% of the highest aggregate balance of all unreported foreign financial accounts during the years under examination.
In addition to the May 2015 guidance on penalties, the IRS has provided procedures for filing a delinquent FBAR without penalty, by taxpayers who fall into neither of the OVDP of SFCP options described above. According to the FBAR instructions, delinquent FBARs should be filed by U.S. persons who do not need to use either OVDP or SFCP to "file delinquent or amended returns to report and pay additional taxes on unreported income, are not under civil examination or criminal investigation by the IRS, nor have already been contact by the IRS about delinquent FBARs." Under this approach, the IRS will not impose a penalty for the failure to file the delinquent FBARs if the conditions are met.
If you have not yet disclosed the existence of foreign-based financial accounts, now is the best time to reevaluate your position and the alternatives to become FBAR compliant. Your Marcum Tax professional can assist you in understanding your filing obligations.
A special thanks to article contributor Patrick Riley, Partner, Tax & Business Services.
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import errno
import functools
import os
import time
import weakref
from eventlet import semaphore
from oslo.config import cfg
from heat.openstack.common import fileutils
from heat.openstack.common.gettextutils import _ # noqa
from heat.openstack.common import local
from heat.openstack.common import log as logging
LOG = logging.getLogger(__name__)
util_opts = [
cfg.BoolOpt('disable_process_locking', default=False,
help='Whether to disable inter-process locks'),
cfg.StrOpt('lock_path',
help=('Directory to use for lock files.'))
]
CONF = cfg.CONF
CONF.register_opts(util_opts)
def set_defaults(lock_path):
cfg.set_defaults(util_opts, lock_path=lock_path)
class _InterProcessLock(object):
"""Lock implementation which allows multiple locks, working around
issues like bugs.debian.org/cgi-bin/bugreport.cgi?bug=632857 and does
not require any cleanup. Since the lock is always held on a file
descriptor rather than outside of the process, the lock gets dropped
automatically if the process crashes, even if __exit__ is not executed.
There are no guarantees regarding usage by multiple green threads in a
single process here. This lock works only between processes. Exclusive
access between local threads should be achieved using the semaphores
in the @synchronized decorator.
Note these locks are released when the descriptor is closed, so it's not
safe to close the file descriptor while another green thread holds the
lock. Just opening and closing the lock file can break synchronisation,
so lock files must be accessed only using this abstraction.
"""
def __init__(self, name):
self.lockfile = None
self.fname = name
def __enter__(self):
self.lockfile = open(self.fname, 'w')
while True:
try:
# Using non-blocking locks since green threads are not
# patched to deal with blocking locking calls.
# Also upon reading the MSDN docs for locking(), it seems
# to have a laughable 10 attempts "blocking" mechanism.
self.trylock()
return self
except IOError as e:
if e.errno in (errno.EACCES, errno.EAGAIN):
# external locks synchronise things like iptables
# updates - give it some time to prevent busy spinning
time.sleep(0.01)
else:
raise
def __exit__(self, exc_type, exc_val, exc_tb):
try:
self.unlock()
self.lockfile.close()
except IOError:
LOG.exception(_("Could not release the acquired lock `%s`"),
self.fname)
def trylock(self):
raise NotImplementedError()
def unlock(self):
raise NotImplementedError()
class _WindowsLock(_InterProcessLock):
def trylock(self):
msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_NBLCK, 1)
def unlock(self):
msvcrt.locking(self.lockfile.fileno(), msvcrt.LK_UNLCK, 1)
class _PosixLock(_InterProcessLock):
def trylock(self):
fcntl.lockf(self.lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
def unlock(self):
fcntl.lockf(self.lockfile, fcntl.LOCK_UN)
if os.name == 'nt':
import msvcrt
InterProcessLock = _WindowsLock
else:
import fcntl
InterProcessLock = _PosixLock
_semaphores = weakref.WeakValueDictionary()
@contextlib.contextmanager
def lock(name, lock_file_prefix=None, external=False, lock_path=None):
"""Context based lock
This function yields a `semaphore.Semaphore` instance unless external is
True, in which case, it'll yield an InterProcessLock instance.
:param lock_file_prefix: The lock_file_prefix argument is used to provide
lock files on disk with a meaningful prefix.
:param external: The external keyword argument denotes whether this lock
should work across multiple processes. This means that if two different
workers both run a a method decorated with @synchronized('mylock',
external=True), only one of them will execute at a time.
:param lock_path: The lock_path keyword argument is used to specify a
special location for external lock files to live. If nothing is set, then
CONF.lock_path is used as a default.
"""
# NOTE(soren): If we ever go natively threaded, this will be racy.
# See http://stackoverflow.com/questions/5390569/dyn
# amically-allocating-and-destroying-mutexes
sem = _semaphores.get(name, semaphore.Semaphore())
if name not in _semaphores:
# this check is not racy - we're already holding ref locally
# so GC won't remove the item and there was no IO switch
# (only valid in greenthreads)
_semaphores[name] = sem
with sem:
LOG.debug(_('Got semaphore "%(lock)s"'), {'lock': name})
# NOTE(mikal): I know this looks odd
if not hasattr(local.strong_store, 'locks_held'):
local.strong_store.locks_held = []
local.strong_store.locks_held.append(name)
try:
if external and not CONF.disable_process_locking:
LOG.debug(_('Attempting to grab file lock "%(lock)s"'),
{'lock': name})
# We need a copy of lock_path because it is non-local
local_lock_path = lock_path or CONF.lock_path
if not local_lock_path:
raise cfg.RequiredOptError('lock_path')
if not os.path.exists(local_lock_path):
fileutils.ensure_tree(local_lock_path)
LOG.info(_('Created lock path: %s'), local_lock_path)
def add_prefix(name, prefix):
if not prefix:
return name
sep = '' if prefix.endswith('-') else '-'
return '%s%s%s' % (prefix, sep, name)
# NOTE(mikal): the lock name cannot contain directory
# separators
lock_file_name = add_prefix(name.replace(os.sep, '_'),
lock_file_prefix)
lock_file_path = os.path.join(local_lock_path, lock_file_name)
try:
lock = InterProcessLock(lock_file_path)
with lock as lock:
LOG.debug(_('Got file lock "%(lock)s" at %(path)s'),
{'lock': name, 'path': lock_file_path})
yield lock
finally:
LOG.debug(_('Released file lock "%(lock)s" at %(path)s'),
{'lock': name, 'path': lock_file_path})
else:
yield sem
finally:
local.strong_store.locks_held.remove(name)
def synchronized(name, lock_file_prefix=None, external=False, lock_path=None):
"""Synchronization decorator.
Decorating a method like so::
@synchronized('mylock')
def foo(self, *args):
...
ensures that only one thread will execute the foo method at a time.
Different methods can share the same lock::
@synchronized('mylock')
def foo(self, *args):
...
@synchronized('mylock')
def bar(self, *args):
...
This way only one of either foo or bar can be executing at a time.
"""
def wrap(f):
@functools.wraps(f)
def inner(*args, **kwargs):
with lock(name, lock_file_prefix, external, lock_path):
LOG.debug(_('Got semaphore / lock "%(function)s"'),
{'function': f.__name__})
return f(*args, **kwargs)
LOG.debug(_('Semaphore / lock released "%(function)s"'),
{'function': f.__name__})
return inner
return wrap
def synchronized_with_prefix(lock_file_prefix):
"""Partial object generator for the synchronization decorator.
Redefine @synchronized in each project like so::
(in nova/utils.py)
from nova.openstack.common import lockutils
synchronized = lockutils.synchronized_with_prefix('nova-')
(in nova/foo.py)
from nova import utils
@utils.synchronized('mylock')
def bar(self, *args):
...
The lock_file_prefix argument is used to provide lock files on disk with a
meaningful prefix.
"""
return functools.partial(synchronized, lock_file_prefix=lock_file_prefix)
|
Futures trading is the process of buying or selling some predetermined asset at a mutually-agreed upon price. This agreement is called a futures contract and it is paid out on a specified delivery date, which is usually in the future. This process occurs at a futures exchange, where the buyer is also called the “long” party and the seller is the “short” party. These designations reflect how the contract is seen in the eyes of each member of the agreement. The person buying the asset hopes that the value of those assets will increase in the future, while the seller hopes that the price will decrease before the sale.
A futures contract is highly standardized. That means that it specifies a number of things, including what the asset up for sale is. The asset might range from an interest rate at a bank, to foreign currency, to a quantity of oil. The contract should clearly indicate the number of units that will be sold. When the contract involves physical commodities, it must also indicate clearly the quality of those contents, otherwise known as the grade. For instance, when crude oil is being traded, the oil must adhere to certain standards which are outlined in the agreement between both the long and short parties.
The contract should also specify how the settlement will be paid out in the future, whether the payment will be made in cash and what currency will be used to make the payment. Finally, the contract must state when the delivery will take place and when final trading will occur. Another detail often included in the futures contract is something known as a commodity tick, which determines by what percentage the market may fluctuate before the delivery date.
There is usually a mediating party in every futures exchange. This process began in the early 1970s when the Chicago Mercantile Exchange (CME) introduced future contracts for financial transactions. It became highly successful and helped to increase the volume of trading and open international access to markets. Following this success, new markets opened up around the world, with the London International Financial Futures Exchange, the Terminborse, and the Tokyo Commodity Exchange becoming just three of over 90 institutions that specialize in futures exchanges.
This innovation stems back to the trading of commodities in Japan in the 18th century. At that time, goods such as silk and rice were being traded in markets. In the west, Holland traded tulip bulbs and the United States began trading later, in the middle of the 19th century. In the U.S., trading took place between farmers who sold their grain produce to buyers either immediately or to be delivered at a later date. These early contracts were the basis for futures trading today.
The term settlement refers to the consummation of the contract, as it is specified. This can be done as either a physical delivery or a cash settlement. The physical delivery process is when the asset denoted in the contract is passed from the seller to the buyer of the contract. This practice is common with most bonds and commodities that are traded; however, it does not occur with all contracts. In fact, contracts are often canceled by other contracts known as covering positions, for either the long or short party.
The second type of settlement is simply a cash payment. These funds are transferred according to the agreed-upon currency and rate. This is usually based on a stock market index or an interest rate index over the course of the transaction. The contract is settled when one party receives the payment for a loss at the time of the contract’s expiration. In the case of cash settlements, most often the asset in question cannot actually be delivered. It is most often not a physical object.
|
# project/__init__.py
#################
#### imports ####
#################
import os
from flask import Flask, render_template
from flask_login import LoginManager
from flask_bcrypt import Bcrypt
from flask_mail import Mail
from flask_debugtoolbar import DebugToolbarExtension
from flask_sqlalchemy import SQLAlchemy
################
#### config ####
################
app = Flask(__name__)
app.config.from_object(os.environ['APP_SETTINGS'])
####################
#### extensions ####
####################
login_manager = LoginManager()
login_manager.init_app(app)
bcrypt = Bcrypt(app)
mail = Mail(app)
toolbar = DebugToolbarExtension(app)
db = SQLAlchemy(app)
####################
#### blueprints ####
####################
from project.main.views import main_blueprint
from project.user.views import user_blueprint
app.register_blueprint(main_blueprint)
app.register_blueprint(user_blueprint)
####################
#### flask-login ####
####################
from project.models import User
login_manager.login_view = "user.login"
login_manager.login_message_category = "danger"
@login_manager.user_loader
def load_user(user_id):
return User.query.filter(User.id == int(user_id)).first()
########################
#### error handlers ####
########################
@app.errorhandler(403)
def forbidden_page(error):
return render_template("errors/403.html"), 403
@app.errorhandler(404)
def page_not_found(error):
return render_template("errors/404.html"), 404
@app.errorhandler(500)
def server_error_page(error):
return render_template("errors/500.html"), 500
|
Upward Bound Math and Science, a federally funded initiative, aims to persuade U.S. high schoolers to become college STEM (science, technology, engineering, mathematics) majors. The program attempts this persuasion by developing students’ content and procedural knowledge so that students may succeed in high school and college STEM courses. Primary focus on knowledge acquisition, however, may cause missed opportunities to engage the imaginative dimensions of students’ science identities and students’ senses of wonder for science. In this reflective essay, I describe a science fiction prototyping assignment that meets the knowledge-based objectives of the Writing Skills course in a five-week Upward Bound summer program at one Eastern U.S. public university and, at the same time, prompts students to perform science identities by writing narrative genres that echo students’ wonder-at attitudes toward science. This assignment is informed by science educator and theorist Yannis Hadzigeorgiou’s argument that imagination should be at the center of science education, as well as by Etienne Wenger’s communities-of-practice framework that describes imagination as one key way of forging belonging in society. By thinking about how future innovations may impact future families through the activity of composing a narrative and an informative genre, students communicate understanding and wonder for science to disciplinary and general audiences, with benefits for their attitudes toward and identities related to science.
Justin Nicholes' research explores the role writing plays in constructing disciplinary identities, enhancing disciplinary learning, and supporting retention efforts.
|
#! /usr/bin/python
# Author: Ulas A.
# Date: 14 Nov 2015
# Python version: 2.7
# This program takes a text file input by the user in a Linux filesystem and searches for a user input string
# The search simulates Regular expressions. The program can search for text anywhere in the opened file
# the beginning of each line (^) or the end of lines $
import os, re, subprocess
### User inputs and variable definitions
TheFile = raw_input('Type in the file to search: ')
while os.path.exists(TheFile) is False:
print 'Invalid file/folder name, please try again!'
TheFile = raw_input('Type in the file/folder to search: ')
Search = raw_input('Type string to search: ')
print 'Choose an option: \n \ta - Search anywhere in the line \n \t^ - Search beginning of a line \n\t$ - Search end of a line'
option = raw_input('a/^/$: ')
while option not in ('a', '^', '$'):
print 'Invalid option!'
option = raw_input('a/^/$: ')
### Check if the file is binary
def NonBinary(path):
return (re.search(r':.* text', subprocess.Popen(["file", '-L', path], stdout=subprocess.PIPE).stdout.read()) is not None)
### Search anywhere option
if option == "a":
if NonBinary(TheFile):
with open(TheFile, 'rt') as txt:
for line in txt:
if Search in line:
print TheFile, ':', line,
if not os.access(TheFile, os.R_OK):
print TheFile, ': Permission denied'
### Search the beginning of a line option
elif option == "^":
if NonBinary(TheFile):
with open(TheFile, 'rt') as txt:
for line in txt:
CurrentLine = line
regex = re.match(Search, CurrentLine)
if regex:
print TheFile, ':', line,
if not os.access(TheFile, os.R_OK):
print TheFile, ': Permission denied'
### Search the end of a line option
elif option == "$":
if NonBinary(TheFile):
with open(TheFile, 'rt') as txt:
for line in txt:
CurrentLine = line
if re.search(Search+r'$', CurrentLine):
print TheFile, ':', line,
if not os.access(TheFile, os.R_OK):
print TheFile, ': Permission denied'
|
Place chopped chocolate in a glass or metal bowl that can nest on top of a pot filled with a few inches of water. Bring water to a simmer, turn off heat, and then place bowl over hot water (the bottom should not touch the water). Stir frequently with a flexible spatula to encourage even melting. If necessary, remove bowl, reheat water, and replace bowl. Once chocolate is melted, remove bowl from pan, stir well, and use immediately.
|
# coding=utf-8
from __future__ import absolute_import, division, print_function
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2015 The OctoPrint Project - Released under terms of the AGPLv3 License"
import io
from octoprint.util import atomic_write
class AbstractFileWrapper(object):
"""
Wrapper for file representations to save to storages.
Arguments:
filename (str): The file's name
"""
def __init__(self, filename):
self.filename = filename
def save(self, path):
"""
Saves the file's content to the given absolute path.
Arguments:
path (str): The absolute path to where to save the file
"""
raise NotImplementedError()
def stream(self):
"""
Returns a Python stream object (subclass of io.IOBase) representing the file's contents.
Returns:
io.IOBase: The file's contents as a stream.
"""
raise NotImplementedError()
class DiskFileWrapper(AbstractFileWrapper):
"""
An implementation of :class:`.AbstractFileWrapper` that wraps an actual file on disk. The `save` implementations
will either copy the file to the new path (preserving file attributes) or -- if `move` is `True` (the default) --
move the file.
Arguments:
filename (str): The file's name
path (str): The file's absolute path
move (boolean): Whether to move the file upon saving (True, default) or copying.
"""
def __init__(self, filename, path, move=True):
AbstractFileWrapper.__init__(self, filename)
self.path = path
self.move = move
def save(self, path):
import shutil
if self.move:
shutil.move(self.path, path)
else:
shutil.copy2(self.path, path)
def stream(self):
return io.open(self.path, "rb")
class StreamWrapper(AbstractFileWrapper):
"""
A wrapper allowing processing of one or more consecutive streams.
Arguments:
*streams (io.IOBase): One or more streams to process one after another to save to storage.
"""
def __init__(self, filename, *streams):
if not len(streams) > 0:
raise ValueError("Need at least one stream to wrap")
AbstractFileWrapper.__init__(self, filename)
self.streams = streams
def save(self, path):
"""
Will dump the contents of all streams provided during construction into the target file, in the order they were
provided.
"""
import shutil
with atomic_write(path, "wb") as dest:
with self.stream() as source:
shutil.copyfileobj(source, dest)
def stream(self):
"""
If more than one stream was provided to the constructor, will return a :class:`.MultiStream` wrapping all
provided streams in the order they were provided, else the first and only stream is returned directly.
"""
if len(self.streams) > 1:
return MultiStream(*self.streams)
else:
return self.streams[0]
class MultiStream(io.RawIOBase):
"""
A stream implementation which when read reads from multiple streams, one after the other, basically concatenating
their contents in the order they are provided to the constructor.
Arguments:
*streams (io.IOBase): One or more streams to concatenate.
"""
def __init__(self, *streams):
io.RawIOBase.__init__(self)
self.streams = streams
self.current_stream = 0
def read(self, n=-1):
if n == 0:
return b''
if len(self.streams) == 0:
return b''
while self.current_stream < len(self.streams):
stream = self.streams[self.current_stream]
result = stream.read(n)
if result is None or len(result) != 0:
return result
else:
self.current_stream += 1
return b''
def readinto(self, b):
n = len(b)
read = self.read(n)
b[:len(read)] = read
return len(read)
def close(self):
for stream in self.streams:
try:
stream.close()
except:
pass
def readable(self, *args, **kwargs):
return True
def seekable(self, *args, **kwargs):
return False
def writable(self, *args, **kwargs):
return False
class LineProcessorStream(io.RawIOBase):
"""
While reading from this stream the provided `input_stream` is read line by line, calling the (overridable) method
:meth:`.process_line` for each read line.
Sub classes can thus modify the contents of the `input_stream` in line, while it is being read.
Arguments:
input_stream (io.IOBase): The stream to process on the fly.
"""
def __init__(self, input_stream):
io.RawIOBase.__init__(self)
self.input_stream = io.BufferedReader(input_stream)
self.leftover = None
def read(self, n=-1):
if n == 0:
return b''
result = b''
while len(result) < n or n == -1:
bytes_left = (n - len(result)) if n != -1 else -1
if self.leftover is not None:
if bytes_left != -1 and bytes_left < len(self.leftover):
result += self.leftover[:bytes_left]
self.leftover = self.leftover[bytes_left:]
break
else:
result += self.leftover
self.leftover = None
processed_line = None
while processed_line is None:
line = self.input_stream.readline()
if not line:
break
processed_line = self.process_line(line)
if processed_line is None:
break
bytes_left = (n - len(result)) if n != -1 else -1
if bytes_left != -1 and bytes_left < len(processed_line):
result += processed_line[:bytes_left]
self.leftover = processed_line[bytes_left:]
break
else:
result += processed_line
return result
def readinto(self, b):
n = len(b)
read = self.read(n)
b[:len(read)] = read
return len(read)
def process_line(self, line):
"""
Called from the `read` Method of this stream with each line read from `self.input_stream`.
By returning ``None`` the line will not be returned from the read stream, effectively being stripped from the
wrapper `input_stream`.
Arguments:
line (str): The line as read from `self.input_stream`
Returns:
str or None: The processed version of the line (might also be multiple lines), or None if the line is to be
stripped from the processed stream.
"""
return line
def close(self):
self.input_stream.close()
def readable(self, *args, **kwargs):
return True
def seekable(self, *args, **kwargs):
return False
def writable(self, *args, **kwargs):
return False
|
The ultimate way to keep your organization earning profits should be to convey a seo contract free download in place. Many of these deals can assist you to maintain making money once the purchaser or engines like google reacts in a manner that makes you carry on doing work but not capable to create a specified respond as to the reasons the ratings decreased or how much time it will need to get come back up.
Everyone ought to know that when you start building links you must make continuous initiatives to make sure they’re turning into create however that normally takes funds. What are you going to do in case you customer will not likely shell out. Effectively this is the reason we created internet marketing deal simply put paydays could keep being released making certain you’ve got the cash useful to develop your backlinks and do your internet marketing even though even so paying your costs.
Discover how to utilize seo contract free download today and take the organization to another volume, never ever get unwanted is caused by the individual once more by looking for a search engine marketing contract in place before starting the seo strategy.
|
from __future__ import absolute_import
# Copyright (c) 2010-2019 openpyxl
"""Write the workbook global settings to the archive."""
from copy import copy
from openpyxl.utils import absolute_coordinate, quote_sheetname
from openpyxl.xml.constants import (
ARC_APP,
ARC_CORE,
ARC_WORKBOOK,
PKG_REL_NS,
CUSTOMUI_NS,
ARC_ROOT_RELS,
)
from openpyxl.xml.functions import tostring, fromstring
from openpyxl.packaging.relationship import Relationship, RelationshipList
from openpyxl.workbook.defined_name import DefinedName
from openpyxl.workbook.external_reference import ExternalReference
from openpyxl.packaging.workbook import ChildSheet, WorkbookPackage, PivotCache
from openpyxl.workbook.properties import WorkbookProperties
from openpyxl.utils.datetime import CALENDAR_MAC_1904
def get_active_sheet(wb):
"""
Return the index of the active sheet.
If the sheet set to active is hidden return the next visible sheet or None
"""
visible_sheets = [idx for idx, sheet in enumerate(wb._sheets) if sheet.sheet_state == "visible"]
if not visible_sheets:
raise IndexError("At least one sheet must be visible")
idx = wb._active_sheet_index
sheet = wb.active
if sheet and sheet.sheet_state == "visible":
return idx
for idx in visible_sheets[idx:]:
wb.active = idx
return idx
return None
class WorkbookWriter:
def __init__(self, wb):
self.wb = wb
self.rels = RelationshipList()
self.package = WorkbookPackage()
self.package.workbookProtection = wb.security
self.package.calcPr = wb.calculation
def write_properties(self):
props = WorkbookProperties() # needs a mapping to the workbook for preservation
if self.wb.code_name is not None:
props.codeName = self.wb.code_name
if self.wb.excel_base_date == CALENDAR_MAC_1904:
props.date1904 = True
self.package.workbookPr = props
def write_worksheets(self):
for idx, sheet in enumerate(self.wb._sheets, 1):
sheet_node = ChildSheet(name=sheet.title, sheetId=idx, id="rId{0}".format(idx))
rel = Relationship(type=sheet._rel_type, Target=sheet.path)
self.rels.append(rel)
if not sheet.sheet_state == 'visible':
if len(self.wb._sheets) == 1:
raise ValueError("The only worksheet of a workbook cannot be hidden")
sheet_node.state = sheet.sheet_state
self.package.sheets.append(sheet_node)
def write_refs(self):
for link in self.wb._external_links:
# need to match a counter with a workbook's relations
rId = len(self.wb.rels) + 1
rel = Relationship(type=link._rel_type, Target=link.path)
self.rels.append(rel)
ext = ExternalReference(id=rel.id)
self.package.externalReferences.append(ext)
def write_names(self):
defined_names = copy(self.wb.defined_names)
# Defined names -> autoFilter
for idx, sheet in enumerate(self.wb.worksheets):
auto_filter = sheet.auto_filter.ref
if auto_filter:
name = DefinedName(name='_FilterDatabase', localSheetId=idx, hidden=True)
name.value = u"{0}!{1}".format(quote_sheetname(sheet.title),
absolute_coordinate(auto_filter)
)
defined_names.append(name)
# print titles
if sheet.print_titles:
name = DefinedName(name="Print_Titles", localSheetId=idx)
name.value = ",".join([u"{0}!{1}".format(quote_sheetname(sheet.title), r)
for r in sheet.print_titles.split(",")])
defined_names.append(name)
# print areas
if sheet.print_area:
name = DefinedName(name="Print_Area", localSheetId=idx)
name.value = ",".join([u"{0}!{1}".format(quote_sheetname(sheet.title), r)
for r in sheet.print_area])
defined_names.append(name)
self.package.definedNames = defined_names
def write_pivots(self):
pivot_caches = set()
for pivot in self.wb._pivots:
if pivot.cache not in pivot_caches:
pivot_caches.add(pivot.cache)
c = PivotCache(cacheId=pivot.cacheId)
self.package.pivotCaches.append(c)
rel = Relationship(Type=pivot.cache.rel_type, Target=pivot.cache.path)
self.rels.append(rel)
c.id = rel.id
#self.wb._pivots = [] # reset
def write_views(self):
active = get_active_sheet(self.wb)
if self.wb.views:
self.wb.views[0].activeTab = active
self.package.bookViews = self.wb.views
def write(self):
"""Write the core workbook xml."""
self.write_properties()
self.write_worksheets()
self.write_names()
self.write_pivots()
self.write_views()
self.write_refs()
return tostring(self.package.to_tree())
def write_rels(self):
"""Write the workbook relationships xml."""
styles = Relationship(type='styles', Target='styles.xml')
self.rels.append(styles)
theme = Relationship(type='theme', Target='theme/theme1.xml')
self.rels.append(theme)
if self.wb.vba_archive:
vba = Relationship(type='', Target='vbaProject.bin')
vba.Type ='http://schemas.microsoft.com/office/2006/relationships/vbaProject'
self.rels.append(vba)
return tostring(self.rels.to_tree())
def write_root_rels(self):
"""Write the package relationships"""
rels = RelationshipList()
rel = Relationship(type="officeDocument", Target=ARC_WORKBOOK)
rels.append(rel)
rel = Relationship(Type="%s/metadata/core-properties" % PKG_REL_NS, Target=ARC_CORE)
rels.append(rel)
rel = Relationship(type="extended-properties", Target=ARC_APP)
rels.append(rel)
if self.wb.vba_archive is not None:
# See if there was a customUI relation and reuse it
xml = fromstring(self.wb.vba_archive.read(ARC_ROOT_RELS))
root_rels = RelationshipList.from_tree(xml)
for rel in root_rels.find(CUSTOMUI_NS):
rels.append(rel)
return tostring(rels.to_tree())
|
LeeTen (Japanese リーテン) is the term used to refer the romantic relationship between Rock Lee and Tenten.
Team Guy declare their dreams.
Team Guy was formed a year before the start of the story.
Tenten sat next to Lee as they introduced themselves to Guy. Tenten stared as Lee declared his dream to prove that it's possible to be a splendid ninja without any ninjutsu or genjutsu.
Later, the three of them all fought Guy and showed that they have the resolve to keep up a fight defeat the odds being against them, so Guy pulled them all into a group hug.
Tenten blushes and Lee cried tears of joy.
Rock Lee and Tenten were put on the same team a year before the start of the series along with Hyūga Neji, thus forming Team Guy. Having not been able to participate in the Exams previously, Team Guy entered the Exams along with the Rookie Nine.
Before the exams, Team Guy is seen being bullied by some of the other kids, who shove Lee and Tenten out of the way and prevent them from entering the room. After Team 7 steps in and a confrontation almost occurs, Lee jumps in and effortlessly holds both of them back, much to everyone's surprise.
Tenten and Neji get onto Lee, reminding him that it was his idea for them to lay low and pretend to be weak in the first place. Lee apologizes.
Tenten wakes up Lee after his fight with the sound ninja.
In the first test in the Exams, Tenten used wires to adjust hidden mirrors on the ceiling read other competitors answers and forward the answers to Lee.
Later, in the Forest of Death, Team Guy decided to search for weaker teams to steal scrolls from with a set location for them to return to after. When Lee did not come to their designated area, Tenten became worried and set out with Neji to search for him. Neji and Tenten arrived to see an unconscious Lee, who had been defeated by the Sound Genin.
Tenten leaped down from the tree branch and told Ino that she would take over watching Lee. Then, Tenten took Lee's body, violently yelling and shaking him awake. When Lee explains what had happened, Tenten thinks that if Lee was alone he would never have lost to the Sound Genin, more than likely blaming Sakura and Lee's infatuation with her for Lee's defeat. When Lee makes his promise to Sakura that he will be a stronger ninja next time they meet, Tenten is then shown looking angry, possibly even jealous.
After Temari defeated Tenten, she flung her unconscious body off of her fan and called her trash. Lee jumped down from the balcony and caught Tenten's falling body to save her from hitting the hard wall or weapons littered all over the ground. Lee yelled at Temari for treating Tenten so callously when she had fought her hardest, and was so angry that he was able to pick a fight with Temari himself.
Guy stepped in to call Lee back and Gaara called Temari back to the stands before the fight could happen.
Tenten is seen smiling as she watches Lee train.
When Kisame traps Neji, Lee and Tenten in his Water Prison Jutsu, Lee destroys the clone that was keeping Tenten in the jutsu.
Team Guy decides to stop at a tea house before returning to the Leaf Village from their mission, where they see several flocks of birds fleeing from the direction of the Leaf Village, the team cuts their resting time short, hurrying to the village, much to the disapointment of Tenten.
Lee, Tenten and the rest of the Konoha 11 decide to take out Sasuke.
As Lee mourns over Neji's death, Tenten and Guy try to comfort him, Tenten seemingly more concerned with Lee being upset than Neji being dead.
Tenten is exasperated by Guy and Lee's unburning will to fight, when both are extremely worn out. Tenten thinks to herself that nothing good will ever come with being with these two. She tells Lee to go help Guy as she went to check up on something.
When Lee finally decides on the perfect present to get for Naruto and Hinata's wedding, he seeks out Tenten to make sure she doesn't have the same idea as he does. Tenten is mystified by Lee's appearance.
Later, at the wedding, Tenten gives Guy and Lee tips on how to behave.
They have always had a friendly relationship throughout the series.
Tenten admires Lee's resolve and motivation.
Lee came to Tenten's aid after her defeat by Temari.
LeeTen has a good amount of popularity among Tenten and Lee pairings. It is likely supported because of their relationship in the manga, and Tenten's being embarrassed of him can be seen as a comedy point of the pairing. The couple has gotten more popular amongst fans due to Neji dying and Tenten being concerned about Lee being upset. The Naruto spinoff series starring Rock Lee has also brought some attention to this fan pairing. In "Boruto: Naruto Next Generations," Rock Lee's son, Metal Lee's mother has not been revealed making Tenten a possible choice.
|
# PyTransit: fast and easy exoplanet transit modelling in Python.
# Copyright (C) 2010-2019 Hannu Parviainen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import matplotlib as mpl
import matplotlib.pyplot as pl
import seaborn as sb
from matplotlib.gridspec import GridSpec
from numpy import sqrt, percentile, ceil, linspace, concatenate, histogram
color = sb.color_palette()[0]
color_rgb = mpl.colors.colorConverter.to_rgb(color)
colors = [sb.utils.set_hls_values(color_rgb, l=l) for l in linspace(1, 0, 12)]
cmap = sb.blend_palette(colors, as_cmap=True)
color = sb.color_palette()[1]
color_rgb = mpl.colors.colorConverter.to_rgb(color)
colors = [sb.utils.set_hls_values(color_rgb, l=l) for l in linspace(1, 0, 12)]
cmap2 = sb.blend_palette(colors, as_cmap=True)
c_ob = "#002147" # Oxford blue
c_bo = "#CC5500" # Burnt orange
def marginal_radius_ratio_plot(df, bins: int = 40, klim: tuple = None, figsize: tuple = (6, 5)):
if klim is None:
klim = percentile(concatenate([df.k_app.values, df.k_true.values]), [0.0, 99.9])
v1, e = histogram(df.k_app, range=klim, bins=bins, density=True)
v2, e = histogram(df.k_true, range=klim, bins=bins, density=True)
fig, ax = pl.subplots(figsize=figsize, constrained_layout=True)
ax.fill_betweenx(e[:-1], -10 - v1, -10, step='post', linewidth=1, edgecolor='k')
ax.fill_betweenx(e[:-1], 10 + v2, 10, step='post', linewidth=1, edgecolor='k')
ax.text(-0.5 * v1.max(), klim[1], 'Apparent radius ratio', ha='center', va='top')
ax.text(0.5 * v2.max(), klim[1], 'True radius ratio', ha='center', va='top')
pl.setp(ax, xlabel='Posterior density', ylabel='Radius ratio', xticks=[], ylim=klim)
sb.despine(fig, offset=5)
return fig
def _jplot(hte, cte, cnr, imp, rho, fw=10, nb=30, gs=25, ylabel='Contamination in $i\'$', **kwargs):
htelim = kwargs.get('htelim', (2000, 8000))
ctelim = kwargs.get('ctelim', (4000, 12000))
blim = kwargs.get('blim', (0, 1))
rlim = kwargs.get('rlim', (0, 15))
clim = kwargs.get('clim', (0, 1))
fig = pl.figure(figsize=(fw, fw / 4))
gs_tt = GridSpec(2, 1, bottom=0.2, top=1, left=0.1, right=0.3, hspace=0, wspace=0, height_ratios=[0.15, 0.85], figure=fig)
gs_ct = GridSpec(2, 5, bottom=0.2, top=1, left=0.38, right=1, hspace=0.05, wspace=0.05,
height_ratios=[0.15, 0.85],
width_ratios=[1, 1, 1, 1, 0.2], figure=fig)
ax_tt = pl.subplot(gs_tt[1, 0])
ax_chj = pl.subplot(gs_ct[1, 0])
ax_ccj = pl.subplot(gs_ct[1, 1])
ax_cbj = pl.subplot(gs_ct[1, 2])
ax_crj = pl.subplot(gs_ct[1, 3])
ax_thm = pl.subplot(gs_ct[0, 0])
ax_ctm = pl.subplot(gs_ct[0, 1])
ax_bm = pl.subplot(gs_ct[0, 2])
ax_rm = pl.subplot(gs_ct[0, 3])
ax_cnm = pl.subplot(gs_ct[1, 4])
ax_tt.hexbin(hte, cte, gridsize=gs, cmap=cmap, extent=(htelim[0], htelim[1], ctelim[0], ctelim[1]))
ax_chj.hexbin(hte, cnr, gridsize=gs, cmap=cmap, extent=(htelim[0], htelim[1], clim[0], clim[1]))
ax_ccj.hexbin(cte, cnr, gridsize=gs, cmap=cmap, extent=(ctelim[0], ctelim[1], clim[0], clim[1]))
ax_cbj.hexbin(imp, cnr, gridsize=gs, cmap=cmap, extent=(blim[0], blim[1], clim[0], clim[1]))
ax_crj.hexbin(rho, cnr, gridsize=gs, cmap=cmap, extent=(rlim[0], rlim[1], clim[0], clim[1]))
ax_thm.hist(hte, bins=nb, alpha=0.5, range=htelim, histtype='stepfilled')
ax_ctm.hist(cte, bins=nb, alpha=0.5, range=ctelim, histtype='stepfilled')
ax_bm.hist(imp, bins=nb, alpha=0.5, range=blim, histtype='stepfilled')
ax_rm.hist(rho, bins=nb, alpha=0.5, range=rlim, histtype='stepfilled')
ax_cnm.hist(cnr, bins=nb, alpha=0.5, range=clim, histtype='stepfilled', orientation='horizontal')
pl.setp(ax_tt, xlabel='Host $T_\mathrm{Eff}$', ylabel='Contaminant $T_\mathrm{Eff}$')
pl.setp(ax_chj, xlabel='Host $T_\mathrm{Eff}$', ylabel=ylabel)
pl.setp(ax_ccj, xlabel='Contaminant $T_\mathrm{Eff}$')
pl.setp(ax_cbj, xlabel='Impact parameter')
pl.setp(ax_crj, xlabel='Stellar density')
pl.setp(ax_thm, xlim=ax_chj.get_xlim())
pl.setp(ax_ctm, xlim=ax_ccj.get_xlim())
pl.setp(ax_bm, xlim=ax_cbj.get_xlim())
pl.setp([ax_ccj, ax_cnm], ylim=ax_chj.get_ylim())
pl.setp([ax_chj, ax_ccj, ax_cbj, ax_crj, ax_cnm], ylim=clim)
pl.setp([ax_thm, ax_ctm, ax_cnm, ax_bm, ax_rm], yticks=[], xticks=[])
pl.setp(ax_ccj.get_yticklabels(), visible=False)
pl.setp(ax_cbj.get_yticklabels(), visible=False)
pl.setp(ax_crj.get_yticklabels(), visible=False)
[sb.despine(ax=ax, left=True, offset=0.1) for ax in [ax_thm, ax_ctm, ax_bm, ax_rm]]
[sb.despine(ax=ax) for ax in [ax_chj, ax_ccj, ax_cbj, ax_crj]]
sb.despine(ax=ax_cnm, bottom=True)
return fig, ax_tt, ax_chj, ax_cbj, ax_ccj, ax_crj
def joint_radius_ratio_plot(df, fw=10, nb=30, gs=25, **kwargs):
return _jplot(df.teff_h, df.teff_c, df.k_true, df.b, df.rho, fw, nb, gs, ylabel='True radius ratio', **kwargs)[0]
def joint_contamination_plot(df, fw=10, nb=30, gs=25, **kwargs):
return _jplot(df.teff_h, df.teff_c, df.cnt, df.b, df.rho, fw, nb, gs, **kwargs)[0]
|
Did you know that Apples & Bananas works with organizations for fundraising? Your class, youth group, or other organization could set up an event for people to shop at Apples & Bananas, and your group would get a percent of the pre-tax sales. All you need to do is pass out flyers! Please email us for more information or to set up a fundraising event.
|
#!flask/bin/python
import json
import os
import urllib2
import httplib2 as http
import requests
from creds import bigOvenAPIkey
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
from flask import Flask, jsonify
recipe_blob_dct = {
1 : {
'id': 1,
'title': 'Chicken Alfredo',
'description': 'Chicken & Pasta & Cream.',
'img': 'https://c1.staticflickr.com/3/2504/3874012191_48ec021023.jpg'
},
2 : {
'id': 2,
'title': 'Lasagna',
'description': 'Garfield\'s Favorite.',
'img': 'https://upload.wikimedia.org/wikipedia/commons/6/6b/Lasagna_(1).jpg'
},
3 : {
'id': 3,
'title': 'Pizza',
'description': 'Best served cold, just like revenge.',
'img': 'https://upload.wikimedia.org/wikipedia/commons/9/95/Pizza_with_various_toppings.jpg'
}
}
def get_recipe_ids():
"""Returns a list of valid recipe IDS.
@return: a JSON object - list of recipe IDs, where recipe ID is a number
"""
"""
1. Get listing of files in the directory
2. Make array of integers with the names of the files
"""
dirListing = os.listdir("recipeJSONs/")
lst = []
tempName = []
for item in dirListing:
if ".json" in item:
tempName = os.path.splitext(item)[0]
lst.append(int(tempName))
response = json.dumps({'response' : lst})
return response
def get_recipe_short_descriptions():
"""Returns a list of valid recipe IDS with small descriptions.
@return: a JSON object - list of recipe IDs, where recipe ID is a number
"""
"""
1. Get listing of files in the directory
2. Make array of integers with the names of the files
"""
dirListing = os.listdir("recipeJSONs/")
lst = []
tempName = []
for item in dirListing:
if ".json" in item:
tempName = os.path.splitext(item)[0]
with open(os.path.join("recipeJSONs", item)) as f:
recipeInfo = json.load(f)
lst.append({
"RecipeID": recipeInfo["RecipeID"],
"Title": recipeInfo["Title"],
"Description": recipeInfo["Description"],
"ImageURL": recipeInfo["ImageURL"]
})
response = json.dumps({'response' : lst})
return response
#return jsonify({'list': recipe_blob_dct.keys()})
def get_recipe_info(recipe_id):
"""Information about the recipe
@type recipe_id: number
@return: a JSON object
"""
"""
1. Get list of files
2. If recipe_id present in list, get the contents of the files
3. Else, make API call to get the JSON data from bigOven
4. jsonify the data and return
"""
dct = json.loads(get_recipe_ids())
lst = dct['response']
if recipe_id not in lst:
url_path = "http://api.bigoven.com/recipe/" + str(recipe_id) + "?api_key=" + bigOvenAPIkey
headers = {'content-type': 'application/json'}
req = requests.get(url_path, headers=headers)
content = req.content
recipe_info = json.loads(content)
else:
json_fname = "%d.json" % recipe_id
json_path = os.path.join("recipeJSONs", json_fname)
recipe_info_str = open(json_path).read()
recipe_info = json.loads(recipe_info_str)
return json.dumps({'response' : recipe_info})
def main():
print get_recipe_info(158905)
if __name__ == '__main__':
main()
|
Beau got into real estate because he enjoys working face to face with people in the community. That along with a fascination with architecture and interior design as well as a chance to absorb creative design schemes and ideas made real estate a natural profession to jump in to. There are not too many things more gratifying than helping people's home buying dreams come true!
Beau was raised in this valley and through all his travels was never able to find a place to compete with its beauty, comfort and recreational opportunities. He actively volunteers in the community with Big Brothers Big Sisters and organizing and operating charity poker tournaments for such organizations as Wounded Warriors Foundation, American Legion, HRDC and MSU affiliated sororities and fraternities. In his free time, Beau enjoys tennis, backpacking, getting to the lake in the summer and skiing, ice fishing and surfing distant beach destinations in the winter!
"This valley's vast mountain landscapes and unabated rivers adhere to any outdoor enthusiast desires!"
|
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django.db import models
from django.db.models import Q
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from shuup.core.models import CustomerTaxGroup, Tax, TaxClass
from shuup.utils.patterns import Pattern, pattern_matches
class TaxRuleQuerySet(models.QuerySet):
def may_match_postal_code(self, postalcode):
null = Q(_postal_codes_min__isnull=True)
in_range = Q()
if postalcode:
in_range = Q(_postal_codes_min__lte=postalcode, _postal_codes_max__gte=postalcode)
return self.filter(null | in_range)
@python_2_unicode_compatible
class TaxRule(models.Model):
enabled = models.BooleanField(default=True, verbose_name=_('enabled'), db_index=True)
tax_classes = models.ManyToManyField(
TaxClass,
verbose_name=_("tax classes"), help_text=_(
"Tax classes of the items to be taxed"))
customer_tax_groups = models.ManyToManyField(
CustomerTaxGroup, blank=True,
verbose_name=_("customer tax groups"))
country_codes_pattern = models.CharField(
max_length=300, blank=True,
verbose_name=_("country codes pattern"))
region_codes_pattern = models.CharField(
max_length=500, blank=True,
verbose_name=_("region codes pattern"))
postal_codes_pattern = models.CharField(
max_length=500, blank=True,
verbose_name=_("postal codes pattern"))
_postal_codes_min = models.CharField(max_length=100, blank=True, null=True)
_postal_codes_max = models.CharField(max_length=100, blank=True, null=True)
priority = models.IntegerField(
default=0,
verbose_name=_("priority"), help_text=_(
"Rules with same priority define added taxes (e.g. US taxes) "
"and rules with different priority define compound taxes "
"(e.g. Canada Quebec PST case)"))
override_group = models.IntegerField(
default=0,
verbose_name=_("override group number"), help_text=_(
"If several rules match, only the rules with the highest "
"override group number will be effective. This can be "
"used, for example, to implement tax exemption by adding "
"a rule with very high override group that sets a zero tax."))
tax = models.ForeignKey(Tax, on_delete=models.PROTECT, verbose_name=_('tax'))
objects = TaxRuleQuerySet.as_manager()
def matches(self, taxing_context):
"""
Check if this tax rule matches given taxing context.
:type taxing_context: shuup.core.taxing.TaxingContext
"""
if taxing_context.customer_tax_group:
tax_groups = set(self.customer_tax_groups.all())
if tax_groups:
if taxing_context.customer_tax_group not in tax_groups:
return False
if self.country_codes_pattern:
if not pattern_matches(self.country_codes_pattern, taxing_context.country_code):
return False
if self.region_codes_pattern:
if not pattern_matches(self.region_codes_pattern, taxing_context.region_code):
return False
if self.postal_codes_pattern:
if not pattern_matches(self.postal_codes_pattern, taxing_context.postal_code):
return False
return True
def save(self, *args, **kwargs):
if self.postal_codes_pattern:
min_value, max_value = Pattern(self.postal_codes_pattern).get_alphabetical_limits()
self._postal_codes_min = min_value
self._postal_codes_max = max_value
return super(TaxRule, self).save(*args, **kwargs)
def __str__(self):
return _("Tax rule {} ({})").format(self.pk, self.tax)
|
Boasting a practical and stylish design, the S'well 16oz Incognito traveler bottle is the perfect addition to your everyday accessories. Crafted from triple walled stainless steel, this themros flask keeps your cold drinks cold for twenty four hours and hot drinks hot for twelve hours. Offering a large mouth that will easily fit ice cubes as well as a vacuum seal, this water bottle by S'well is perfect for trips to the gym, outdoor sports or activities.
|
'''
Copyright (c) 2015, Salesforce.com, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
import unittest
import os
import json
import string
import random
import getpass
from datetime import datetime
from Empire.creds import CredentialManager
import config
configuration = config.Configuration()
credentials_file = configuration.get('credentials_file')
credential_key = os.environ.get('CREDENTIAL_KEY')
if credential_key is None:
credential_key = getpass.getpass('Credential Key:')
credential_manager = CredentialManager(credentials_file, credential_key)
config.credential_manager = credential_manager
from repos.repotracker import RepoTracker
def id_generator(size=6, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
class RepoTrackerTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_last_identifier(self):
repo = RepoTracker()
temp_identifier = id_generator()
repo.update_identifier("test-identifier", temp_identifier)
fetched_identifier = repo.last_identifier("test-identifier")
self.assertEqual(temp_identifier, fetched_identifier)
def test_last_run_completed(self):
repo = RepoTracker()
temp_last_run = datetime.utcnow()
repo.update_last_run_completed("test-last-run",temp_last_run)
fetched_last_run = repo.last_run_completed("test-last-run")
self.assertEqual(temp_last_run, fetched_last_run)
|
This section of Moodle details where you can purchase recommended revision guides for various subjects.
Revision guides for GCSE Computing are £2.75, and are available from Pupil Services. Please note that there are TWO guides for this subject. One is the revision guide and the other is the accompanying workbook. These are sold together as a pack for £5.50.
The packs of revision guides cost £7 for three books from CPG Publishers. They provide direct preparation for the English Language exam. To buy, please purchase from Mrs Martin at Pupil Services, who will provide a receipt.
You then collect books from Mrs De Nitto, showing the receipt.
Geography GCSE revision guides are £3.25, and can be purchased from Mrs Martin at Pupil Services.
History revision guides are not stocked by the school, but can be purchased from Amazon below.
Maths Revision Guides and Workbooks are £2.50 and can be purchased from either the Maths Department directly, or via Mrs Martin at Pupil Services.
Revision guides and workbooks for both French and German are available from your MFL teachers. They cost £2 each.
RE revision guides, for both years 10 and 11, are available directly from Hodder and Stoughton, or can be purchased from Amazon with the links below.
The Science Department have a number of different revision guides all priced at £3.00. We also have GCSE revision workbooks priced at £2.00. These are all available from the Science Prep Room (between rooms 15 and 16).
|
from intelligine.core.exceptions import BestMoleculeHere, MoleculeGlandDisabled
from intelligine.simulation.molecule.DirectionMolecule import DirectionMolecule
class MoleculeGland():
def __init__(self, host, context):
self._molecule_type = None
self._host = host
self._context = context
self._enabled = False
def set_molecule_type(self, molecule_type):
self._molecule_type = molecule_type
def get_molecule_type(self):
if self._molecule_type is None:
raise Exception("molecule_type not specified")
return self._molecule_type
def get_molecule(self):
raise NotImplementedError()
def appose(self):
if not self._enabled:
raise MoleculeGlandDisabled()
try:
DirectionMolecule.appose(self._context,
self._host.get_position(),
self.get_molecule())
except BestMoleculeHere as best_molecule_here:
self._host.get_brain().set_distance_from_objective(best_molecule_here.get_best_distance())
def disable(self):
self._enabled = False
def enable(self):
self._enabled = True
def is_enabled(self):
return self._enabled
|
Starting your own business can be a rewarding, yet financially stressful process and sometimes we underestimate how expensive the essential office and IT equipment required can be. Which is why LendingArch are here to help get your business up and running sooner, without the financial burden.
Whether you require office and/or IT equipment and you need it fast, we can help. We provide affordable office and IT equipment lease or finance options for businesses all over Canada. Whether you need printers, fax machines, desks, tables or chairs, we can help.
There are many benefits of leasing your office & IT equipment with LendingArch. From having the ability to upgrade all of your office and IT equipment mid-lease, to affordable lease repayments your business will be able to pay, it really is the smarter choice. Not to mention, at the end of your lease period, you’ll have the option to purchase your equipment outright, at a discounted rate.
Financing your office and IT equipment with LendingArch is an easy, stress-free process. We can help get you the finance you need to get the job done, and we can provide you with a low, competitive finance rate, that won’t affect your daily financial situation. At the end of the duration of your loan, you’ll also own your office and IT equipment outright and can list it as an asset on your balance sheet.
Unlike other office and IT equipment lease and finance providers in Canada, we put you and your business needs first. We’ll help you get the office and IT equipment you need, at an affordable rate, whether it’s through leasing or financing – whichever suits your business the most. And, we’ll help you get it fast. With LendingArch, we’re 100% online, which means no waiting around in line and no on hold moments over the phone. We can get you the lease or finance you need for office and IT equipment the same day as your application, allowing you to get on with business!
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.