content
stringlengths 0
1.05M
| origin
stringclasses 2
values | type
stringclasses 2
values |
|---|---|---|
"""
ipython -i --pdb scripts/train_model.py -- --model cropped_jan02 --data 128_20151029 --use_cropped --as_grey --overwrite --no_test
"""
import numpy as np
from lasagne.layers import dnn
import lasagne as nn
import theano.tensor as T
import theano
from utils.nolearn_net import NeuralNet
from nolearn.lasagne.handlers import SaveWeights
from nolearn_utils.iterators import (
ShuffleBatchIteratorMixin,
BufferedBatchIteratorMixin,
RandomFlipBatchIteratorMixin,
AffineTransformBatchIteratorMixin,
AdjustGammaBatchIteratorMixin,
make_iterator
)
from nolearn_utils.hooks import (
SaveTrainingHistory,
PlotTrainingHistory,
EarlyStopping,
StepDecay
)
from utils import TrainSplit
# from utils.layers import batch_norm
# from utils.iterators import PairBatchIteratorMixin
# from utils.nonlinearities import low_temperature_softmax
# from utils.layers import TiedDropoutLayer
from utils.layer_macros import conv2dbn2 as conv2dbn
from utils.layer_macros import residual_block3 as residual_block
def float32(k):
return np.cast['float32'](k)
model_fname = './models/cropped_jan02.pkl'
model_accuracy_fname = './models/cropped_jan02_accuracy.pkl'
model_history_fname = './models/cropped_jan02_history.pkl'
model_graph_fname = './models/cropped_jan02_history.png'
image_size = 256
batch_size = 16
n_classes = 447
train_iterator_mixins = [
ShuffleBatchIteratorMixin,
BufferedBatchIteratorMixin,
RandomFlipBatchIteratorMixin,
AffineTransformBatchIteratorMixin,
AdjustGammaBatchIteratorMixin,
]
TrainIterator = make_iterator('TrainIterator', train_iterator_mixins)
test_iterator_mixins = [
]
TestIterator = make_iterator('TestIterator', test_iterator_mixins)
train_iterator_kwargs = dict(
batch_size=batch_size,
buffer_size=16,
flip_horizontal_p=0.5,
flip_vertical_p=0.5,
affine_p=1.,
affine_scale_choices=np.linspace(0.5, 1.5, 11),
affine_shear_choices=np.linspace(-0.25, 0.25, 11),
affine_translation_choices=np.arange(-32, 32, 1),
affine_rotation_choices=np.arange(-45, 45, 1),
adjust_gamma_p=0.5,
adjust_gamma_chocies=np.linspace(0.8, 1.2, 11)
)
train_iterator = TrainIterator(**train_iterator_kwargs)
test_iterator_kwargs = dict(
batch_size=batch_size,
)
test_iterator = TestIterator(**test_iterator_kwargs)
save_weights = SaveWeights(model_fname, only_best=True, pickle=False)
save_training_history = SaveTrainingHistory(model_history_fname)
plot_training_history = PlotTrainingHistory(model_graph_fname)
early_stopping = EarlyStopping(patience=100)
conv_kwargs = dict(
pad='same',
nonlinearity=nn.nonlinearities.very_leaky_rectify,
)
l = nn.layers.InputLayer(name='in', shape=(None, 3, image_size, image_size))
# 256x256
l = conv2dbn(
l, name='l1c1', num_filters=32, filter_size=(7, 7), stride=2,
**conv_kwargs
)
# 128x128
for i in range(3):
l = residual_block(
l, name='2c%s' % i,
# bottleneck=False,
num_filters=48, filter_size=(3, 3),
num_layers=2,
**conv_kwargs
)
# 128x128
for i in range(3):
actual_stride = 2 if i == 0 else 1
l = residual_block(
l, name='3c%s' % i,
# bottleneck=True, bottleneck_factor=4,
num_filters=64, filter_size=(3, 3), stride=actual_stride,
num_layers=2,
**conv_kwargs
)
# 64x64
for i in range(3):
actual_stride = 2 if i == 0 else 1
l = residual_block(
l, name='4c%s' % i,
# bottleneck=True, bottleneck_factor=4,
num_filters=80, filter_size=(3, 3), stride=actual_stride,
num_layers=3,
**conv_kwargs
)
# 32x32
for i in range(4):
actual_stride = 2 if i == 0 else 1
l = residual_block(
l, name='5c%s' % i,
# bottleneck=True, bottleneck_factor=4,
num_filters=96, filter_size=(3, 3), stride=actual_stride,
num_layers=3,
**conv_kwargs
)
# 16x16
for i in range(5):
actual_stride = 2 if i == 0 else 1
l = residual_block(
l, name='6c%s' % i,
# bottleneck=True, bottleneck_factor=4,
num_filters=128, filter_size=(3, 3), stride=actual_stride,
num_layers=3,
**conv_kwargs
)
# 8x8
# 8
l = nn.layers.dnn.Pool2DDNNLayer(l, name='gp', pool_size=8, mode='average_inc_pad')
l = nn.layers.DropoutLayer(l, name='gpdrop', p=0.5)
l = nn.layers.DenseLayer(l, name='out', num_units=n_classes, nonlinearity=nn.nonlinearities.softmax)
net = NeuralNet(
layers=l,
regression=False,
use_label_encoder=False,
objective_l2=1e-6,
# update=nn.updates.adam,
# update_learning_rate=1e-2,
update=nn.updates.nesterov_momentum,
update_learning_rate=theano.shared(float32(1e-1)),
train_split=TrainSplit(0.15, random_state=42, stratify=False),
batch_iterator_train=train_iterator,
batch_iterator_test=test_iterator,
on_epoch_finished=[
save_weights,
save_training_history,
plot_training_history,
early_stopping,
StepDecay('update_learning_rate', start=1e-1, stop=1e-5)
],
verbose=10,
max_epochs=2000,
)
|
nilq/baby-python
|
python
|
from __future__ import annotations
import abc
import datetime
import decimal
import typing as t
import zoneinfo
# region: Bases
class SpecialValue(abc.ABC):
"""Represents a special value specific to an SQL Type."""
def __init__(self, python_value: t.Any, sql_value: str):
self._py_value = python_value
self.sql = sql_value
@property
def py(self) -> t.Any:
"""Python representation of the special value."""
if isinstance(self._py_value, t.Callable):
return self._py_value()
else:
return self._py_value
def __repr__(self):
return f'SpecialValue({self.py}, "{self.sql}")'
def __str__(self):
return self.sql
def __eq__(self, other: t.Any):
if not isinstance(other, SpecialValue):
return False
return self.sql == other.sql and self._py_value == other._py_value
class SQLTypeMeta(abc.ABCMeta):
"""Metaclass defining the behaviour of non-initialised SQLType classes."""
__types__ = dict()
py: t.Any
sql: str
def __init__(cls, *_args, **_kwargs):
super().__init__(cls)
if cls.__name__ == "SQLType":
return
SQLTypeMeta.__types__[cls.py] = cls
def __repr__(self):
py = getattr(self, "py", None)
if py:
py = f"'{py.__name__}'"
return f"<{self.__name__} python={py} sql='{getattr(self, 'sql', None)}'>"
def __str__(self):
return self.sql
def __eq__(self, other: t.Any):
if isinstance(other, SQLTypeMeta) or isinstance(type(other), SQLTypeMeta):
return self.sql == other.sql
return False
def __hash__(self):
return hash(repr(self))
class SQLType(metaclass=SQLTypeMeta):
"""Base class representing an SQL datatype."""
py: t.Any
sql: str
def __init__(self):
pass
def __repr__(self):
py = getattr(self, "py", None)
if py: # pragma: no cover
py = f"'{py.__name__}'"
return f"<{self.__class__.__name__} python={py} sql='{getattr(self, 'sql', None)}'>"
def __str__(self):
return self.sql
def __eq__(self, other: t.Any):
if isinstance(other, SQLTypeMeta) or isinstance(type(other), SQLTypeMeta):
return self.sql == other.sql
return False
def __hash__(self):
return hash(repr(self))
# endregion
# region: Numeric Types
class Integer(SQLType):
"""
Whole number from -32768 to +32767.
Uses 4 bytes of storage.
"""
py = int
sql = "INTEGER"
class SmallInteger(Integer):
"""
Whole number from -2147483648 to +2147483647.
Uses 2 bytes of storage.
"""
sql = "SMALLINT"
class BigInteger(Integer):
"""
Whole number from -9223372036854775808 to +9223372036854775807.
Uses 8 bytes of storage.
"""
sql = "BIGINT"
class Serial(Integer):
"""
Auto-incrementing number from 1 to 2147483647.
Uses 4 bytes of storage.
"""
sql = "SERIAL"
class SmallSerial(Serial):
"""
Auto-incrementing number from 1 to 32767.
Uses 2 bytes of storage.
"""
sql = "SMALLSERIAL"
class BigSerial(Serial):
"""
Auto-incrementing number from 1 to 9223372036854775807.
Uses 8 bytes of storage.
"""
sql = "BIGSERIAL"
class Numeric(SQLType):
"""
Precise decimal number with configurable precision and scale.
Uses 3 to 8 bytes overhead and 2 bytes for every 4 decimal digits.
"""
py = decimal.Decimal
sql = "NUMERIC"
# special values
not_a_number = SpecialValue(decimal.Decimal("NaN"), "'NaN'")
def __init__(self, precision: int, scale: int = 0): # noqa
self.precision = precision
self.scale = scale
self.sql = f"NUMERIC({precision}, {scale})"
class Decimal(Numeric):
"""
Precise decimal number with configurable precision and scale.
Uses 3 to 8 bytes storage overhead and 2 bytes for every 4 decimal digits.
"""
sql = "DECIMAL"
class Real(SQLType):
"""
Inexact floating-point number with a range of 1E-37 to 1E+37.
Uses 4 bytes of storage.
"""
py = float
sql = "REAL"
# special values
not_a_number = SpecialValue(float("NaN"), "'NaN'")
infinity = SpecialValue(float("inf"), "'Infinity'")
negative_infinity = SpecialValue(float("-inf"), "'-Infinity'")
class DoublePrecision(Real):
"""
Inexact floating-point number with a range of 1E-307 to 1E+308.
Uses 8 bytes of storage.
"""
sql = "DOUBLE PRECISION"
class Money(SQLType):
"""
Currency amount with a fixed precision ranging from -92233720368547758.08 to +92233720368547758.07.
Uses 8 bytes of storage.
"""
py = str
sql = "MONEY"
# endregion
# region: String Types
class Text(SQLType):
"""
Variable unlimited string.
Uses 1 byte of storage overhead for strings under 126 bytes in length, or 4 bytes if over that length.
"""
py = str
sql = "TEXT"
class ByteA(SQLType):
"""
Variable unlimited binary string.
Uses 1 byte of storage overhead for strings under 126 bytes in length, or 4 bytes if over that length.
"""
py = bytes
sql = "BYTEA"
# endregion
# region: DateTime Types
class Timestamp(SQLType):
"""
Timezone naive datetime.
Uses 8 bytes of storage.
"""
py = datetime.datetime
sql = "TIMESTAMP"
# special values
epoch = SpecialValue(datetime.datetime.utcfromtimestamp(0), "'Epoch'")
infinity = SpecialValue(datetime.datetime.max, "'Infinity'")
negative_infinity = SpecialValue(datetime.datetime.min, "'-Infinity'")
now = SpecialValue(datetime.datetime.now, "Now")
today = SpecialValue(datetime.datetime.today, "Today")
tomorrow = SpecialValue(lambda: datetime.datetime.today() + datetime.timedelta(days=1), "Tomorrow")
yesterday = SpecialValue(lambda: datetime.datetime.today() + datetime.timedelta(days=-1), "Yesterday")
def __init__(self, precision: int): # noqa
self.precision = precision
self.sql = f"TIMESTAMP({precision})"
class TimestampTZ(Timestamp):
"""
Timezone aware datetime.
Uses 8 bytes of storage.
"""
sql = "TIMESTAMP WITH TIME ZONE"
def __init__(self, precision: int): # noqa
self.precision = precision
self.sql = f"TIMESTAMP({precision}) WITH TIME ZONE"
class Date(SQLType):
"""
Date from 4713BC to 5874897AD.
Uses 4 bytes of storage.
"""
py = datetime.date
sql = "DATE"
# special values
epoch = SpecialValue(datetime.datetime.utcfromtimestamp(0).date(), "'Epoch'")
infinity = SpecialValue(datetime.datetime.max.date(), "'Infinity'")
negative_infinity = SpecialValue(datetime.datetime.min.date(), "'-Infinity'")
now = SpecialValue(datetime.date.today, "Now")
today = SpecialValue(datetime.date.today, "Today")
tomorrow = SpecialValue(lambda: datetime.date.today() + datetime.timedelta(days=1), "Tomorrow")
yesterday = SpecialValue(lambda: datetime.date.today() + datetime.timedelta(days=-1), "Yesterday")
class Time(SQLType):
"""
Timezone naive time of day.
Uses 8 bytes of storage.
"""
py = datetime.time
sql = "TIME"
# special values
now = SpecialValue(lambda: datetime.datetime.now().time(), "Now")
allballs = SpecialValue(datetime.time(0, 0, 0, 0, zoneinfo.ZoneInfo("UTC")), "Allballs")
def __init__(self, precision: int): # noqa
self.precision = precision
self.sql = f"TIME({precision})"
class Interval(SQLType):
"""
Time interval.
Uses 16 bytes of storage.
"""
py = datetime.timedelta
sql = "INTERVAL"
# endregion
# region: Boolean Types
class Boolean(SQLType):
"""
True or False value.
Uses 1 byte of storage.
"""
py = bool
sql = "BOOLEAN"
# endregion
# region: Collection Types
class JSON(SQLType):
"""JSON data objects."""
py = dict
sql = "JSON"
class JSONB(SQLType):
"""JSONB data objects."""
py = dict
sql = "JSONB"
class Array(SQLType):
"""Variable length array containing any supported type."""
py = list
def __init__(self, element_type: SQLType, size: int = ''): # noqa
self.element_type = element_type
self.element_size = size
self.sql = f"{element_type}[{size}]"
# endregion
SQLTypes = t.Union[SQLType, SQLTypeMeta]
|
nilq/baby-python
|
python
|
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from collections import OrderedDict
import numpy as np
from oneflow.compatible import single_client as flow
import test_global_storage
from test_util import GenArgList
import unittest
def TestMultiInput(x1, x2):
return (
flow.user_op_builder("my_test_multi_input")
.Op("TestMultiInput")
.Input("x1", [x1])
.Input("x2", [x2])
.Output("y")
.Build()
.InferAndTryRun()
.RemoteBlobList()[0]
)
@flow.unittest.skip_unless_1n1d()
class Test_TestMultiInputGrad(flow.unittest.TestCase):
@unittest.skipIf(os.getenv("ONEFLOW_TEST_CPU_ONLY"), "only test cpu cases")
def test_TestMultiInput_grad_mirrored_inplace(test_case):
func_config = flow.FunctionConfig()
func_config.default_data_type(flow.float)
func_config.default_logical_view(flow.scope.mirrored_view())
shape = (
3,
3,
)
@flow.global_function(type="train", function_config=func_config)
def TestMultiInputJob():
with flow.scope.placement("gpu", "0:0"):
x1 = flow.get_variable(
"x1",
shape=shape,
dtype=flow.float,
initializer=flow.random_uniform_initializer(minval=-10, maxval=10),
trainable=True,
)
x2 = flow.get_variable(
"x2",
shape=shape,
dtype=flow.float,
initializer=flow.random_uniform_initializer(minval=-10, maxval=10),
trainable=True,
)
loss = TestMultiInput(x1, x2)
flow.optimizer.SGD(
flow.optimizer.PiecewiseConstantScheduler([], [1e-4]), momentum=0
).minimize(loss)
flow.watch(x1, test_global_storage.Setter("x1"))
flow.watch_diff(x1, test_global_storage.Setter("x1_diff"))
flow.watch(x2, test_global_storage.Setter("x2"))
flow.watch_diff(x2, test_global_storage.Setter("x2_diff"))
return loss
out = TestMultiInputJob().get()
x1_diff = test_global_storage.Get("x1_diff")
x2_diff = test_global_storage.Get("x2_diff")
expect_out = test_global_storage.Get("x1")
expect_x1_diff = np.ones(shape, dtype=np.float32)
expect_x2_diff = np.ones(shape, dtype=np.float32) * 2.0
# print(x1_diff, x2_diff)
# print(expect_x1_diff, expect_x2_diff)
assert np.allclose(out.numpy(), expect_out)
assert np.allclose(x1_diff, expect_x1_diff)
assert np.allclose(x2_diff, expect_x2_diff)
if __name__ == "__main__":
unittest.main()
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from obscmd import compat
def init():
global _global_dict
_global_dict = {}
def use_lock(key):
lock_key = key + '_lock'
if lock_key not in _global_dict:
_global_dict[lock_key] = compat.Lock()
return lock_key
def set_value(key, value):
_global_dict[key] = value
def set_value_lock(key, value):
lock_key = use_lock(key)
with _global_dict[lock_key]:
_global_dict[key].value = value
def get_value(key, default=None):
try:
return _global_dict[key]
except KeyError:
return default
def append_list_lock(key, value):
lock_key = use_lock(key)
with _global_dict[lock_key]:
_global_dict[key].append(value)
|
nilq/baby-python
|
python
|
from hill import Hill
from numpy.linalg.linalg import norm
from jumper import Jumper
from jump_result import JumpResult
from physics_simulator import PhysicsSimulator
import numpy as np
import random
angles = [0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20,
22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44]
kw_pts = [[0.00185, 0.00204, 0.00223, 0.00243, 0.00261, 0.00281, 0.00301, 0.00319,
0.00338, 0.00355, 0.00372, 0.00388, 0.00403, 0.00418, 0.00432, 0.00447,
0.00462, 0.00479, 0.00502, 0.00537, 0.00614, 0.00691, 0.00767],
[0.00232, 0.00245, 0.00258, 0.00272, 0.00285, 0.00298, 0.00311, 0.00325,
0.00337, 0.00350, 0.00362, 0.00374, 0.00386, 0.00398, 0.00410, 0.00422,
0.00436, 0.00453, 0.00474, 0.00504, 0.00553, 0.00602, 0.00651],
[0.00261, 0.00271, 0.00282, 0.00293, 0.00304, 0.00315, 0.00326, 0.00337,
0.00347, 0.00357, 0.00367, 0.00376, 0.00386, 0.00396, 0.00407, 0.00419,
0.00432, 0.00449, 0.00471, 0.00503, 0.00555, 0.00606, 0.00658]]
ka_pts = [[0.00093, 0.00139, 0.00185, 0.00231, 0.00275, 0.00316, 0.00354, 0.00390,
0.00424, 0.00455, 0.00484, 0.00511, 0.00534, 0.00555, 0.00574, 0.00591,
0.00605, 0.00617, 0.00628, 0.00638, 0.00655, 0.00672, 0.00689],
[0.00116, 0.00180, 0.00244, 0.00308, 0.00365, 0.00396, 0.00424, 0.00450,
0.00472, 0.00492, 0.00508, 0.00522, 0.00534, 0.00543, 0.00550, 0.00555,
0.00560, 0.00565, 0.00571, 0.00582, 0.00606, 0.00629, 0.00652],
[0.00130, 0.00177, 0.00224, 0.00270, 0.00316, 0.00350, 0.00382, 0.00411,
0.00436, 0.00459, 0.00479, 0.00496, 0.00510, 0.00521, 0.00531, 0.00538,
0.00545, 0.00551, 0.00558, 0.00569, 0.00590, 0.00611, 0.00632]]
kw = [np.poly1d(np.polyfit(angles, kw_pt, 4)) for kw_pt in kw_pts]
ka = [np.poly1d(np.polyfit(angles, ka_pt, 4)) for ka_pt in ka_pts]
def get_aerodynamic_data(stat, max_stat, table, angle):
half_max_stat = max_stat / 2
if stat <= max_stat / 2:
return table[2](angle) * (half_max_stat - stat) / half_max_stat + table[1](angle) * stat / half_max_stat
return table[1](angle) * (max_stat - stat) / half_max_stat + table[0](angle) * (stat - half_max_stat) / half_max_stat
def get_kw(stat, max_stat, angle):
return get_aerodynamic_data(stat, max_stat, kw, angle)
def get_ka(stat, max_stat, angle):
return get_aerodynamic_data(stat, max_stat, ka, angle)
class JumpSimulator:
def __init__(self, hill: Hill):
self.hill = hill
self.physics_sim = PhysicsSimulator(self.hill.profile)
self.physics_sim.aero_coeffs_fun = self.get_aero_coeffs
def simulate_jump(self, jumper: Jumper, wind, gate, jump_seed):
self.jumper = jumper
self.wind = wind
self.gate = gate
self.jump_seed = jump_seed
inrun_vel = self.physics_sim.simulate_inrun(gate, jumper.get_inrun_coeff())
inrun_speed_kmh = np.linalg.norm(inrun_vel) * 3.6
takeoff = jumper.get_takeoff_speed()
fly_x, fly_y, pos, normal_vel_land = self.physics_sim.simulate_flight(
inrun_vel, takeoff, wind)
distance = self.hill.profile.get_distance(pos[0])
judges_points = self.get_judges_points(jump_seed, normal_vel_land)
# print(normal_vel_land)
return (JumpResult(inrun_speed_kmh, distance, 0, wind, judges_points, self.hill), (fly_x, fly_y))
def get_aero_coeffs(self, angle):
stat = self.jumper.get_flight_coeffs()
return (get_ka(stat, 100, angle), get_kw(stat, 100, angle))
def get_judges_points(self, jump_seed, normal_speed):
normal_speed = -normal_speed
stat = self.jumper.get_style()
telemark = 4.2 + stat / 100
two_legs = 5.6 + stat / 300
touch = 6.4 + stat / 200
if normal_speed <= telemark:
base_score = 19
elif normal_speed <= two_legs:
base_score = 16.5
elif normal_speed <= touch:
base_score = 12.5
else:
base_score = 9.
base_score *= 2
random.seed(jump_seed)
bias = 3 #int(np.round((40 - base_score) / 10))
lower_bound = max(0, base_score - bias)
upper_bound = min(40, base_score + bias)
scores = [random.randint(
lower_bound, upper_bound) / 2 for _ in range(5)]
return scores
|
nilq/baby-python
|
python
|
"""
Implements a simple HTTP/1.0 Server
"""
import socket
# Define socket host and port
SERVER_HOST = '127.0.0.1'
SERVER_PORT = 7777
# Create socket
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind((SERVER_HOST, SERVER_PORT))
server_socket.listen(1)
print('Listening on port %s ...' % SERVER_PORT)
while True:
# Wait for client connections
client_connection, client_address = server_socket.accept()
print(f'accepted from :{client_address}')
# Get the client request
request = client_connection.recv(1024).decode()
print(request.split('\n')[0])
# Send HTTP response
response = f'HTTP/1.0 200 OK\r\nConnection: close\r\n\r\nHello {client_address} \n\n\nrequest: {request}\r\n'
client_connection.sendall(response.encode())
client_connection.close()
# Close socket
server_socket.close()
|
nilq/baby-python
|
python
|
# coding=utf-8
#-----------------------------------------------------------
# IMPORTS
#-----------------------------------------------------------
import enigmus
import messages
import random
from entities import Entity, Player, Room
#-----------------------------------------------------------
# CLASSES
#-----------------------------------------------------------
class DoorCode(Entity):
def __init__(self):
super(DoorCode, self).__init__()
# Set from room file.
self.code = ''
self.room = ''
self.describe('ett', [], ['kodlås' , 'lås' ],
'' , [], ['kodlåset', 'låset'],
'Det är en liten kodterminal för att trycka in koder '
'med. Du förmodar att dörrarna till datasalen låses upp '
'om man trycker in rätt kod.')
self.on_message('player_command', self.__player_command,
filter=messages.for_nearby_entities(self))
def __player_command(self, player, command):
args = command.split(' ')
if args[0] != 'tryck':
return
if len(args) < 2 or args[1] != 'kod':
player.text('Tryck vad? Kod?')
return
if len(args) < 3:
player.text('Vilken kod vill du trycka?')
return
code = args[2]
player.emote('slår in en kod.')
beeps = random.sample(['*beep*', '*bzzzt*', '*boop*', '*bip*', '*BEEP*'], min(4, len(code)))
player.text('{} piper terminalen när du trycker på '
'knapparna och slår in koden {}'.format(' '.join(beeps), code))
if code != self.code:
player.text('Ingenting händer.')
return
room = player.container
for p in room.get_entities(Player):
p.text('Glasdörrarna till datasalen slår upp så snabbt att du hoppar '
'bakåt.')
player.emote('går in i datasalen.')
player.text('Glasdörrarna slår igen bakom dig.')
enigmus.instance.database.rooms[self.room].add_entity(player)
player.text(player.container.get_description(exclude_actor=player))
for p in room.get_entities(Player):
p.text('Lika snabbt som de öppnas slår dörrarna igen, alldeles för '
'snabbt för att du skulle hinna gå in utan att vara beredd.')
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-11-23 21:48
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('problems', '0040_auto_20161123_2106'),
]
operations = [
migrations.CreateModel(
name='Script',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=256, verbose_name='title')),
('script', models.TextField(verbose_name='script')),
('disabled', models.BooleanField(default=False, verbose_name='disabled')),
('problem', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='problems.ProblemRevision', verbose_name='problem')),
],
options={
'abstract': False,
},
),
migrations.RemoveField(
model_name='subtask',
name='testcases',
),
migrations.AddField(
model_name='testcase',
name='subtasks',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='problems.Subtask'),
),
migrations.AlterField(
model_name='testcase',
name='name',
field=models.CharField(blank=True, editable=False, max_length=20, verbose_name='name'),
),
migrations.AddField(
model_name='testcase',
name='script',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='problems.Script'),
),
]
|
nilq/baby-python
|
python
|
# Ultroid - UserBot
# Copyright (C) 2021-2022 TeamUltroid
#
# This file is a part of < https://github.com/TeamUltroid/Ultroid/ >
# PLease read the GNU Affero General Public License in
# <https://www.github.com/TeamUltroid/Ultroid/blob/main/LICENSE/>.
#
# Ported by @AyiinXd
# FROM Ayiin-Userbot <https://github.com/AyiinXd/Ayiin-Userbot>
# t.me/AyiinXdSupport & t.me/AyiinSupport
# ========================×========================
# Jangan Hapus Credit Ngentod
# ========================×========================
import os
from telethon import Button, custom
from AyiinXd import CMD_HANDLER as cmd
from AyiinXd import CMD_HELP, bot, tgbot
from AyiinXd.ayiin import ayiin_cmd, eor
from Stringyins import get_languages, language, get_string
from .button import BTN_URL_REGEX
def build_keyboards(buttons):
keyb = []
for btn in buttons:
if btn[0] and keyb:
keyb[0].append(Button.url(btn[0], btn[0]))
else:
keyb.append([Button.url(btn[0], btn[0])])
return keyb
Y_BUTTONS = [
[
custom.Button.url("Bᴏᴛ Sᴛʀɪɴɢ", "https://t.me/AyiinStringRobot"),
custom.Button.url("Rᴇᴘʟɪᴛ Sᴛʀɪɴɢ", "https://repl.it/@AyiinXd/AyiinString?lite=1&outputonly=1"),
],
[
custom.Button.url("Sᴜᴘᴘᴏʀᴛ", "https://t.me/AyiinXdSupport"),
],
]
@ayiin_cmd(pattern=r"lang(?: |$)(.*)")
async def setlang(event):
await eor(event, get_string("com_1"))
languages = get_languages()
if languages:
try:
AyiinUBOT = await tgbot.get_me()
BOT_USERNAME = AyiinUBOT.username
yinslang = await event.client.inline_query( # pylint:disable=E0602
BOT_USERNAME, "lang",
)
await yinslang[0].click(
event.chat_id, reply_to=event.reply_to_msg_id, hide_via=True
)
await event.delete()
except Exception as e:
await eor(event, get_string("error_1").format(e)
)
@ayiin_cmd(pattern=r"set( id| en|$)(.*)")
async def settt(event):
await eor(event, get_string("com_1"))
lang = event.pattern_match.group(1).strip()
languages = get_languages()
language[0] = lang
if not os.environ.get("lang"):
os.environ.setdefault("language", "1")
if lang == "id":
try:
os.environ.setdefault("language", lang)
await event.edit(get_string("lang_2").format(languages[lang]['asli'], lang)
)
except Exception as e:
await eor(event, get_string("error_1").format(e)
)
if lang == "en":
try:
os.environ.setdefault("language", lang)
await event.edit(get_string("lang_2").format(languages[lang]['asli'], lang)
)
except Exception as e:
await eor(event, get_string("error_1").format(e)
)
@ayiin_cmd(pattern="string(?:\\s|$)([\\s\\S]*)")
async def test_string(event):
ayiin = await eor(event, get_string("com_1"))
buttons = build_keyboards(Y_BUTTONS)
if buttons:
try:
AyiinUBOT = await tgbot.get_me()
BOT_USERNAME =AyiinUBOT.username
results = await event.client.inline_query( # pylint:disable=E0602
BOT_USERNAME, "string",
)
await results[0].click(
event.chat_id, reply_to=event.reply_to_msg_id, hide_via=True
)
await event.delete()
except Exception as e:
await eor(event, get_string("error_1").format(e)
)
CMD_HELP.update(
{
"yinslang": f"**Plugin :** `yinslang`\
\n\n » **Perintah :** `{cmd}lang`\
\n » **Kegunaan : **__Untuk Melihat Daftar Bahasa Yang Tersedia.__\
\n\n » **Perintah :** `{cmd}set <nama_bahasa>`\
\n » **Kegunaan : **__Untuk Mengubah Bahasa.__\
\n\n » **Perintah :** `{cmd}string`\
\n » **Kegunaan : **__Untuk Membuat String Session.__\
"
}
)
|
nilq/baby-python
|
python
|
# encoding:utf-8
from flask import Flask,request
import json
import time
import sys
import sqlite3
import os
app=Flask(__name__)
####回复文本格式##########
re={}
result={}
result["type"]="text"
result["content"]=""
re["error_code"]=0
re["error_msg"]=""
re["result"]=result
dic={'温度':'temperature','湿度':'humidity','光照':'light','二氧化碳':'co2_simulation','声音':'noise'}
##########意图对应的语音回复文本字典############
response={'AC1_OC_OPEN':'请稍等,正在为您打开空调,接下来您可以选择调节温度、风速,改变空调模式等选项,如果我没理解您的命令,\
您可以通过“怎样调节”加目标项来获得标准控制命令',
'AC1_OC_CLOSE':'请稍等,正在为您关闭空调',
'AC1_TEMP_UP':'正在帮您升高空调温度,请稍等',
'AC1_TEMP_DOWN':'正在帮您降低空调温度,请稍等',
'AC1_WSPEED_UP':'正在帮您提高空调风速,请稍等',
'AC1_WSPEED_DOWN':'正在帮您降低空调风速,请稍等',
'AC1_SLEEP_OPEN':'好的,正在为您打开睡眠模式,请稍等',
'AC1_SLEEP_CLOSE':'好的,正在为您关闭睡眠模式,请稍等',
'AC1_TIMER_CLOSE':'好的,正在为您取消空调定时,请稍等'}
##############词槽标准化字典#############
normal={'高':'0','中':'1','低':'2','制冷':'0','制热':'1','送风':'2','自动':'3','除湿':'4','关':'close','开':'open','平衡':'0','环保':'1','极致':'2',
'16度':'16','17度':'17','18度':'18','19度':'19','20度':'20','21度':'21','22度':'22','23度':'23','24度':'24','25度':'25',
'26度':'26','27度':'27','28度':'28','29度':'29','30度':'30','降低':'lower','热':'lower','升高':'higher','冷':'higher',
'半小时':'0.5','一小时':'1.0','一个半小时':'1.5','两小时':'2.0','两个半小时':'2.5','三小时':'3.0','温度':'0','湿度':'1','声音':'2','光照':'3','二氧化碳':'4'}
class Response:
def __init__(self,intent,nom_word):
self.re=re
self.intent=intent
self.nom_word=nom_word
def json_resp(self,intent,nom_word):
if intent in response.keys():
re["result"]["content"]=response[intent]
elif intent=="AC1_TEMP_TO":
re["result"]["content"]="好的,正在为您将空调设置为"+nom_word+",请稍等"
elif intent=="AC1_WSPEED_TO":
re["result"]["content"]="好的,正在为您调节风速为"+nom_word+",请稍等"
elif intent=="AC1_TIMER_SET":
re["result"]["content"]="好的,正在为您设置空调定时"+nom_word
elif intent=="AC1_COMMOD_SELECT"or intent=="AC1_SMARTMOD_SELECT":
re["result"]["content"]="好的,正在为您调节到"+nom_word+"模式,请稍等"
elif intent=="ROOM1_ENV_INFO_QUERY":
conn = sqlite3.connect("db.sqlite3")
c = conn.cursor()
############选择查询信息的最新值#########
c.execute('SELECT '+dic[nom_word]+' FROM myhome_nodedata ORDER BY id desc')
#########取出查询项对应字段对应数据######
query_result=c.fetchone()[0]
re["result"]["content"]="您当前的室内"+nom_word+"为"+str(query_result)
else:
re["result"]["content"]="请求失败,请重试"
return re
@app.route("/unit/callback",methods=['POST'])
def callback():
########加载json数据并转换为字典形式###########
dic=json.loads(str(request.data,encoding='utf-8'))
########获取字典中的意图和词槽即“intent”和“normallized_word”字段数据########
intent=dic["response"]["schema"]["intent"]
nom_word=dic["response"]["schema"]["slots"][0]["normalized_word"]
######创建回复类对象#######
exp1=Response(intent,nom_word)
########将表征请求中意图、词槽的数据更新存储控制命令的数据库表单(若不存在则先创建)############
if os.path.exists("db.sqlite3"):
conn = sqlite3.connect("db.sqlite3")
c = conn.cursor()
else:
conn = sqlite3.connect("db.sqlite3")
c = conn.cursor()
c.execute('''CREATE TABLE myhome_commands(ID integer NOT NULL PRIMARY KEY AUTOINCREMENT,INTENT
text NOT NULL ,SLOTS text NOT NULL)''')
c.execute('INSERT INTO myhome_commands VALUES(1,"0","0")') #插入意图和归一化词槽
c.execute("UPDATE myhome_commands SET INTENT=?,SLOTS=? where ID=1",(intent,normal[nom_word]))
conn.commit()
conn.close()
#######获得答复数据并转换为json格式返回给请求方######
json_re=exp1.json_resp(intent,nom_word)
json_re=json.dumps(re)
return json_re
if __name__=='__main__':
app.run(host='172.20.10.12',port=9999,debug=True)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#1 - Normalize vector:
def normalize_v(V):
m = 0
for x in V: m += x**2 #Sum of the elements powered to the 2
m = sqrt(m) #Get vector's norm
return [x/m for x in V] #Divide each element of vector by its norm
#2 - Find D, euclidian distance
def euclid_dis(V):
d=0
for i in xrange(len(V)):
for j in xrange(len(W[i])): #Len of matrix
d += (V[i]-W[i][j])**2 #∑(xi – mij)**2
return sqrt(d)
#3 - Find j*, the winning attractor
def find_att(d):
star=[0,0] #Start with the beginning, keep coordinates in a list
for A in xrange(Units) : # scan all units
for B in xrange(Units) :
if distance(W[A][B], d) < distance(W[star[0]][star[1]], d) :
star = [A,B] # closest unit
return star
#4 - signal de sortie de j* à 1, tous les autres à 0
#Cette fonction existe parce que l'algorithme du cours demande une sortie. Je ne vois pas l'utilité de cette dernière pour notre utilisation
def provide_sortie(sortie,star):
for x in xrange(Units):
for y in xrange(Units):
if x == star[0] and y == star[1]: sortie[x][y]=1
else: sortie[x][y]=0
#5 - règle les pondérations
#Le voisinage est ici hexagonal, dis_rad mesure la proximité de voisinage.
#Par convention, chaque ligne paire de la matrice est décalée vers la droite pour faire exister cette hexagonalité
def weighting(signal, alpha, star, radius):
left = max(star[0] - radius, 0) # left boundary
right = min(star[0] + radius+1, Units) # right boundary
top = max(star[1] - radius, 0) #top boundary
bottom = min(star[1] + radius+1, Units) #bottom boundary
for A in xrange(left, right) : # scan neighborhood left-right
dis_A = abs(A-star[0]) #Distance left or right with attractor j*
for B in xrange(top, bottom): # scan neighborhood top-bottom
dis_B = abs(B-star[1]) #Distance top or bottom with attractor j*
dis_rad = dis_A if dis_A >= dis_B else dis_B #Distance total between neighboor element in matrix and j*, take the bigger distance
left_lim = left + dis_B/2 #left limit contingent on vertical distance
right_lim = right - dis_B/2 #Same for right limit
if dis_B%2 == 1: #If distance of neighbooring attractor to j* is odd
if B%2 == 0:left_lim += 1 #If j* row is even within the matrix, shift left limit by one to the right
else : right_lim -= 1 #else shift right limit by one to the left
if A >= right_lim or A<left_lim: continue #Do not modify neighbooring attractors not within the limits above, this guarantees hexagonality of the weighting process
else:
for i in xrange(len(signal)): #For both values of signal vector
W[A][B] += (signal[i] - W[A][B]) * (alpha / (1 + dis_rad))#distance degree linearized
#Fonction pilote:
from math import fabs, sqrt
def distance(u, v) : return fabs(u - v)
def new_rate(lap) : return 1 - (lap / float(T))
def teach(W) :
alpha = ALPHAo # learning rate
radius = Ro # neighborhood
sortie = [[0]*Units for x in xrange(Units)] #Parce que demandé par l'algorithme
for lap in xrange(T) : #2 * (10**4)
signal = normalize_v([randrange(1, 10)for x in xrange(2)]) #Signal aléatoire
d = euclid_dis(signal) #Find euclidian distance
star = find_att(d) #With it find j*
provide_sortie(sortie, star) #Provide sortie
weighting(signal, alpha, star, radius) #Pondération du voisinage
alpha = ALPHAo * new_rate(lap) # new learning rate
radius = int(Ro * new_rate(lap)) # new integral radius
return lap
#Pour lancer l'apprentissage
print("Here, we are testing the topology map by using a 8*8 matrix, we initiate each processor's attractivity weight between .45 and .55, .2 as a learning coefficient. The entry signals are random and because we iterate 2 * (10**4), all processors' weight end up at the same place:\n")
from random import randrange
Units = 8
W = [[(randrange(450, 550) * .001) for i in xrange(Units)] for x in xrange(Units)] # 0.5 ± 5 %
M = [x[:] for x in W]
ALPHAo = .2
Ro = Units / 2 # half of the width of the network
T = 2 * (10**4)
teach(W) # training phase
for A in xrange(Units) :
for B in xrange(Units) : print "%6i%2i\t%4.2f --> %4.2f" % (A,B, M[A][B], W[A][B])
|
nilq/baby-python
|
python
|
from spaceone.inventory.manager.pricing_manager import PricingManager
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
import sys
import dnfile
from hashlib import sha256
filename = sys.argv[1]
sha256hash = ''
with open(filename, 'rb') as fh_in:
sha256hash = sha256(fh_in.read()).hexdigest()
pe = dnfile.dnPE(filename)
#tbl = pe.net.mdtables.MemberRef
tbl = pe.net.mdtables.TypeRef
tbl_num_rows =\
pe.get_offset_from_rva(tbl.num_rows)
tbl_row_size =\
pe.get_offset_from_rva(tbl.row_size)
tbl_bytes = pe.get_data(tbl.rva, (tbl_num_rows*tbl_row_size))
hex_str = ''
for i in range(0, len(tbl_bytes), 6):
a = tbl_bytes[i:i+2].hex()
b = tbl_bytes[i+4:i+6].hex()
hex_str += f'{a}[2]{b}'
rule = '''
import "pe"
rule DotNet_Tbl_{}
{{
meta:
hash = "{}"
strings:
$ = {{{}}}
condition:
uint16(0) == 0x5A4D
and (uint32(uint32(0x3C)) == 0x00004550)
and pe.data_directories[pe.IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR].virtual_address != 0
and uint32be(
pe.rva_to_offset(
uint32(
pe.rva_to_offset(pe.data_directories[pe.IMAGE_DIRECTORY_ENTRY_COM_DESCRIPTOR].virtual_address)+8
)
)
) == 0x42534a42
and all of them
}}
'''
print(rule.format(tbl.name, sha256hash, hex_str))
with open(f'{filename}_tbl_{tbl.name}.bin', 'wb') as fh_out:
fh_out.write(tbl_bytes)
|
nilq/baby-python
|
python
|
import pandas as pd
fname = "LBW_dataset.csv"
df = pd.read_csv(fname)
#cleaning data
df = df.drop(columns=['Education'])
df = df.interpolate()
df['Community'] = df['Community'].round()
df['Delivery phase'] = df['Delivery phase'].round()
df['IFA'] = df['IFA'].round()
#df = df.round()
mean_weight = df['Weight'].mean()
mean_age = df['Age'].mean()
mean_hb = df['HB'].mean()
mean_bp = df['BP'].mean()
std_weight = df['Weight'].std()
std_age = df['Age'].std()
std_hb = df['HB'].std()
std_bp = df['BP'].std()
# In the below code we standardise the data so that there is no unnecessary bias in the neural network.
# mean_x --> mean of column x
# std_x --> standard deviation of column x
df.loc[:, 'Weight'] = df.Weight.apply(lambda x : (x - mean_weight) / std_weight )
df.loc[:, 'Age'] = df.Age.apply(lambda x : (x - mean_age) / std_age)
df.loc[:, 'HB'] = df.HB.apply(lambda x : (x - mean_hb) / std_hb)
df.loc[:, 'BP'] = df.BP.apply(lambda x : (x - mean_bp) / std_bp)
df.to_csv("LBW_Dataset_Cleaned.csv")
|
nilq/baby-python
|
python
|
"""
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
import traceback
from timeit import default_timer as timer
import reversion
from django.core.management.base import BaseCommand
from django.db import transaction
from wells.models import Well, ActivitySubmission
from wells.stack import StackWells
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""
Run from command line:
python manage.py legacy_records
"""
def add_arguments(self, parser):
# Arguments added for debugging purposes.
parser.add_argument('--start', type=int, nargs='?', help='Well to start at', default=1)
parser.add_argument('--end', type=int, nargs='?', help='Well to end at', default=50)
parser.add_argument('--next', type=int, nargs='?', help='Process n wells', default=0)
def handle(self, *args, **options):
# pylint: disable=broad-except
start = options['start']
end = options['end']
to_do_amount = options['next']
# We turn off reversion of ActivitySubmissions as we don't want to bloat the DB
reversion.unregister(ActivitySubmission)
reversion.unregister(Well)
num_wells = 0
if to_do_amount:
wells = self.find_next_n_wells_without_legacy_records(start, to_do_amount)
else:
wells = self.find_wells_without_legacy_records(start, end)
num_wells = len(wells)
if num_wells == 0:
self.stdout.write(self.style.ERROR(f'No records found between well tag number {start} and {end}'))
return
print(f'Creating {num_wells} legacy records from well_tag_number {wells[0].well_tag_number} to {wells[len(wells) - 1].well_tag_number}')
failures = []
start = timer()
for well in wells:
try:
self.create_legacy_record(well)
except Exception as err:
failures.append(well.well_tag_number)
print(f'Error creating legacy record for well_tag_number {well.well_tag_number}')
# logger.exception(err)
print(traceback.format_exc(limit=8))
end = timer()
num_fails = len(failures)
num_created = num_wells - num_fails
if num_created > 0:
success_msg = 'Created {} legacy reports in {:.2f}s'.format(num_created, end - start)
self.stdout.write(self.style.SUCCESS(success_msg))
if num_fails > 0:
failed_wells = ', '.join(map(str, failures))
error_msg = 'Failed to create {} legacy reports for wells: {}' \
.format(num_fails, failed_wells)
clues_msg = 'See above stack traces for clues to why these failed'
self.stdout.write(self.style.ERROR(error_msg))
self.stdout.write(self.style.ERROR(clues_msg))
def find_wells_without_legacy_records(self, start, end):
wells = Well.objects \
.filter(well_tag_number__gte=start,
well_tag_number__lte=end,
activitysubmission__isnull=True) \
.order_by('well_tag_number')
return wells
def find_next_n_wells_without_legacy_records(self, start, num):
wells = Well.objects \
.filter(well_tag_number__gte=start, activitysubmission__isnull=True) \
.order_by('well_tag_number') \
[0:num]
return wells
@transaction.atomic
def create_legacy_record(self, well):
# pylint: disable=protected-access
# NOTE that _create_legacy_submission() will create the LEGACY activity
# submission but then when the `submission_serializer.save()` is called
# inside of `_create_legacy_submission()` this will trigger a
# `StackWells().process()` call which will in turn call
# `_update_well_record()` which checks to see if it should create a new
# legacy record (it shouldn't). Instead it will just call
# `self._stack(records, submission.well)` for this one legacy record.
StackWells()._create_legacy_submission(well)
|
nilq/baby-python
|
python
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
import numpy as np
from astropy.io import fits
from astropy.table import Table
from gammapy.data import GTI
from gammapy.maps import MapCoord, Map
from gammapy.estimators.core import FluxEstimate
from gammapy.estimators.flux_point import FluxPoints
from gammapy.utils.table import table_from_row_data
from gammapy.modeling.models import (
SkyModel,
PowerLawSpectralModel,
PointSpatialModel,
Models,
)
from gammapy.utils.scripts import make_path
__all__ = ["FluxMaps"]
REQUIRED_MAPS = {
"dnde": ["dnde"],
"e2dnde": ["e2dnde"],
"flux": ["flux"],
"eflux": ["eflux"],
"likelihood": ["norm"],
}
#TODO: add an entry for is_ul?
OPTIONAL_MAPS = {
"dnde": ["dnde_err", "dnde_errp", "dnde_errn", "dnde_ul"],
"e2dnde": ["e2dnde_err", "e2dnde_errp", "e2dnde_errn", "e2dnde_ul"],
"flux": ["flux_err", "flux_errp", "flux_errn", "flux_ul"],
"eflux": ["eflux_err", "eflux_errp", "eflux_errn", "eflux_ul"],
"likelihood": ["norm_err", "norm_errn", "norm_errp","norm_ul", "norm_scan", "stat_scan"],
}
log = logging.getLogger(__name__)
class FluxMaps(FluxEstimate):
"""A flux map container.
It contains a set of `~gammapy.maps.Map` objects that store the estimated flux as a function of energy as well as
associated quantities (typically errors, upper limits, delta TS and possibly raw quantities such counts,
excesses etc). It also contains a reference model to convert the flux values in different formats. Usually, this
should be the model used to produce the flux map.
The associated map geometry can use a `RegionGeom` to store the equivalent of flux points, or a `WcsGeom`/`HpxGeom`
to store an energy dependent flux map.
The container relies internally on the 'Likelihood' SED type defined in :ref:`gadf:flux-points`
and offers convenience properties to convert to other flux formats, namely:
``dnde``, ``flux``, ``eflux`` or ``e2dnde``. The conversion is done according to the reference model spectral shape.
Parameters
----------
data : dict of `~gammapy.maps.Map`
the maps dictionary. Expected entries are the following:
* norm : the norm factor
* norm_err : optional, the error on the norm factor.
* norm_errn : optional, the negative error on the norm factor.
* norm_errp : optional, the positive error on the norm factor.
* norm_ul : optional, the upper limit on the norm factor.
* norm_scan : optional, the norm values of the test statistic scan.
* stat_scan : optional, the test statistic scan values.
* ts : optional, the delta TS associated with the flux value.
* sqrt_ts : optional, the square root of the TS, when relevant.
reference_model : `~gammapy.modeling.models.SkyModel`, optional
the reference model to use for conversions. Default in None.
If None, a model consisting of a point source with a power law spectrum of index 2 is assumed.
gti : `~gammapy.data.GTI`
the maps GTI information. Default is None.
"""
def __init__(self, data, reference_model=None, gti=None):
self.geom = data['norm'].geom
if reference_model == None:
log.warning("No reference model set for FluxMaps. Assuming point source with E^-2 spectrum.")
reference_model = self._default_model()
self.reference_model = reference_model
self.gti = gti
super().__init__(data, spectral_model=reference_model.spectral_model)
@staticmethod
def _default_model():
return SkyModel(spatial_model=PointSpatialModel(), spectral_model=PowerLawSpectralModel(index=2))
@property
def _additional_maps(self):
return self.data.keys() - (REQUIRED_MAPS["likelihood"] + OPTIONAL_MAPS["likelihood"])
@property
def energy_ref(self):
axis = self.geom.axes["energy"]
return axis.center
@property
def energy_min(self):
axis = self.geom.axes["energy"]
return axis.edges[:-1]
@property
def energy_max(self):
axis = self.geom.axes["energy"]
return axis.edges[1:]
@property
def ts(self):
if not "ts" in self.data:
raise KeyError("No ts map present in FluxMaps.")
return self.data["ts"]
@property
def sqrt_ts(self):
if not "sqrt_ts" in self.data:
raise KeyError("No sqrt_ts map present in FluxMaps.")
return self.data["sqrt_ts"]
def __str__(self):
str_ = f"{self.__class__.__name__}\n"
str_ += "\t"+ "\t\n".join(str(self.norm.geom).split("\n")[:1])
str_ += "\n\t"+"\n\t".join(str(self.norm.geom).split("\n")[2:])
str_ += f"\n\tAvailable quantities : {self._available_quantities}\n\n"
str_ += f"\tAdditional maps : {self._additional_maps}\n\n"
str_ += "\tReference model:\n"
if self.reference_model is not None:
str_ += "\t" + "\n\t".join(str(self.reference_model).split("\n")[2:])
return str_.expandtabs(tabsize=2)
def get_flux_points(self, coord=None):
"""Extract flux point at a given position.
The flux points are returned in the the form of a `~gammapy.estimators.FluxPoints` object
(which stores the flux points in an `~astropy.table.Table`)
Parameters
---------
coord : `~astropy.coordinates.SkyCoord`
the coordinate where the flux points are extracted.
Returns
-------
fluxpoints : `~gammapy.estimators.FluxPoints`
the flux points object
"""
if coord is None:
coord = self.geom.center_skydir
energies = self.energy_ref
coords = MapCoord.create(dict(skycoord=coord, energy=energies))
ref = self.dnde_ref.squeeze()
fp = dict()
fp["norm"] = self.norm.get_by_coord(coords) * self.norm.unit
for quantity in self._available_quantities:
norm_quantity = f"norm_{quantity}"
res = getattr(self, norm_quantity).get_by_coord(coords)
res *= getattr(self, norm_quantity).unit
fp[norm_quantity] = res
for additional_quantity in self._additional_maps:
res = self.data[additional_quantity].get_by_coord(coords)
res *= self.data[additional_quantity].unit
fp[additional_quantity] = res
# TODO: add support of norm and stat scan
rows = []
for idx, energy in enumerate(self.energy_ref):
result = dict()
result["e_ref"] = energy
result["e_min"] = self.energy_min[idx]
result["e_max"] = self.energy_max[idx]
result["ref_dnde"] = ref[idx]
result["norm"] = fp["norm"][idx]
for quantity in self._available_quantities:
norm_quantity = f"norm_{quantity}"
result[norm_quantity] = fp[norm_quantity][idx]
for key in self._additional_maps:
result[key] = fp[key][idx]
rows.append(result)
table = table_from_row_data(rows=rows, meta={"SED_TYPE": "likelihood"})
return FluxPoints(table).to_sed_type('dnde')
def to_dict(self, sed_type="likelihood"):
"""Return maps in a given SED type in the form of a dictionary.
Parameters
----------
sed_type : str
sed type to convert to. Default is `Likelihood`
Returns
-------
map_dict : dict
dictionary containing the requested maps.
"""
if sed_type == "likelihood":
map_dict = self.data
else:
map_dict = {}
for entry in REQUIRED_MAPS[sed_type]:
map_dict[entry] = getattr(self, entry)
for entry in OPTIONAL_MAPS[sed_type]:
try:
map_dict[entry] = getattr(self, entry)
except KeyError:
pass
for key in self._additional_maps:
map_dict[key] = self.data[key]
return map_dict
def write(self, filename, filename_model=None, overwrite=False, sed_type="likelihood"):
"""Write flux map to file.
Parameters
----------
filename : str
Filename to write to.
filename_model : str
Filename of the model (yaml format).
If None, keep string before '.' and add '_model.yaml' suffix
overwrite : bool
Overwrite file if it exists.
sed_type : str
sed type to convert to. Default is `likelihood`
"""
filename = make_path(filename)
if filename_model is None:
name_string = filename.as_posix()
for suffix in filename.suffixes:
name_string.replace(suffix,'')
filename_model = name_string + '_model.yaml'
filename_model=make_path(filename_model)
hdulist = self.to_hdulist(sed_type)
models = Models(self.reference_model)
models.write(filename_model, overwrite=overwrite)
hdulist[0].header['MODEL'] = filename_model.as_posix()
hdulist.writeto(str(make_path(filename)), overwrite=overwrite)
def to_hdulist(self, sed_type="likelihood", hdu_bands=None):
"""Convert flux map to list of HDUs.
For now, one cannot export the reference model.
Parameters
----------
sed_type : str
sed type to convert to. Default is `Likelihood`
hdu_bands : str
Name of the HDU with the BANDS table. Default is 'BANDS'
If set to None, each map will have its own hdu_band
Returns
-------
hdulist : `~astropy.io.fits.HDUList`
Map dataset list of HDUs.
"""
exclude_primary = slice(1, None)
hdu_primary = fits.PrimaryHDU()
hdulist = fits.HDUList([hdu_primary])
hdu_primary.header["SED_TYPE"] = sed_type
map_dict = self.to_dict(sed_type)
for key in map_dict:
hdulist += map_dict[key].to_hdulist(hdu=key, hdu_bands=hdu_bands)[exclude_primary]
if self.gti:
hdu = fits.BinTableHDU(self.gti.table, name="GTI")
hdulist.append(hdu)
return hdulist
@classmethod
def read(cls, filename):
"""Read map dataset from file.
Parameters
----------
filename : str
Filename to read from.
Returns
-------
flux_map : `~gammapy.estimators.FluxMaps`
Flux map.
"""
with fits.open(str(make_path(filename)), memmap=False) as hdulist:
return cls.from_hdulist(hdulist)
@classmethod
def from_hdulist(cls, hdulist, hdu_bands=None):
"""Create flux map dataset from list of HDUs.
Parameters
----------
hdulist : `~astropy.io.fits.HDUList`
List of HDUs.
hdu_bands : str
Name of the HDU with the BANDS table. Default is 'BANDS'
If set to None, each map should have its own hdu_band
Returns
-------
fluxmaps : `~gammapy.estimators.FluxMaps`
the flux map.
"""
try:
sed_type = hdulist[0].header["SED_TYPE"]
except KeyError:
raise ValueError(f"Cannot determine SED type of flux map from primary header.")
result = {}
for map_type in REQUIRED_MAPS[sed_type]:
if map_type.upper() in hdulist:
result[map_type] = Map.from_hdulist(hdulist, hdu=map_type, hdu_bands=hdu_bands)
else:
raise ValueError(f"Cannot find required map {map_type} for SED type {sed_type}.")
for map_type in OPTIONAL_MAPS[sed_type]:
if map_type.upper() in hdulist:
result[map_type] = Map.from_hdulist(hdulist, hdu=map_type, hdu_bands=hdu_bands)
# Read additional image hdus
for hdu in hdulist[1:]:
if hdu.is_image:
if hdu.name.lower() not in (REQUIRED_MAPS[sed_type]+OPTIONAL_MAPS[sed_type]):
result[hdu.name.lower()] = Map.from_hdulist(hdulist, hdu=hdu.name, hdu_bands=hdu_bands)
model_filename = hdulist[0].header.get("MODEL", None)
reference_model = None
if model_filename:
try:
reference_model = Models.read(model_filename)[0]
except FileNotFoundError:
raise FileNotFoundError(f"Cannot find {model_filename} model file. Check MODEL keyword.")
if "GTI" in hdulist:
gti = GTI(Table.read(hdulist["GTI"]))
else:
gti = None
return cls.from_dict(result, sed_type, reference_model, gti)
@staticmethod
def _validate_type(maps, sed_type):
"""Check that map input is valid and correspond to one of the SED type."""
try:
required = set(REQUIRED_MAPS[sed_type])
except:
raise ValueError(f"Unknown SED type.")
if not required.issubset(maps.keys()):
missing = required.difference(maps.keys())
raise ValueError(
"Missing maps for sed type '{}':" " {}".format(sed_type, missing)
)
@classmethod
def from_dict(cls, maps, sed_type='likelihood', reference_model=None, gti=None):
"""Create FluxMaps from a dictionary of maps.
Parameters
----------
maps : dict
dictionary containing the requested maps.
sed_type : str
sed type to convert to. Default is `Likelihood`
reference_model : `~gammapy.modeling.models.SkyModel`, optional
the reference model to use for conversions. Default in None.
If None, a model consisting of a point source with a power law spectrum of index 2 is assumed.
gti : `~gammapy.data.GTI`
the maps GTI information. Default is None.
Returns
-------
fluxmaps : `~gammapy.estimators.FluxMaps`
the flux map.
"""
cls._validate_type(maps, sed_type)
if sed_type == 'likelihood':
return cls(maps, reference_model)
e_ref = maps[sed_type].geom.axes["energy"].center
e_edges = maps[sed_type].geom.axes["energy"].edges
if reference_model is None:
log.warning("No reference model set for FluxMaps. Assuming point source with E^-2 spectrum.")
reference_model = cls._default_model()
ref_dnde = reference_model.spectral_model(e_ref)
if sed_type == "dnde":
factor = ref_dnde
elif sed_type == "flux":
factor = reference_model.spectral_model.integral(e_edges[:-1], e_edges[1:])
elif sed_type == "eflux":
factor = reference_model.spectral_model.energy_flux(e_edges[:-1], e_edges[1:])
elif sed_type == "e2dnde":
factor = e_ref ** 2 * ref_dnde
# to ensure the units are similar
factor = factor.to(maps[sed_type].unit)
data = dict()
data["norm"] = maps[sed_type]/factor[:,np.newaxis, np.newaxis]
for map_type in OPTIONAL_MAPS[sed_type]:
if map_type in maps:
norm_type = map_type.replace(sed_type, "norm")
data[norm_type] = maps[map_type]/factor[:,np.newaxis, np.newaxis]
# We add the remaining maps
for key in maps.keys() - (REQUIRED_MAPS[sed_type] + OPTIONAL_MAPS[sed_type]):
data[key] = maps[key]
return cls(data, reference_model, gti)
|
nilq/baby-python
|
python
|
from flask import Flask
from driver import get_final_kmeans
app = Flask(__name__)
@app.route("/")
def hello():
return get_final_kmeans()
|
nilq/baby-python
|
python
|
from typing import Any, Dict, List, Optional, Union
from interactions.ext.paginator import Paginator
from thefuzz.fuzz import ratio
from interactions import Client, CommandContext, DictSerializerMixin, Embed, Extension
from .settings import AdvancedSettings, PaginatorSettings, TemplateEmbed, typer_dict
class RawHelpCommand(DictSerializerMixin):
__slots__ = (
"_json",
"client",
"scope",
"sync_commands",
"_commands",
)
_json: Dict[str, Any]
client: Client
sync_commands: bool
_commands: List[dict]
def __init__(
self,
client: Client,
sync_commands: bool = False,
**kwargs,
):
super().__init__(client=client, sync_commands=sync_commands, **kwargs)
self._commands = []
async def _get_all_commands(
self,
global_commands: bool = True,
guild_commands: bool = True,
guild: Optional[int] = None,
):
result = []
if global_commands:
result = await self.client._http.get_application_commands(self.client.me.id)
result = [] if result is None else result
if guild_commands and guild:
guild_result = await self.client._http.get_application_commands(
self.client.me.id, guild
)
result.append(guild_result) if guild_result is not None else None
result = list(filter(lambda x: x is not None, result))
if not result:
raise RuntimeError("No commands found")
return result
async def get_commands(
self,
global_commands: bool = True,
guild_commands: bool = True,
guild: Optional[int] = None,
):
if self._commands and not self.sync_commands:
return self._commands
# get all commands
all_commands = await self._get_all_commands(global_commands, guild_commands, guild)
# separate by category
commands, subcommands, menus = [], [], []
guild_id_index = None
await self.__sort_all_commands(all_commands, commands, subcommands, menus, guild_id_index)
master: List[dict] = []
for command in commands:
command: dict
cmd_ext: Optional[Extension] = next(
(
ext_name
for ext_name, ext in self.client._extensions.items()
if isinstance(ext, Extension)
and f'command_{command["name"]}' in ext._commands.keys()
),
None,
)
master.append(
{
"name": command["name"],
"description": command["description"],
"options": command["options"],
"type": "slash command",
"extension": cmd_ext,
}
)
for subcommand in subcommands:
for sub in subcommand["options"]:
sub: dict
if sub["type"] == 1:
sub["options"] = sub.get("options", [])
cmd_ext: Optional[Extension] = next(
(
ext_name
for ext_name, ext in self.client._extensions.items()
if isinstance(ext, Extension)
and f'command_{subcommand["name"]}' in ext._commands.keys()
),
None,
)
master.append(
{
"name": f'{subcommand["name"]} {sub["name"]}',
"description": sub["description"],
"options": sub["options"],
"type": "subcommand",
"extension": cmd_ext,
}
)
else:
sub["options"][0]["options"] = sub["options"][0].get("options", [])
cmd_ext: Optional[Extension] = next(
(
ext_name
for ext_name, ext in self.client._extensions.items()
if isinstance(ext, Extension)
and f'command_{subcommand["name"]}' in ext._commands.keys()
),
None,
)
master.append(
{
"name": f'{subcommand["name"]} {sub["name"]} {sub["options"][0]["name"]}',
"description": sub["options"][0]["description"],
"options": sub["options"][0]["options"],
"type": "subcommand group",
"extension": cmd_ext,
}
)
for menu in menus:
cmd_ext: Optional[Extension] = next(
(
ext_name
for ext_name, ext in self.client._extensions.items()
if isinstance(ext, Extension)
and f'command_{menu["name"]}' in ext._commands.keys()
),
None,
)
master.append(
{
"name": menu["name"],
"description": None,
"type": ("user menu" if menu["type"] == 2 else "message menu"),
"extension": cmd_ext,
}
)
for interaction in master:
interaction: dict
if interaction.get("options", None) is not None:
for option in interaction["options"]:
option: dict
option["required"] = option.get("required", False)
self._commands = master
return master
async def __sort_all_commands(
self,
all_commands: List[Union[List[dict], dict]],
commands: list,
subcommands: list,
menus: list,
guild_ids_index: Optional[int],
):
# first, sort all global commands
for command in all_commands:
if isinstance(command, list):
guild_ids_index = all_commands.index(command)
break
if command["type"] == 1:
if "options" in command.keys() and command["options"][0]["type"] in (
1,
2,
):
subcommands.append(command)
else:
if "options" not in command.keys():
command["options"] = []
commands.append(command)
else:
menus.append(command)
# next, sort all guild commands if applicable
if guild_ids_index is not None:
for command in all_commands[guild_ids_index]:
if command["type"] == 1:
if "options" in command.keys() and command["options"][0]["type"] in (
1,
2,
):
subcommands.append(command)
else:
if "options" not in command.keys():
command["options"] = []
commands.append(command)
else:
menus.append(command)
class HelpCommand(RawHelpCommand):
__slots__ = (
"_json",
"client",
"sync_commands",
"template_embed",
"paginator_settings",
"advanced_settings",
)
client: Client
sync_commands: bool
template_embed: TemplateEmbed
paginator_settings: PaginatorSettings
advanced_settings: AdvancedSettings
def __init__(
self,
client: Client,
sync_commands: bool = False,
template_embed: TemplateEmbed = TemplateEmbed(),
paginator_settings: PaginatorSettings = PaginatorSettings(),
advanced_settings: AdvancedSettings = AdvancedSettings(),
) -> None:
super().__init__(
client=client,
sync_commands=sync_commands,
template_embed=template_embed,
paginator_settings=paginator_settings,
advanced_settings=advanced_settings,
)
async def send_help(
self,
ctx: CommandContext,
search: Optional[str] = None,
guild_id: Optional[int] = None,
):
if guild_id is None:
guild_id = ctx.guild_id
await self.get_commands(guild=guild_id)
data: List[dict] = self._commands.copy()
if search is not None:
search: str = search.lower()
answers: dict = {}
list_extensions: list = []
list_commands: list = []
# extensions
for interaction in data:
if self.__ext_in_blacklist(interaction):
continue
percent = ratio(search, interaction["extension"])
if interaction["extension"] not in answers:
answers[interaction["extension"]] = percent
list_extensions.append(interaction["extension"])
# commands
for interaction in data:
if self.__cmd_in_blacklist(interaction):
continue
percent = ratio(search, interaction["name"])
if interaction["name"] not in answers.keys():
answers[interaction["name"]] = percent
list_commands.append(interaction["name"])
sorted_data: list = sorted(answers, key=answers.get, reverse=True)[
: self.advanced_settings.max_search_results
]
embeds: List[Embed] = []
for i in range(0, len(sorted_data), self.template_embed.fields_per_embed):
page = Embed(
title=f"Search results for `{search}`, {i + 1} - {i + self.template_embed.fields_per_embed}",
color=self.template_embed.color,
)
for match in sorted_data[i : (i + self.template_embed.fields_per_embed)]:
if match in list_extensions:
ext: str = None
cmds: List[Dict[str, dict]] = []
for interaction in data:
if match == interaction["extension"]:
ext = interaction["extension"]
cmds.append({interaction["name"]: interaction})
if ext is not None:
value = "Category\nCommands:\n"
for cmd in cmds:
in_blacklist = False
if self.advanced_settings.blacklist:
for black in self.blacklist:
if black in list(cmd.keys())[0]:
in_blacklist = True
break
if in_blacklist:
continue
elif match in list_commands:
for interaction in data:
if match == interaction["name"]:
break
options: str = ""
if interaction["type"] in {
"slash command",
"subcommand",
"subcommand group",
}:
for option in interaction["options"]:
the_type = typer_dict(
option["type"],
option["choices"] if "choices" in option.keys() else [],
)
options += f"[{option['name']}: {'' if option['required'] else 'optional '}{the_type}], "
elif "menu" not in interaction["type"]:
options += interaction["options"]
options = options[:-2] if options.endswith(", ") else options
how_to_use = f"\nHow to use:\n```\n{f'/' if interaction['type'] in {'slash command', 'subcommand', 'subcommand group'} else ('Right click on a ' + interaction['type'].replace(' menu', '')) if 'menu' in interaction['type'] else '/'}{'' if 'menu' in interaction['type'] else interaction['name']} {options}\n```"
page.add_field(
name=interaction["name"],
value=(
""
if interaction["description"] is None
else interaction["description"]
)
+ f"\n{interaction['type'].capitalize()}"
+ how_to_use,
inline=False,
)
if self.template_embed.footer is not None:
page.set_footer(text=self.template_embed.footer)
embeds.append(page)
return await Paginator(
client=self.client,
ctx=ctx,
pages=embeds,
timeout=self.paginator_settings.timeout,
author_only=self.paginator_settings.author_only,
use_select=self.paginator_settings.use_select,
extended_buttons=self.paginator_settings.extended_buttons,
).run()
else:
first_page = (
Embed(title="Help", color=self.template_embed.color)
if self.template_embed.description is None
else Embed(
title="Help",
description=self.template_embed.description,
color=self.template_embed.color,
)
)
if self.template_embed.footer is not None:
first_page.set_footer(text=self.template_embed.footer)
embeds: List[Embed] = [first_page]
exts: List[dict] = []
for interaction in data:
if self.__ext_in_blacklist(interaction):
continue
if {
"name": interaction["extension"],
"interactions": [],
} not in exts:
exts.append(
{
"name": interaction["extension"],
"interactions": [],
}
)
for ext in exts:
value = "\n"
for interaction in data:
if self.__cmd_in_blacklist(interaction):
continue
if interaction["extension"] == ext["name"]:
ext["interactions"].append(interaction)
value += f"`{'/' if interaction['type'] in ['slash command', 'subcommand', 'subcommand group'] else '' if 'menu' in interaction['type'] else '/'}{interaction['name']}`, "
value = value[:-2] if value.endswith(", ") else value
first_page.add_field(
name=(
self.template_embed.no_category_name if ext["name"] is None else ext["name"]
),
value=value,
inline=False,
)
for ext in exts:
for i in range(0, len(ext["interactions"]), self.template_embed.fields_per_embed):
next_page = Embed(
title=f"{self.template_embed.no_category_name if ext['name'] is None else ext['name']} {i + 1} - {i + self.template_embed.fields_per_embed}",
color=self.template_embed.color,
)
for cmd in ext["interactions"][i : (i + self.template_embed.fields_per_embed)]:
cmd: dict
cmd_name: str = cmd["name"]
cmd_desc: str = cmd["description"]
cmd_opts: list = cmd.get("options", [])
cmd_type: str = cmd["type"]
desc = (
"No description"
if cmd_desc is None or cmd_desc == [] or not cmd_desc
else cmd_desc
) + "\nHow to use:"
how_to_use = f"\n```\n{f'/{cmd_name}' if 'menu' not in cmd_type else ('Right click on a ' + cmd['type'].replace(' menu', ''))} "
if isinstance(cmd_opts, list):
for opt in cmd_opts:
opt: dict
_type = typer_dict(opt["type"], opt.get("choices", []))
how_to_use += f"[{opt['name']}: {'optional ' if not opt['required'] else ''}{_type}], "
elif cmd_opts is not None:
how_to_use += cmd_opts
how_to_use = how_to_use[:-2] if how_to_use.endswith(", ") else how_to_use
how_to_use += "\n```"
next_page.add_field(name=cmd_name, value=desc + how_to_use, inline=False)
if self.template_embed.footer is not None:
next_page.set_footer(text=self.template_embed.footer)
embeds.append(next_page)
return await Paginator(
client=self.client,
ctx=ctx,
pages=embeds,
timeout=self.paginator_settings.timeout,
author_only=self.paginator_settings.author_only,
use_select=self.paginator_settings.use_select,
extended_buttons=self.paginator_settings.extended_buttons,
).run()
def __cmd_in_blacklist(self, interaction: dict):
return (
any(
(black in interaction["name"]) or (black in interaction["extension"])
for black in self.advanced_settings.blacklist
)
if self.advanced_settings.blacklist is not None
else False
)
def __ext_in_blacklist(self, interaction: dict):
return (
self.__cmd_in_blacklist(interaction) if interaction["extension"] is not None else False
)
|
nilq/baby-python
|
python
|
import numpy as np
from sklearn.cluster import MeanShift# as ms
from sklearn.datasets.samples_generator import make_blobs
import matplotlib.pyplot as plt
centers = [[1,1], [5,5]]
X,y = make_blobs(n_samples = 10000, centers = centers, cluster_std = 1)
plt.scatter(X[:,0],X[:,1])
plt.show()
ms = MeanShift()
ms.fit(X)
labels = ms.labels_
cluster_centers = ms.cluster_centers_
n_clusters_ = len(np.unique(labels))
print("Number of estimated clusters:", n_clusters_)
colors = 10 * ['r.','g.','b.','c.','k.','y.','m.']
print(colors)
print(labels)
for i in range(len(X)):
plt.plot(X[i][0],X[i][1], colors[labels[i]], markersize=10)
plt.scatter(cluster_centers[:,0], cluster_centers[:,1],
marker="x", s = 150, linewidths = 5, zorder=10)
plt.show();
|
nilq/baby-python
|
python
|
# Copyright 2020 Arthur Coqué, Valentine Aubard, Pôle OFB-INRAE ECLA, UR RECOVER
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module contains a reader for S2_THEIA products.
This reader is dedicated to extract data from S2_THEIA_L2A.
Example::
reader = S2THEIAReader(**config)
reader.extract_bands()
reader.create_ds()
extracted_dataset = reader.dataset
"""
import warnings
from collections import defaultdict, namedtuple
from datetime import datetime
from pathlib import Path
from typing import Dict, List, Optional
from zipfile import ZipFile
import numpy as np
import rasterio
import xarray as xr
from lxml import etree
from pyproj import CRS
from rasterio.windows import Window
from tqdm import tqdm
from sisppeo.readers.reader import Reader, Inputs
from sisppeo.utils.exceptions import InputError, ProductError
from sisppeo.utils.readers import (get_ij_bbox, decode_data,
resample_band_array,
resize_and_resample_band_array)
warnings.filterwarnings('ignore', category=rasterio.errors.NotGeoreferencedWarning)
S2THEIAInputs = namedtuple('S2THEIAInputs', Inputs._fields
+ ('out_resolution', 'theia_bands', 'theia_masks'))
def format_zippath(path: Path) -> str:
return f'zip://{str(path.resolve())}!/'
class S2THEIAReader(Reader):
"""A reader dedicated to extract data from S2_THEIA_L2A products.
For more information about THEIA S2 L2A products, please see:
https://labo.obs-mip.fr/multitemp/sentinel-2/theias-sentinel-2-l2a-product-format/
Attributes:
dataset: A dataset containing extracted data.
"""
def __init__(self,
input_product: Path,
requested_bands: List[str],
geom: Optional[dict] = None,
out_resolution: Optional[int] = None,
theia_bands: str = 'FRE',
theia_masks: Optional[Dict[str, Optional[List[int]]]] = None,
**_ignored) -> None:
"""See base class.
Args:
out_resolution: The wanted resolution of the output product. Used
when performing resampling operations.
theia_bands: The bands to be extracted. Must be either "SRE"
(for Surface REflectance) or "FRE" (for Flat REflectance).
theia_masks: A dict whose keys are the names of THEIA's masks to
extract ("CLM", "MG2" or "SAT") and vals are lists of bits to
use (e.g., [0, 1, 2] ; if None, all bits will be used for the
corresponding mask).
"""
super().__init__(input_product, requested_bands, geom)
if out_resolution not in (None, 10, 20):
raise InputError('"out_resolution" must be in (10, 20)')
if theia_bands not in ('FRE', 'SRE'):
raise InputError('"theia_bands" must be either "SRE" or "FRE"')
self._inputs = S2THEIAInputs(*self._inputs, out_resolution,
theia_bands, theia_masks)
def extract_bands(self) -> None:
"""See base class."""
# Check if data are compressed
compressed = False
if self._inputs.input_product.suffix == '.zip':
compressed = True
# Load metadata
metadata = self._load_metadata_from_MTD(compressed)
quantification_value, nodata = _get_product_coefs(metadata)
# Filter bands
if compressed:
with ZipFile(self._inputs.input_product) as archive:
root_path = format_zippath(self._inputs.input_product)
try:
requested_bands = [
(root_path + [_ for _ in archive.namelist() if _.endswith(
f'_{self._inputs.theia_bands}_{band}.tif'
)][0], band) for band in self._inputs.requested_bands
]
except IndexError as no_band:
msg = ('One of the requested bands is not found in the '
'given product.')
raise ProductError(msg) from no_band
else:
try:
requested_bands = [
(list(self._inputs.input_product.glob(
f'*_{self._inputs.theia_bands}_{band}.tif'
))[0], band) for band in self._inputs.requested_bands
]
except IndexError as no_band:
msg = ('One of the requested bands is not found in the given '
'product.')
raise ProductError(msg) from no_band
tmp = ['R1' if band in ('B2', 'B3', 'B4', 'B8') else 'R2'
for band in self._inputs.requested_bands]
if 'R1' in tmp and 'R2' in tmp:
requested_bands = [rband for _, rband
in sorted(zip(tmp, requested_bands),
reverse=True)]
if 'R1' in tmp:
min_res = 10
else:
min_res = 20
# Set the default resolution
if self._inputs.out_resolution is None:
self._inputs = self._inputs._replace(out_resolution=min_res)
# Extract data
data = {}
for path, band in tqdm(requested_bands, unit='bands'):
with rasterio.open(path) as subdataset:
if self._intermediate_data['x'] is None: # 1st extracted_band
if ((out_res := self._inputs.out_resolution)
> (in_res := subdataset.res[0])):
msg = (f'"out_resolution" must be <= {in_res} ; '
f'here, out_resolution={out_res}')
raise InputError(msg)
# Store the CRS
self._intermediate_data['crs'] = CRS.from_epsg(
subdataset.crs.to_epsg()
)
band_array, xy_bbox = self._extract_first_band(
subdataset, quantification_value, nodata
)
else:
band_array = self._extract_nth_band(
subdataset, xy_bbox, quantification_value, nodata
)
data[band] = band_array.reshape(1, *band_array.shape)
print('')
# Mask data
if self._inputs.theia_masks is not None:
for mask_name in self._inputs.theia_masks:
if self._inputs.out_resolution == 10:
suffix = '_R1'
else:
suffix = '_R2'
if compressed:
with ZipFile(self._inputs.input_product) as archive:
mask_path = (root_path + [
_ for _ in archive.namelist()
if _.endswith(f'_{mask_name}{suffix}.tif')
][0])
else:
mask_path = list((self._inputs.input_product / 'MASKS'
).glob(f'*_{mask_name}{suffix}.tif'))[0]
with rasterio.open(mask_path) as mask:
mask_array = self._extract_nth_band(mask, xy_bbox, 1, 1,
mask=True)
if self._inputs.theia_masks[mask_name] is None:
self._inputs.theia_masks[mask_name] = range(8)
bitmasks = [mask_array & (1 << b)
for b in self._inputs.theia_masks[mask_name]]
mask_array *= np.any(bitmasks, axis=0)
for band in data:
data[band] = np.where(mask_array, np.nan, data[band])
# Store outputs
self._intermediate_data['data'] = data
self._intermediate_data['metadata'] = metadata
def create_ds(self) -> None:
"""See base class."""
# Create the dataset
ds = xr.Dataset(
{key: (['time', 'y', 'x'], val) for key, val
in self._intermediate_data['data'].items()},
coords={
'x': ('x', self._intermediate_data['x']),
'y': ('y', self._intermediate_data['y']),
'time': [datetime.fromisoformat(self._intermediate_data[
'metadata']['ACQUISITION_DATE'].split('.')[0])]
}
)
crs = self._intermediate_data['crs']
# Set up coordinate variables
ds.x.attrs['axis'] = 'X'
ds.x.attrs['long_name'] = f'x-coordinate ({crs.name})'
ds.x.attrs['standard_name'] = "projection_x_coordinate"
ds.x.attrs['units'] = 'm'
ds.y.attrs['axis'] = 'Y'
ds.y.attrs['long_name'] = f'y-coordinate ({crs.name})'
ds.y.attrs['standard_name'] = "projection_y_coordinate"
ds.y.attrs['units'] = 'm'
ds.time.attrs['axis'] = 'T'
ds.time.attrs['long_name'] = 'time'
# Set up the 'grid mapping variable'
ds['crs'] = xr.DataArray(name='crs', attrs=crs.to_cf())
# Store metadata
ds['product_metadata'] = xr.DataArray()
for key, val in self._intermediate_data['metadata'].items():
ds.product_metadata.attrs[key] = val
ds.attrs['data_type'] = 'rho'
ds.attrs['theia_bands'] = self._inputs.theia_bands
if self._inputs.theia_masks is not None:
ds.attrs['suppl_masks'] = ', '.join(
f'THEIA_{key} ({"".join([str(b) for b in val])})'
for key, val in self._inputs.theia_masks.items()
)
self.dataset = ds
def _load_metadata_from_MTD(self, compressed):
if compressed:
with ZipFile(self._inputs.input_product) as archive:
path = [_ for _ in archive.namelist() if _.endswith('MTD_ALL.xml')][0]
with archive.open(path) as f:
tree = etree.parse(f)
else:
path = list(self._inputs.input_product.glob('*MTD_ALL.xml'))[0]
with open(path) as f:
tree = etree.parse(f)
root = tree.getroot()
metadata = defaultdict(dict)
for elem in root:
for subelem in elem:
if subelem.text.strip():
metadata[subelem.tag] = subelem.text
for att in subelem.attrib:
metadata[':'.join([subelem.tag, att])] = subelem.attrib.get(att)
for elem in root.iter('Horizontal_Coordinate_System'):
for subelem in elem:
metadata[subelem.tag] = subelem.text
for elem in root.iter('SPECIAL_VALUE'):
metadata[elem.get('name')] = elem.text
for elem in root.iter('QUALITY_INDEX'):
metadata[elem.get('name')] = elem.text
for elem in root.iter('Processing_Information'):
metadata[elem.find('NAME').text] = elem.find('VALUE').text
return metadata
def _compute_x_coords(self, x0, x1):
out_res = self._inputs.out_resolution
x_start = x0 + out_res / 2
x_stop = x1 - out_res / 2
self._intermediate_data['x'] = np.arange(x_start, x_stop + 1, out_res)
def _compute_y_coords(self, y0, y1):
out_res = self._inputs.out_resolution
y_start = y0 - out_res / 2
y_stop = y1 + out_res / 2
self._intermediate_data['y'] = np.arange(y_start, y_stop - 1, -out_res)
# pylint: disable=too-many-locals
# False positive.
def _extract_first_band(self, subdataset, quantification_value, nodata):
if self._inputs.ROI is not None:
self._reproject_geom()
row_start, col_start, row_stop, col_stop = get_ij_bbox(
subdataset,
self._intermediate_data['geom']
)
arr = subdataset.read(
1,
window=Window.from_slices((row_start, row_stop + 1),
(col_start, col_stop + 1))
)
# Update internal coords
x0, y0 = subdataset.transform * (col_start, row_start)
x1, y1 = subdataset.transform * (col_stop + 1, row_stop + 1)
else:
arr = subdataset.read(1)
# Update internal coords
x0, y0 = subdataset.transform * (0, 0)
x1, y1 = subdataset.transform * (subdataset.width,
subdataset.height)
# Decode extracted data
band_array = decode_data(arr, 1 / quantification_value, nodata)
if (out_res := self._inputs.out_resolution) != subdataset.res[0]:
band_array = resample_band_array(band_array, subdataset.res[0],
out_res)
# Compute projected coordinates
self._compute_x_coords(x0, x1)
self._compute_y_coords(y0, y1)
# Update internal coords
x1 -= 1
y1 += 1
return band_array, [x0, y0, x1, y1]
# pylint: disable=too-many-locals
# More readable if coordonates are explicitely extracted from the bbox.
def _extract_nth_band(self, subdataset, xy_bbox, quantification_value,
nodata, mask=False):
x0, y0, x1, y1 = xy_bbox
row_start, col_start = subdataset.index(x0, y0)
row_stop, col_stop = subdataset.index(x1, y1)
arr = subdataset.read(
1,
window=Window.from_slices(
(row_start, row_stop + 1),
(col_start, col_stop + 1)
)
)
if mask:
band_array = arr
else:
# Decode extracted data
band_array = decode_data(arr, 1 / quantification_value, nodata)
ij_bbox = [row_start, col_start, row_stop, col_stop]
if (out_res := self._inputs.out_resolution) != subdataset.res[0]:
band_array = resize_and_resample_band_array(band_array, ij_bbox,
subdataset.res[0],
out_res)
return band_array
def _get_product_coefs(metadata):
"""Gets both quantification and nodata values (to compute correct reflectances)"""
quantification_value = float(metadata['REFLECTANCE_QUANTIFICATION_VALUE'])
nodata = float(metadata['nodata'])
return quantification_value, nodata
|
nilq/baby-python
|
python
|
from tkinter import *
root = Tk()
root.geometry('225x230')
root.resizable(False, False)
root.title('Learning English')
def showMenu():
menu1.pack()
menu2.pack()
def hideMenu():
menu1.pack_forget()
menu2.pack_forget()
menu1 = Button(text = 'Can you translate?\nENG --> RUS', width = 300, height = 7)
menu2 = Button(text = 'Can you translate?\nRUS --> ENG', width = 300, height = 7)
showMenu()
root.mainloop()
|
nilq/baby-python
|
python
|
import numpy as np
from time import time
import xorshift
rng = xorshift.Xoroshiro()
rng2 = xorshift.Xorshift128plus()
def output(name, start, end):
elapsed = (end - start) * 1000
per_iter = elapsed / iters
per_rv = per_iter / count * 1e6
print '%s took %.2f ms/iter, %.2f ns per float' % (name, per_iter, per_rv)
def bench_binomial(iters, count, N, p):
print "Benchmarking generation of %d Bin(%d,%f) RVs, %d iterations" % (
count, N, p, iters)
print "------------------------------"
start = time()
for i in xrange(iters):
np.random.binomial(N, p, count)
end = time()
output('numpy', start, end)
start = time()
for i in xrange(iters):
rng.binomial(N, p, count)
end = time()
output('xoroshiro', start, end)
start = time()
for i in xrange(iters):
rng2.binomial(N, p, count)
end = time()
output('xoroshift128plus', start, end)
def bench_uniform(iters, count):
print "Benchmarking generation of %d Uniform(0,1) RVs, %d iterations" % (
count, iters)
print "------------------------------"
start = time()
for i in xrange(iters):
np.random.uniform(size=count)
end = time()
output('numpy', start, end)
start = time()
for i in xrange(iters):
rng.uniform(size=count)
end = time()
output('xoroshiro', start, end)
start = time()
for i in xrange(iters):
rng2.uniform(size=count)
end = time()
output('xoroshift128plus', start, end)
iters = 10
count = 131072
N = 50
p = 0.25
bench_binomial(iters, count, N, p)
print
bench_uniform(iters, count)
|
nilq/baby-python
|
python
|
from PySide2.QtCore import Qt
from PySide2.QtWidgets import QWidget, QHBoxLayout, QLabel, QSlider
from traitlets import HasTraits, Unicode, Int, observe
from regexport.views.utils import HasWidget
class LabelledSliderModel(HasTraits):
label = Unicode()
min = Int(default_value=0)
max = Int()
value = Int(default_value=1000)
label2 = Unicode(allow_none=True)
def __repr__(self):
return f"{self.__class__.__name__}(label='{self.label}', min={self.min}, max={self.max}, value={self.value})"
@observe('value')
def _clamp_value_to_be_inside_bounded_range(self, change):
value = change.new
print(self)
if value < self.min:
self.value = 0
elif value > self.max:
self.value = self.max
class LabelledSliderView(HasWidget):
def __init__(self, model: LabelledSliderModel):
widget = QWidget()
HasWidget.__init__(self, widget=widget)
layout = QHBoxLayout()
widget.setLayout(layout)
self.label = QLabel()
layout.addWidget(self.label)
self.slider = QSlider()
self.slider.setMinimum(model.min)
self.slider.setMaximum(model.max)
self.slider.setValue(model.value)
self.slider.setOrientation(Qt.Horizontal)
layout.addWidget(self.slider)
self.value_label = QLabel()
layout.addWidget(self.value_label)
self.model = model
self.model.observe(self.render)
self.slider.valueChanged.connect(self._update_model_value)
self.render()
def render(self, change=None):
self.label.setText(self.model.label)
self.slider.setMinimum(self.model.min)
self.slider.setMaximum(self.model.max)
self.value_label.setText(str(self.model.value))
def _update_model_value(self, value: int):
self.model.value = value
|
nilq/baby-python
|
python
|
'''
Configure web app settings. Updating or removing application settings will cause an app recycle.
'''
from .... pyaz_utils import _call_az
def list(name, resource_group, slot=None):
'''
Get the details of a web app's settings.
Required Parameters:
- name -- name of the web app. If left unspecified, a name will be randomly generated. You can configure the default using `az configure --defaults web=<name>`
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- slot -- the name of the slot. Default to the productions slot if not specified
'''
return _call_az("az webapp config appsettings list", locals())
def set(name, resource_group, settings=None, slot=None, slot_settings=None):
'''
Set a web app's settings.
Required Parameters:
- name -- name of the web app. If left unspecified, a name will be randomly generated. You can configure the default using `az configure --defaults web=<name>`
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- settings -- space-separated appsettings in a format of `<name>=<value>`
- slot -- the name of the slot. Default to the productions slot if not specified
- slot_settings -- space-separated slot appsettings in a format of either `<name>=<value>` or `@<json_file>`
'''
return _call_az("az webapp config appsettings set", locals())
def delete(name, resource_group, setting_names, slot=None):
'''
Delete web app settings.
Required Parameters:
- name -- name of the web app. If left unspecified, a name will be randomly generated. You can configure the default using `az configure --defaults web=<name>`
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
- setting_names -- space-separated appsettings names
Optional Parameters:
- slot -- the name of the slot. Default to the productions slot if not specified
'''
return _call_az("az webapp config appsettings delete", locals())
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Bandit Algorithms
This script follows Chapter 2 of Sutton and Barto (2nd) and simply reproduces
figures 2.2 to 2.5.
Author: Gertjan van den Burg
License: MIT
Copyright: (c) 2020, The Alan Turing Institute
"""
import abc
import math
import matplotlib.pyplot as plt
import numpy as np
import random
import tqdm
from matplotlib import ticker
from scipy.special import logsumexp
class TestBed:
""" k-Armed Test Bed """
def __init__(self, k=10, baseline=0):
self.k = k
self.baseline = baseline
self._opt_action = None
@property
def opt_action(self):
if self._opt_action is None:
raise ValueError("Not initialised properly!")
return self._opt_action
def step(self, action):
mean = self._qstar[action]
return random.gauss(mean, 1)
def reset(self):
self._qstar = []
for _ in range(self.k):
self._qstar.append(random.gauss(self.baseline, 1))
self._opt_action = argmax(lambda a: self._qstar[a], range(self.k))
return self
class Bandit(metaclass=abc.ABCMeta):
def __init__(self, k=10, initial_value=0, stepsize="avg"):
self.k = k
self.initial_value = initial_value
self.stepsize = stepsize
def reset(self):
# Reset the state of the bandit.
self.Q = {a: self.initial_value for a in range(self.k)}
self.N = {a: 0 for a in range(self.k)}
if self.stepsize == "avg":
self.alpha = lambda a: 1 if self.N[a] == 0 else 1.0 / self.N[a]
else:
self.alpha = lambda a: self.stepsize
@abc.abstractmethod
def get_action(self):
""" Choose an action to take """
def record(self, action, reward):
""" Record the reward of the action taken """
# Follows algorithm on page 32
A, R = action, reward
self.N[A] += 1
self.Q[A] += self.alpha(A) * (R - self.Q[A])
class EpsilonGreedy(Bandit):
def __init__(self, k=10, epsilon=0.1, initial_value=0, stepsize="avg"):
super().__init__(k=k, initial_value=initial_value, stepsize=stepsize)
self.epsilon = epsilon
def get_action(self):
if random.random() <= self.epsilon:
return random.randint(0, self.k - 1)
return argmax(lambda a: self.Q[a], range(self.k))
def label(self):
return (
r"$\varepsilon$-greedy ($\varepsilon = %g$, $Q_1 = %g$, $\alpha = %s$)"
% (self.epsilon, self.initial_value, self.stepsize)
)
class UpperConfidence(Bandit):
def __init__(self, k=10, c=2.0):
super().__init__(k=k)
self.c = c
def reset(self):
super().reset()
self.t = 0
def get_action(self):
self.t += 1
func = lambda a: self.Q[a] + self.c * math.sqrt(
math.log(self.t) / self.N[a]
)
for a in range(self.k):
# first pick all actions at least once
if self.N[a] == 0:
return a
return argmax(func, range(self.k))
def label(self):
return r"UCB ($c = %g$)" % self.c
class GradientBandit(Bandit):
def __init__(self, k=10, stepsize="avg", use_baseline=True):
super().__init__(k=k, stepsize=stepsize)
self.use_baseline = use_baseline
def reset(self):
super().reset()
self.H = {a: 0 for a in range(self.k)}
self.probs, self.Rtbar, self.t = None, 0, 0
def get_action(self):
self.t += 1
lse = logsumexp(list(self.H.values()))
self.probs = [math.exp(self.H[a] - lse) for a in range(self.k)]
a = random.choices(list(range(self.k)), weights=self.probs, k=1)
return a[0]
def record(self, action, reward):
At, Rt = action, reward
for a in range(self.k):
self.H[a] += (
self.alpha(a) * (Rt - self.Rtbar) * ((At == a) - self.probs[a])
)
# Note that the choice of baseline is somewhat arbitrary, but the
# average reward works well in practice. See discussion on page 40 of
# Sutton & Barto.
if self.use_baseline:
self.Rtbar += 1 / self.t * (Rt - self.Rtbar)
def label(self):
bsln = "with" if self.use_baseline else "without"
return r"Gradient ($\alpha = %s$, %s baseline)" % (self.stepsize, bsln)
def argmax(func, args):
"""Simple argmax function """
m, inc = -float("inf"), None
for a in args:
if (v := func(a)) > m:
m, inc = v, a
return inc
def plot_common(axis, data, bandits):
axis.plot(data.T)
axis.legend([b.label() for b in bandits])
axis.set_xlabel("Steps")
def make_reward_plot(axis, avg_rewards, bandits):
plot_common(axis, avg_rewards, bandits)
axis.set_ylabel("Average\nreward", rotation="horizontal", ha="center")
def make_optact_plot(axis, avg_optact, bandits):
plot_common(axis, avg_optact, bandits)
axis.set_yticks([0, 0.2, 0.4, 0.6, 0.8, 1.0])
axis.set_ylim(0, 1)
axis.yaxis.set_major_formatter(ticker.PercentFormatter(1.0))
axis.set_ylabel("%\nOptimal\naction", rotation="horizontal", ha="center")
def run_experiment(env, bandits, repeats, steps):
B = len(bandits)
rewards = np.zeros((B, repeats, steps))
optact = np.zeros((B, repeats, steps))
for r in tqdm.trange(repeats):
# reset the bandits and the environment
[bandit.reset() for bandit in bandits]
env.reset()
for i in range(steps):
for b in range(B):
bandit = bandits[b]
action = bandit.get_action()
reward = env.step(action)
bandit.record(action, reward)
rewards[b, r, i] = reward
optact[b, r, i] = action == env.opt_action
avg_rewards = rewards.mean(axis=1)
avg_optact = optact.mean(axis=1)
return avg_rewards, avg_optact
def figure_2_2(k=10, repeats=2000, steps=1000, epsilons=None):
env = TestBed(k=k)
epsilons = epsilons or [0.1, 0.01, 0]
bandits = [EpsilonGreedy(k=k, epsilon=e) for e in epsilons]
avg_rewards, avg_optact = run_experiment(env, bandits, repeats, steps)
fig, axes = plt.subplots(2, 1)
make_reward_plot(axes[0], avg_rewards, bandits)
make_optact_plot(axes[1], avg_optact, bandits)
plt.show()
def figure_2_3(k=10, repeats=2000, steps=1000):
env = TestBed(k=k)
bandits = [
EpsilonGreedy(k=k, epsilon=0.1, initial_value=0, stepsize=0.1),
EpsilonGreedy(k=k, epsilon=0, initial_value=5, stepsize=0.1),
]
_, avg_optact = run_experiment(env, bandits, repeats, steps)
fig, axis = plt.subplots(1, 1)
make_optact_plot(axis, avg_optact, bandits)
plt.show()
def figure_2_4(k=10, repeats=2000, steps=1000, c=2):
env = TestBed(k=k)
bandits = [EpsilonGreedy(k=k, epsilon=0.1), UpperConfidence(k=k, c=c)]
avg_rewards, _ = run_experiment(env, bandits, repeats, steps)
fig, axis = plt.subplots(1, 1)
make_reward_plot(axis, avg_rewards, bandits)
plt.show()
def figure_2_5(k=10, repeats=1000, steps=1000):
env = TestBed(k=k, baseline=4)
bandits = [
GradientBandit(k=k, stepsize=0.1),
GradientBandit(k=k, stepsize=0.4),
GradientBandit(k=k, stepsize=0.1, use_baseline=False),
GradientBandit(k=k, stepsize=0.4, use_baseline=False),
]
_, avg_optact = run_experiment(env, bandits, repeats, steps)
fig, axis = plt.subplots(1, 1)
make_optact_plot(axis, avg_optact, bandits)
plt.show()
def playground(k=10, repeats=2000, steps=1000):
""" Function for if you want to play around with bandits"""
env = TestBed(k=k)
bandits = [
EpsilonGreedy(k=k, epsilon=0.01),
EpsilonGreedy(k=k, initial_value=5, epsilon=0.1),
UpperConfidence(k=k, c=2),
GradientBandit(k=k, stepsize=0.1),
]
avg_reward, avg_optact = run_experiment(env, bandits, repeats, steps)
fig, axes = plt.subplots(2, 1)
make_reward_plot(axes[0], avg_reward, bandits)
make_optact_plot(axes[1], avg_optact, bandits)
plt.show()
def main():
# enable or disable plots you want to see
figure_2_2()
figure_2_3()
figure_2_4()
figure_2_5()
# playground(repeats=1000, steps=5000)
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
import unittest
import pandas as pd
from tests.test_utils import TestUtils
from enda.ml_backends.sklearn_estimator import EndaSklearnEstimator
try:
from sklearn.linear_model import LinearRegression, SGDRegressor
from sklearn.ensemble import AdaBoostRegressor, RandomForestRegressor
from sklearn.svm import SVR, LinearSVR
from sklearn.preprocessing import PolynomialFeatures, StandardScaler
from sklearn.neighbors import KNeighborsRegressor
from sklearn.pipeline import Pipeline
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.feature_selection import SelectFromModel
from sklearn.neural_network import MLPRegressor
except ImportError as e:
raise ImportError("scikit-learn is required is you want to test enda's EndaSklearnEstimator. "
"Try: pip install scikit-learn>=0.24.1", e)
class TestEndaSklearnEstimator(unittest.TestCase):
def test_estimators(self):
train_set, test_set, target_name = TestUtils.read_example_a_train_test_sets()
for estimator in [
LinearRegression(),
AdaBoostRegressor(),
SVR(),
Pipeline([('poly', PolynomialFeatures(degree=3)),
('linear', LinearRegression(fit_intercept=False))]),
Pipeline([('standard_scaler', StandardScaler()),
('sgd_regressor', SGDRegressor())]),
KNeighborsRegressor(n_neighbors=10),
GaussianProcessRegressor(),
Pipeline([('feature_selection', SelectFromModel(LinearSVR())),
('classification', RandomForestRegressor())]),
Pipeline([
('standard_scaler', StandardScaler()),
('mlp_regressor', MLPRegressor(solver='lbfgs', alpha=1e-5, hidden_layer_sizes=(5, 5), random_state=1))]
),
]:
m = EndaSklearnEstimator(estimator)
m.train(train_set, target_name)
prediction = m.predict(test_set, target_name)
# prediction must preserve the pandas.DatetimeIndex
self.assertIsInstance(prediction.index, pd.DatetimeIndex)
self.assertTrue((test_set.index == prediction.index).all())
|
nilq/baby-python
|
python
|
""" Generate new files from templates. """
import argparse
import sys
from typing import Optional
from contextlib import contextmanager
import os
import shlex
from itertools import chain
from cjrh_template import Template
import biodome
__version__ = '2017.10.3'
@contextmanager
def file_or_stdout(args, filename: Optional[str] = None):
"""If target_filename is None, just strip off the .templitz
extension off args.template and used that as the target name."""
for path, hit in all_templates(args):
if args.template in hit:
break
else:
raise FileNotFoundError('Template not found!')
if args.stdout:
f = sys.stdout
else:
# Remove the trailing ".templitz"
fname = filename or hit.rpartition('.')[0]
target = os.path.join(args.outdir, fname)
f = open(target, 'w+')
try:
yield f
finally:
if f is not sys.stdout:
f.close()
def all_templates(args):
pathstr: str = biodome.environ.get('TEMPLITZ_PATH', '').split(os.pathsep)
# Current dir first, and /library of templitz.py dir as last resort
paths = chain(
[os.getcwd()],
pathstr,
[os.path.join(os.path.dirname(__file__), 'library')]
)
for p in paths:
if not os.path.exists(p):
continue
for fname in os.listdir(p):
if fname.endswith('.templitz'):
yield p, fname
def load_template(args):
paths = biodome.environ.get('TEMPLITZ_PATH', '').split(os.pathsep)
for path, hit in all_templates(args):
if args.template in hit:
break
else:
msg = (f'Error: template "{args.template}" not found in any of '
f'the following locations:')
msg += '\n'.join(paths)
raise FileNotFoundError(msg)
with open(os.path.join(path, hit)) as f:
data = f.read()
return Template(data)
def subs(args):
tmpl = load_template(args)
params = {
x.partition('=')[0]: x.partition('=')[2] for x in args.params
}
output = tmpl.safe_substitute(params)
# Strip out lines starting with "#templitz" and process settings in
# them.
settings = {}
final_lines = []
for line in output.splitlines(False):
if line.startswith('#templitz'):
data = line.partition('#templitz')[2]
for item in shlex.split(data):
key, _, value = item.partition('=')
# Handle toggles/bools automatically
settings[key] = value.strip('"') or True
else:
final_lines.append(line)
output = '\n'.join(final_lines)
filename = settings.get('filename')
with file_or_stdout(args, filename=filename) as f:
f.write(output)
def info(args):
tmpl = load_template(args)
print('The template has the following vars: ')
print()
for ph in tmpl.placeholders():
print(' ${%s}' % ph)
print()
def main():
parser = argparse.ArgumentParser()
parser.add_argument('-t', '--template')
parser.add_argument('-i', '--info', help='Information about the templit.',
action='store_true')
parser.add_argument(
'-l', '--list', help='List all available templitz.',
action='store_true'
)
parser.add_argument('-s', '--stdout', action='store_true',
help='Write to stdout instead of file.')
parser.add_argument('-o', '--outdir', help='Output directory.',
default=os.getcwd())
parser.add_argument(
'-p', '--params', nargs='+', default=[]
)
args = parser.parse_args()
try:
if args.info:
info(args)
elif args.list:
for path, fname in all_templates(args):
print(path, fname)
else:
subs(args)
except FileNotFoundError as e:
print(f'Error: {e!s}')
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
import os
import paramiko
def get_private_key():
# or choose the location and the private key file on your client
private_key_file = os.path.expanduser("/home/ubuntu/.ssh/id_rsa")
return paramiko.RSAKey.from_private_key_file(private_key_file, password='')
def get_ssh(myusername, myhostname, myport):
ssh = paramiko.SSHClient()
ssh.load_system_host_keys()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
#ssh.connect(myhostname, username=myusername, port=myport, pkey = private_key)
ssh.connect(myhostname, username=myusername, port=myport)
return ssh
def block_exec(ssh, command):
stdin, stdout, stderr = ssh.exec_command(command)
exit_status = stdout.channel.recv_exit_status()
return
def clear_bw_config2(ssh, interface):
block_exec(ssh, "sudo tc qdisc del dev %s root" % interface)
block_exec(ssh, "sudo tc qdisc del dev %s ingress" % interface)
block_exec(ssh, "sudo tc class del dev %s root" % interface)
block_exec(ssh, "sudo tc filter del dev %s root" % interface)
def exec_bw_config2(ssh, interface, bandwidth, ip, subnetmasklength):
clear_bw_config2(ssh, interface)
# create a qdisc (queuing discipline), 12 is default class
cmd1 = "sudo tc qdisc add dev %s root handle 1: htb default 12" % interface
print cmd1
block_exec(ssh, cmd1)
# define the performance for default class
cmd2 = "sudo tc class add dev %s parent 1: classid 1:1 htb rate %dmbps ceil %dmbps" % (interface, bandwidth, bandwidth )
print cmd2
block_exec(ssh, cmd2)
filter_cmd = "sudo tc filter add dev %s protocol ip parent 1:0 prio 1 u32 match ip dst %s/%d flowid 1:1" % (interface, ip, subnetmasklength)
print filter_cmd
block_exec(ssh, filter_cmd)
def main():
myhosts = ["10.0.1.193", "10.0.1.192", "10.0.1.191", "10.0.1.190"]
username="ubuntu"
port=22
#key = ""get_private_key()
for host in myhosts:
ssh = get_ssh(username, host, port)
clear_bw_config2(ssh, "eth0")
exec_bw_config2(ssh, "eth0", 128, "10.0.0.0", 8)
# iterate over hosts here
# for everyhost,
# 1. create ssh connection
# 2. run the exec_bw_config with params
return
if __name__ == '__main__':
main()
|
nilq/baby-python
|
python
|
import json
from unittest.mock import patch
from ddt import ddt
from django.test import tag
from django.urls import reverse
from requests.exceptions import HTTPError
from rest_framework import status
from .test_setup import TestSetUp
@tag('unit')
@ddt
class ViewTests(TestSetUp):
def test_get_catalogs(self):
"""Test that calling the endpoint /api/catalogs returns a list of
catalogs"""
url = reverse('api:catalogs')
with patch('api.views.requests') as requests:
http_resp = requests.return_value
requests.get.return_value = http_resp
http_resp.json.return_value = [{
"test": "value"
}]
http_resp.status_code = 200
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_catalogs_error(self):
"""Test that calling the endpoint /api/catalogs returns an
http error if an exception a thrown while reaching out to XIS"""
url = reverse('api:catalogs')
errorMsg = "error reaching out to configured XIS API; " + \
"please check the XIS logs"
with patch('api.views.requests.get') as get_request:
get_request.side_effect = [HTTPError]
response = self.client.get(url)
responseDict = json.loads(response.content)
self.assertEqual(response.status_code,
status.HTTP_500_INTERNAL_SERVER_ERROR)
self.assertEqual(responseDict['message'], errorMsg)
def test_get_experiences(self):
"""Test that calling /api/experiences returns a list of
experiences"""
url = reverse('api:experiences')
with patch('api.views.requests') as requests:
http_resp = requests.return_value
requests.get.return_value = http_resp
http_resp.json.return_value = [{
"test": "value"
}]
http_resp.status_code = 200
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_experiences_error(self):
"""Test that calling /api/experiences returns an error if the call
to the XIS throws an http error"""
url = reverse('api:experiences')
errorMsg = "error reaching out to configured XIS API; " + \
"please check the XIS logs"
with patch('api.views.requests.get') as get_request:
get_request.side_effect = [HTTPError]
response = self.client.get(url)
responseDict = json.loads(response.content)
self.assertEqual(response.status_code,
status.HTTP_500_INTERNAL_SERVER_ERROR)
self.assertEqual(responseDict['message'], errorMsg)
def test_get_experience(self):
"""Test that calling /api/experience/id returns an experience"""
doc_id = '123456'
url = reverse('api:experience', args=(doc_id,))
with patch('api.views.requests') as requests:
http_resp = requests.return_value
requests.get.return_value = http_resp
http_resp.json.return_value = {
"test": "value"
}
http_resp.status_code = 200
response = self.client.get(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_get_experience_error(self):
"""Test that calling /api/experience/id returns an error if the call
to the XIS throws an http error"""
doc_id = '123456'
url = reverse('api:experience', args=(doc_id,))
errorMsg = "error reaching out to configured XIS API; " + \
"please check the XIS logs"
with patch('api.views.requests.get') as get_request:
get_request.side_effect = [HTTPError]
response = self.client.get(url)
responseDict = json.loads(response.content)
self.assertEqual(response.status_code,
status.HTTP_500_INTERNAL_SERVER_ERROR)
self.assertEqual(responseDict['message'], errorMsg)
def test_patch_experience(self):
"""Test that calling /api/experience/id updates an experience"""
doc_id = '123456'
url = reverse('api:experience', args=(doc_id,))
with patch('api.views.requests') as requests:
http_resp = requests.return_value
requests.patch.return_value = http_resp
http_resp.json.return_value = {
"test": "value"
}
http_resp.status_code = 200
response = self.client.patch(url)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_patch_experience_error(self):
"""Test that calling /api/experience/id returns an error if the call
to the XIS throws an http error"""
doc_id = '123456'
url = reverse('api:experience', args=(doc_id,))
errorMsg = "error reaching out to configured XIS API; " + \
"please check the XIS logs"
with patch('api.views.requests.patch') as patch_request:
patch_request.side_effect = [HTTPError]
response = self.client.patch(url)
responseDict = json.loads(response.content)
self.assertEqual(response.status_code,
status.HTTP_500_INTERNAL_SERVER_ERROR)
self.assertEqual(responseDict['message'], errorMsg)
|
nilq/baby-python
|
python
|
import sys
stack = []
def recursion(stack, last):
if stack:
now = stack.pop()
else:
return -1
s = 0
while now != -last:
foo = recursion(stack, now)
if foo == -1:
return -1
s += foo
if stack:
now = stack.pop()
else:
break
if now != -last or s >= last:
return -1
return last
for line in sys.stdin:
stack = [int(a) for a in line.split()]
top = stack.pop()
ans = recursion(stack, top)
if ans == -1:
print(':-( Try again.')
else:
print(':-) Matrioshka!')
|
nilq/baby-python
|
python
|
from chaco.api import ArrayPlotData
from enable.component_editor import ComponentEditor
from traits.api import (
List, Instance, Either, Str, on_trait_change, Tuple, Any,
Property)
from traitsui.api import (
TabularEditor, View, UItem, VGroup, EnumEditor, HGroup, Item)
from traitsui.tabular_adapter import TabularAdapter
from pyfibre.gui.image_tab import ImageTab
class ImageMetricTab(ImageTab):
data = Any
_data = Property(List(Tuple), depends_on='data')
headers = Property(List(Str), depends_on='data')
tabular_adapter = Instance(TabularAdapter, ())
x_label = Str
y_label = Str
_display_cols = Property(List(Str), depends_on='data')
#: Selected evaluation steps in the table
_selected_rows = Either(List(Tuple), None)
def default_traits_view(self):
editor = TabularEditor(
adapter=self.tabular_adapter,
show_titles=True,
selected="_selected_rows",
auto_update=False,
multi_select=True,
editable=False,
)
return View(
VGroup(
HGroup(
VGroup(
UItem('selected_label',
style='simple'),
UItem('image_plot',
editor=ComponentEditor(),
show_label=False),
),
VGroup(
HGroup(
Item("x_label",
editor=EnumEditor(name="_display_cols")),
Item("y_label",
editor=EnumEditor(name="_display_cols")),
),
UItem('component',
editor=ComponentEditor(),
show_label=False),
),
),
UItem("_data", editor=editor),
layout="split"
)
)
def _plot_data_default(self):
plot_data = ArrayPlotData()
for data in ['x', 'y']:
plot_data.set_data(data, [])
return plot_data
def _get__data(self):
if self.data is None:
return []
fibre_data = self.data.to_records()
return fibre_data.tolist()
def _get__display_cols(self):
if self.data is None:
return []
return [
name for dtype, name in zip(
self.data.dtypes, self.data.columns)
if dtype in ["int64", "float64"]
]
def _get_headers(self):
if self.data is None:
return []
return [''] + list(self.data.columns)
def customise_plot(self, plot):
plot.plot(("x", "y"), type="scatter", color="blue")
def _tabular_adapter_default(self):
return TabularAdapter(columns=self.headers)
@on_trait_change("headers")
def _update_adapter(self):
self.tabular_adapter.columns = self.headers
@on_trait_change("data")
def _update_data(self):
self._update_plot_x_data()
self._update_plot_y_data()
@on_trait_change("x_label")
def _update_plot_x_data(self):
""" Update data points displayed by the x axis.
This method is called when the `x` axis is changed.
"""
if self.x_label == "":
self.plot_data.set_data("x", [])
else:
self.plot.x_axis.title = self.x_label
index = self.headers.index(self.x_label)
x_data = [row[index] for row in self._data]
self.plot_data.set_data("x", x_data)
@on_trait_change("y_label")
def _update_plot_y_data(self):
""" Update data points displayed by the y axis.
This method is called when the `y` axis is changed.
"""
if self.y_label == "":
self.plot_data.set_data("y", [])
else:
self.plot.y_axis.title = self.y_label
index = self.headers.index(self.y_label)
y_data = [row[index] for row in self._data]
self.plot_data.set_data("y", y_data)
def reset_tab(self):
super().reset_tab()
self.data = None
|
nilq/baby-python
|
python
|
from .device import (ORTDeviceInfo, get_available_devices_info,
get_cpu_device_info)
from .InferenceSession import InferenceSession_with_device
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
import sys
import numpy as np
with open(sys.argv[1]) as infile:
rows = [a.strip() for a in infile]
def do_round(cube):
x, y, z = cube.shape
new_cube = np.zeros(cube.shape, dtype=bool)
for i in range(x):
for j in range(y):
for k in range(z):
neighbors = cube[max(i - 1, 0):min(i + 2, x), max(j - 1, 0):min(j + 2, y), max(k - 1, 0):min(k + 2, z)]
n_occupied = np.count_nonzero(neighbors)
# print((i,j,k), neighbors.shape, cube[i,j,k], n_occupied)
if cube[i, j, k]:
if 3 <= n_occupied <= 4: # one higher than specs since we're counting this too
new_cube[i, j, k] = True
else:
if n_occupied == 3:
new_cube[i, j, k] = True
return new_cube
square_size = 12 + len(rows)
start_cube = np.zeros((square_size, square_size, 13), dtype=bool)
for i, row in enumerate(rows):
for j, active in enumerate(row):
if active == '#':
start_cube[i + 6, j + 6, 6] = True
for i in range(6):
cube = do_round(start_cube)
print("Round {}: {} active".format(i+1, np.count_nonzero(cube)))
start_cube = cube
|
nilq/baby-python
|
python
|
"""bsw URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path
from django.conf.urls import url, include
from .views import RingTimeViewSet, DefaultRingTimeViewSet, CallForwarding, RemoveCallForwarding, TestingMDNViewSet, QueryCallForwarding, QueryRingTime
from rest_framework import routers
ring_time_router = routers.DefaultRouter()
ring_time_router.register(r'set-ring-time', RingTimeViewSet)
default_ring_time_router = routers.DefaultRouter()
default_ring_time_router.register(r'default-ring-time', DefaultRingTimeViewSet)
call_forwarding_router = routers.DefaultRouter()
call_forwarding_router.register(r'set-forwarding-number', CallForwarding)
remove_forwarding_router = routers.DefaultRouter()
remove_forwarding_router.register(r'remove-forwarding-number', RemoveCallForwarding)
add_testing_mdn = routers.DefaultRouter()
add_testing_mdn.register(r'add-mdn', TestingMDNViewSet)
query_forwarding_router = routers.DefaultRouter()
query_forwarding_router.register('query-forwarding-number', QueryCallForwarding)
query_ring_time_router = routers.DefaultRouter()
query_ring_time_router.register('query-ring-time', QueryRingTime)
# mock up documentation for SI from the raw data from bsw/callforwarding/api/set-ring-time
urlpatterns = [
url('^api/', include(ring_time_router.urls)),
url(r'^api/', include(default_ring_time_router.urls)),
url(r'^api/', include(call_forwarding_router.urls)),
url(r'^api/', include(remove_forwarding_router.urls)),
url(r'^api/', include(add_testing_mdn.urls)),
url(r'^api/', include(query_forwarding_router.urls)),
url(r'^api/', include(query_ring_time_router.urls))
]
|
nilq/baby-python
|
python
|
from pandas import DataFrame
from typing import List, Tuple, Dict
from models.team_stats import TeamSkeletonStats
def get_opponents_in_given_fixture_list(team_id: int, fixtures: DataFrame) -> List[int]:
home_opponents = fixtures.loc[fixtures['team_a'] == team_id, 'team_h']
away_opps = fixtures.loc[fixtures['team_h'] == team_id, 'team_a']
return list(home_opponents.values) + list(away_opps.values)
def get_prev_and_next_opponents(
team_id: int,
number_of_opponents_to_get: int,
fixtures: DataFrame
) -> Tuple[List[int], List[int]]:
team_fixtures = fixtures[(fixtures['team_a'] == team_id) | (fixtures['team_h'] == team_id)]
completed_fixtures = team_fixtures[team_fixtures['finished'] == True]
upcoming_fixtures = team_fixtures[team_fixtures['finished'] == False]
# all_previous_opponents = get_opponents_in_given_fixture_list(team_id, completed_fixtures)
previous_x_opponents = get_opponents_in_given_fixture_list(team_id, completed_fixtures.tail(number_of_opponents_to_get))
next_x_opponents = get_opponents_in_given_fixture_list(team_id, upcoming_fixtures.head(number_of_opponents_to_get))
return previous_x_opponents, next_x_opponents
def get_current_team_stats(team: str, team_id: int, teams_data: Dict[str, DataFrame]) -> TeamSkeletonStats:
relevant_team_data = teams_data[team]
xg_total = relevant_team_data.xG.sum()
xga_total = relevant_team_data.xGA.sum()
npxg_total = relevant_team_data.npxG.sum()
npxga_total = relevant_team_data.npxGA.sum()
goals_scored_total = relevant_team_data.scored.sum()
goals_conceded_total = relevant_team_data.missed.sum()
games_played = len(relevant_team_data.index)
xg_avg = xg_total / games_played
xga_avg = xga_total / games_played
goals_scored_avg = goals_scored_total / games_played
goals_conceded_avg = goals_conceded_total / games_played
return TeamSkeletonStats(
xg_total=xg_total,
xga_total=xga_total,
npxg_total=npxg_total,
npxga_total=npxga_total,
g_total=goals_scored_total,
ga_total=goals_conceded_total,
games_played=games_played,
xg_avg=xg_avg,
xga_avg=xga_avg,
goals_scored_avg=goals_scored_avg,
goals_conceded_avg=goals_conceded_avg,
name=team,
team_id=team_id,
)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# coding:utf-8
from pickle import load
with open("banner.p", "rb") as f:
print(load(f))
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
import re
from setuptools import setup
def get_version(filename):
f = open(filename).read()
return re.search("__version__ = ['\"]([^'\"]+)['\"]", f).group(1)
version = get_version('flake8_assertive.py')
description = open('README.rst').read() + "\n\n" + open('CHANGELOG.rst').read()
github_url = 'https://github.com/jparise/flake8-assertive'
setup(
name='flake8-assertive',
version=version,
description='Flake8 unittest assert method checker',
long_description=description,
author='Jon Parise',
author_email='jon@indelible.org',
keywords='flake8 testing unittest assert',
url=github_url,
download_url=github_url + '/tarball/' + version,
license='MIT',
py_modules=['flake8_assertive'],
entry_points={
'flake8.extension': ['A50 = flake8_assertive:Checker'],
},
install_requires=['flake8'],
tests_require=['flake8>=3.0.0'],
test_suite='tests',
zip_safe=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Framework :: Flake8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Testing :: Unit',
],
)
|
nilq/baby-python
|
python
|
import modules.weapon as weapon
basic_sword = weapon.Weapon("Sword", "A sword you found somewhere.", 10, 0.5, 10, "slash", 1 , "You took the sword.", "You dropped the sword.")
big_axe = weapon.Weapon("Axe", "A big axe you found somewhere.", 20, 0.5, 10, "slash", 1 , "You took the axe.", "You dropped the axe.")
|
nilq/baby-python
|
python
|
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator
from django.shortcuts import get_object_or_404, redirect, render
from django.views.decorators.cache import cache_page
from .forms import CommentForm, PostForm
from .models import Follow, Group, Post, User
from .settings import CACHE_TIME, PAGINATOR_NUM_PAGES
def get_paginator_page(request, items):
paginator = Paginator(items, PAGINATOR_NUM_PAGES)
page_number = request.GET.get('page')
return paginator.get_page(page_number)
@cache_page(CACHE_TIME)
def index(request):
return render(request, 'posts/index.html', {
'page_obj': get_paginator_page(request, Post.objects.all())
})
def group_posts(request, slug):
group = get_object_or_404(Group, slug=slug)
return render(request, 'posts/group_list.html', {
'group': group,
'page_obj': get_paginator_page(request, group.posts.all()),
'is_group': True
})
def profile(request, username):
author = User.objects.get(username=username)
is_following = (
request.user.is_authenticated
and author != request.user
and author.following.filter(user=request.user).exists()
)
return render(request, 'posts/profile.html', {
'page_obj': get_paginator_page(request, author.posts.all()),
'author': author,
'following': is_following
})
def post_detail(request, post_id):
post = get_object_or_404(Post, pk=post_id)
form = CommentForm(request.POST or None)
return render(request, 'posts/post_detail.html', {
'post': post,
'form': form,
'is_post_detail': True
})
@login_required
def post_create(request):
form = PostForm(request.POST or None, files=request.FILES or None)
if not form.is_valid():
return render(request, 'posts/create_post.html', {'form': form})
new_post = form.save(commit=False)
new_post.author = request.user
new_post.save()
return redirect('posts:profile', username=request.user)
def post_edit(request, post_id):
post = get_object_or_404(Post, pk=post_id)
if post.author != request.user:
return redirect('posts:post_detail', post_id)
form = PostForm(
request.POST or None,
files=request.FILES or None,
instance=post
)
if not form.is_valid():
return render(request, 'posts/create_post.html', {
'is_edit': True,
'form': form
})
form.save()
return redirect('posts:post_detail', post_id=post_id)
@login_required
def add_comment(request, post_id):
post = get_object_or_404(Post, pk=post_id)
form = CommentForm(request.POST or None)
if form.is_valid():
comment = form.save(commit=False)
comment.author = request.user
comment.post = post
comment.save()
return redirect('posts:post_detail', post_id=post_id)
@login_required
def follow_index(request):
return render(request, 'posts/follow.html', {
'page_obj': get_paginator_page(
request,
Post.objects.filter(author__following__user=request.user)
)
})
@login_required
def profile_follow(request, username):
author = User.objects.get(username=username)
if not (author == request.user
or author.following.filter(user=request.user).exists()):
Follow.objects.create(
user=request.user,
author=author
)
return redirect('posts:profile', username)
@login_required
def profile_unfollow(request, username):
get_object_or_404(
Follow, user=request.user,
author__username=username
).delete()
return redirect('posts:profile', username)
|
nilq/baby-python
|
python
|
from numpy import random
from impl.distribution import distribution
class triangular(distribution):
def __init__(self, mini, mode, maxi):
self.mini = float(mini)
self.mode = float(mode)
self.maxi = float(maxi)
def generate(self):
return int(random.triangular(self.mini, self.mode, self.maxi))
|
nilq/baby-python
|
python
|
from .multiagent_particle_env import RLlibMultiAgentParticleEnv as MultiAgentParticleEnv
__all__ = [
"MultiAgentParticleEnv"
]
|
nilq/baby-python
|
python
|
from flask import abort, Flask, jsonify, request
import os
import asyncio
import pyjuicenet
import aiohttp
from prettytable import PrettyTable
from pytz import timezone
import datetime
import requests
import database_helper
import html_renderer
app = Flask(__name__)
@app.route("/")
def show_all_chargers():
database_helper.update_chargers()
return html_renderer.print_chargers()
@app.route("/sql")
def show_sql_chargers():
database_helper.update_chargers()
return html_renderer.print_chargers(['980'])
@app.route("/qry")
def show_qry_chargers():
database_helper.update_chargers()
return html_renderer.print_chargers(['Penta-Taj', 'Joby Heights'])
@app.route("/render-qr-codes")
def render_qr_codes():
# will produce a QR code for all chargers in the database
html_renderer.generate_and_save_qr_code(request.url_root)
return html_renderer.qr_codes_to_html()
@app.route("/get-assign-charger-command/<charger_id>")
def get_assign_charger_command(charger_id):
# will return the command to type into slack.
# we should get here after scanning a QR code
return html_renderer.generate_assign_charger_text(charger_id)
# Slack handler
@app.route("/slack/assign-charger", methods=['POST'])
def assign_charger():
team_id, token = request.form.get("team_id"), request.form.get("token")
request_is_valid = validate_request(team_id, token)
response = "Hmm...something went wrong."
if request_is_valid:
user = request.form.get("user_name")
charger_id = request.form.get("text")
if (user and charger_id):
success, charger_name = database_helper.assign_driver(user, charger_id)
if success:
response = f"@{user}, you have been assigned {charger_name}. Find more information about other Joby chargers here: {request.url_root}!"
return jsonify(
response_type='ephemeral',
text=response)
def validate_request(team_id, token):
request_valid = False
if team_id == os.environ['TEAM_ID'] and token == os.environ['SLACK_VERIFICATION_TOKEN']:
request_valid = True
else:
print(f"Request with Team ID: {team_id} and token {token} is not valid.")
return request_valid
app.run()
|
nilq/baby-python
|
python
|
import pytest
import networkx as nx
from ..pyfastg import add_node_to_digraph
def test_basic():
def check_asdf(g):
assert "asdf" in g.nodes
assert g.nodes["asdf"]["cov"] == 5.2
assert g.nodes["asdf"]["gc"] == 4 / 6.0
assert g.nodes["asdf"]["length"] == 6
def check_ghjasdf(g):
assert "ghjasd" in g.nodes
assert g.nodes["ghjasd"]["cov"] == 100
assert g.nodes["ghjasd"]["gc"] == 1 / 3.0
assert g.nodes["ghjasd"]["length"] == 3
g = nx.DiGraph()
# 1. Add node "asdf" to g
add_node_to_digraph(
g, {"name": "asdf", "cov": 5.2, "seq": "ATCGCC", "length": 6}
)
check_asdf(g)
# 2. Add node "ghjasd" to g
add_node_to_digraph(
g,
{
"name": "ghjasd",
"cov": 100,
"seq": "CAT",
"length": 3,
"outgoing_node_names": ["asdf", "qwerty", "hamborgar"],
},
)
# This should have added three new nodes (ghjasdf, qwerty, hamborgar)
# qwerty and hamborgar, however, don't have any attributes (yet)
# Double-check that asdf's attributes were not somehow lost
check_asdf(g)
check_ghjasdf(g)
assert "qwerty" in g.nodes
assert "hamborgar" in g.nodes
assert ("ghjasd", "asdf") in g.edges
assert ("ghjasd", "qwerty") in g.edges
assert ("ghjasd", "hamborgar") in g.edges
# 3. Add node "hamborgar" to g (it's already in there but is "empty")
add_node_to_digraph(
g, {"name": "hamborgar", "cov": 33.3, "seq": "AAAA", "length": 4}
)
# Again, check that prior nodes' attributes are ok
check_asdf(g)
check_ghjasdf(g)
assert "qwerty" in g.nodes
assert "hamborgar" in g.nodes
assert ("ghjasd", "asdf") in g.edges
assert ("ghjasd", "qwerty") in g.edges
assert ("ghjasd", "hamborgar") in g.edges
assert g.nodes["hamborgar"]["cov"] == 33.3
assert g.nodes["hamborgar"]["gc"] == 0
assert g.nodes["hamborgar"]["length"] == 4
def test_insufficient_attrs():
g = nx.DiGraph()
with pytest.raises(ValueError) as exc_info:
add_node_to_digraph(g, {})
assert "name not present for all nodes" in str(exc_info.value)
with pytest.raises(ValueError) as exc_info:
add_node_to_digraph(g, {"name": "123"})
assert "length not present for all nodes" in str(exc_info.value)
with pytest.raises(ValueError) as exc_info:
add_node_to_digraph(g, {"name": "123", "length": 2})
assert "cov not present for all nodes" in str(exc_info.value)
with pytest.raises(ValueError) as exc_info:
add_node_to_digraph(g, {"name": "123", "length": 2, "cov": 6.3})
assert "seq not present for all nodes" in str(exc_info.value)
# Finally, this should work
add_node_to_digraph(
g, {"name": "123", "length": 2, "cov": 6.3, "seq": "AG"}
)
assert "123" in g.nodes
def test_length_mismatch():
g = nx.DiGraph()
with pytest.raises(ValueError) as exc_info:
add_node_to_digraph(
g, {"name": "asdf", "cov": 5.2, "seq": "A", "length": 6}
)
assert "Length given vs. actual seq. length differs for node asdf" in str(
exc_info.value
)
|
nilq/baby-python
|
python
|
from rest_framework import serializers
from care.facility.api.serializers import TIMESTAMP_FIELDS
from care.facility.api.serializers.facility import FacilityBasicInfoSerializer
from care.facility.models import PatientConsultation, PatientRegistration, Facility
from care.facility.models.prescription_supplier import PrescriptionSupplier
from care.utils.serializer.external_id_field import ExternalIdSerializerField
from config.serializers import ChoiceField
class MinimalPatientSerializer(serializers.ModelSerializer):
id = serializers.CharField(source="external_id")
class Meta:
model = PatientRegistration
fields = ("id", "name", "phone_number", "address")
class PrescriptionSupplierConsultationSerializer(serializers.ModelSerializer):
id = serializers.CharField(source="external_id", read_only=True)
patient = MinimalPatientSerializer(read_only=True)
class Meta:
model = PatientConsultation
fields = ("id", "prescriptions", "discharge_advice", "patient")
class PrescriptionSupplierSerializer(serializers.ModelSerializer):
id = serializers.CharField(source="external_id", read_only=True)
scheme = ChoiceField(choices=PrescriptionSupplier.SchemeChoices)
status = ChoiceField(choices=PrescriptionSupplier.StatusChoices)
consultation_object = PrescriptionSupplierConsultationSerializer(source="consultation", read_only=True)
facility_object = FacilityBasicInfoSerializer(source="facility", read_only=True)
consultation = ExternalIdSerializerField(required=True, queryset=PatientConsultation.objects.all())
facility = ExternalIdSerializerField(required=True, queryset=Facility.objects.all())
class Meta:
model = PrescriptionSupplier
exclude = ("deleted", "external_id")
read_only_fields = TIMESTAMP_FIELDS
def create(self, validated_data):
instance = super().create(validated_data)
instance.updated_user = self.context["request"].user
instance.save()
def update(self, instance, validated_data):
instance = super().update(instance, validated_data)
instance.updated_user = self.context["request"].user
instance.save()
return instance
|
nilq/baby-python
|
python
|
# ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from azure_devtools.perfstress_tests import PerfStressTest
from azure.identity import ClientSecretCredential, TokenCachePersistenceOptions
from azure.identity.aio import ClientSecretCredential as AsyncClientSecretCredential
try:
from dotenv import load_dotenv
load_dotenv()
except ImportError:
pass
class PersistentCacheRead(PerfStressTest):
def __init__(self, arguments):
super().__init__(arguments)
client_id = self.get_from_env("AZURE_CLIENT_ID")
tenant_id = self.get_from_env("AZURE_TENANT_ID")
secret = self.get_from_env("AZURE_CLIENT_SECRET")
cache_options = TokenCachePersistenceOptions(allow_unencrypted_storage=True)
self.credential = ClientSecretCredential(tenant_id, client_id, secret, cache_persistence_options=cache_options)
self.async_credential = AsyncClientSecretCredential(
tenant_id, client_id, secret, cache_persistence_options=cache_options
)
self.scope = "https://vault.azure.net/.default"
async def global_setup(self):
"""Cache an access token"""
await super().global_setup()
self.credential.get_token(self.scope)
await self.async_credential.get_token(self.scope)
def run_sync(self):
self.credential.get_token(self.scope)
async def run_async(self):
await self.async_credential.get_token(self.scope)
async def close(self):
await self.async_credential.close()
await super().close()
|
nilq/baby-python
|
python
|
import re, hashlib, random, json, csv, sys
from datetime import datetime, timedelta, tzinfo
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.core.cache import caches
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from django.core.files.uploadhandler import MemoryFileUploadHandler
from django.core.validators import validate_email
from django.db.models import ProtectedError
from django.forms import ValidationError
from django.forms.models import modelformset_factory, inlineformset_factory
from django.http import HttpResponseRedirect, HttpResponse, JsonResponse
from django.shortcuts import render_to_response, get_object_or_404, redirect, render
from django.template import RequestContext
from django.utils.datastructures import MultiValueDictKeyError
from django.views.defaults import page_not_found, permission_denied, bad_request
from itertools import chain
from polls import models
from polls.includes import forms, email_messages
from pprint import pprint
#################################################
#### PASO DE MENSAJES Y PARAMETROS POR CACHE ####
#################################################
# Crea un mensaje que se mostrara en la siguiente pagina
def set_cache_message(user, msg_type, msg):
if not user.is_authenticated():
return
cache = caches['default']
if (msg_type == 'error'):
key = 'error_msg'
elif (msg_type == 'warning'):
key = 'warning_msg'
elif (msg_type == 'success'):
key = 'success_msg'
else:
key = 'info_msg'
key = hashlib.sha256(('%d_%s' % (user.pk, key)).encode('utf-8')).hexdigest()
cache.set(key, msg)
# Lee el contenido de las variables de mensaje si existen.
def caches_messages(user):
if not user.is_authenticated():
return
cache = caches['default']
# Construyo las claves
error_key = hashlib.sha256(("%d_error_msg" % user.pk).encode('utf-8')).hexdigest()
warning_key = hashlib.sha256(("%d_warning_msg" % user.pk).encode('utf-8')).hexdigest()
success_key = hashlib.sha256(("%d_success_msg" % user.pk).encode('utf-8')).hexdigest()
info_key = hashlib.sha256(("%d_info_msg" % user.pk).encode('utf-8')).hexdigest()
# Recojo los mensajes
error_msg = cache.get(error_key, None);
warning_msg = cache.get(warning_key, None);
success_msg = cache.get(success_key, None);
info_msg = cache.get(info_key, None);
# Limpio las variables
cache.set(error_key, None);
cache.set(warning_key, None);
cache.set(success_key, None);
cache.set(info_key, None);
return error_msg, warning_msg, success_msg, info_msg
def set_cache_param(user, name, value):
if not user.is_authenticated():
return
cache = caches['default']
key = hashlib.sha256(('%d_%s' % (user.pk, name)).encode('utf-8')).hexdigest()
cache.set(key, value)
def caches_param(user, name):
if not user.is_authenticated():
return
cache = caches['default']
key = hashlib.sha256(('%d_%s' % (user.pk, name)).encode('utf-8')).hexdigest()
param = cache.get(key, None)
cache.set(key, None)
return param
#################################################
#################################################
def login_view(request):
login_active = "active"
login_form = forms.LoginForm()
reg_form = forms.RegisterForm()
js_actions = "$('#registerForm').modal('hide')"
error_msg = ''
register_error = ''
info_msg = ''
if request.user is not None and request.user.is_active:
try:
redir = request.GET['next'];
except MultiValueDictKeyError:
redir = '/polls/home/';
return HttpResponseRedirect(redir)
if (request.method == 'POST'):
if (request.POST['wichform'] == 'registration'):
reg_form = forms.RegisterForm(request.POST)
if reg_form.is_valid():
password = request.POST['password']
first_name = request.POST['first_name']
last_name = request.POST['last_name']
email = request.POST['email']
new_user = User.objects.create_user(username=email, password=password, first_name=first_name, last_name=last_name, email=email)
new_user.is_active = False
new_user.save()
# Send activation email
salt = hashlib.sha256(str(random.getrandbits(256)).encode('utf-8')).hexdigest()[:5]
activation_key = hashlib.sha256((salt+email).encode('utf-8')).hexdigest()
key_expires = datetime.now() + timedelta(2)
new_user_profile = models.UserProfile(user=new_user, activation_key=activation_key, key_expires=key_expires)
new_user_profile.save()
new_user_profile.send_activation_email()
reg_form = forms.RegisterForm()
info_msg = "Thank you for your registration. You will now receive an activation email. Please activate your account within the next 48 hours."
else:
js_actions = "$('#registerForm').modal('show')"
else:
login_form = forms.LoginForm(request.POST)
email = request.POST['email']
password = request.POST['password']
user = authenticate(username=email, password=password)
if user is not None:
if user.is_active:
login(request, user)
try:
redir = request.GET['next'];
except MultiValueDictKeyError:
redir = '/polls/home/';
return HttpResponseRedirect(redir)
else:
info_msg = 'Your user has not been activated yet. If the problem persist, please contact us.'
else:
error_msg = 'Wrong username or password. Please try again.'
return render(
request,
'polls/login.html',
context={
'login_form': login_form,
'reg_form': reg_form,
'error_msg': error_msg,
'info_msg': info_msg,
'js_actions': js_actions,
'login_active': login_active,
'register_error': register_error
}
)
def logout_view(request):
logout(request)
return HttpResponseRedirect('/polls/login/')
def activate_account(request, activation_key):
msg = ''
user_profile = None
status = False
try:
user_profile = models.UserProfile.objects.get(activation_key=activation_key)
status = user_profile.activate_account(activation_key)
if not status:
msg = 'Sorry, your activation link has expired. Please register again.'
else:
msg = 'Congratulatins! You have activated your account succesfully. You can now login into BBPolls.'
except ObjectDoesNotExist:
msg = "Sorry, your account could not be found or you have already activated your account."
return render(request, 'polls/activate_account.html',
{'user_profile':user_profile, 'msg':msg, 'status':status});
@login_required(login_url='/polls/login')
def polls_index(request):
mypolls_active = 'active'
js_file = "polls_index.js"
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
published_polls = models.Poll.objects.filter(user=request.user, poll_status=models.Poll.ST_PUBLISHED).order_by("publish_date")
draft_polls = models.Poll.objects.filter(user=request.user, poll_status=models.Poll.ST_DRAFT).order_by("-last_modified")
archived_polls = models.Poll.objects.filter(user=request.user, poll_status=models.Poll.ST_ARCHIVED).order_by("-archive_date")
# send_poll_form = forms.SendPollForm()
error_msg, warning_msg, success_msg, info_msg = caches_messages(request.user)
return render(request, 'polls/polls_index.html',
{'published_polls':published_polls,
'username':request.user.username,
'draft_polls':draft_polls,
'archived_polls':archived_polls,
'error_msg':error_msg,
'warning_msg':warning_msg,
'success_msg':success_msg,
'info_msg':info_msg,
'js_file':js_file,
'send_poll_form':forms.SendPollForm(),
'is_pollster':is_pollster,
'mypolls_active':mypolls_active});
@login_required(login_url='/polls/login')
def send_poll(request, poll_id):
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id)
except ObjectDoesNotExist:
set_cache_message(request.user, "error", "Sorry! Poll not found")
return HttpResponseRedirect("/polls/my-polls/")
# send_poll_form = forms.SendPollForm(request.POST)
if (request.method == 'POST'):
emails_text = request.POST["emails"]
emails = []
for line_emails in emails_text.splitlines():
line_emails = line_emails.strip()
if (',' in line_emails):
splited_line = line_emails.split(",")
for e in splited_line:
e = e.strip()
if e != "":
try:
validate_email(e)
emails.append(e.strip())
except ValidationError:
continue
elif (';' in line_emails):
splited_line = line_emails.split(";")
for e in splited_line:
e = e.strip()
if e != "":
try:
validate_email(e)
emails.append(e.strip())
except ValidationError:
continue
elif (' ' in line_emails):
splited_line = line_emails.split(" ")
for e in splited_line:
e = e.strip()
if e != "":
try:
validate_email(e)
emails.append(e.strip())
except ValidationError:
continue
elif(line_emails != ""):
try:
validate_email(line_emails)
emails.append(line_emails.strip())
except ValidationError:
continue
emails = list(set(emails))
if not emails:
set_cache_message(request.user, "warning", "No emails were found")
else:
poll.send_poll(emails)
set_cache_message(request.user, "success", "Invitations sent!")
return HttpResponseRedirect("/polls/my-polls/")
@login_required(login_url='/polls/login')
def publish_poll(request, poll_id):
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id)
if (poll.poll_status == models.Poll.ST_ARCHIVED):
set_cache_message(request.user, "error", "Sorry! An archived poll cannot be unarchived")
return HttpResponseRedirect("/polls/my-polls/")
elif (not poll.is_doable):
set_cache_message(request.user, "error", "Sorry! Is not possible to publish this poll. At least one question in this poll that cannot be proeprly answered")
return HttpResponseRedirect("/polls/my-polls/")
except ObjectDoesNotExist:
set_cache_message(request.user, "error", "Sorry! Poll not found")
return HttpResponseRedirect("/polls/my-polls/")
pprint("PUBLISH Current status: %s" % poll.poll_status, sys.stdout)
poll.poll_status = models.Poll.ST_PUBLISHED
poll.publish_date = datetime.now()
poll.save()
pprint("PUBLISH Current status: %s" % poll.poll_status, sys.stdout)
return HttpResponseRedirect('/polls/my-polls/')
@login_required(login_url='/polls/login')
def archive_poll(request, poll_id):
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id)
if (poll.poll_status == models.Poll.ST_DRAFT):
set_cache_message(request.user, "error", "Sorry! Only published polls may be archived")
return HttpResponseRedirect("/polls/my-polls/")
except ObjectDoesNotExist:
set_cache_message(request.user, "error", "Sorry! Poll not found")
return HttpResponseRedirect("/polls/my-polls/")
models.Response.objects.filter(poll=poll, is_completed=False).delete()
pprint("ARCHIVE Current status: %s" % poll.poll_status, sys.stdout)
poll.poll_status = models.Poll.ST_ARCHIVED
poll.archive_date = datetime.now()
poll.save()
pprint("ARCHIVE Current status: %s" % poll.poll_status, sys.stdout)
return HttpResponseRedirect('/polls/my-polls/')
@login_required(login_url='/polls/login')
def unpublish_poll(request, poll_id):
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id)
if (poll.poll_status == models.Poll.ST_ARCHIVED):
set_cache_message(request.user, "error", "Sorry! An archived poll cannot be unarchived")
return HttpResponseRedirect("/polls/my-polls/")
except ObjectDoesNotExist:
set_cache_message(request.user, "error", "Sorry! Poll not found")
return HttpResponseRedirect("/polls/my-polls/")
if (models.Response.objects.filter(poll=poll, is_completed=True)):
set_cache_message(request.user, "error", "Sorry! This poll has already been answered and cannot be unpublish.")
return HttpResponseRedirect("/polls/my-polls/")
models.Response.objects.filter(poll=poll, is_completed=False).delete()
pprint("UNPUBLISH Current status: %s" % poll.poll_status, sys.stdout)
poll.poll_status = models.Poll.ST_DRAFT
poll.save()
pprint("UNPUBLISH New status: %s" % poll.poll_status, sys.stdout)
return HttpResponseRedirect('/polls/my-polls/')
@login_required(login_url='/polls/login')
def create_poll(request):
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
mypolls_active = 'active'
js_actions = "$('[data-toggle=\"tooltip\"]').tooltip({html: true})"
create_form = forms.PollCreateForm(request.POST or None, prefix="create");
import_form = forms.PollImportForm(request.POST or None, request.FILES or None, prefix="import")
if(request.method == 'POST'): # Create
if ('create' in request.POST):
if create_form.is_valid():
poll_name = request.POST['create-name'];
p = models.Poll(name=poll_name, user=request.user)
p.save();
return HttpResponseRedirect('/polls/manage-poll/%d/' % p.pk)
elif ('import' in request.POST): # Import
if (import_form.is_valid()):
# Check size
data = b''
for chunk in request.FILES['import-import_file'].chunks():
data+=chunk
json_data = json.loads(data)
try:
poll = models.Poll.import_poll(json_data, request.user)
return HttpResponseRedirect('/polls/manage-poll/%d/' % poll.pk)
except ValidationError as ve:
import_form.errors["import_file"] = [ve.messages[0]]
return render(request, 'polls/create-poll.html',
{'create_form':create_form,
'username':request.user.username,
'import_form':import_form,
'js_actions':js_actions,
'is_pollster':is_pollster,
'mypolls_active':mypolls_active});
@login_required(login_url='/polls/login')
def manage_poll(request, poll_id):
mypolls_active = 'active'
js_file = "manage-poll.js"
js_actions = "$('[data-toggle=\"tooltip\"]').tooltip({html: true});"
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
scroll = caches_param(request.user, "scroll")
pprint(scroll, sys.stderr)
if scroll:
js_actions += "$('body').scrollTop(%s)" % scroll
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
poll_form = forms.PollForm(instance = poll)
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "Sorry! The poll you are trying to access does not exist anymore.");
return HttpResponseRedirect("/polls/my-polls/")
can_edit = poll.poll_status == models.Poll.ST_DRAFT;
if not can_edit:
poll_form.disable()
question_queryset = models.Question.objects.filter(poll=poll).order_by('order');
if (request.method == 'POST' and can_edit):
poll_form = forms.PollForm(request.POST, instance=poll)
if poll_form.is_valid():
poll_form.save()
return HttpResponseRedirect('/polls/my-polls/')
error_msg, warning_msg, success_msg, info_msg = caches_messages(request.user)
return render(request, 'polls/manage-poll.html',
{'poll_form':poll_form,
'username':request.user.username,
'question_queryset':question_queryset,
'poll':poll,
'js_file':js_file,
'js_actions':js_actions,
'error_msg':error_msg,
'warning_msg':warning_msg,
'success_msg':success_msg,
'info_msg':info_msg,
'mypolls_active':mypolls_active,
'is_pollster':is_pollster,
'can_edit':can_edit});
@login_required(login_url='/polls/login')
def add_question(request, poll_id):
mypolls_active = 'active'
js_actions = "$('[data-toggle=\"tooltip\"]').tooltip({html: true})"
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "The poll you are trying to create a question within, does not exist anymore.");
return HttpResponseRedirect("/polls/my-polls/")
try:
response = models.Response.objects.get(poll=poll, is_completed=True)
set_cache_message(request.user, 'error', "Sorry! The poll has been already answered and cannot be edited.");
return HttpResponseRedirect("/polls/my-polls/")
except ObjectDoesNotExist:
pass
question_form = forms.AddQuestionForm(request.POST or None)
BaseChoiceFormset = inlineformset_factory(models.Question, models.Choice, form=forms.ChoiceForm, extra=3, can_delete=False)
choice_formset = BaseChoiceFormset()
if (request.method == 'POST'):
if (request.POST['submit'] == 'Save'):
if question_form.is_valid():
new_question = question_form.save(commit=False)
new_question.poll = poll
new_question.save()
choice_formset = BaseChoiceFormset(request.POST, instance=new_question)
if choice_formset.is_valid():
choice_formset.save()
set_cache_message(request.user, 'success', 'New question created')
return HttpResponseRedirect('/polls/manage-poll/%s/' % poll_id)
elif(request.POST['submit'] == 'Save and add new'):
if question_form.is_valid():
new_question = question_form.save(commit=False)
new_question.poll = poll
new_question.save()
choice_formset = BaseChoiceFormset(request.POST, instance=new_question)
if choice_formset.is_valid():
choice_formset.save()
question_form = forms.QuestionForm(None)
set_cache_message(request.user, 'success', 'New question created')
return HttpResponseRedirect('/polls/manage-poll/%s/add-question/' % poll_id)
else:
more_choices = request.POST['number-choices']
if not more_choices:
more_choices=0
try:
more_choices = int(more_choices)
if more_choices < 0:
more_choices = 0
BaseChoiceFormset = inlineformset_factory(models.Question, models.Choice, form=forms.ChoiceForm, extra=more_choices+3, can_delete=False)
except ValueError:
BaseChoiceFormset = inlineformset_factory(models.Question, models.Choice, form=forms.ChoiceForm, extra=3, can_delete=False)
choice_formset = BaseChoiceFormset()
error_msg, warning_msg, success_msg, info_msg = caches_messages(request.user)
return render(request, 'polls/manage-question.html',
{'question_form':question_form,
'username':request.user.username,
'poll':poll, 'choice_formset':choice_formset,
'question_index':poll.number_questions+1,
'create_question':True,
'js_actions':js_actions,
'error_msg':error_msg,
'warning_msg':warning_msg,
'success_msg':success_msg,
'info_msg':info_msg,
'is_pollster':is_pollster,
'mypolls_active':mypolls_active});
@login_required(login_url='/polls/login')
def increase_question_order(request, poll_id, question_id, scroll):
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
mypolls_active = 'active'
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
question = models.Question.objects.get(pk=question_id, poll=poll)
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "Sorry! The question you are trying to increase order to does not exist anymore");
return HttpResponseRedirect("/polls/manage-poll/%s/" % poll_id)
pprint(scroll, sys.stderr)
if scroll:
set_cache_param(request.user, "scroll", scroll)
question.increase_order();
return HttpResponseRedirect("/polls/manage-poll/%s/" % poll_id);
@login_required(login_url='/polls/login')
def decrease_question_order(request, poll_id, question_id, scroll):
mypolls_active = 'active'
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
question = models.Question.objects.get(pk=question_id, poll=poll)
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "Sorry! The question you are trying to decrease order to does not exist anymore");
return HttpResponseRedirect("/polls/manage-poll/%s/" % poll_id)
pprint(scroll, sys.stderr)
if scroll:
set_cache_param(request.user, "scroll", scroll)
question.decrease_order();
return HttpResponseRedirect('/polls/manage-poll/%s/'% poll_id);
@login_required(login_url='/polls/login')
def manage_question(request, poll_id, question_id):
mypolls_active = 'active'
manage_only = 'manage-only'
js_file = "manage-question.js"
js_actions = "$('[data-toggle=\"tooltip\"]').tooltip({html: true})"
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
question = models.Question.objects.get(pk=question_id, poll=poll)
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "The question you are trying to delete does not exist anymore.")
return HttpResponseRedirect("/polls/manage-poll/%s/" % poll_id)
can_edit = poll.poll_status == models.Poll.ST_DRAFT;
i = 0;
for q in models.Question.objects.filter(poll=poll):
i +=1;
if (q.pk == question.pk):
break;
BaseChoiceFormset = inlineformset_factory(models.Question, models.Choice, form=forms.ChoiceForm, extra=0)
multimedia_sources = models.MultimediaSource.objects.filter(question=question).order_by('media_type')
choice_formset = BaseChoiceFormset(request.POST or None, instance=question)
if (request.method == 'POST' and can_edit):
if (request.POST['submit'] == 'Save'):
question_form = forms.QuestionForm(request.POST, instance=question)
if question_form.is_valid():
question = question_form.save()
if choice_formset.is_valid():
choice_formset.save();
return HttpResponseRedirect('/polls/manage-poll/%s/' % poll_id)
else:
more_choices = request.POST['number-choices']
if not more_choices:
more_choices=0
try:
BaseChoiceFormset = inlineformset_factory(models.Question, models.Choice, form=forms.ChoiceForm, extra=int(more_choices))
except ValueError:
BaseChoiceFormset = inlineformset_factory(models.Question, models.Choice, form=forms.ChoiceForm, extra=0)
question_form = forms.QuestionForm(instance=question)
choice_formset = BaseChoiceFormset(instance=question)
if not can_edit:
question_form.disable()
for choice_form in choice_formset:
choice_form.disable()
video_message = "You have %d video sources available" % question.number_video_srcs
if (question.number_video_srcs > 0):
video_class = "alert-success"
else:
video_class = "alert-danger"
audio_message = "You have %d audio sources available" % question.number_audio_srcs
if (question.number_audio_srcs > 0):
audio_class = "alert-success"
else:
audio_class = "alert-danger"
image_message = "You have %d image sources available" % question.number_image_srcs
if (question.number_image_srcs > 0):
image_class = "alert-success"
else:
image_class = "alert-danger"
iframe_message = "You have %d iframe sources available" % question.number_iframe_srcs
if (question.number_iframe_srcs > 0):
iframe_class = "alert-success"
else:
iframe_class = "alert-danger"
error_msg, warning_msg, success_msg, info_msg = caches_messages(request.user)
return render(request, 'polls/manage-question.html',
{'question_form':question_form,
'username':request.user.username,
'poll':poll,
'question_index':i,
'question_pk':question_id,
'choice_formset':choice_formset,
'multimedia_sources':multimedia_sources,
'manage_only':manage_only,
'mypolls_active':mypolls_active,
'create_question':False,
'error_msg':error_msg,
'warning_msg':warning_msg,
'success_msg':success_msg,
'info_msg':info_msg,
'image_message': image_message,
'image_class': image_class,
'audio_message': audio_message,
'audio_class': audio_class,
'video_message': video_message,
'video_class': video_class,
'iframe_message': iframe_message,
'iframe_class': iframe_class,
'js_file': js_file,
'js_actions' : js_actions,
'is_pollster':is_pollster,
'can_edit':can_edit});
@login_required(login_url='/polls/login')
def clone_poll(request, poll_id):
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
poll.clone()
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "Sorry! The poll you are trying to clone does not exist anymore.")
return HttpResponseRedirect('/polls/my-polls/')
@login_required(login_url='/polls/login')
def remove_poll(request, poll_id):
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
poll.delete()
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "Sorry! The poll you are trying to clone does not exist anymore.")
except ProtectedError:
set_cache_message(request.user, 'error', "Sorry! The poll has been already answered and cannot be removed.")
return HttpResponseRedirect('/polls/my-polls/')
@login_required(login_url='/polls/login')
def remove_question(request, poll_id, question_id):
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
question = models.Question.objects.get(pk=question_id, poll=poll)
question.delete()
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "The question you are trying to delete does not exist anymore.")
return HttpResponseRedirect('/polls/manage-poll/%s/' % poll_id)
try:
response = models.Response.objects.get(poll=poll, is_completed=True)
set_cache_message(request.user, 'error', "Sorry! The poll have been already answered and cannot be edited.")
return HttpResponseRedirect('/polls/my-polls/')
except ObjectDoesNotExist:
pass
set_cache_message(request.user, 'success', "Question successfully removed")
return HttpResponseRedirect('/polls/manage-poll/%s/' % poll.pk)
@login_required(login_url='/polls/login')
def add_multimedia_source(request, poll_id, question_id, source='url'):
mypolls_active = "active"
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
question = models.Question.objects.get(pk=question_id, poll=poll)
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "The question you are trying to delete does not exist anymore.")
return HttpResponseRedirect('/polls/manage-poll/%s/' % poll_id)
try:
response = models.Response.objects.get(poll=poll, is_completed=True)
set_cache_message(request.user, 'error', "Sorry! The poll has been already answered and cannot be edited.");
return HttpResponseRedirect("/polls/my-polls/")
except ObjectDoesNotExist:
pass
i = 0;
for q in models.Question.objects.filter(poll=poll):
i +=1;
if (q.pk == question.pk):
break;
if (source == 'url'):
if (request.method == 'POST'):
multimedia_form = forms.MultimediaSourceFormURL(request.POST)
if multimedia_form.is_valid():
try:
mmsrc = multimedia_form.save(commit=False)
mmsrc.question = question
mmsrc.validate_mime_type()
mmsrc.save()
set_cache_message(request.user, 'success', "Multimedia source successfully created")
return HttpResponseRedirect('/polls/manage-poll/%s/manage-question/%s/' % (poll.pk, question.pk))
except ValidationError as ve:
multimedia_form = forms.MultimediaSourceFormURL(request.POST)
multimedia_form.errors["url_source"] = [ve.messages[0]]
else:
multimedia_form = forms.MultimediaSourceFormURL()
elif (source == 'file'):
pass
else:
pass
return render(request, 'polls/add-multimedia-source.html',
{'multimedia_form':multimedia_form,
'username':request.user.username,
'poll':poll,
'question':question,
'question_index':i,
'mypolls_active':mypolls_active})
@login_required(login_url='/polls/login')
def remove_multimedia_source(request, poll_id, question_id, mmsrc_id):
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
question = models.Question.objects.get(pk=question_id, poll=poll)
mmsrc = models.MultimediaSource.objects.get(pk=mmsrc_id, question=question)
mmsrc.delete()
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "The source you are trying to delete does not exist anymore.")
return HttpResponseRedirect("/polls/manage-poll/%s/manage-question/%s/" % (poll.pk, question.pk))
set_cache_message(request.user, 'success', "Multimedia source successfully removed")
return HttpResponseRedirect("/polls/manage-poll/%s/manage-question/%s/" % (poll.pk, question.pk))
def do_survey(request, poll_id, try_poll=False, invitation_key=None):
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
try:
poll = models.Poll.objects.get(pk=poll_id)
except ObjectDoesNotExist:
if try_poll:
set_cache_message(request.user, 'error', "Poll not found.")
return HttpResponseRedirect('/polls/my-polls/')
else:
set_cache_message(request.user, 'error', "Poll not found.")
return HttpResponseRedirect('/polls/home/')
# Comprobamos que el usuario tenga permisos parar acceder
if (invitation_key is not None):
try:
poll_invitation = models.AnonymousInvitation.objects.get(poll=poll, key=invitation_key)
anonymous_poll = True
if (poll_invitation.response is not None and poll_invitation.response.is_completed):
return HttpResponseRedirect('/polls/login/')
except ObjectDoesNotExist:
return HttpResponseRedirect('/polls/login/?next=/polls/do-poll/%d/' % poll.pk)
elif (request.user.is_authenticated()):
print("auth user")
if (poll.access_type != models.Poll.AT_PUBLIC):
if ((request.user not in poll.allowed_users.all()
and request.user.groups not in poll.allowed_groups.all())
and (request.user != poll.user and not try_poll)):
print("not allowed user")
set_cache_message(request.user, 'error', "Sorry! You don't have permission to access this poll.")
return HttpResponseRedirect('/polls/home/')
anonymous_poll = False;
else:
print("neither invitation_key, neither allowed_user")
return HttpResponseRedirect('/polls/login/?next=%s' % request.path)
if (poll.randomize_questions):
questions = models.Question.objects.filter(poll=poll).order_by('?')
else:
questions = models.Question.objects.filter(poll=poll).order_by('order')
choices = models.Choice.objects.filter(question__in=questions)
if (not anonymous_poll):
try:
response = models.Response.objects.get(poll=poll, user=request.user)
except ObjectDoesNotExist:
response = None
else:
response = poll_invitation.response
error_msg = None;
if (request.method == 'POST') and not try_poll:
if response is not None:
response.choices.clear()
models.Verbatim.objects.filter(response=response).delete()
else:
try:
response = models.Response(poll=poll, user=request.user)
except ValueError:
if anonymous_poll:
response = models.Response(poll=poll, user=None)
poll_invitation.response = response
poll_invitation.save()
else:
set_cache_message(request.user, 'error', "Unexpected error occurred when attempting to save your response. Please contact the administrator.")
return HttpResponseRedirect(request.path)
response.save()
for field, value in request.POST.items():
if re.match('^q\d*_choice\d*$', field) == None:
continue
try:
choice = models.Choice.objects.get(pk=int(value), question__poll=poll)
response.choices.add(choice)
if not choice.is_fixed:
v = models.Verbatim(response=response, choice=choice, verbatim=request.POST['%s_verbatim' % choice.pk])
v.save()
except (ObjectDoesNotExist, ValueError):
error_msg = "Corrupted data, please try again."
break
if error_msg:
set_cache_message(request.user, "error", error_msg)
response.delete();
else:
if request.user.is_authenticated():
error_msg, warning_msg, success_msg, info_msg = caches_messages(request.user)
cs = response.choices.all()
completed = True
for q in models.Question.objects.filter(poll=poll):
if not cs.exists():
completed = False
break
cs = cs.exclude(question=q)
if completed: # Complete also saves the Response
set_cache_message(request.user, "success", "You have successfully completed the poll. Thank you!")
response.set_complete()
else:
set_cache_message(request.user, "info", "The poll has not been completed. You may finish it in the \"Ongoing Polls\" section at the home page.")
return HttpResponseRedirect('/polls/home/')
elif request.user.is_authenticated():
error_msg, warning_msg, success_msg, info_msg = caches_messages(request.user)
if anonymous_poll:
template = "non-auth-do_survey.html"
username = None
else:
template = "do_survey.html"
username = request.user.username
return render(request, 'polls/%s' % template,
{'poll':poll,
'username':username,
'questions':questions,
'choices':choices,
'response':response,
'try_poll':try_poll,
'error_msg':error_msg,
'anonymous_poll':anonymous_poll,
'is_pollster':is_pollster});
@login_required(login_url='/polls/login')
def review_survey(request, poll_id):
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
try:
poll = models.Poll.objects.get(pk=poll_id)
response = models.Response.objects.get(poll=poll, user=request.user)
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "You have not completed this poll yet.")
return HttpResponseRedirect('/polls/home/')
if (not response.is_completed):
set_cache_message(request.user, 'error', "You have not completed this poll yet.")
return HttpResponseRedirect('/polls/home/')
questions = models.Question.objects.filter(poll=poll)
choices = models.Choice.objects.filter(question__in=questions)
error_msg, warning_msg, success_msg, info_msg = caches_messages(request.user)
return render(request, 'polls/review_survey.html',
{'response':response,
'username':request.user.username,
'choices':choices,
'questions':questions,
'poll':poll,
'error_msg':error_msg,
'is_pollster':is_pollster});
@login_required(login_url='/polls/login')
def remove_response(request, poll_id):
try:
response = models.Response.objects.get(poll__pk=poll_id, user=request.user)
response.delete()
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "You have not completed this poll yet.")
return HttpResponseRedirect('/polls/home/')
@login_required(login_url='/polls/login')
def home(request):
home_active = 'active'
# Checking pollster permission
try:
g = request.user.groups.get(name='sys_pollsters')
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
public_polls = models.Poll.objects.filter(poll_status=models.Poll.ST_PUBLISHED, access_type=models.Poll.AT_PUBLIC, is_finished=False).exclude(user=request.user)
restricted_polls = models.Poll.objects.filter(poll_status=models.Poll.ST_PUBLISHED, access_type=models.Poll.AT_RESTRICTED, allowed_groups__in=request.user.groups.all(), is_finished=False).exclude(user=request.user)
private_polls = models.Poll.objects.filter(poll_status=models.Poll.ST_PUBLISHED, access_type=models.Poll.AT_PRIVATE, allowed_users=request.user, is_finished=False).exclude(user=request.user)
if (public_polls or restricted_polls or private_polls):
available_polls = list(chain(public_polls, private_polls, restricted_polls))
else:
available_polls = None
responses = models.Response.objects.filter(user=request.user)
completed_polls = responses.exclude(is_completed=False)
ongoing_polls = responses.exclude(is_completed=True, poll__is_finished=False)
pprint("available_polls before: ", stream=sys.stderr)
pprint(available_polls, stream=sys.stderr)
if responses.exists() and available_polls is not None:
for response in responses:
if response.poll in available_polls:
available_polls.remove(response.poll)
pprint("available_polls after: ", stream=sys.stderr)
pprint(available_polls, stream=sys.stderr)
error_msg, warning_msg, success_msg, info_msg = caches_messages(request.user)
return render(request, 'polls/home.html',
{'available_polls':available_polls,
'username':request.user.username,
'completed_polls':completed_polls,
'ongoing_polls':ongoing_polls,
'error_msg':error_msg,
'warning_msg':warning_msg,
'success_msg':success_msg,
'info_msg':info_msg,
'home_active':home_active,
'is_pollster':is_pollster});
@login_required(login_url='/polls/login')
def view_stats(request, poll_id):
mypolls_active = "active"
css_file = "view_stats.css"
js_file = "view_stats.js"
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "Sorry! The poll you are trying to see the statistics from, does not exists anymore.")
return HttpResponseRedirect("/polls/manage-poll/%s/" % poll_id)
questions = models.Question.objects.filter(poll=poll)
choices = models.Choice.objects.filter(question__in=questions)
verbatims = models.Verbatim.objects.filter(choice__in=choices)
print("Preguntas: %d" % questions.count())
print("Opciones: %d" % choices.count())
print("Verbatims: %d" % verbatims.count())
return render(request, 'polls/view_stats.html',
{'poll':poll,
'username':request.user.username,
'questions':questions,
'choices':choices,
'verbatims':verbatims,
'css_file':css_file,
'mypolls_active':mypolls_active,
'is_pollster':is_pollster,
'js_file':js_file});
@login_required(login_url='/polls/login')
def account(request):
account_active = 'active'
password_error = None
user_error = None
user_form = forms.UserProfileForm(instance=request.user)
password_form = forms.PasswordChangeForm()
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
if (request.method == 'POST'):
if (request.POST['submit'] == 'Save'):
user_form = forms.UserProfileForm(request.POST, instance=request.user)
if user_form.is_valid():
user_form.save()
else:
password_form = forms.PasswordChangeForm(request.POST)
if password_form.is_valid():
old_password = password_form.cleaned_data['old_password']
password = password_form.cleaned_data['password']
cpassword = password_form.cleaned_data['confirm_password']
if not (request.user.check_password(old_password)):
password_error = 'Wrong password. Please try again.'
elif (password != cpassword):
password_error = "New passwords don't match. Please try again."
else:
request.user.set_password(password)
request.user.save()
return render(request, 'polls/account.html',
{'user_form':user_form,
'password_form': password_form,
'account_active':account_active,
'username':request.user.username,
'password_error':password_error,
'user_error':user_error,
'is_pollster':is_pollster});
def about(request):
about_active = 'active'
is_pollster = False
if request.user.is_authenticated():
template = 'polls/about.html'
username = request.user.username
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
else:
username = ''
template = 'polls/non-auth-about.html'
return render(request, template, {'about_active':about_active, 'is_pollster':is_pollster, 'username':username});
def contact(request):
contact_active = 'active'
is_pollster = False
if request.user.is_authenticated():
template = 'polls/contact.html'
username = request.user.username
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
pass
else:
template = 'polls/non-auth-contact.html'
username = ''
return render(request, template, {'contact_active':contact_active, 'is_pollster':is_pollster, 'username':username});
@login_required(login_url='/polls/login')
def export_poll(request, poll_id):
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
except ObjectDoesNotExist:
set_cache_message(request.user, 'error', "Sorry! The poll you are trying to export does not exist anymore.")
return HttpResponseRedirect("/polls/my-polls/%s/" % poll_id)
poll_json = poll.get_json()
json_response = JsonResponse(poll_json, safe=False)
json_response['Content-Disposition'] = 'attachment; filename=%s.json' % poll.name
return json_response
@login_required(login_url='/polls/login')
def get_csv_stats(request, poll_id, delimiter=','):
# Checking pollster permission
try:
g = request.user.groups.get(name="sys_pollsters")
is_pollster = True
except ObjectDoesNotExist:
is_pollster = False
set_cache_message(request.user, "error", "Sorry, you don't have permission to access this area. Redirecting to home page...")
return HttpResponseRedirect('/polls/home/')
try:
poll = models.Poll.objects.get(pk=poll_id, user=request.user)
poll_csv = poll.get_responses_csv()
csv_response = HttpResponse(content_type='text/csv')
csv_response['Content-Disposition'] = 'attachment; filename=%s_stats.csv' % poll.name
writer = csv.writer(csv_response, delimiter=delimiter)
writer.writerows(poll_csv)
return csv_response
except ObjectDoesNotExist:
return page_not_found(request)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-05-21 08:19
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('photos', '0005_photolikes'),
]
operations = [
migrations.RemoveField(
model_name='followers',
name='insta',
),
migrations.RemoveField(
model_name='followers',
name='user_id',
),
migrations.AddField(
model_name='followers',
name='follower',
field=models.CharField(default='', max_length=20),
),
migrations.AlterField(
model_name='followers',
name='user',
field=models.CharField(default='', max_length=20),
),
]
|
nilq/baby-python
|
python
|
class Record:
vdict = dict()
count = 0
record = dict()
reclen = 0
fd = None
# The constructor opens the Record Defenition File and sets
# the record defenition
def __init__(self, recName, fileName, mode="r", encoding="Latin-1"):
defstr = self.recordDef(recName)
self.vdict = self.vardict(defstr)
self.fd = self.openfile(fileName, mode, encoding)
def getreclen(self, add = 0):
if self.reclen==int(0):
here = self.fd.tell()
try:
reclen = len(self.fd.readline()) + add
except:
reclen = 0
self.reclen = reclen
# self.fd.seek(here, 0)
# self.fd.seek(here, 0)
self.rewind()
return reclen
else:
return self.reclen
def Change_fielddef(self, recName):
defstr = self.recordDef(recName)
self.vdict = self.vardict(defstr)
try:
self.reclen = len(self.fd.readline()) + 1
except:
self.reclen = 0
return self.vdict
def openfile(self, fileName, mode, encoding):
try:
fd = open(fileName, mode, encoding="Latin-1")
except Exception as e:
print(e)
quit()
else:
return fd
# Read the record defenition from "field.def" file
def recordDef(self, recName):
fx = open("field.def", "r", encoding="Latin-1")
line = fx.readline()
while line:
line = line.split("#", 1)[0]
line.lstrip()
if (len(line) < 3):
line = fx.readline()
continue
name, defstr = line.split("=")
if (name.strip() == recName.strip()):
return(defstr)
line = fx.readline()
print(recName, ": Record Definition not found");
quit()
# Create a dict with each name="field Name"
# and Value = a list consiting of two elements
# 1) The Start Character Position and 2) End Char position
def vardict(self, defstr):
col = 0
recdict = dict()
nv = (item.split(" AS ") for item in defstr.split(","))
for item in nv:
num = int(item[0].strip())
recdict[(item[1]).strip()[0:-1]] = [col, col + num]
col = col + num
return(recdict)
def getline(self):
try:
line = self.fd.readline()
except Exception as e:
print("Error Reading Line", self.count)
finally:
return(line)
# Parse a line of data using the record defenition
# and create an easily accessible record.
def parseline(self, line):
recdef = self.vdict
recdict = {}
for name in recdef:
recdict[name] = line[recdef[name][0]:recdef[name][1]]
return(recdict)
# Reading each line and call Parse Rec
def readrec(self):
line = self.getline()
line = line.rstrip("\r\n")
line = line.rstrip("\n")
line = line.rstrip("\r")
if not line:
return False
rec = self.parseline(line)
self.count = self.count + 1
self.setrec(rec)
return(rec)
# Copy a record to this Object (self.record)
def setrec(self, rec):
for item in rec:
self.record[item] = rec[item]
# Write a Record as an output line
def writerec(self, rec):
self.setrec(rec)
line = ""
for field in self.vdict:
self.sizeadjust(field, self.record)
line = line + self.record[field]
line = line + "\r";
self.fd.write(line)
# Some fields in the record my be longer or shorter
# The size is adjusted to the correct size
def sizeadjust(self, field, rec):
length = self.vdict[field][1] - self.vdict[field][0]
length = 0 if (length < 0) else length
if (len(str(rec[field])) == length):
return
elif (length > len(str(rec[field]))):
while (length > len(str(rec[field]))):
rec[field] = str(rec[field]) + ' '
else:
rec[field] = rec[field][:length]
# Rewind takes to the begining of the file
def rewind(self):
self.fd.seek(0)
# Get a specific record
def getrec(self, rec_no):
# print(rec_no)
self.fd.seek(rec_no * self.reclen)
rec = self.readrec()
return(rec)
# Put a specific record
def putrec(self, rec, rec_no):
self.fd.seek(rec_no * self.reclen)
self.writerec(rec)
# Get maximum number of records in an open file
def getmaxrecs(self):
here = self.fd.tell()
last = self.fd.seek(0, 2)
nos = self.fd.tell() / self.reclen
self.fd.seek(here, 0)
return(nos)
|
nilq/baby-python
|
python
|
import os
import posixpath
import sys
import docker
import json
from unittest import TestCase, skipUnless
from unittest.mock import Mock, call, patch, ANY
from pathlib import Path, WindowsPath
from parameterized import parameterized
from samcli.lib.build.build_graph import FunctionBuildDefinition, LayerBuildDefinition
from samcli.lib.providers.provider import ResourcesToBuildCollector
from samcli.lib.build.app_builder import (
ApplicationBuilder,
UnsupportedBuilderLibraryVersionError,
BuildError,
LambdaBuilderError,
ContainerBuildNotSupported,
BuildInsideContainerError,
DockerfileOutSideOfContext,
DockerBuildFailed,
DockerConnectionError,
)
from samcli.lib.utils.packagetype import IMAGE, ZIP
from samcli.lib.utils import osutils
from tests.unit.lib.build_module.test_build_graph import generate_function
class TestApplicationBuilder_build(TestCase):
def setUp(self):
self.build_dir = "builddir"
self.func1 = Mock()
self.func1.packagetype = ZIP
self.func1.name = "function_name1"
self.func1.full_path = posixpath.join("StackJ", "function_name1")
self.func1.get_build_dir = Mock()
self.func1.inlinecode = None
self.func2 = Mock()
self.func2.packagetype = ZIP
self.func2.name = "function_name2"
self.func2.full_path = posixpath.join("StackJ", "function_name2")
self.func2.get_build_dir = Mock()
self.func2.inlinecode = None
self.imageFunc1 = Mock()
self.imageFunc1.name = "function_name3"
self.imageFunc1.full_path = posixpath.join("StackJ", "function_name3")
self.imageFunc1.get_build_dir = Mock()
self.imageFunc1.inlinecode = None
self.layer1 = Mock()
self.layer2 = Mock()
self.imageFunc1.packagetype = IMAGE
self.layer1.build_method = "build_method"
self.layer1.full_path = os.path.join("StackJ", "layer_name1")
self.layer1.get_build_dir = Mock()
self.layer2.build_method = "build_method"
self.layer2.full_path = os.path.join("StackJ", "layer_name2")
self.layer2.get_build_dir = Mock()
resources_to_build_collector = ResourcesToBuildCollector()
resources_to_build_collector.add_functions([self.func1, self.func2, self.imageFunc1])
resources_to_build_collector.add_layers([self.layer1, self.layer2])
self.builder = ApplicationBuilder(resources_to_build_collector, "builddir", "basedir", "cachedir")
@patch("samcli.lib.build.build_graph.BuildGraph._write")
def test_must_iterate_on_functions_and_layers(self, persist_mock):
build_function_mock = Mock()
build_image_function_mock = Mock()
build_image_function_mock_return = Mock()
build_layer_mock = Mock()
def build_layer_return(
layer_name, layer_codeuri, layer_build_method, layer_compatible_runtimes, artifact_dir, layer_env_vars
):
return f"{layer_name}_location"
build_layer_mock.side_effect = build_layer_return
self.builder._build_function = build_function_mock
self.builder._build_lambda_image = build_image_function_mock
self.builder._build_layer = build_layer_mock
build_function_mock.side_effect = [
os.path.join(self.build_dir, "StackJ", "function_name1"),
os.path.join(self.build_dir, "StackJ", "function_name2"),
build_image_function_mock_return,
]
result = self.builder.build()
self.maxDiff = None
self.assertEqual(
result,
{
self.func1.full_path: os.path.join("builddir", "StackJ", "function_name1"),
self.func2.full_path: os.path.join("builddir", "StackJ", "function_name2"),
self.imageFunc1.full_path: build_image_function_mock_return,
self.layer1.full_path: f"{self.layer1.name}_location",
self.layer2.full_path: f"{self.layer2.name}_location",
},
)
build_function_mock.assert_has_calls(
[
call(
self.func1.name,
self.func1.codeuri,
ZIP,
self.func1.runtime,
self.func1.handler,
ANY,
self.func1.metadata,
ANY,
),
call(
self.func2.name,
self.func2.codeuri,
ZIP,
self.func2.runtime,
self.func2.handler,
ANY,
self.func2.metadata,
ANY,
),
call(
self.imageFunc1.name,
self.imageFunc1.codeuri,
IMAGE,
self.imageFunc1.runtime,
self.imageFunc1.handler,
ANY,
self.imageFunc1.metadata,
ANY,
),
],
any_order=False,
)
build_layer_mock.assert_has_calls(
[
call(
self.layer1.name,
self.layer1.codeuri,
self.layer1.build_method,
self.layer1.compatible_runtimes,
ANY,
ANY,
),
call(
self.layer2.name,
self.layer2.codeuri,
self.layer2.build_method,
self.layer2.compatible_runtimes,
ANY,
ANY,
),
]
)
@patch("samcli.lib.build.build_graph.BuildGraph._write")
def test_should_use_function_or_layer_get_build_dir_to_determine_artifact_dir(self, persist_mock):
def get_func_call_with_artifact_dir(artifact_dir):
return call(ANY, ANY, ANY, ANY, ANY, artifact_dir, ANY, ANY)
def get_layer_call_with_artifact_dir(artifact_dir):
return call(ANY, ANY, ANY, ANY, artifact_dir, ANY)
build_function_mock = Mock()
build_layer_mock = Mock()
self.builder._build_function = build_function_mock
self.builder._build_layer = build_layer_mock
self.builder.build()
# make sure function/layer's get_build_dir() is called with correct directory
self.func1.get_build_dir.assert_called_with(self.build_dir)
self.func2.get_build_dir.assert_called_with(self.build_dir)
self.imageFunc1.get_build_dir.assert_called_with(self.build_dir)
self.layer1.get_build_dir.assert_called_with(self.build_dir)
self.layer2.get_build_dir.assert_called_with(self.build_dir)
# make sure whatever is returned from .get_build_dir() is used for build function/layer
build_function_mock.assert_has_calls(
[
get_func_call_with_artifact_dir(self.func1.get_build_dir()),
get_func_call_with_artifact_dir(self.func2.get_build_dir()),
get_func_call_with_artifact_dir(self.imageFunc1.get_build_dir()),
]
)
build_layer_mock.assert_has_calls(
[
get_layer_call_with_artifact_dir(self.layer1.get_build_dir()),
get_layer_call_with_artifact_dir(self.layer2.get_build_dir()),
]
)
@patch("samcli.lib.build.build_graph.BuildGraph._write")
def test_should_generate_build_graph(self, persist_mock):
build_graph = self.builder._get_build_graph()
self.assertTrue(len(build_graph.get_function_build_definitions()), 2)
all_functions_in_build_graph = []
for build_definition in build_graph.get_function_build_definitions():
for function in build_definition.functions:
all_functions_in_build_graph.append(function)
self.assertTrue(self.func1 in all_functions_in_build_graph)
self.assertTrue(self.func2 in all_functions_in_build_graph)
@patch("samcli.lib.build.build_graph.BuildGraph._write")
@patch("samcli.lib.build.build_graph.BuildGraph._read")
@patch("samcli.lib.build.build_strategy.osutils")
def test_should_run_build_for_only_unique_builds(self, persist_mock, read_mock, osutils_mock):
build_function_mock = Mock()
# create 3 function resources where 2 of them would have same codeuri, runtime and metadata
function1_1 = generate_function("function1_1")
function1_2 = generate_function("function1_2")
function2 = generate_function("function2", runtime="different_runtime")
resources_to_build_collector = ResourcesToBuildCollector()
resources_to_build_collector.add_functions([function1_1, function1_2, function2])
build_dir = "builddir"
# instantiate the builder and run build method
builder = ApplicationBuilder(resources_to_build_collector, "builddir", "basedir", "cachedir")
builder._build_function = build_function_mock
build_function_mock.side_effect = [
function1_1.get_build_dir(build_dir),
function1_2.get_build_dir(build_dir),
function1_2.get_build_dir(build_dir),
]
result = builder.build()
# result should contain all 3 functions as expected
self.assertEqual(
result,
{
function1_1.full_path: function1_1.get_build_dir(build_dir),
function1_2.full_path: function1_2.get_build_dir(build_dir),
function2.full_path: function1_2.get_build_dir(build_dir),
},
)
# actual build should only be called twice since only 2 of the functions have unique build
build_function_mock.assert_has_calls(
[
call(
function1_1.name,
function1_1.codeuri,
ZIP,
function1_1.runtime,
function1_1.handler,
ANY,
function1_1.metadata,
ANY,
),
call(
function2.name,
function2.codeuri,
ZIP,
function2.runtime,
function2.handler,
ANY,
function2.metadata,
ANY,
),
],
any_order=True,
)
@patch("samcli.lib.build.app_builder.DefaultBuildStrategy")
def test_default_run_should_pick_default_strategy(self, mock_default_build_strategy_class):
mock_default_build_strategy = Mock()
mock_default_build_strategy_class.return_value = mock_default_build_strategy
build_graph_mock = Mock()
get_build_graph_mock = Mock(return_value=build_graph_mock)
builder = ApplicationBuilder(Mock(), "builddir", "basedir", "cachedir")
builder._get_build_graph = get_build_graph_mock
result = builder.build()
mock_default_build_strategy.build.assert_called_once()
self.assertEqual(result, mock_default_build_strategy.build())
@patch("samcli.lib.build.app_builder.CachedBuildStrategy")
def test_cached_run_should_pick_cached_strategy(self, mock_cached_build_strategy_class):
mock_cached_build_strategy = Mock()
mock_cached_build_strategy_class.return_value = mock_cached_build_strategy
build_graph_mock = Mock()
get_build_graph_mock = Mock(return_value=build_graph_mock)
builder = ApplicationBuilder(Mock(), "builddir", "basedir", "cachedir", cached=True)
builder._get_build_graph = get_build_graph_mock
result = builder.build()
mock_cached_build_strategy.build.assert_called_once()
self.assertEqual(result, mock_cached_build_strategy.build())
@patch("samcli.lib.build.app_builder.ParallelBuildStrategy")
def test_parallel_run_should_pick_parallel_strategy(self, mock_parallel_build_strategy_class):
mock_parallel_build_strategy = Mock()
mock_parallel_build_strategy_class.return_value = mock_parallel_build_strategy
build_graph_mock = Mock()
get_build_graph_mock = Mock(return_value=build_graph_mock)
builder = ApplicationBuilder(Mock(), "builddir", "basedir", "cachedir", parallel=True)
builder._get_build_graph = get_build_graph_mock
result = builder.build()
mock_parallel_build_strategy.build.assert_called_once()
self.assertEqual(result, mock_parallel_build_strategy.build())
@patch("samcli.lib.build.app_builder.ParallelBuildStrategy")
@patch("samcli.lib.build.app_builder.CachedBuildStrategy")
def test_parallel_and_cached_run_should_pick_parallel_with_cached_strategy(
self, mock_cached_build_strategy_class, mock_parallel_build_strategy_class
):
mock_parallel_build_strategy = Mock()
mock_parallel_build_strategy_class.return_value = mock_parallel_build_strategy
mock_cached_build_strategy = Mock()
mock_cached_build_strategy_class.return_value = mock_cached_build_strategy
build_graph_mock = Mock()
get_build_graph_mock = Mock(return_value=build_graph_mock)
builder = ApplicationBuilder(Mock(), "builddir", "basedir", "cachedir", parallel=True)
builder._get_build_graph = get_build_graph_mock
result = builder.build()
mock_parallel_build_strategy.build.assert_called_once()
self.assertEqual(result, mock_parallel_build_strategy.build())
class PathValidator:
def __init__(self, path):
self._path = path
def __eq__(self, other):
return self._path is None if other is None else other.endswith(self._path)
class TestApplicationBuilderForLayerBuild(TestCase):
def setUp(self):
self.layer1 = Mock()
self.layer2 = Mock()
self.container_manager = Mock()
resources_to_build_collector = ResourcesToBuildCollector()
resources_to_build_collector.add_layers([self.layer1, self.layer2])
self.builder = ApplicationBuilder(resources_to_build_collector, "builddir", "basedir", "cachedir")
@patch("samcli.lib.build.app_builder.get_workflow_config")
@patch("samcli.lib.build.app_builder.osutils")
@patch("samcli.lib.build.app_builder.get_layer_subfolder")
def test_must_build_layer_in_process(self, get_layer_subfolder_mock, osutils_mock, get_workflow_config_mock):
get_layer_subfolder_mock.return_value = "python"
config_mock = Mock()
config_mock.manifest_name = "manifest_name"
scratch_dir = "scratch"
osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=scratch_dir)
osutils_mock.mkdir_temp.return_value.__exit__ = Mock()
get_workflow_config_mock.return_value = config_mock
build_function_in_process_mock = Mock()
self.builder._build_function_in_process = build_function_in_process_mock
self.builder._build_layer("layer_name", "code_uri", "python3.8", ["python3.8"], "full_path")
build_function_in_process_mock.assert_called_once_with(
config_mock,
PathValidator("code_uri"),
PathValidator("python"),
"scratch",
PathValidator("manifest_name"),
"python3.8",
None,
)
@patch("samcli.lib.build.app_builder.get_workflow_config")
@patch("samcli.lib.build.app_builder.osutils")
@patch("samcli.lib.build.app_builder.get_layer_subfolder")
def test_must_build_layer_in_container(self, get_layer_subfolder_mock, osutils_mock, get_workflow_config_mock):
self.builder._container_manager = self.container_manager
get_layer_subfolder_mock.return_value = "python"
config_mock = Mock()
config_mock.manifest_name = "manifest_name"
scratch_dir = "scratch"
osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=scratch_dir)
osutils_mock.mkdir_temp.return_value.__exit__ = Mock()
get_workflow_config_mock.return_value = config_mock
build_function_on_container_mock = Mock()
self.builder._build_function_on_container = build_function_on_container_mock
self.builder._build_layer("layer_name", "code_uri", "python3.8", ["python3.8"], "full_path")
build_function_on_container_mock.assert_called_once_with(
config_mock,
PathValidator("code_uri"),
PathValidator("python"),
PathValidator("manifest_name"),
"python3.8",
None,
None,
None,
)
@patch("samcli.lib.build.app_builder.get_workflow_config")
@patch("samcli.lib.build.app_builder.osutils")
@patch("samcli.lib.build.app_builder.get_layer_subfolder")
def test_must_build_layer_in_container_with_global_build_image(
self, get_layer_subfolder_mock, osutils_mock, get_workflow_config_mock
):
self.builder._container_manager = self.container_manager
get_layer_subfolder_mock.return_value = "python"
config_mock = Mock()
config_mock.manifest_name = "manifest_name"
scratch_dir = "scratch"
osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=scratch_dir)
osutils_mock.mkdir_temp.return_value.__exit__ = Mock()
get_workflow_config_mock.return_value = config_mock
build_function_on_container_mock = Mock()
build_images = {None: "test_image"}
self.builder._build_images = build_images
self.builder._build_function_on_container = build_function_on_container_mock
self.builder._build_layer("layer_name", "code_uri", "python3.8", ["python3.8"], "full_path")
build_function_on_container_mock.assert_called_once_with(
config_mock,
PathValidator("code_uri"),
PathValidator("python"),
PathValidator("manifest_name"),
"python3.8",
None,
None,
"test_image",
)
@patch("samcli.lib.build.app_builder.get_workflow_config")
@patch("samcli.lib.build.app_builder.osutils")
@patch("samcli.lib.build.app_builder.get_layer_subfolder")
def test_must_build_layer_in_container_with_specific_build_image(
self, get_layer_subfolder_mock, osutils_mock, get_workflow_config_mock
):
self.builder._container_manager = self.container_manager
get_layer_subfolder_mock.return_value = "python"
config_mock = Mock()
config_mock.manifest_name = "manifest_name"
scratch_dir = "scratch"
osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=scratch_dir)
osutils_mock.mkdir_temp.return_value.__exit__ = Mock()
get_workflow_config_mock.return_value = config_mock
build_function_on_container_mock = Mock()
build_images = {"layer_name": "test_image"}
self.builder._build_images = build_images
self.builder._build_function_on_container = build_function_on_container_mock
self.builder._build_layer("layer_name", "code_uri", "python3.8", ["python3.8"], "full_path")
build_function_on_container_mock.assert_called_once_with(
config_mock,
PathValidator("code_uri"),
PathValidator("python"),
PathValidator("manifest_name"),
"python3.8",
None,
None,
"test_image",
)
class TestApplicationBuilder_update_template(TestCase):
def make_root_template(self, resource_type, location_property_name):
return {
"Resources": {
"MyFunction1": {"Type": "AWS::Serverless::Function", "Properties": {"CodeUri": "oldvalue"}},
"ChildStackXXX": {"Type": resource_type, "Properties": {location_property_name: "./child.yaml"}},
}
}
def setUp(self):
self.builder = ApplicationBuilder(Mock(), "builddir", "basedir", "cachedir")
self.template_dict = {
"Resources": {
"MyFunction1": {"Type": "AWS::Serverless::Function", "Properties": {"CodeUri": "oldvalue"}},
"MyFunction2": {"Type": "AWS::Lambda::Function", "Properties": {"Code": "oldvalue"}},
"GlueResource": {"Type": "AWS::Glue::Job", "Properties": {"Command": {"ScriptLocation": "something"}}},
"OtherResource": {"Type": "AWS::Lambda::Version", "Properties": {"CodeUri": "something"}},
"MyImageFunction1": {
"Type": "AWS::Lambda::Function",
"Properties": {"PackageType": "Image"},
"Metadata": {"Dockerfile": "Dockerfile", "DockerContext": "DockerContext", "DockerTag": "Tag"},
},
}
}
def test_must_update_resources_with_build_artifacts(self):
self.maxDiff = None
original_template_path = "/path/to/tempate.txt"
built_artifacts = {
"MyFunction1": "/path/to/build/MyFunction1",
"MyFunction2": "/path/to/build/MyFunction2",
"MyImageFunction1": "myimagefunction1:Tag",
}
expected_result = {
"Resources": {
"MyFunction1": {
"Type": "AWS::Serverless::Function",
"Properties": {"CodeUri": os.path.join("build", "MyFunction1")},
},
"MyFunction2": {
"Type": "AWS::Lambda::Function",
"Properties": {"Code": os.path.join("build", "MyFunction2")},
},
"GlueResource": {"Type": "AWS::Glue::Job", "Properties": {"Command": {"ScriptLocation": "something"}}},
"OtherResource": {"Type": "AWS::Lambda::Version", "Properties": {"CodeUri": "something"}},
"MyImageFunction1": {
"Type": "AWS::Lambda::Function",
"Properties": {"Code": "myimagefunction1:Tag", "PackageType": IMAGE},
"Metadata": {"Dockerfile": "Dockerfile", "DockerContext": "DockerContext", "DockerTag": "Tag"},
},
}
}
stack = Mock(stack_path="", template_dict=self.template_dict, location=original_template_path)
actual = self.builder.update_template(stack, built_artifacts, {})
self.assertEqual(actual, expected_result)
@parameterized.expand([("AWS::Serverless::Application", "Location"), ("AWS::CloudFormation::Stack", "TemplateURL")])
def test_must_update_resources_with_build_artifacts_and_template_paths_in_multi_stack(
self, resource_type, location_property_name
):
self.maxDiff = None
original_child_template_path = "/path/to/child.yaml"
original_root_template_path = "/path/to/template.yaml"
built_artifacts = {
"MyFunction1": "/path/to/build/MyFunction1",
"ChildStackXXX/MyFunction1": "/path/to/build/ChildStackXXX/MyFunction1",
"ChildStackXXX/MyFunction2": "/path/to/build/ChildStackXXX/MyFunction2",
"ChildStackXXX/MyImageFunction1": "myimagefunction1:Tag",
}
stack_output_paths = {
"": "/path/to/build/template.yaml",
"ChildStackXXX": "/path/to/build/ChildStackXXX/template.yaml",
}
expected_child = {
"Resources": {
"MyFunction1": {
"Type": "AWS::Serverless::Function",
"Properties": {"CodeUri": os.path.join("build", "ChildStackXXX", "MyFunction1")},
},
"MyFunction2": {
"Type": "AWS::Lambda::Function",
"Properties": {"Code": os.path.join("build", "ChildStackXXX", "MyFunction2")},
},
"GlueResource": {"Type": "AWS::Glue::Job", "Properties": {"Command": {"ScriptLocation": "something"}}},
"OtherResource": {"Type": "AWS::Lambda::Version", "Properties": {"CodeUri": "something"}},
"MyImageFunction1": {
"Type": "AWS::Lambda::Function",
"Properties": {"Code": "myimagefunction1:Tag", "PackageType": IMAGE},
"Metadata": {"Dockerfile": "Dockerfile", "DockerContext": "DockerContext", "DockerTag": "Tag"},
},
}
}
expected_root = {
"Resources": {
"MyFunction1": {
"Type": "AWS::Serverless::Function",
"Properties": {"CodeUri": os.path.join("build", "MyFunction1")},
},
"ChildStackXXX": {
"Type": resource_type,
"Properties": {
location_property_name: os.path.join("build", "ChildStackXXX", "template.yaml"),
},
},
}
}
stack_root = Mock(
stack_path="",
template_dict=self.make_root_template(resource_type, location_property_name),
location=original_root_template_path,
)
actual_root = self.builder.update_template(stack_root, built_artifacts, stack_output_paths)
stack_child = Mock(
stack_path="ChildStackXXX",
template_dict=self.template_dict,
location=original_child_template_path,
)
actual_child = self.builder.update_template(stack_child, built_artifacts, stack_output_paths)
self.assertEqual(expected_root, actual_root)
self.assertEqual(expected_child, actual_child)
def test_must_skip_if_no_artifacts(self):
built_artifacts = {}
stack = Mock(stack_path="", template_dict=self.template_dict, location="/foo/bar/template.txt")
actual = self.builder.update_template(stack, built_artifacts, {})
self.assertEqual(actual, self.template_dict)
class TestApplicationBuilder_update_template_windows(TestCase):
def setUp(self):
self.builder = ApplicationBuilder(Mock(), "builddir", "basedir", "cachedir")
self.template_dict = {
"Resources": {
"MyFunction1": {"Type": "AWS::Serverless::Function", "Properties": {"CodeUri": "oldvalue"}},
"MyFunction2": {"Type": "AWS::Lambda::Function", "Properties": {"Code": "oldvalue"}},
"GlueResource": {"Type": "AWS::Glue::Job", "Properties": {"Command": {"ScriptLocation": "something"}}},
"OtherResource": {"Type": "AWS::Lambda::Version", "Properties": {"CodeUri": "something"}},
"ChildStack1": {"Type": "AWS::Serverless::Application", "Properties": {"Location": "oldvalue"}},
"ChildStack2": {"Type": "AWS::CloudFormation::Stack", "Properties": {"TemplateURL": "oldvalue"}},
}
}
# Force os.path to be ntpath instead of posixpath on unix systems
import ntpath
self.saved_os_path_module = sys.modules["os.path"]
os.path = sys.modules["ntpath"]
def test_must_write_absolute_path_for_different_drives(self):
def mock_new(cls, *args, **kwargs):
cls = WindowsPath
self = cls._from_parts(args, init=False)
self._init()
return self
def mock_resolve(self):
return self
with patch("pathlib.Path.__new__", new=mock_new):
with patch("pathlib.Path.resolve", new=mock_resolve):
original_template_path = "C:\\path\\to\\template.txt"
function_1_path = "D:\\path\\to\\build\\MyFunction1"
function_2_path = "C:\\path2\\to\\build\\MyFunction2"
built_artifacts = {"MyFunction1": function_1_path, "MyFunction2": function_2_path}
child_1_path = "D:\\path\\to\\build\\ChildStack1\\template.yaml"
child_2_path = "C:\\path2\\to\\build\\ChildStack2\\template.yaml"
output_template_paths = {"ChildStack1": child_1_path, "ChildStack2": child_2_path}
expected_result = {
"Resources": {
"MyFunction1": {
"Type": "AWS::Serverless::Function",
"Properties": {"CodeUri": function_1_path},
},
"MyFunction2": {
"Type": "AWS::Lambda::Function",
"Properties": {"Code": "..\\..\\path2\\to\\build\\MyFunction2"},
},
"GlueResource": {
"Type": "AWS::Glue::Job",
"Properties": {"Command": {"ScriptLocation": "something"}},
},
"OtherResource": {"Type": "AWS::Lambda::Version", "Properties": {"CodeUri": "something"}},
"ChildStack1": {
"Type": "AWS::Serverless::Application",
"Properties": {"Location": child_1_path},
},
"ChildStack2": {
"Type": "AWS::CloudFormation::Stack",
"Properties": {"TemplateURL": "..\\..\\path2\\to\\build\\ChildStack2\\template.yaml"},
},
}
}
stack = Mock()
stack.stack_path = ""
stack.template_dict = self.template_dict
stack.location = original_template_path
actual = self.builder.update_template(stack, built_artifacts, output_template_paths)
self.assertEqual(actual, expected_result)
def tearDown(self):
os.path = self.saved_os_path_module
class TestApplicationBuilder_build_lambda_image_function(TestCase):
def setUp(self):
self.stream_mock = Mock()
self.docker_client_mock = Mock()
self.builder = ApplicationBuilder(
Mock(),
"/build/dir",
"/base/dir",
"/cached/dir",
stream_writer=self.stream_mock,
docker_client=self.docker_client_mock,
)
def test_docker_build_raises_docker_unavailable(self):
with self.assertRaises(DockerConnectionError):
metadata = {
"Dockerfile": "Dockerfile",
"DockerContext": "context",
"DockerTag": "Tag",
"DockerBuildArgs": {"a": "b"},
}
self.docker_client_mock.ping.side_effect = docker.errors.APIError(message="Mock Error")
self.builder._build_lambda_image("Name", metadata)
def test_docker_build_raises_DockerBuildFailed_when_error_in_buildlog_stream(self):
with self.assertRaises(DockerBuildFailed):
metadata = {
"Dockerfile": "Dockerfile",
"DockerContext": "context",
"DockerTag": "Tag",
"DockerBuildArgs": {"a": "b"},
}
self.docker_client_mock.api.build.return_value = [{"error": "Function building failed"}]
self.builder._build_lambda_image("Name", metadata)
def test_dockerfile_not_in_dockercontext(self):
with self.assertRaises(DockerfileOutSideOfContext):
metadata = {
"Dockerfile": "Dockerfile",
"DockerContext": "context",
"DockerTag": "Tag",
"DockerBuildArgs": {"a": "b"},
}
response_mock = Mock()
response_mock.status_code = 500
error_mock = Mock()
error_mock.side_effect = docker.errors.APIError(
"Bad Request", response=response_mock, explanation="Cannot locate specified Dockerfile"
)
self.builder._stream_lambda_image_build_logs = error_mock
self.docker_client_mock.api.build.return_value = []
self.builder._build_lambda_image("Name", metadata)
def test_error_rerasises(self):
with self.assertRaises(docker.errors.APIError):
metadata = {
"Dockerfile": "Dockerfile",
"DockerContext": "context",
"DockerTag": "Tag",
"DockerBuildArgs": {"a": "b"},
}
error_mock = Mock()
error_mock.side_effect = docker.errors.APIError("Bad Request", explanation="Some explanation")
self.builder._stream_lambda_image_build_logs = error_mock
self.docker_client_mock.api.build.return_value = []
self.builder._build_lambda_image("Name", metadata)
def test_can_build_image_function(self):
metadata = {
"Dockerfile": "Dockerfile",
"DockerContext": "context",
"DockerTag": "Tag",
"DockerBuildArgs": {"a": "b"},
}
self.docker_client_mock.api.build.return_value = []
result = self.builder._build_lambda_image("Name", metadata)
self.assertEqual(result, "name:Tag")
def test_can_build_image_function_without_tag(self):
metadata = {"Dockerfile": "Dockerfile", "DockerContext": "context", "DockerBuildArgs": {"a": "b"}}
self.docker_client_mock.api.build.return_value = []
result = self.builder._build_lambda_image("Name", metadata)
self.assertEqual(result, "name:latest")
@patch("samcli.lib.build.app_builder.os")
def test_can_build_image_function_under_debug(self, mock_os):
mock_os.environ.get.return_value = "debug"
metadata = {
"Dockerfile": "Dockerfile",
"DockerContext": "context",
"DockerTag": "Tag",
"DockerBuildArgs": {"a": "b"},
}
self.docker_client_mock.api.build.return_value = []
result = self.builder._build_lambda_image("Name", metadata)
self.assertEqual(result, "name:Tag-debug")
self.assertEqual(
self.docker_client_mock.api.build.call_args,
# NOTE (sriram-mv): path set to ANY to handle platform differences.
call(
path=ANY,
dockerfile="Dockerfile",
tag="name:Tag-debug",
buildargs={"a": "b", "SAM_BUILD_MODE": "debug"},
decode=True,
),
)
@patch("samcli.lib.build.app_builder.os")
def test_can_build_image_function_under_debug_with_target(self, mock_os):
mock_os.environ.get.return_value = "debug"
metadata = {
"Dockerfile": "Dockerfile",
"DockerContext": "context",
"DockerTag": "Tag",
"DockerBuildArgs": {"a": "b"},
"DockerBuildTarget": "stage",
}
self.docker_client_mock.api.build.return_value = []
result = self.builder._build_lambda_image("Name", metadata)
self.assertEqual(result, "name:Tag-debug")
self.assertEqual(
self.docker_client_mock.api.build.call_args,
call(
path=ANY,
dockerfile="Dockerfile",
tag="name:Tag-debug",
buildargs={"a": "b", "SAM_BUILD_MODE": "debug"},
decode=True,
target="stage",
),
)
class TestApplicationBuilder_build_function(TestCase):
def setUp(self):
self.builder = ApplicationBuilder(Mock(), "/build/dir", "/base/dir", "cachedir")
@patch("samcli.lib.build.app_builder.get_workflow_config")
@patch("samcli.lib.build.app_builder.osutils")
def test_must_build_in_process(self, osutils_mock, get_workflow_config_mock):
function_name = "function_name"
codeuri = "path/to/source"
packagetype = ZIP
runtime = "runtime"
scratch_dir = "scratch"
handler = "handler.handle"
config_mock = get_workflow_config_mock.return_value = Mock()
config_mock.manifest_name = "manifest_name"
osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=scratch_dir)
osutils_mock.mkdir_temp.return_value.__exit__ = Mock()
self.builder._build_function_in_process = Mock()
code_dir = str(Path("/base/dir/path/to/source").resolve())
artifacts_dir = str(Path("/build/dir/function_full_path"))
manifest_path = str(Path(os.path.join(code_dir, config_mock.manifest_name)).resolve())
self.builder._build_function(function_name, codeuri, ZIP, runtime, handler, artifacts_dir)
self.builder._build_function_in_process.assert_called_with(
config_mock, code_dir, artifacts_dir, scratch_dir, manifest_path, runtime, None
)
@patch("samcli.lib.build.app_builder.get_workflow_config")
@patch("samcli.lib.build.app_builder.osutils")
def test_must_build_in_process_with_metadata(self, osutils_mock, get_workflow_config_mock):
function_name = "function_name"
codeuri = "path/to/source"
runtime = "runtime"
packagetype = ZIP
scratch_dir = "scratch"
handler = "handler.handle"
config_mock = get_workflow_config_mock.return_value = Mock()
config_mock.manifest_name = "manifest_name"
osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=scratch_dir)
osutils_mock.mkdir_temp.return_value.__exit__ = Mock()
self.builder._build_function_in_process = Mock()
code_dir = str(Path("/base/dir/path/to/source").resolve())
artifacts_dir = str(Path("/build/dir/function_full_path"))
manifest_path = str(Path(os.path.join(code_dir, config_mock.manifest_name)).resolve())
self.builder._build_function(
function_name, codeuri, packagetype, runtime, handler, artifacts_dir, metadata={"BuildMethod": "Workflow"}
)
get_workflow_config_mock.assert_called_with(
runtime, code_dir, self.builder._base_dir, specified_workflow="Workflow"
)
self.builder._build_function_in_process.assert_called_with(
config_mock, code_dir, artifacts_dir, scratch_dir, manifest_path, runtime, None
)
@patch("samcli.lib.build.app_builder.get_workflow_config")
@patch("samcli.lib.build.app_builder.osutils")
def test_must_build_in_container(self, osutils_mock, get_workflow_config_mock):
function_name = "function_name"
codeuri = "path/to/source"
runtime = "runtime"
packagetype = ZIP
scratch_dir = "scratch"
handler = "handler.handle"
config_mock = get_workflow_config_mock.return_value = Mock()
config_mock.manifest_name = "manifest_name"
osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=scratch_dir)
osutils_mock.mkdir_temp.return_value.__exit__ = Mock()
self.builder._build_function_on_container = Mock()
code_dir = str(Path("/base/dir/path/to/source").resolve())
artifacts_dir = str(Path("/build/dir/function_full_path"))
manifest_path = str(Path(os.path.join(code_dir, config_mock.manifest_name)).resolve())
# Settting the container manager will make us use the container
self.builder._container_manager = Mock()
self.builder._build_function(function_name, codeuri, packagetype, runtime, handler, artifacts_dir)
self.builder._build_function_on_container.assert_called_with(
config_mock, code_dir, artifacts_dir, manifest_path, runtime, None, None, None
)
@patch("samcli.lib.build.app_builder.get_workflow_config")
@patch("samcli.lib.build.app_builder.osutils")
def test_must_build_in_container_with_env_vars(self, osutils_mock, get_workflow_config_mock):
function_name = "function_name"
codeuri = "path/to/source"
runtime = "runtime"
packagetype = ZIP
scratch_dir = "scratch"
handler = "handler.handle"
config_mock = get_workflow_config_mock.return_value = Mock()
config_mock.manifest_name = "manifest_name"
env_vars = {"TEST": "test"}
osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=scratch_dir)
osutils_mock.mkdir_temp.return_value.__exit__ = Mock()
self.builder._build_function_on_container = Mock()
code_dir = str(Path("/base/dir/path/to/source").resolve())
artifacts_dir = str(Path("/build/dir/function_name"))
manifest_path = str(Path(os.path.join(code_dir, config_mock.manifest_name)).resolve())
# Settting the container manager will make us use the container
self.builder._container_manager = Mock()
self.builder._build_function(
function_name, codeuri, packagetype, runtime, handler, artifacts_dir, container_env_vars=env_vars
)
self.builder._build_function_on_container.assert_called_with(
config_mock, code_dir, artifacts_dir, manifest_path, runtime, None, {"TEST": "test"}, None
)
@patch("samcli.lib.build.app_builder.get_workflow_config")
@patch("samcli.lib.build.app_builder.osutils")
def test_must_build_in_container_with_custom_specified_build_image(self, osutils_mock, get_workflow_config_mock):
function_name = "function_name"
codeuri = "path/to/source"
runtime = "runtime"
packagetype = ZIP
scratch_dir = "scratch"
handler = "handler.handle"
image_uri = "image uri"
build_images = {function_name: image_uri}
config_mock = get_workflow_config_mock.return_value = Mock()
config_mock.manifest_name = "manifest_name"
osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=scratch_dir)
osutils_mock.mkdir_temp.return_value.__exit__ = Mock()
self.builder._build_function_on_container = Mock()
code_dir = str(Path("/base/dir/path/to/source").resolve())
artifacts_dir = str(Path("/build/dir/function_name"))
manifest_path = str(Path(os.path.join(code_dir, config_mock.manifest_name)).resolve())
# Settting the container manager will make us use the container
self.builder._container_manager = Mock()
self.builder._build_images = build_images
self.builder._build_function(
function_name, codeuri, packagetype, runtime, handler, artifacts_dir, container_env_vars=None
)
self.builder._build_function_on_container.assert_called_with(
config_mock, code_dir, artifacts_dir, manifest_path, runtime, None, None, image_uri
)
@patch("samcli.lib.build.app_builder.get_workflow_config")
@patch("samcli.lib.build.app_builder.osutils")
def test_must_build_in_container_with_custom_default_build_image(self, osutils_mock, get_workflow_config_mock):
function_name = "function_name"
codeuri = "path/to/source"
runtime = "runtime"
packagetype = ZIP
scratch_dir = "scratch"
handler = "handler.handle"
image_uri = "image uri"
build_images = {"abc": "efg", None: image_uri}
config_mock = get_workflow_config_mock.return_value = Mock()
config_mock.manifest_name = "manifest_name"
osutils_mock.mkdir_temp.return_value.__enter__ = Mock(return_value=scratch_dir)
osutils_mock.mkdir_temp.return_value.__exit__ = Mock()
self.builder._build_function_on_container = Mock()
code_dir = str(Path("/base/dir/path/to/source").resolve())
artifacts_dir = str(Path("/build/dir/function_name"))
manifest_path = str(Path(os.path.join(code_dir, config_mock.manifest_name)).resolve())
# Settting the container manager will make us use the container
self.builder._container_manager = Mock()
self.builder._build_images = build_images
self.builder._build_function(
function_name, codeuri, packagetype, runtime, handler, artifacts_dir, container_env_vars=None
)
self.builder._build_function_on_container.assert_called_with(
config_mock, code_dir, artifacts_dir, manifest_path, runtime, None, None, image_uri
)
class TestApplicationBuilder_build_function_in_process(TestCase):
def setUp(self):
self.builder = ApplicationBuilder(Mock(), "/build/dir", "/base/dir", "/cache/dir", mode="mode")
@patch("samcli.lib.build.app_builder.LambdaBuilder")
def test_must_use_lambda_builder(self, lambda_builder_mock):
config_mock = Mock()
builder_instance_mock = lambda_builder_mock.return_value = Mock()
result = self.builder._build_function_in_process(
config_mock, "source_dir", "artifacts_dir", "scratch_dir", "manifest_path", "runtime", None
)
self.assertEqual(result, "artifacts_dir")
lambda_builder_mock.assert_called_with(
language=config_mock.language,
dependency_manager=config_mock.dependency_manager,
application_framework=config_mock.application_framework,
)
builder_instance_mock.build.assert_called_with(
"source_dir",
"artifacts_dir",
"scratch_dir",
"manifest_path",
runtime="runtime",
executable_search_paths=config_mock.executable_search_paths,
mode="mode",
options=None,
)
@patch("samcli.lib.build.app_builder.LambdaBuilder")
def test_must_raise_on_error(self, lambda_builder_mock):
config_mock = Mock()
builder_instance_mock = lambda_builder_mock.return_value = Mock()
builder_instance_mock.build.side_effect = LambdaBuilderError()
self.builder._get_build_options = Mock(return_value=None)
with self.assertRaises(BuildError):
self.builder._build_function_in_process(
config_mock, "source_dir", "artifacts_dir", "scratch_dir", "manifest_path", "runtime", None
)
class TestApplicationBuilder_build_function_on_container(TestCase):
def setUp(self):
self.container_manager = Mock()
self.builder = ApplicationBuilder(
Mock(), "/build/dir", "/base/dir", "/cache/dir", container_manager=self.container_manager, mode="mode"
)
self.builder._parse_builder_response = Mock()
@patch("samcli.lib.build.app_builder.LambdaBuildContainer")
@patch("samcli.lib.build.app_builder.lambda_builders_protocol_version")
@patch("samcli.lib.build.app_builder.LOG")
@patch("samcli.lib.build.app_builder.osutils")
def test_must_build_in_container(self, osutils_mock, LOGMock, protocol_version_mock, LambdaBuildContainerMock):
config = Mock()
log_level = LOGMock.getEffectiveLevel.return_value = "foo"
stdout_data = "container stdout response data"
response = {"result": {"artifacts_dir": "/some/dir"}}
def mock_wait_for_logs(stdout, stderr):
stdout.write(stdout_data.encode("utf-8"))
# Wire all mocks correctly
container_mock = LambdaBuildContainerMock.return_value = Mock()
container_mock.wait_for_logs = mock_wait_for_logs
self.builder._parse_builder_response.return_value = response
result = self.builder._build_function_on_container(
config, "source_dir", "artifacts_dir", "manifest_path", "runtime", None
)
self.assertEqual(result, "artifacts_dir")
LambdaBuildContainerMock.assert_called_once_with(
protocol_version_mock,
config.language,
config.dependency_manager,
config.application_framework,
"source_dir",
"manifest_path",
"runtime",
image=None,
log_level=log_level,
optimizations=None,
options=None,
executable_search_paths=config.executable_search_paths,
mode="mode",
env_vars={},
)
self.container_manager.run.assert_called_with(container_mock)
self.builder._parse_builder_response.assert_called_once_with(stdout_data, container_mock.image)
container_mock.copy.assert_called_with(response["result"]["artifacts_dir"] + "/.", "artifacts_dir")
self.container_manager.stop.assert_called_with(container_mock)
@patch("samcli.lib.build.app_builder.LambdaBuildContainer")
def test_must_raise_on_unsupported_container(self, LambdaBuildContainerMock):
config = Mock()
container_mock = LambdaBuildContainerMock.return_value = Mock()
container_mock.image = "image name"
container_mock.executable_name = "myexecutable"
self.container_manager.run.side_effect = docker.errors.APIError(
"Bad Request: 'lambda-builders' " "executable file not found in $PATH"
)
with self.assertRaises(UnsupportedBuilderLibraryVersionError) as ctx:
self.builder._build_function_on_container(
config, "source_dir", "artifacts_dir", "scratch_dir", "manifest_path", "runtime", {}
)
msg = (
"You are running an outdated version of Docker container 'image name' that is not compatible with"
"this version of SAM CLI. Please upgrade to continue to continue with build. "
"Reason: 'myexecutable executable not found in container'"
)
self.assertEqual(str(ctx.exception), msg)
self.container_manager.stop.assert_called_with(container_mock)
def test_must_raise_on_docker_not_running(self):
config = Mock()
self.container_manager.is_docker_reachable = False
with self.assertRaises(BuildInsideContainerError) as ctx:
self.builder._build_function_on_container(
config, "source_dir", "artifacts_dir", "scratch_dir", "manifest_path", "runtime", {}
)
self.assertEqual(
str(ctx.exception), "Docker is unreachable. Docker needs to be running to build inside a container."
)
@patch("samcli.lib.build.app_builder.supports_build_in_container")
def test_must_raise_on_unsupported_container_build(self, supports_build_in_container_mock):
config = Mock()
reason = "my reason"
supports_build_in_container_mock.return_value = (False, reason)
with self.assertRaises(ContainerBuildNotSupported) as ctx:
self.builder._build_function_on_container(
config, "source_dir", "artifacts_dir", "scratch_dir", "manifest_path", "runtime", {}
)
self.assertEqual(str(ctx.exception), reason)
class TestApplicationBuilder_parse_builder_response(TestCase):
def setUp(self):
self.image_name = "name"
self.builder = ApplicationBuilder(Mock(), "/build/dir", "/base/dir", "/cache/dir")
def test_must_parse_json(self):
data = {"valid": "json"}
result = self.builder._parse_builder_response(json.dumps(data), self.image_name)
self.assertEqual(result, data)
def test_must_fail_on_invalid_json(self):
data = "{invalid: json}"
with self.assertRaises(ValueError):
self.builder._parse_builder_response(data, self.image_name)
def test_must_raise_on_user_error(self):
msg = "invalid params"
data = {"error": {"code": 488, "message": msg}}
with self.assertRaises(BuildInsideContainerError) as ctx:
self.builder._parse_builder_response(json.dumps(data), self.image_name)
self.assertEqual(str(ctx.exception), msg)
def test_must_raise_on_version_mismatch(self):
msg = "invalid params"
data = {"error": {"code": 505, "message": msg}}
with self.assertRaises(UnsupportedBuilderLibraryVersionError) as ctx:
self.builder._parse_builder_response(json.dumps(data), self.image_name)
expected = str(UnsupportedBuilderLibraryVersionError(self.image_name, msg))
self.assertEqual(str(ctx.exception), expected)
def test_must_raise_on_method_not_found(self):
msg = "invalid method"
data = {"error": {"code": -32601, "message": msg}}
with self.assertRaises(UnsupportedBuilderLibraryVersionError) as ctx:
self.builder._parse_builder_response(json.dumps(data), self.image_name)
expected = str(UnsupportedBuilderLibraryVersionError(self.image_name, msg))
self.assertEqual(str(ctx.exception), expected)
def test_must_raise_on_all_other_codes(self):
msg = "builder crashed"
data = {"error": {"code": 1, "message": msg}}
with self.assertRaises(ValueError) as ctx:
self.builder._parse_builder_response(json.dumps(data), self.image_name)
self.assertEqual(str(ctx.exception), msg)
class TestApplicationBuilder_make_env_vars(TestCase):
def test_make_env_vars_with_env_file(self):
function1 = generate_function("Function1")
file_env_vars = {
"Parameters": {"ENV_VAR1": "1"},
"Function1": {"ENV_VAR2": "2"},
"Function2": {"ENV_VAR3": "3"},
}
result = ApplicationBuilder._make_env_vars(function1, file_env_vars, {})
self.assertEqual(result, {"ENV_VAR1": "1", "ENV_VAR2": "2"})
def test_make_env_vars_with_function_precedence(self):
function1 = generate_function("Function1")
file_env_vars = {
"Parameters": {"ENV_VAR1": "1"},
"Function1": {"ENV_VAR1": "2"},
"Function2": {"ENV_VAR3": "3"},
}
result = ApplicationBuilder._make_env_vars(function1, file_env_vars, {})
self.assertEqual(result, {"ENV_VAR1": "2"})
def test_make_env_vars_with_inline_env(self):
function1 = generate_function("Function1")
inline_env_vars = {
"Parameters": {"ENV_VAR1": "1"},
"Function1": {"ENV_VAR2": "2"},
"Function2": {"ENV_VAR3": "3"},
}
result = ApplicationBuilder._make_env_vars(function1, {}, inline_env_vars)
self.assertEqual(result, {"ENV_VAR1": "1", "ENV_VAR2": "2"})
def test_make_env_vars_with_both(self):
function1 = generate_function("Function1")
file_env_vars = {
"Parameters": {"ENV_VAR1": "1"},
"Function1": {"ENV_VAR2": "2"},
"Function2": {"ENV_VAR3": "3"},
}
inline_env_vars = {
"Parameters": {"ENV_VAR1": "2"},
"Function1": {"ENV_VAR2": "3"},
"Function2": {"ENV_VAR3": "3"},
}
result = ApplicationBuilder._make_env_vars(function1, file_env_vars, inline_env_vars)
self.assertEqual(result, {"ENV_VAR1": "2", "ENV_VAR2": "3"})
|
nilq/baby-python
|
python
|
import requests
import json
coin_market_cap = requests.get(
"https://api.coingecko.com/api/v3/coins/markets?vs_currency=usd&order=market_cap_desc&per_page=100&page=1&sparkline=false",
headers = {"accept": "application/json"})
print("Enter the number of top cryptocurrencies by market capitalization: ")
n = int(input())
i = 0
while i < n:
print(str(i + 1) + '. "Cryptocurrency": ' + '"' + str(coin_market_cap.json()[i]['name']) + '"'', "market cap": ' +
str(coin_market_cap.json()[i]['market_cap']) + ', "current price": ' + str(coin_market_cap.json()[i]['current_price']))
i += 1
|
nilq/baby-python
|
python
|
"""
"""
from __future__ import print_function
from abc import ABCMeta, abstractmethod
class BaseAgent:
"""
"""
__metaclass__ = ABCMeta
def __init__(self):
pass
@abstractmethod
def agent_init(self, agent_info= {}):
""" """
@abstractmethod
def agent_start(self, observation):
"""
"""
@abstractmethod
def agent_step(self, reward, observation):
"""
"""
@abstractmethod
def agent_end(self, reward):
"""
"""
@abstractmethod
def agent_cleanup(self):
""" """
@abstractmethod
def agent_message(self, message):
"""
"""
|
nilq/baby-python
|
python
|
import torch
import gpytorch
from torch.nn.functional import softplus
from gpytorch.priors import NormalPrior, MultivariateNormalPrior
class LogRBFMean(gpytorch.means.Mean):
"""
Log of an RBF Kernel's spectral density
"""
def __init__(self, hypers = None):
super(LogRBFMean, self).__init__()
if hypers is not None:
self.register_parameter(name="constant", parameter=torch.nn.Parameter(hypers[-5] + softplus(hypers[-3]).log()))
self.register_parameter(name="lengthscale", parameter=torch.nn.Parameter(hypers[-4]))
else:
self.register_parameter(name="constant", parameter=torch.nn.Parameter(0. * torch.ones(1)))
self.register_parameter(name="lengthscale", parameter=torch.nn.Parameter(-0.3*torch.ones(1)))
# register prior
self.register_prior(name='constant_prior', prior=NormalPrior(torch.zeros(1), 100.*torch.ones(1), transform=None),
param_or_closure='constant')
self.register_prior(name='lengthscale_prior', prior=NormalPrior(torch.zeros(1), 100.*torch.ones(1), transform=torch.nn.functional.softplus),
param_or_closure='lengthscale')
def set_pars(self, hypers):
self.constant.data = hypers[-2]
self.lengthscale.data = hypers[-1]
def forward(self, input):
# logrbf up to constants is: c - t^1 / 2l
out = self.constant - input.pow(2).squeeze(-1) / (2 * (softplus(self.lengthscale.view(-1)) + 1e-7) )
return out
class LogRBFMean2D(gpytorch.means.Mean):
"""
Log of an RBF Kernel's spectral density
"""
def __init__(self, hypers = None):
super(LogRBFMean2D, self).__init__()
if hypers is not None:
self.register_parameter(name="constant", parameter=torch.nn.Parameter(hypers[-5] + softplus(hypers[-3]).log()))
self.register_parameter(name="lengthscale", parameter=torch.nn.Parameter(hypers[-4]))
else:
self.register_parameter(name="constant", parameter=torch.nn.Parameter(0. * torch.ones(1)))
self.register_parameter(name="lengthscale", parameter=torch.nn.Parameter(-0.3*torch.ones(1)))
# register prior
self.register_prior(name='constant_prior', prior=NormalPrior(torch.zeros(1), 100.*torch.ones(1), transform=None),
param_or_closure='constant')
self.register_prior(name='lengthscale_prior', prior=NormalPrior(torch.zeros(1), 100.*torch.ones(1), transform=torch.nn.functional.softplus),
param_or_closure='lengthscale')
def set_pars(self, hypers):
self.constant.data = hypers[-2]
self.lengthscale.data = hypers[-1]
def forward(self, input):
# logrbf up to constants is: c - t^1 / 2l
out = self.constant - input[:,0]*input[:,1]*(input[:,0]-input[:,1]).pow(2).squeeze(-1) / (2 * (softplus(self.lengthscale.view(-1)) + 1e-7) )
return out
#class LogRBFMean2D(gpytorch.means.Mean):
# """
# Log of 2D RBF Kernel's spectral density, with diagonal length-scale matrix.
# """
# def __init__(self, hypers = None):
# super(LogRBFMean2D, self).__init__()
# if hypers is not None:
# self.register_parameter(name="constant", parameter=torch.nn.Parameter(hypers[-5] + softplus(hypers[-3]).log()))
# self.register_parameter(name="lengthscale", parameter=torch.nn.Parameter(hypers[-4]))
# else:
# self.register_parameter(name="constant", parameter=torch.nn.Parameter(0. * torch.ones(2)))
# self.register_parameter(name="lengthscale", parameter=torch.nn.Parameter(-0.3*torch.ones(2)))
#
# # register prior
# self.register_prior(name='constant_prior',
# prior = MultivariateNormalPrior(torch.zeros(2), covariance_matrix=100.*torch.eye(2), transform=None),
# param_or_closure='constant')
# self.register_prior(name='lengthscale_prior',
# prior = MultivariateNormalPrior(torch.zeros(2), covariance_matrix=100.*torch.eye(2), transform=torch.nn.functional.softplus),
# param_or_closure='lengthscale')
#
# def set_pars(self, hypers):
# self.constant.data = hypers[-2]
# self.lengthscale.data = hypers[-1]
#
# def forward(self, input):
# # logrbf up to constants is: c - t^2 / 2l
# out = self.constant - input.pow(2).sum(dim=1).squeeze(-1) / (2 * (softplus(self.lengthscale.view(-1)) + 1e-7) )
# return out
|
nilq/baby-python
|
python
|
# python
import lx, lxifc, lxu, modo
import tagger
from os.path import basename, splitext
CMD_NAME = tagger.CMD_SET_PTAG
def material_tags_list():
res = set(tagger.scene.all_tags_by_type(lx.symbol.i_POLYTAG_MATERIAL))
for type, tag in tagger.items.get_all_masked_tags():
if type == "material":
res.add(tag)
return list(res)
class CommandClass(tagger.CommanderClass):
# _commander_default_values = []
def commander_arguments(self):
return [
{
'name': tagger.TAG,
'label': tagger.LABEL_TAG,
'datatype': 'string',
'default': "",
'flags': [],
'values_list_type': 'sPresetText',
'values_list': material_tags_list
}, {
'name': tagger.PRESET,
'label': tagger.LABEL_PRESET,
'datatype': 'string',
'default': tagger.RANDOM,
'values_list_type': 'popup',
'values_list': tagger.presets.presets_popup,
'flags': ['optional', 'query']
}, {
'name': tagger.SCOPE,
'label': tagger.LABEL_SCOPE,
'datatype': 'string',
'default': tagger.SCOPE_SELECTED,
'values_list_type': 'popup',
'values_list': tagger.POPUPS_SCOPE,
'flags': ['optional']
}, {
'name': tagger.TAGTYPE,
'label': tagger.LABEL_TAGTYPE,
'datatype': 'string',
'default': tagger.MATERIAL,
'values_list_type': 'popup',
'values_list': tagger.POPUPS_TAGTYPES,
'flags': ['optional']
}, {
'name': tagger.WITH_EXISTING,
'label': tagger.LABEL_WITH_EXISTING,
'datatype': 'string',
'default': tagger.USE,
'values_list_type': 'popup',
'values_list': tagger.POPUPS_WITH_EXISTING,
'flags': ['optional']
}
]
def commander_execute(self, msg, flags):
pTag = self.commander_arg_value(0)
preset = self.commander_arg_value(1, tagger.RANDOM)
connected = self.commander_arg_value(2, tagger.SCOPE_FLOOD)
tagType = self.commander_arg_value(3, tagger.MATERIAL)
withExisting = self.commander_arg_value(4)
if preset == tagger.RANDOM:
preset = None
i_POLYTAG = tagger.convert_to_iPOLYTAG(tagType)
if not pTag:
if not preset:
pTag = tagger.DEFAULT_MATERIAL_NAME
elif not preset.endswith(".lxp"):
pTag = tagger.DEFAULT_MATERIAL_NAME
elif preset.endswith(".lxp"):
pTag = splitext(basename(preset))[0]
# find any existing masks for this pTag
existing_masks = tagger.shadertree.get_masks( pTags = { pTag: i_POLYTAG })
# tag the polys
args = tagger.build_arg_string({
tagger.TAGTYPE: tagType,
tagger.TAG: pTag,
tagger.SCOPE: connected
})
lx.eval("!" + tagger.CMD_PTAG_SET + args)
# build a new mask if we need one
if not existing_masks:
new_mask = tagger.shadertree.build_material(i_POLYTAG = i_POLYTAG, pTag = pTag, preset = preset)
tagger.shadertree.move_to_base_shader(new_mask)
elif existing_masks and withExisting == tagger.USE:
pass
elif existing_masks and withExisting == tagger.KEEP:
new_mask = tagger.shadertree.build_material(i_POLYTAG = i_POLYTAG, pTag = pTag, preset = preset)
tagger.shadertree.move_to_base_shader(new_mask)
elif existing_masks and withExisting == tagger.REMOVE:
new_mask = tagger.shadertree.build_material(i_POLYTAG = i_POLYTAG, pTag = pTag, preset = preset)
tagger.shadertree.move_to_base_shader(new_mask)
tagger.safe_removeItems(existing_masks, True)
elif existing_masks and withExisting == tagger.CONSOLIDATE:
new_mask = tagger.shadertree.build_material(i_POLYTAG = i_POLYTAG, pTag = pTag, preset = preset)
tagger.shadertree.move_to_base_shader(new_mask)
consolidation_masks = tagger.shadertree.consolidate(pTags = { pTag: i_POLYTAG })
new_mask.setParent(consolidation_masks[pTag])
tagger.shadertree.move_to_top(new_mask)
notifier = tagger.Notifier()
notifier.Notify(lx.symbol.fCMDNOTIFY_DATATYPE)
lx.bless(CommandClass, CMD_NAME)
|
nilq/baby-python
|
python
|
import turtle
'''this makes a circle by building many squares'''
def draw_square(tom):
for _ in range(4):
tom.forward(100)
tom.right(90)
def draw_flower():
window = turtle.Screen()
window.bgcolor("red")
brad = turtle.Turtle()
brad.speed(0)
brad.color("blue")
for i in range(72):
draw_square(brad)
brad.right(5)
brad.color("green")
brad.right(90)
brad.forward(250)
brad.right(90)
brad.forward(3)
brad.right(90)
brad.forward(250)
window.exitonclick()
draw_flower()
|
nilq/baby-python
|
python
|
class Solution:
def maxProfit(self, prices):
index = 0
flag = False
ans = 0
i = 1
n = len(prices)
while i < n:
if prices[i] > prices[i - 1]:
flag = True
else:
if flag:
ans += prices[i - 1] - prices[index]
flag = False
index = i
i += 1
if flag:
ans += prices[n - 1] - prices[index]
return ans
if __name__ == '__main__':
prices = [7, 1, 5, 3, 6, 4]
print(Solution().maxProfit(prices))
|
nilq/baby-python
|
python
|
from bs4 import BeautifulSoup
from requests.exceptions import RequestException
from lxml import etree
import requests
import re
def get_links(who_sells=0):
# urls = []
list_view = 'http://bj.58.com/pbdn/{}/'.format(str(who_sells))
print(list_view)
wb_data = requests.get(list_view, headers=headers)
# print(wb_data.text)
# soup = BeautifulSoup(web_data.text, 'lxml')
p = r'<a\sonClick="clickLog\(\'from=zzpc_infoclick\'\);"\shref="(.*?)"\starget="_blank">'
links = re.findall(p, wb_data.text)
# links = soup.select('td.t')
print(1, links)
def get_info(url):
try:
if web_data.status_code == 200:
title = soup.title.text
# print(title)
price = soup.select('#content span.price')
date = soup.select('li.time')
# print(date)
area = soup.select('span.c_25d')
# print(list(area[0].stripped_strings))
data = {
'title': title,
'price': price[0].text,
'date': date[0].text,
'area': list(area[0].stripped_strings),
'cate': None,
'views': None
}
print(11, data)
except RequestException:
pass
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.94 Safari/537.36'
}
url = 'http://bj.58.com/pingbandiannao/30223879694911x.shtml'
web_data = requests.get(url, headers=headers)
# print(web_data.text)
soup = BeautifulSoup(web_data.text, 'lxml')
get_info(url)
get_links(0)
'''
#infolist > div.left > a.title.t
'''
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
import os
import json
from logging import getLogger
from six import string_types, text_type
from collections import OrderedDict
from ckan import logic
from ckan import model
import ckan.plugins as p
from ckan.lib.plugins import DefaultDatasetForm
try:
from ckan.lib.plugins import DefaultTranslation
except ImportError:
class DefaultTranslation():
pass
import ckanext.harvest
from ckanext.harvest.model import setup as model_setup
from ckanext.harvest.model import HarvestSource, HarvestJob, HarvestObject
from ckanext.harvest.log import DBLogHandler
from ckanext.harvest.utils import (
DATASET_TYPE_NAME
)
if p.toolkit.check_ckan_version(min_version='2.9.0'):
from ckanext.harvest.plugin.flask_plugin import MixinPlugin
else:
from ckanext.harvest.plugin.pylons_plugin import MixinPlugin
log = getLogger(__name__)
assert not log.disabled
class Harvest(MixinPlugin, p.SingletonPlugin, DefaultDatasetForm, DefaultTranslation):
p.implements(p.IConfigurable)
p.implements(p.IConfigurer, inherit=True)
p.implements(p.IActions)
p.implements(p.IAuthFunctions)
p.implements(p.IDatasetForm)
p.implements(p.IPackageController, inherit=True)
p.implements(p.ITemplateHelpers)
p.implements(p.IFacets, inherit=True)
if p.toolkit.check_ckan_version(min_version='2.5.0'):
p.implements(p.ITranslation, inherit=True)
startup = False
# ITranslation
def i18n_directory(self):
u'''Change the directory of the .mo translation files'''
return os.path.join(
os.path.dirname(ckanext.harvest.__file__),
'i18n'
)
# IPackageController
def after_create(self, context, data_dict):
if 'type' in data_dict and data_dict['type'] == DATASET_TYPE_NAME and not self.startup:
# Create an actual HarvestSource object
_create_harvest_source_object(context, data_dict)
def after_update(self, context, data_dict):
if 'type' in data_dict and data_dict['type'] == DATASET_TYPE_NAME:
# Edit the actual HarvestSource object
_update_harvest_source_object(context, data_dict)
def after_delete(self, context, data_dict):
package_dict = p.toolkit.get_action('package_show')(context, {'id': data_dict['id']})
if 'type' in package_dict and package_dict['type'] == DATASET_TYPE_NAME:
# Delete the actual HarvestSource object
_delete_harvest_source_object(context, package_dict)
def before_view(self, data_dict):
# check_ckan_version should be more clever than this
if p.toolkit.check_ckan_version(max_version='2.1.99') and (
'type' not in data_dict or data_dict['type'] != DATASET_TYPE_NAME):
# This is a normal dataset, check if it was harvested and if so, add
# info about the HarvestObject and HarvestSource
harvest_object = model.Session.query(HarvestObject) \
.filter(HarvestObject.package_id == data_dict['id']) \
.filter(HarvestObject.current==True).first() # noqa
if harvest_object:
for key, value in [
('harvest_object_id', harvest_object.id),
('harvest_source_id', harvest_object.source.id),
('harvest_source_title', harvest_object.source.title),
]:
_add_extra(data_dict, key, value)
return data_dict
def before_search(self, search_params):
'''Prevents the harvesters being shown in dataset search results.'''
fq = search_params.get('fq', '')
if 'dataset_type:harvest' not in fq:
fq = u"{0} -dataset_type:harvest".format(search_params.get('fq', ''))
search_params.update({'fq': fq})
return search_params
def after_show(self, context, data_dict):
if 'type' in data_dict and data_dict['type'] == DATASET_TYPE_NAME:
# This is a harvest source dataset, add extra info from the
# HarvestSource object
source = HarvestSource.get(data_dict['id'])
if not source:
log.error('Harvest source not found for dataset {0}'.format(data_dict['id']))
return data_dict
st_action_name = 'harvest_source_show_status'
try:
status_action = p.toolkit.get_action(st_action_name)
except KeyError:
logic.clear_actions_cache()
status_action = p.toolkit.get_action(st_action_name)
data_dict['status'] = status_action(context, {'id': source.id})
elif 'type' not in data_dict or data_dict['type'] != DATASET_TYPE_NAME:
# This is a normal dataset, check if it was harvested and if so, add
# info about the HarvestObject and HarvestSource
harvest_object = model.Session.query(HarvestObject) \
.filter(HarvestObject.package_id == data_dict['id']) \
.filter(HarvestObject.current == True).first() # noqa
# If the harvest extras are there, remove them. This can happen eg
# when calling package_update or resource_update, which call
# package_show
if data_dict.get('extras'):
data_dict['extras'][:] = [e for e in data_dict.get('extras', [])
if not e['key']
in ('harvest_object_id', 'harvest_source_id', 'harvest_source_title',)]
# We only want to add these extras at index time so they are part
# of the cached data_dict used to display, search results etc. We
# don't want them added when editing the dataset, otherwise we get
# duplicated key errors.
# The only way to detect indexing right now is checking that
# validate is set to False.
if harvest_object and not context.get('validate', True):
for key, value in [
('harvest_object_id', harvest_object.id),
('harvest_source_id', harvest_object.source.id),
('harvest_source_title', harvest_object.source.title),
]:
_add_extra(data_dict, key, value)
return data_dict
# IDatasetForm
def is_fallback(self):
return False
def package_types(self):
return [DATASET_TYPE_NAME]
def package_form(self):
return 'source/new_source_form.html'
def search_template(self):
return 'source/search.html'
def read_template(self):
return 'source/read.html'
def new_template(self):
return 'source/new.html'
def edit_template(self):
return 'source/edit.html'
def setup_template_variables(self, context, data_dict):
p.toolkit.c.dataset_type = DATASET_TYPE_NAME
def create_package_schema(self):
'''
Returns the schema for mapping package data from a form to a format
suitable for the database.
'''
from ckanext.harvest.logic.schema import harvest_source_create_package_schema
schema = harvest_source_create_package_schema()
if self.startup:
schema['id'] = [text_type]
return schema
def update_package_schema(self):
'''
Returns the schema for mapping package data from a form to a format
suitable for the database.
'''
from ckanext.harvest.logic.schema import harvest_source_update_package_schema
schema = harvest_source_update_package_schema()
return schema
def show_package_schema(self):
'''
Returns the schema for mapping package data from the database into a
format suitable for the form
'''
from ckanext.harvest.logic.schema import harvest_source_show_package_schema
return harvest_source_show_package_schema()
def configure(self, config):
self.startup = True
# Setup harvest model
model_setup()
# Configure database logger
_configure_db_logger(config)
self.startup = False
def update_config(self, config):
if not p.toolkit.check_ckan_version(min_version='2.0'):
assert 0, 'CKAN before 2.0 not supported by ckanext-harvest - '\
'genshi templates not supported any more'
if p.toolkit.asbool(config.get('ckan.legacy_templates', False)):
log.warn('Old genshi templates not supported any more by '
'ckanext-harvest so you should set ckan.legacy_templates '
'option to True any more.')
p.toolkit.add_template_directory(config, '../templates')
p.toolkit.add_public_directory(config, '../public')
p.toolkit.add_resource('../fanstatic_library', 'ckanext-harvest')
p.toolkit.add_resource('../public/ckanext/harvest/javascript', 'harvest-extra-field')
if p.toolkit.check_ckan_version(min_version='2.9.0'):
mappings = config.get('ckan.legacy_route_mappings', {})
if isinstance(mappings, string_types):
mappings = json.loads(mappings)
mappings.update({
'harvest_read': 'harvest.read',
'harvest_edit': 'harvest.edit',
})
bp_routes = [
"delete", "refresh", "admin", "about",
"clear", "job_list", "job_show_last", "job_show",
"job_abort", "object_show"
]
mappings.update({
'harvest_' + route: 'harvester.' + route
for route in bp_routes
})
# https://github.com/ckan/ckan/pull/4521
config['ckan.legacy_route_mappings'] = json.dumps(mappings)
# IActions
def get_actions(self):
module_root = 'ckanext.harvest.logic.action'
action_functions = _get_logic_functions(module_root)
return action_functions
# IAuthFunctions
def get_auth_functions(self):
module_root = 'ckanext.harvest.logic.auth'
auth_functions = _get_logic_functions(module_root)
return auth_functions
# ITemplateHelpers
def get_helpers(self):
from ckanext.harvest import helpers as harvest_helpers
return {
'package_list_for_source': harvest_helpers.package_list_for_source,
'package_count_for_source': harvest_helpers.package_count_for_source,
'harvesters_info': harvest_helpers.harvesters_info,
'harvester_types': harvest_helpers.harvester_types,
'harvest_frequencies': harvest_helpers.harvest_frequencies,
'harvest_times': harvest_helpers.harvest_times,
'harvest_default_time': harvest_helpers.harvest_default_time,
'link_for_harvest_object': harvest_helpers.link_for_harvest_object,
'harvest_source_extra_fields': harvest_helpers.harvest_source_extra_fields,
'bootstrap_version': harvest_helpers.bootstrap_version,
'get_harvest_source': harvest_helpers.get_harvest_source,
'get_latest_job': harvest_helpers.get_latest_job,
}
def dataset_facets(self, facets_dict, package_type):
if package_type != 'harvest':
return facets_dict
return OrderedDict([('frequency', 'Frequency'),
('source_type', 'Type'),
('organization', 'Organization'),
])
def organization_facets(self, facets_dict, organization_type, package_type):
if package_type != 'harvest':
return facets_dict
return OrderedDict([('frequency', 'Frequency'),
('source_type', 'Type'),
('organization', 'Organization'),
])
def _add_extra(data_dict, key, value):
if 'extras' not in data_dict:
data_dict['extras'] = []
data_dict['extras'].append({
'key': key, 'value': value, 'state': u'active'
})
def _get_logic_functions(module_root, logic_functions={}):
for module_name in ['get', 'create', 'update', 'patch', 'delete']:
module_path = '%s.%s' % (module_root, module_name,)
module = __import__(module_path)
for part in module_path.split('.')[1:]:
module = getattr(module, part)
for key, value in module.__dict__.items():
if not key.startswith('_') and (hasattr(value, '__call__')
and (value.__module__ == module_path)):
logic_functions[key] = value
return logic_functions
def _create_harvest_source_object(context, data_dict):
'''
Creates an actual HarvestSource object with the data dict
of the harvest_source dataset. All validation and authorization
checks should be used by now, so this function is not to be used
directly to create harvest sources. The created harvest source will
have the same id as the dataset.
:param data_dict: A standard package data_dict
:returns: The created HarvestSource object
:rtype: HarvestSource object
'''
log.info('Creating harvest source: %r', data_dict)
source = HarvestSource()
source.id = data_dict['id']
source.url = data_dict['url'].strip()
# Avoids clashes with the dataset type
source.type = data_dict['source_type']
opt = ['active', 'title', 'description', 'user_id',
'publisher_id', 'config', 'frequency', 'time']
for o in opt:
if o in data_dict and data_dict[o] is not None:
source.__setattr__(o, data_dict[o])
source.active = not data_dict.get('state', None) == 'deleted'
# Don't commit yet, let package_create do it
source.add()
log.info('Harvest source created: %s', source.id)
return source
def _update_harvest_source_object(context, data_dict):
'''
Updates an actual HarvestSource object with the data dict
of the harvest_source dataset. All validation and authorization
checks should be used by now, so this function is not to be used
directly to update harvest sources.
:param data_dict: A standard package data_dict
:returns: The created HarvestSource object
:rtype: HarvestSource object
'''
source_id = data_dict.get('id')
log.info('Harvest source %s update: %r', source_id, data_dict)
source = HarvestSource.get(source_id)
if not source:
log.error('Harvest source %s does not exist', source_id)
raise logic.NotFound('Harvest source %s does not exist' % source_id)
fields = ['url', 'title', 'description', 'user_id',
'publisher_id', 'frequency', 'time']
for f in fields:
if f in data_dict and data_dict[f] is not None:
if f == 'url':
data_dict[f] = data_dict[f].strip()
source.__setattr__(f, data_dict[f])
# Avoids clashes with the dataset type
if 'source_type' in data_dict:
source.type = data_dict['source_type']
if 'config' in data_dict:
source.config = data_dict['config']
# Don't change state unless explicitly set in the dict
if 'state' in data_dict:
source.active = data_dict.get('state') == 'active'
# Don't commit yet, let package_create do it
source.add()
# Abort any pending jobs
if not source.active:
jobs = HarvestJob.filter(source=source, status=u'New')
log.info('Harvest source %s not active, so aborting %i outstanding jobs', source_id, jobs.count())
if jobs:
for job in jobs:
job.status = u'Aborted'
job.add()
return source
def _delete_harvest_source_object(context, data_dict):
'''
Deletes an actual HarvestSource object with the id provided on the
data dict of the harvest_source dataset. Similarly to the datasets,
the source object is not actually deleted, just flagged as inactive.
All validation and authorization checks should be used by now, so
this function is not to be used directly to delete harvest sources.
:param data_dict: A standard package data_dict
:returns: The deleted HarvestSource object
:rtype: HarvestSource object
'''
source_id = data_dict.get('id')
log.info('Deleting harvest source: %s', source_id)
source = HarvestSource.get(source_id)
if not source:
log.warn('Harvest source %s does not exist', source_id)
raise p.toolkit.ObjectNotFound('Harvest source %s does not exist' % source_id)
# Don't actually delete the record, just flag it as inactive
source.active = False
source.save()
# Abort any pending jobs
jobs = HarvestJob.filter(source=source, status=u'New')
if jobs:
log.info('Aborting %i jobs due to deleted harvest source', jobs.count())
for job in jobs:
job.status = u'Aborted'
job.save()
log.debug('Harvest source %s deleted', source_id)
return source
def _configure_db_logger(config):
# Log scope
#
# -1 - do not log to the database
# 0 - log everything
# 1 - model, logic.action, logic.validators, harvesters
# 2 - model, logic.action, logic.validators
# 3 - model, logic.action
# 4 - logic.action
# 5 - model
# 6 - plugin
# 7 - harvesters
#
scope = p.toolkit.asint(config.get('ckan.harvest.log_scope', -1))
if scope == -1:
return
parent_logger = 'ckanext.harvest'
children = ['plugin', 'model', 'logic.action.create', 'logic.action.delete',
'logic.action.get', 'logic.action.patch', 'logic.action.update',
'logic.validators', 'harvesters.base', 'harvesters.ckanharvester']
children_ = {0: children, 1: children[1:], 2: children[1:-2],
3: children[1:-3], 4: children[2:-3], 5: children[1:2],
6: children[:1], 7: children[-2:]}
# Get log level from config param - default: DEBUG
from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL
level = config.get('ckan.harvest.log_level', 'debug').upper()
if level == 'DEBUG':
level = DEBUG
elif level == 'INFO':
level = INFO
elif level == 'WARNING':
level = WARNING
elif level == 'ERROR':
level = ERROR
elif level == 'CRITICAL':
level = CRITICAL
else:
level = DEBUG
loggers = children_.get(scope)
# Get root logger and set db handler
logger = getLogger(parent_logger)
if scope < 1:
logger.addHandler(DBLogHandler(level=level))
# Set db handler to all child loggers
for _ in loggers:
child_logger = logger.getChild(_)
child_logger.addHandler(DBLogHandler(level=level))
|
nilq/baby-python
|
python
|
# coding=utf-8
# IP地址取自国内髙匿代理IP网站:http://www.xicidaili.com/nn/
# 仅仅爬取首页IP地址就足够一般使用
import telnetlib
from bs4 import BeautifulSoup
import requests
import random
URL = 'http://www.xicidaili.com/nn/'
HEADERS = {
'User-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.143 Safari/537.36'
}
def get_ip_list(url=URL, headers=HEADERS):
web_data = requests.get(url, headers=headers)
soup = BeautifulSoup(web_data.text, 'lxml')
ips = soup.find_all('tr')
ip_list = []
for i in range(1, len(ips)):
ip_info = ips[i]
tds = ip_info.find_all('td')
ip_list.append(tds[1].text + ':' + tds[2].text)
return ip_list
def get_random_ip():
ip_list = get_ip_list()
# 列表生成式
proxy_list = ['http://' + ip for ip in ip_list]
# 随机选取ip
proxy_ip = random.choice(proxy_list)
proxy_dict = {'http': proxy_ip}
return proxy_dict
def test_ip():
ip_dict = get_random_ip()
# 这里假设有ip_list中某一ip
l= ip_dict['http']
http, ip_port= l.split('://')
ip,port=ip_port.split(':')
print(ip)
print(port)
try:
telnetlib.Telnet(ip, port=port, timeout=5)
except:
print('失败')
else:
print('成功')
return ip_dict
if __name__ == '__main__':
# print(get_ip_list())
# proxy_dict = get_random_ip()
# print(proxy_dict)
ip_dict = test_ip()
# ip, port = ("http://110.73.2.182", "8123")
# proxy_url = "{0}:{1}".format(ip, port)
# print(proxy_url)
|
nilq/baby-python
|
python
|
import struct
__all__ = ['AbstractEnumValue', 'IntValue', 'KnobModeEnum', 'PadModeEnum', 'SusModeEnum']
class AbstractEnumValue (object):
_VALUES = {}
def __init__(self, val):
if isinstance(val, int):
try:
self._value = next(k for k, v in self._VALUES.items() if v == val)
except StopIteration:
raise ValueError("Invalid value '%d' for enum '%s'"
% (val, self.__class__.__name__))
elif isinstance(val, str):
if val in self._VALUES:
self._value = val
else:
raise ValueError("Invalid value '%s' for enum '%s'"
% (val, self.__class__.__name__))
else:
raise ValueError("Enum must be instantiated with int or string.")
def as_string(self):
return self._value
def as_int(self):
return self._VALUES[self._value]
def enum_vals(self):
return self._VALUES.items()
def serialize(self):
return struct.pack('B', self.as_int())
@classmethod
def num_bytes(cls):
return 1
@classmethod
def deserialize(cls, b):
return cls(int(b[0]))
class IntValue (object):
def __init__(self, val):
if not isinstance(val, int):
raise ValueError("Invalid type '%s', expected int."
% (val.__class__.__name__))
self._value = val
def as_int(self):
return self._value
def serialize(self):
return struct.pack('B', self.as_int())
@classmethod
def num_bytes(cls):
return 1
@classmethod
def deserialize(cls, b):
return cls(int(b[0]))
class KnobModeEnum (AbstractEnumValue):
_VALUES = {
'CC': 0x00,
'Aftertouch': 0x01
}
class PadModeEnum (AbstractEnumValue):
_VALUES = {
'Note': 0x00,
'Toggle CC': 0x01,
'Momentary CC': 0x02
}
#class ButtonModeEnum (AbstractEnumValue):
#
# _VALUES = {
# 'Toggle CC': 0x00,
# 'Momentary CC': 0x01
# }
class SusModeEnum (AbstractEnumValue):
_VALUES = {
'Switch': 0x00,
'Momentary': 0x01
}
|
nilq/baby-python
|
python
|
s = input().strip()
n = int(input().strip())
a_count = s.count('a')
whole_str_reps = n // len(s)
partial_str_length = n % len(s)
partial_str = s[:partial_str_length]
partial_str_a_count = partial_str.count('a')
print(a_count * whole_str_reps + partial_str_a_count)
|
nilq/baby-python
|
python
|
import asyncio
import math
import networkx as nx
from ccxt import async_support as ccxt
import warnings
__all__ = [
'create_multi_exchange_graph',
'create_weighted_multi_exchange_digraph',
'multi_graph_to_log_graph',
]
def create_multi_exchange_graph(exchanges: list, digraph=False):
"""
Returns a MultiGraph representing the markets for each exchange in exchanges. Each edge represents a market.
Note: does not add edge weights using the ticker's ask and bid prices.
exchange.load_markets() must have been called for each exchange in exchanges. Will throw a ccxt error if it has not.
todo: check which error.
"""
if digraph:
graph = nx.MultiDiGraph()
else:
graph = nx.MultiGraph()
for exchange in exchanges:
for market_name in exchange.symbols:
try:
base_currency, quote_currency = market_name.split('/')
# if ccxt returns a market in incorrect format (e.g FX_BTC_JPY on BitFlyer)
except ValueError:
continue
graph.add_edge(base_currency,
quote_currency,
market_name=market_name,
exchange_name=exchange.name.lower())
if digraph:
graph.add_edge(quote_currency,
base_currency,
market_name=market_name,
exchange_name=exchange.name.lower())
return graph
def create_weighted_multi_exchange_digraph(exchanges: list, name=True, log=False, fees=False, suppress=None):
"""
Not optimized (in favor of readability). There is multiple iterations over exchanges.
"""
if suppress is None:
suppress = ['markets']
if name:
exchanges = [{'object': getattr(ccxt, exchange)()} for exchange in exchanges]
else:
exchanges = [{'object': exchange} for exchange in exchanges]
try:
loop = asyncio.get_event_loop()
except RuntimeError:
loop = asyncio.get_running_loop()
futures = [asyncio.ensure_future(exchange_dict['object'].load_markets()) for exchange_dict in exchanges]
loop.run_until_complete(asyncio.gather(*futures))
if fees:
for exchange_dict in exchanges:
if 'maker' in exchange_dict['object'].fees['trading']:
# we always take the maker side because arbitrage depends on filling orders
exchange_dict['fee'] = exchange_dict['object'].fees['trading']['maker']
else:
if 'fees' not in suppress:
warnings.warn("The fees for {} have not yet been implemented into the library. "
"Values will be calculated using a 0.2% maker fee.".format(exchange_dict['object'].id))
exchange_dict['fee'] = 0.002
else:
# todo: is there a way to do this with list/ dict comprehension?
for exchange_dict in exchanges:
exchange_dict['fee'] = 0
graph = nx.MultiDiGraph()
futures = [_add_exchange_to_multi_digraph(graph, exchange, log=log, suppress=suppress) for exchange in exchanges]
loop.run_until_complete(asyncio.gather(*futures))
return graph
async def _add_exchange_to_multi_digraph(graph: nx.MultiDiGraph, exchange, log=True, suppress=None):
tasks = [_add_market_to_multi_digraph(exchange, symbol, graph, log=log, suppress=suppress)
for symbol in exchange['object'].symbols]
await asyncio.wait(tasks)
await exchange['object'].close()
# todo: refactor. there is a lot of code repetition here with single_exchange.py's _add_weighted_edge_to_graph
# todo: write tests which prove market_name is always a ticker on exchange and exchange's load_markets has been called.
# this will validate that all exceptions thrown by await exchange.fetch_ticker(market_name) are solely because of
# ccxt's fetch_ticker
async def _add_market_to_multi_digraph(exchange, market_name: str, graph: nx.DiGraph, log=True, suppress=None):
if suppress is None:
raise ValueError("suppress cannot be None. Must be a list with possible values listed in docstring of"
"create_weighted_multi_exchange_digraph. If this error shows, something likely went awry "
"during execution.")
try:
ticker = await exchange['object'].fetch_ticker(market_name)
# any error is solely because of fetch_ticker
except:
if 'markets' not in suppress:
warning = 'Market {} is unavailable at this time.'.format(market_name)
warnings.warn(warning)
return
try:
ticker_ask = ticker['ask']
ticker_bid = ticker['bid']
# ask and bid == None if this market does not exist.
except TypeError:
return
# prevent math error when Bittrex (GEO/BTC) or other API gives 0 as ticker price
if ticker_ask == 0:
return
try:
base_currency, quote_currency = market_name.split('/')
# if ccxt returns a market in incorrect format (e.g FX_BTC_JPY on BitFlyer)
except ValueError:
return
fee_scalar = 1 - exchange['fee']
if log:
# math.log raises exception when dealing with zero values
# we prefer math.log1p instead
graph.add_edge(base_currency, quote_currency,
market_name=market_name,
exchange_name=exchange['object'].id,
weight=-math.log1p(fee_scalar * ticker_bid))
graph.add_edge(quote_currency, base_currency,
market_name=market_name,
exchange_name=exchange['object'].id,
weight=-math.log1p(fee_scalar * 1 / ticker_ask))
else:
graph.add_edge(base_currency, quote_currency,
market_name=market_name,
exchange_name=exchange['object'].id,
weight=fee_scalar * ticker_bid)
graph.add_edge(quote_currency, base_currency,
market_name=market_name,
exchange_name=exchange['object'].id,
weight=fee_scalar * 1 / ticker_ask)
def multi_graph_to_log_graph(digraph: nx.MultiDiGraph):
"""
This does not work with the default version of Networkx, but with the fork available at wardbradt/Networkx
Given weighted MultiDigraph m1, returns a MultiDigraph m2 where for each edge e1 in each edge bunch eb1 of m1, the
weight w1 of e1 is replaced with log(w1) and the weight w2 of each edge e2 in the opposite edge bunch of eb is
log(1/w2)
This function is not optimized.
todo: allow this function to be used with Networkx DiGraph objects. Should not be that hard, simply return seen
from self._report in the iterator for digraph's edges() in reportviews.py as it is done for multidigraph's
edge_bunches()
"""
result_graph = nx.MultiDiGraph()
for bunch in digraph.edge_bunches(data=True, seen=True):
for data_dict in bunch[2]:
weight = data_dict.pop('weight')
# if not seen
if not bunch[3]:
result_graph.add_edge(bunch[0], bunch[1], -math.log(weight), **data_dict)
else:
result_graph.add_edge(bunch[0], bunch[1], -math.log(1/weight), **data_dict)
|
nilq/baby-python
|
python
|
"""Methods for creating, manipulating, and storing Teradata row objects."""
import csv
from claims_to_quality.lib.qpp_logging import logging_config
from claims_to_quality.lib.teradata_methods import deidentification
import teradata
logger = logging_config.get_logger(__name__)
def csv_to_query_output(csv_path):
"""
Use csv input to mock SQL query results.
This is used to allow claim_reader to read from csv.
"""
rows = []
with open(csv_path) as file:
reader = csv.reader(file, delimiter=',', quotechar='"')
header = next(reader)
rows = convert_list_of_lists_to_teradata_rows(reader, header)
columns = {column_name: idx for idx, column_name in enumerate(header)}
return (columns, rows)
def convert_list_of_lists_to_teradata_rows(data, columns):
"""
Given a list of iterables, convert to Teradata row objects with the specified columns.
:param data: List of iterables to convert to Teradata row objects.
:param columns: List of column names for the returned rows.
"""
columns = {key: index for index, key in enumerate(columns)}
return [
teradata.util.Row(columns=columns, values=entry, rowNum=idx)
for idx, entry in enumerate(data)
]
def convert_dicts_to_teradata_rows(data):
"""
Convert a list of dictionaries to a list of Teradata row objects.
All dictionaries in the list should have the same keys.
"""
if not data:
return []
columns = {key: index for index, key in enumerate(data[0].keys())}
# Convert rows to list format as expected by the Teradata library.
rows_as_lists = []
for row in data:
row_as_list = ['0'] * len(columns)
for column_name, column_idx in columns.items():
row_as_list[column_idx] = row[column_name]
rows_as_lists.append(row_as_list)
return [
teradata.util.Row(columns=columns, values=row, rowNum=idx)
for idx, row in enumerate(rows_as_lists)
]
def to_csv(rows, csv_path, anonymize=True):
"""
Given a list of Teradata rows, output to csv with the given columns.
TODO: Specify specific columns to write to csv.
:param rows: List of Teradata row objects to be written to csv.
:param csv_path: Path of csv file to create and write to.
"""
if not rows:
logger.warn('No data to save.')
return
if anonymize:
anonymization_filter = deidentification.AnonymizationFilter()
rows = list(anonymization_filter.anonymize_rows(rows))
with open(csv_path, 'w') as f:
fieldnames = [column for column in rows[0].columns]
writer = csv.writer(f)
writer.writerow(fieldnames)
writer.writerows([row.values for row in rows])
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
"""\
=====================
General 3D Object
=====================
TODO
"""
import Axon
import pygame
from pygame.locals import *
from OpenGL.GL import *
from OpenGL.GLU import *
from Display3D import Display3D
from Util3D import *
from Object3D import *
class Button(Object3D):
def __init__(self, **argd):
super(Button, self).__init__(**argd)
self.grabbed = 0
# Button initialisation
caption = argd.get("caption", "Button")
self.backgroundColour = argd.get("bgcolour", (244,244,244))
self.foregroundColour = argd.get("fgcolour", (0,0,0))
self.sideColour = argd.get("sidecolour", (200,200,244))
self.margin = argd.get("margin", 8)
self.key = argd.get("key", None)
self.caption = argd.get("caption", "Button")
self.fontsize = argd.get("fontsize", 50)
self.pixelscaling = argd.get("pixelscaling", 100)
self.thickness = argd.get("thickness", 0.2)
self.eventMsg = argd.get("msg", "CLICK")
self.activated = False
self.actrot = 0
def setup(self):
self.buildCaption()
self.addListenEvents( [pygame.MOUSEMOTION, pygame.MOUSEBUTTONDOWN, pygame.MOUSEBUTTONUP ])
def draw(self):
hs = self.size/2.0
# draw faces
glBegin(GL_QUADS)
glColor4f(self.sideColour[0]/256.0, self.sideColour[1]/256.0, self.sideColour[2]/256.0, 0.5)
glVertex3f(hs.x,hs.y,hs.z)
glVertex3f(hs.x,-hs.y,hs.z)
glVertex3f(hs.x,-hs.y,-hs.z)
glVertex3f(hs.x,hs.y,-hs.z)
glVertex3f(-hs.x,hs.y,hs.z)
glVertex3f(-hs.x,-hs.y,hs.z)
glVertex3f(-hs.x,-hs.y,-hs.z)
glVertex3f(-hs.x,hs.y,-hs.z)
glVertex3f(hs.x,hs.y,hs.z)
glVertex3f(-hs.x,hs.y,hs.z)
glVertex3f(-hs.x,hs.y,-hs.z)
glVertex3f(hs.x,hs.y,-hs.z)
glVertex3f(hs.x,-hs.y,hs.z)
glVertex3f(-hs.x,-hs.y,hs.z)
glVertex3f(-hs.x,-hs.y,-hs.z)
glVertex3f(hs.x,-hs.y,-hs.z)
glEnd()
glEnable(GL_TEXTURE_2D)
glBindTexture(GL_TEXTURE_2D, self.texID)
glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_REPLACE)
glBegin(GL_QUADS)
# back plane
glTexCoord2f(self.tex_w, 1.0-self.tex_h)
glVertex3f(hs.x,hs.y,-hs.z)
glTexCoord2f(0.0, 1.0-self.tex_h)
glVertex3f(-hs.x,hs.y,-hs.z)
glTexCoord2f(0.0, 1.0)
glVertex3f(-hs.x,-hs.y,-hs.z)
glTexCoord2f(self.tex_w, 1.0)
glVertex3f(hs.x,-hs.y,-hs.z)
# front plane
glTexCoord2f(0.0, 1.0-self.tex_h)
glVertex3f(-hs.x,-hs.y,hs.z)
glTexCoord2f(self.tex_w, 1.0-self.tex_h)
glVertex3f(hs.x,-hs.y,hs.z)
glTexCoord2f(self.tex_w, 1.0)
glVertex3f(hs.x,hs.y,hs.z)
glTexCoord2f(0.0, 1.0)
glVertex3f(-hs.x,hs.y,hs.z)
glEnd()
glDisable(GL_TEXTURE_2D)
def handleEvents(self):
while self.dataReady("inbox"):
event = self.recv("inbox")
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1 and self.ogl_name in event.hitobjects:
self.grabbed = event.button
self.scaling = Vector(0.9,0.9,0.9)
if event.type == pygame.MOUSEBUTTONUP:
if event.button == 1:
self.grabbed = 0
self.scaling = Vector(1,1,1)
#activate
if self.ogl_name in event.hitobjects:
self.send( self.eventMsg, "outbox" )
self.activated = True
def buildCaption(self):
"""Pre-render the text to go on the button label."""
# Text is rendered to self.image
pygame.font.init()
font = pygame.font.Font(None, self.fontsize)
self.image = font.render(self.caption,True, self.foregroundColour, )
# create power of 2 dimensioned surface
pow2size = (int(2**(ceil(log(self.image.get_width(), 2)))), int(2**(ceil(log(self.image.get_height(), 2)))))
textureSurface = pygame.Surface(pow2size)
textureSurface.fill( self.backgroundColour )
# determine texture coordinates
self.tex_w = float(self.image.get_width()+2*self.margin)/pow2size[0]
self.tex_h = float(self.image.get_height()+2*self.margin)/pow2size[1]
# copy image data to pow2surface
textureSurface.blit(self.image, (self.margin,self.margin))
# textureSurface.set_alpha(128)
# textureSurface = textureSurface.convert_alpha()
# read pixel data
textureData = pygame.image.tostring(textureSurface, "RGBX", 1)
self.texID = glGenTextures(1)
# create texture
glEnable(GL_TEXTURE_2D)
glBindTexture(GL_TEXTURE_2D, self.texID)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST)
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST)
glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA, textureSurface.get_width(), textureSurface.get_height(), 0,
GL_RGBA, GL_UNSIGNED_BYTE, textureData );
glDisable(GL_TEXTURE_2D)
if self.size is None:
self.size=Vector(self.image.get_width()/float(self.pixelscaling), self.image.get_height()/float(self.pixelscaling), self.thickness)
def steadyMovement(self):
# self.rot += self.wiggle
# if self.wiggle.x >= 0.1 or self.wiggle.x <=-0.1:
# self.wiggleadd *= -1
# self.wiggle += self.wiggleadd
if self.activated:
self.rot += Vector(3,0,0)%360
self.actrot += 3
if self.actrot >= 360:
self.actrot = 0
self.activated = False
def frame(self):
self.steadyMovement()
from SkyGrassBackground import *
if __name__=='__main__':
BUTTON1 = Button(caption="<<", msg="Previous", pos=Vector(-3,0,-10)).activate()
BUTTON2 = Button(caption=">>", msg="Next", pos=Vector(3,0,-10)).activate()
BUTTON3 = Button(caption="Play", msg="Play", pos=Vector(-1,0,-10)).activate()
BUTTON4 = Button(caption="Stop", msg="Stop", pos=Vector(1,0,-10)).activate()
bg = SkyGrassBackground(size=Vector(5000,5000,0), pos = Vector(0, 0, -100)).activate()
Axon.Scheduler.scheduler.run.runThreads()
|
nilq/baby-python
|
python
|
# Copyright 2021 AIPlan4EU project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import importlib
import upf
from upf.model import ProblemKind
from typing import Dict, Tuple, Optional, List, Union, Type
DEFAULT_SOLVERS = {'tamer' : ('upf_tamer', 'SolverImpl'),
'pyperplan' : ('upf_pyperplan', 'SolverImpl'),
'sequential_plan_validator' : ('upf.solvers.plan_validator', 'SequentialPlanValidator'),
'grounder' : ('upf.solvers.grounder', 'Grounder')}
class Factory:
def __init__(self, solvers: Dict[str, Tuple[str, str]] = DEFAULT_SOLVERS):
self.solvers: Dict[str, Type['upf.solvers.solver.Solver']] = {}
for name, (module_name, class_name) in solvers.items():
try:
self.add_solver(name, module_name, class_name)
except ImportError:
pass
def add_solver(self, name: str, module_name: str, class_name: str):
module = importlib.import_module(module_name)
SolverImpl = getattr(module, class_name)
self.solvers[name] = SolverImpl
def _get_solver_class(self, solver_kind: str, name: Optional[str] = None,
problem_kind: ProblemKind = ProblemKind()) -> Optional[Type['upf.solvers.solver.Solver']]:
if name is not None:
return self.solvers[name]
for SolverClass in self.solvers.values():
if getattr(SolverClass, 'is_'+solver_kind)() and SolverClass.supports(problem_kind):
return SolverClass
return None
def _get_solver(self, solver_kind: str, name: Optional[str] = None,
names: Optional[List[str]] = None,
params: Union[Dict[str, str], List[Dict[str, str]]] = None,
problem_kind: ProblemKind = ProblemKind()) -> Optional['upf.solvers.solver.Solver']:
if names is not None:
assert name is None
if params is None:
params = [{} for i in range(len(names))]
assert isinstance(params, List) and len(names) == len(params)
solvers = []
for name, param in zip(names, params):
SolverClass = self._get_solver_class(solver_kind, name)
if SolverClass is None:
raise
solvers.append((SolverClass, param))
return upf.solvers.parallel.Parallel(solvers)
else:
if params is None:
params = {}
assert isinstance(params, Dict)
SolverClass = self._get_solver_class(solver_kind, name, problem_kind)
if SolverClass is None:
raise
return SolverClass(**params)
return None
def OneshotPlanner(self, *, name: Optional[str] = None,
names: Optional[List[str]] = None,
params: Union[Dict[str, str], List[Dict[str, str]]] = None,
problem_kind: ProblemKind = ProblemKind()) -> Optional['upf.solvers.solver.Solver']:
"""
Returns a oneshot planner. There are three ways to call this method:
- using 'name' (the name of a specific planner) and 'params' (planner dependent options).
e.g. OneshotPlanner(name='tamer', params={'heuristic': 'hadd'})
- using 'names' (list of specific planners name) and 'params' (list of
planners dependent options) to get a Parallel solver.
e.g. OneshotPlanner(names=['tamer', 'tamer'],
params=[{'heuristic': 'hadd'}, {'heuristic': 'hmax'}])
- using 'problem_kind' parameter.
e.g. OneshotPlanner(problem_kind=problem.kind())
"""
return self._get_solver('oneshot_planner', name, names, params, problem_kind)
def PlanValidator(self, *, name: Optional[str] = None,
names: Optional[List[str]] = None,
params: Union[Dict[str, str], List[Dict[str, str]]] = None,
problem_kind: ProblemKind = ProblemKind()) -> Optional['upf.solvers.solver.Solver']:
"""
Returns a plan validator. There are three ways to call this method:
- using 'name' (the name of a specific plan validator) and 'params'
(plan validator dependent options).
e.g. PlanValidator(name='tamer', params={'opt': 'val'})
- using 'names' (list of specific plan validators name) and 'params' (list of
plan validators dependent options) to get a Parallel solver.
e.g. PlanValidator(names=['tamer', 'tamer'],
params=[{'opt1': 'val1'}, {'opt2': 'val2'}])
- using 'problem_kind' parameter.
e.g. PlanValidator(problem_kind=problem.kind())
"""
return self._get_solver('plan_validator', name, names, params, problem_kind)
def Grounder(self, *, name: Optional[str] = None, params: Union[Dict[str, str], List[Dict[str, str]]] = None,
problem_kind: ProblemKind = ProblemKind()) -> Optional['upf.solvers.solver.Solver']:
"""
Returns a Grounder. There are three ways to call this method:
- using 'name' (the name of a specific grounder) and 'params'
(grounder dependent options).
e.g. Grounder(name='tamer', params={'opt': 'val'})
- using 'problem_kind' parameter.
e.g. Grounder(problem_kind=problem.kind())
"""
return self._get_solver('grounder', name, None, params, problem_kind)
|
nilq/baby-python
|
python
|
preco = float(input('Qual o valor do produto ? R$ '))
porcentagem = float(input('Qual a porcentagem ? '))
calculo = preco - (preco * porcentagem / 100)
print(f'O produto que custava R${preco}, na promoção com desconto de {porcentagem}% vai custar {calculo:.2f}')
|
nilq/baby-python
|
python
|
import json
import logging
from unittest import mock
from django.test import TestCase
from djenga.logging.formatters import JsonFormatter, JsonTaskFormatter
__all__ = [ 'JsonFormatterTest', ]
log = logging.getLogger(__name__)
class JsonFormatterTest(TestCase):
def test_json_formatter(self):
formatter = JsonFormatter()
with self.assertLogs(log) as log_context:
for handler in log.handlers:
handler.setFormatter(formatter)
log.info('Hello, Gwenna!', extra={'favorite': 'Olive'})
data = log_context.output[-1]
data = json.loads(data)
self.assertIn('timestamp', data)
self.assertEqual(data['message'], 'Hello, Gwenna!')
self.assertEqual(data['logger'],
'djenga_tests.tests.json_formatters')
self.assertEqual(data['favorite'], 'Olive')
try:
raise ValueError('test exception')
except ValueError as ex:
log.exception('%s', ex)
data = log_context.output[-1]
data = json.loads(data)
self.assertEqual(data['exception_type'], 'builtins.ValueError')
self.assertIn('test exception', data['message'])
self.assertEquals('test exception', data['exception_args'][0])
class MockTask:
class MockRequest:
id = 'olive'
request = MockRequest()
name = 'gwenna'
@mock.patch('celery._state.get_current_task',
return_value=MockTask())
def test_task_formatter(self, mock_current_task):
formatter = JsonTaskFormatter()
with self.assertLogs(log) as log_context:
for handler in log.handlers:
handler.setFormatter(formatter)
log.info('Hello, Olive!')
data = log_context.output[-1]
data = json.loads(data)
self.assertEqual(data['task_id'], 'olive')
self.assertEqual(data['task_name'], 'gwenna')
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
'''
booksdatasource.py
Jeff Ondich, 21 September 2021
For use in the "books" assignment at the beginning of Carleton's
CS 257 Software Design class, Fall 2021.
'''
#Revised by Thea Traw
import csv
class Author:
def __init__(self, surname='', given_name='', birth_year=None, death_year=None):
self.surname = surname
self.given_name = given_name
self.birth_year = birth_year
self.death_year = death_year
def __eq__(self, other):
''' For simplicity, we're going to assume that no two authors have the same name. '''
return self.surname == other.surname and self.given_name == other.given_name
class Book:
def __init__(self, title='', publication_year=None, authors=[]):
''' Note that the self.authors instance variable is a list of
references to Author objects. '''
self.title = title
self.publication_year = publication_year
self.authors = authors
def __eq__(self, other):
''' We're going to make the excessively simplifying assumption that
no two books have the same title, so "same title" is the same
thing as "same book". '''
return self.title == other.title
class BooksDataSource:
def __init__(self, books_csv_file_name):
''' The books CSV file format looks like this:
title,publication_year,author_description
For example:
All Clear,2010,Connie Willis (1945-)
"Right Ho, Jeeves",1934,Pelham Grenville Wodehouse (1881-1975)
This __init__ method parses the specified CSV file and creates
suitable instance variables for the BooksDataSource object containing
a collection of Author objects and a collection of Book objects.
'''
self.list_of_authors = []
self.list_of_books = []
#open and read file
books_file = open(books_csv_file_name, "r")
lines_in_books_file = books_file.readlines()
books_file_length = len(lines_in_books_file)
books_file.close()
for line in lines_in_books_file:
parsed_information = BooksDataSource.parse_line(line)
title = parsed_information[0]
publication_year = parsed_information[1]
author_info = parsed_information[2]
processed_list_of_authors = BooksDataSource.process_list_of_authors(author_info)
authors_list = []
for author in processed_list_of_authors:
authors_list.append(author)
if author not in self.list_of_authors:
self.list_of_authors.append(author)
book_to_add = Book(title, publication_year, authors_list)
self.list_of_books.append(book_to_add)
def parse_line(line):
parsed_information = []
#check if there are quotation marks around title
if line.find('"') == -1:
parsed_information = line.split(",")
else:
#just in case there's also '"' in the book title
index_first_quote = line.find('"')
index_last_quote = line.rfind('"')
#everything between quotes
title = line[(index_first_quote + 1):index_last_quote]
#everything following the comma after the last quote
publication_and_author_info = line[(index_last_quote + 2):].split(",")
parsed_information.append(title)
parsed_information.append(publication_and_author_info[0])
parsed_information.append(publication_and_author_info[1])
return parsed_information
def process_author(author_info):
#separate names and birth & death year
#rfind() from https://www.w3schools.com/python/ref_string_rfind.asp
split_index = author_info.rfind(" ")
names = author_info[:split_index]
#birth and death year follow the space (" ") after the last name
birth_and_death_year = author_info[(split_index + 2):]
#just count any "middle" names as part of first name
name_split = names.rfind(" ")
given_name = names[:name_split]
surname = names[(name_split + 1):]
#for birth year and death year
birth_and_death_year_split = birth_and_death_year.split("-")
birth_year = birth_and_death_year_split[0].split("(")[0]
#assume no death year
death_year = None
if len(birth_and_death_year_split[1]) > 1: #meaning not just a ")"
death_year = birth_and_death_year_split[1].split(")")[0]
return Author(surname, given_name, birth_year, death_year)
def process_list_of_authors(author_info):
authors = []
if author_info.find(' and ') == -1:
sole_author = BooksDataSource.process_author(author_info)
authors.append(sole_author)
else:
author_strings = author_info.split(" and ")
for author_string in author_strings:
author = BooksDataSource.process_author(author_string)
authors.append(author)
return authors
def authors(self, search_text=None):
''' Returns a list of all the Author objects in this data source whose names contain
(case-insensitively) the search text. If search_text is None, then this method
returns all of the Author objects. In either case, the returned list is sorted
by surname, breaking ties using given name (e.g. Ann Brontë comes before Charlotte Brontë).
'''
selected_authors = []
if (search_text == None):
selected_authors = self.list_of_authors
else:
for author in self.list_of_authors:
if ((search_text.lower() in author.surname.lower()) or (search_text.lower() in author.given_name.lower())):
selected_authors.append(author)
#lambda sorting method from sorted() documentation: https://docs.python.org/3/howto/sorting.html
return sorted(selected_authors, key=lambda x: (x.surname, x.given_name))
def books(self, search_text=None, sort_by='title'):
''' Returns a list of all the Book objects in this data source whose
titles contain (case-insensitively) search_text. If search_text is None,
then this method returns all of the books objects.
The list of books is sorted in an order depending on the sort_by parameter:
'year' -- sorts by publication_year, breaking ties with (case-insenstive) title
'title' -- sorts by (case-insensitive) title, breaking ties with publication_year
default -- same as 'title' (that is, if sort_by is anything other than 'year'
or 'title', just do the same thing you would do for 'title')
'''
selected_books = []
if (search_text == None):
selected_books = self.list_of_books
#finding books that contain search_term
else:
for book in self.list_of_books:
if search_text.lower() in book.title.lower():
selected_books.append(book)
if (sort_by != 'year' or sort_by == 'title'):
#lambda sorting method from sorted() documentation: https://docs.python.org/3/howto/sorting.html
return sorted(selected_books, key=lambda book: (book.title))
else:
return sorted(selected_books, key=lambda book: (book.publication_year, book.title))
def books_between_years(self, start_year=None, end_year=None):
''' Returns a list of all the Book objects in this data source whose publication
years are between start_year and end_year, inclusive. The list is sorted
by publication year, breaking ties by title (e.g. Neverwhere 1996 should
come before Thief of Time 1996).
If start_year is None, then any book published before or during end_year
should be included. If end_year is None, then any book published after or
during start_year should be included. If both are None, then all books
should be included.
'''
start_year_none = False
end_year_none = False
if (start_year == 'None' or start_year == None):
start_year_none = True
if (end_year == 'None' or end_year == None):
end_year_none = True
#check for improper type in input (anything not an integer)
try:
if (start_year_none == False):
x = int(start_year)
if (end_year_none == False):
y = int(end_year)
except ValueError:
raise ValueError('sorry, invalid input') from None
quit()
selected_books = []
if (start_year_none == True and end_year_none == True):
selected_books = self.list_of_books
elif (start_year_none == True):
for book in self.list_of_books:
if (int(book.publication_year) <= int(end_year)):
selected_books.append(book)
elif (end_year_none == True):
for book in self.list_of_books:
if (int(book.publication_year) >= int(start_year)):
selected_books.append(book)
else: #neither term is None
for book in self.list_of_books:
if (int(book.publication_year) >= int(start_year) and int(book.publication_year) <= int(end_year)):
selected_books.append(book)
#lambda sorting method from sorted() documentation: https://docs.python.org/3/howto/sorting.html
return sorted(selected_books, key=lambda book: (book.publication_year, book.title))
def books_by_author(self, author):
books_by_author = []
for book in self.list_of_books:
if author in book.authors:
books_by_author.append(book)
return books_by_author
|
nilq/baby-python
|
python
|
/home/wai/anaconda3/lib/python3.6/copy.py
|
nilq/baby-python
|
python
|
import pytest
from ergaster import add
data = (
(1, 2, 3),
(2, 2, 4),
(3, 2, 5),
)
@pytest.mark.parametrize("x, y, res", data)
def test_add(x, y, res):
assert add(x, y) == res
|
nilq/baby-python
|
python
|
def grafoSimples(matriz):
result = ""
l = 0
am = 0
for linha in range(len(matriz)):
for coluna in range(len(matriz[linha])):
if(linha == coluna and matriz[linha][coluna] == 2):
result+=("Há laço no vertice %s\n" %(linha+1))
l = 1
if (linha != coluna and matriz[linha][coluna] > 1):
result+=("Há aresta multiplas nos vertices %s %s\n" % ((linha + 1), (coluna + 1)))
am = 1
if am == 0 and l ==0:
result+=("É um grafos simples, pois não possui arestas multiplas e laços\n")
else:
result+=("Não é um grafos simples, pois possui arestas multiplas e laços\n")
return result
|
nilq/baby-python
|
python
|
# coding=UTF-8
from django.db import models
from django.utils.translation import ugettext, ugettext_lazy as _
from django.utils.encoding import python_2_unicode_compatible
from product.models import TaxClass
from l10n.models import AdminArea, Country
#from satchmo_store.shop.models import Order
#from satchmo_store.shop.signals import order_success
#from tax import Processor
from datetime import date as _date
try:
from decimal import Decimal
except:
from django.utils._decimal import Decimal
@python_2_unicode_compatible
class Taxable(models.Model):
"""
Map that says what items are taxable in a jurisdiction.
To use properly, assign products to a meaningful TaxClass, such as 'Shipping',
'Food', 'Default'. Then create rules for the jurisdictions where you are
required to collect tax. If for example, you are taxing objects in two states
and 'Food' is taxable in one and not the other, but shipping is the other
way around, you would need to create the following entries:
food = TaxClass(...)
default = TaxClass(...)
shipping = TaxClass(...)
one_state = AdminArea(...)
two_state = AdminArea(...)
usa = Country(...)
Taxable(taxClass=default, isTaxable=True, taxZone=one_state, taxCountry=usa)
Taxable(taxClass=food, isTaxable=False, useFood=True, taxZone=one_state, taxCountry=usa)
Taxable(taxClass=shipping, isTaxable=True, taxZone=one_state, taxCountry=usa)
Taxable(taxClass=default, isTaxable=True, taxZone=two_state, taxCountry=usa)
Taxable(taxClass=food, isTaxable=True, useFood=True, taxZone=two_state, taxCountry=usa)
Taxable(taxClass=shipping, isTaxable=False, taxZone=two_state, taxCountry=usa)
Laws vary drastically form state to state, so please make sure to make needed
TaxClasses for all objects that vary in taxing jurisdictions to which you
must submit.
If you do not at least create a 'Default' entry for a state, then you will
not be collecting any taxes for that state. Only create entires for states
where you are obligated to collect and report taxes.
SST defines food rates and interstate vs. intrastate rates. You may override
these, otherwise taxes will be charged at the non-food, intrastate rate by default.
WARNING: If a product is taxable in ANY jurisdiction, it must be set taxable
in the product. You disable it per-jurisdiction by disabling it here. You
cannot enable it here if it is disabled on the product itself.
"""
taxClass = models.ForeignKey(TaxClass, verbose_name=_('Tax Class'), on_delete=models.CASCADE)
taxZone = models.ForeignKey(AdminArea, blank=True, null=True,
verbose_name=_('Tax Zone'), on_delete=models.SET_NULL)
taxCountry = models.ForeignKey(Country, blank=True, null=True,
verbose_name=_('Tax Country'), on_delete=models.SET_NULL)
isTaxable = models.BooleanField(verbose_name=_('Taxable?'), default=True, )
useIntrastate = models.BooleanField(verbose_name=_('Use Intrastate rate instead of Interstate?'),
default=True)
useFood = models.BooleanField(verbose_name=_('Use food/drug rate instead of general?'),
default=False)
def _country(self):
if self.taxZone:
return self.taxZone.country.name
else:
return self.taxCountry.name
country = property(_country)
#def _display_percentage(self):
# return "%#2.2f%%" % (100*self.percentage)
#_display_percentage.short_description = _('Percentage')
#display_percentage = property(_display_percentage)
def __str__(self):
return "%s - %s = %s" % (self.taxClass,
self.taxZone and self.taxZone or self.taxCountry,
self.isTaxable)
class Meta:
verbose_name = _("Taxable Class")
verbose_name_plural = _("Taxable Classes")
JURISDICTION_CHOICES = (
(0, 'County'),
(1, 'City'),
(2, 'Town'),
(3, 'Village'),
(4, 'Borough'),
(5, 'Township'),
(9, 'Other Municipality'),
(10, 'School District'),
(11, 'Junior Colleges'),
(19, 'Other Schools'),
(20, 'Water Control'),
(21, 'Utility District'),
(22, 'Sanitation'),
(23, 'Water or Sewer District'),
(24, 'Reclamation District'),
(25, 'Fire or Police'),
(26, 'Roads or Bridges'),
(27, 'Hospitals'),
(29, 'Other Municipal Services'),
(40, 'Township and County'),
(41, 'City and School'),
(42, 'County collected by Other Taxing Authority'),
(43, 'State and County'),
(44, 'Central Collection Taxing Authority'),
(45, 'State Taxing Authority'),
(49, 'Other Combination Collection'),
(50, 'Bond Authority'),
(51, 'Annual County Bond Authority'),
(52, 'Semi-annual County Bond Authority'),
(53, 'Annual City Bond Authority'),
(54, 'Semi-annual City Bond Authority'),
(59, 'Other Bond Authority'),
(61, 'Assessment District'),
(62, 'Homeowner’s Association'),
(63, 'Special District'),
(69, 'Other Special Districts'),
(70, 'Central Appraisal Taxing Authority'),
(71, 'Unsecured County Taxes'),
(72, 'Mobile Home Authority'),
(79, 'Other Special Applications'),
)
@python_2_unicode_compatible
class TaxRate(models.Model):
"""
Records for tax rates in the default SST format as defined at:
http://www.streamlinedsalestax.org/Technology/RatesandBoundariesClean082605.pdf
"""
state = models.IntegerField(verbose_name=_('FIPS State Code'), db_index=True)
jurisdictionType = models.IntegerField(choices=JURISDICTION_CHOICES, verbose_name=_('Type'))
jurisdictionFipsCode = models.CharField(max_length=5,
verbose_name=_('FIPS Code'), db_index=True)
generalRateIntrastate = models.DecimalField(max_digits=8, decimal_places=7,
verbose_name=_('General Tax Rate - Intrastate'))
generalRateInterstate = models.DecimalField(max_digits=8, decimal_places=7,
verbose_name=_('General Tax Rate - Interstate'))
foodRateIntrastate = models.DecimalField(max_digits=8, decimal_places=7,
verbose_name=_('Food/Drug Tax Rate - Intrastate'))
foodRateInterstate = models.DecimalField(max_digits=8, decimal_places=7,
verbose_name=_('Food/Drug Tax Rate - Interstate'))
startDate = models.DateField(verbose_name=_('Effective Start Date'))
endDate = models.DateField(verbose_name=_('Effective End Date'))
class Meta:
verbose_name = _("Tax Rate")
verbose_name_plural = _("Tax Rates")
def __str__(self):
return 'State %d: Jurisdiction: %s(%s)' % (
self.state,
self.jurisdictionFipsCode,
self.get_jurisdictionType_display(),
)
def rate(self, intrastate=False, food=False):
if intrastate:
if food:
return self.foodRateIntrastate
else:
return self.generalRateIntrastate
else:
if food:
return self.foodRateInterstate
else:
return self.generalRateInterstate
TAX_BOUNDRY_CHOICES = (
('Z', 'Zip-5 Record'),
('4', 'Zip+4 Record'),
('A', 'Address Record'),
)
ODD_EVEN_CHOICES = (
('O', 'Odd'),
('E', 'Even'),
('B', 'Both'),
)
@python_2_unicode_compatible
class TaxBoundry(models.Model):
"""
Records for tax boundries in the default SST format as defined at:
http://www.streamlinedsalestax.org/Technology/RatesandBoundariesClean082605.pdf
"""
recordType = models.CharField(max_length=1, choices=TAX_BOUNDRY_CHOICES,
verbose_name=_('Boundry Type'))
startDate = models.DateField(verbose_name=_('Effective Start Date'))
endDate = models.DateField(verbose_name=_('Effective End Date'))
lowAddress = models.IntegerField(blank=True, null=True,
verbose_name=_('Low Address Range'))
highAddress = models.IntegerField(blank=True, null=True,
verbose_name=_('High Address Range'))
oddEven = models.CharField(max_length=1, blank=True, null=True, choices=ODD_EVEN_CHOICES,
verbose_name=_('Odd / Even Range Indicator'))
streetPreDirection = models.CharField(max_length=2, blank=True, null=True,
verbose_name=_('State Pre-Directional Abbr.'))
streetName = models.CharField(max_length=20, blank=True, null=True,
verbose_name=_('Street Name'))
streetSuffix = models.CharField(max_length=4, blank=True, null=True,
verbose_name=_('Street Suffix Abbr.'))
streetPostDirection = models.CharField(max_length=2, blank=True, null=True,
verbose_name=_('Street Post Directional'))
addressSecondaryAbbr = models.CharField(max_length=4, blank=True, null=True,
verbose_name=_('Address Secondary - Abbr.'))
addressSecondaryLow = models.IntegerField(blank=True, null=True,
verbose_name=_('Address Secondary - Low'))
addressSecondaryHigh = models.IntegerField(blank=True, null=True,
verbose_name=_('Address Secondary - High'))
addressSecondaryOddEven = models.CharField(max_length=1, blank=True, null=True,
choices=ODD_EVEN_CHOICES, verbose_name=_('Address Secondary - Odd/Even'))
cityName = models.CharField(max_length=28, blank=True, null=True,
verbose_name=_('City Name'))
zipCode = models.IntegerField(blank=True, null=True,
verbose_name=_('Zip Code'))
plus4 = models.IntegerField(blank=True, null=True,
verbose_name=_('Zip Code - Plus 4'))
zipCodeLow = models.IntegerField(blank=True, null=True,
verbose_name=_('Zip Code - Low'), db_index=True)
zipExtensionLow = models.IntegerField(blank=True, null=True,
verbose_name=_('Zip Code Extension - Low'), db_index=True)
zipCodeHigh = models.IntegerField(blank=True, null=True,
verbose_name=_('Zip Code - High'), db_index=True)
zipExtensionHigh = models.IntegerField(blank=True, null=True,
verbose_name=_('Zip Code Extension - High'), db_index=True)
serCode = models.CharField(max_length=5, verbose_name=_('Composite SER Code'), blank=True, null=True)
fipsStateCode = models.CharField(max_length=2, blank=True, null=True,
verbose_name=_('FIPS State Code'))
fipsStateIndicator = models.CharField(max_length=2, blank=True, null=True,
verbose_name=_('FIPS State Indicator'))
fipsCountyCode = models.CharField(max_length=3, blank=True, null=True,
verbose_name=_('FIPS County Code'))
fipsPlaceCode = models.CharField(max_length=5, blank=True, null=True,
verbose_name=_('FIPS Place Code'))
fipsPlaceType = models.CharField(max_length=2, blank=True, null=True,
verbose_name=_('FIPS Place Type'), choices=JURISDICTION_CHOICES)
special_1_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 1 code'), blank=True, null=True)
special_1_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 1 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_2_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 2 code'), blank=True, null=True)
special_2_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 2 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_3_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 3 code'), blank=True, null=True)
special_3_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 3 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_4_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 4 code'), blank=True, null=True)
special_4_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 4 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_5_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 5 code'), blank=True, null=True)
special_5_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 5 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_6_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 6 code'), blank=True, null=True)
special_6_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 6 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_7_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 7 code'), blank=True, null=True)
special_7_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 7 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_8_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 8 code'), blank=True, null=True)
special_8_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 8 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_9_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 9 code'), blank=True, null=True)
special_9_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 9 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_10_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 10 code'), blank=True, null=True)
special_10_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 10 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_11_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 11 code'), blank=True, null=True)
special_11_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 11 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_12_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 12 code'), blank=True, null=True)
special_12_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 12 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_13_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 13 code'), blank=True, null=True)
special_13_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 13 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_14_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 14 code'), blank=True, null=True)
special_14_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 14 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_15_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 15 code'), blank=True, null=True)
special_15_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 15 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_16_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 16 code'), blank=True, null=True)
special_16_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 16 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_17_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 17 code'), blank=True, null=True)
special_17_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 17 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_18_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 18 code'), blank=True, null=True)
special_18_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 18 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_19_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 19 code'), blank=True, null=True)
special_19_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 19 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
special_20_code = models.CharField(max_length=5, verbose_name=_('FIPS Special 20 code'), blank=True, null=True)
special_20_type = models.CharField(max_length=2, verbose_name=_('FIPS Special 20 type'), blank=True, null=True, choices=JURISDICTION_CHOICES)
# Fill in this property to use not-today for looking up the tax rates.
date = None
# Set these and we'll use non-default rate.
useIntrastate = None
useFood = None
def get_zip_range(self):
if self.zipExtensionLow:
return '%05d-%04d -> %05d-%04d' % (
self.zipCodeLow, self.zipExtensionLow, self.zipCodeHigh, self.zipExtensionHigh
)
else:
return '%05d -> %05d' % (self.zipCodeLow, self.zipCodeHigh)
zip_range = property(get_zip_range)
def rates(self, date=None):
l = list()
state = self.fipsStateCode
if not date:
date = _date.today()
# Lookup all the applicable codes.
for fips in (
self.fipsStateIndicator, self.fipsCountyCode, self.fipsPlaceCode,
self.special_1_code, self.special_2_code, self.special_3_code,
self.special_4_code, self.special_5_code, self.special_6_code,
self.special_7_code, self.special_8_code, self.special_9_code,
self.special_10_code, self.special_11_code, self.special_12_code,
self.special_13_code, self.special_14_code, self.special_15_code,
self.special_16_code, self.special_17_code, self.special_18_code,
self.special_19_code, self.special_20_code
):
if not fips:
continue
rate = TaxRate.objects.get(
state=state,
jurisdictionFipsCode=fips,
startDate__lte=date,
endDate__gte=date,
)
l.append( rate )
return l
def get_percentage(self, date=None):
"""
Emulate being a tax rate by returning a total percentage to tax the customer.
"""
pct = Decimal('0.00')
for x in self.rates(date):
pct += x.rate(intrastate=self.useIntrastate, food=self.useFood)
return pct
percentage=property(get_percentage)
def __str__(self):
if self.recordType == 'Z':
return 'TaxBoundry(Z): %i -- %i' % (
self.zipCodeLow, self.zipCodeHigh
)
elif self.recordType == '4':
return 'TaxBoundry(4): %i-%i -- %i-%i' % (
self.zipCodeLow, self.zipExtensionLow,
self.zipCodeHigh, self.zipExtensionHigh,
)
else:
return 'TaxBoundry(A)'
@classmethod
def lookup(cls, zip, ext=None, date=None):
"""Handy function to take a zip code and return the appropriate rates
for it."""
if not date:
date = _date.today()
# Try for a ZIP+4 lookup first if we can.
if ext:
try:
return cls.objects.get(
recordType='4',
zipCodeLow__lte=zip,
zipCodeHigh__gte=zip,
zipExtensionLow__lte=ext,
zipExtensionHigh__gte=ext,
startDate__lte=date,
endDate__gte=date,
)
except cls.DoesNotExist:
# Not all zip+4 have entires. That's OK.
pass
# Try for just the ZIP then.
try:
return cls.objects.get(
recordType='Z',
zipCodeLow__lte=zip,
zipCodeHigh__gte=zip,
startDate__lte=date,
endDate__gte=date,
)
except cls.DoesNotExist:
return None
class Meta:
verbose_name = _("Tax Boundry")
verbose_name_plural = _("Tax Boundries")
#class TaxCollected(models.Model):
# order = models.ForeignKey(Order, verbose_name=_("Order"))
# taxRate = models.ForeignKey(TaxRate, verbose_name=_('Tax Rate'))
# useIntrastate = models.BooleanField(verbose_name=_('Use Intrastate rate instead of Interstate?'),
# default=True)
# useFood = models.BooleanField(verbose_name=_('Use food/drug rate instead of general?'),
# default=False)
#
#def save_taxes_collected(order, **kwargs):
# processor = Processor(order=order)
# tb = processor.get_boundry()
#
#order_success.connect(save_taxes_colletecd)
from . import config
|
nilq/baby-python
|
python
|
import os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import colorcet as cc
import datashader as ds
import datashader.utils as utils
import datashader.transfer_functions as tf
sns.set(context="paper", style="white")
data_dir = os.path.abspath("./data")
data_fname = os.path.join(data_dir, "cellgraph_embedding.csv")
save_dir = os.path.abspath("./plots")
save = True
fmt = "png"
dpi = 300
pal = [
"#9e0142",
"#d8434e",
"#f67a49",
"#fdbf6f",
"#feeda1",
"#f1f9a9",
"#bfe5a0",
"#74c7a5",
"#378ebb",
"#5e4fa2",
]
#color_key = {str(d): c for d, c in enumerate(pal)}
color_key = pal
#color_key = plt.get_cmap("Set1").colors
print(f"Reading data from {data_fname}")
df = pd.read_csv(data_fname)
df["n_components"] = np.random.randint(1, 5, size=df.shape[0])
df["n_components"] = df["n_components"].astype("category")
print("Plotting with Datashader")
cvs = ds.Canvas(plot_width=400, plot_height=400)
agg = cvs.points(df, "umap_x", "umap_y", ds.count_cat("n_components"))
img = tf.shade(agg, color_key=color_key, how="linear")
if save:
# Save UMAP as image
imname = "cellgraph_embedding_image"
impath = os.path.join(save_dir, imname)
utils.export_image(img, filename=impath, background="white")
# Make plot from image
fname = "cellgraph_embedding_plot"
fpath = os.path.join(save_dir, fname + "." + fmt)
image = plt.imread(impath + ".png")
fig, ax = plt.subplots(figsize=(6, 6))
plt.imshow(image)
plt.setp(ax, xticks=[], yticks=[])
plt.title("UMAP of tissue topologies ($5\mathrm{x}5$)", fontsize=12)
print(f"Saving figure to {fpath}")
plt.savefig(fpath, format=fmt, dpi=dpi)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
from .base_settings import *
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
ALLOWED_HOSTS = ['*']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
FOP_EXECUTABLE = "C:/Users/ria/Downloads/fop-2.3/fop/fop.cmd"
GRAPPELLI_INDEX_DASHBOARD = 'projectsettings.dashboard.CustomIndexDashboard'
KOALIXCRM_REST_API_AUTH = True
|
nilq/baby-python
|
python
|
import json
from dagster import ModeDefinition, execute_solid, solid
from dagster_slack import slack_resource
from mock import patch
@patch("slack.web.base_client.BaseClient._perform_urllib_http_request")
def test_slack_resource(mock_urllib_http_request):
@solid(required_resource_keys={"slack"})
def slack_solid(context):
assert context.resources.slack
body = {"ok": True}
mock_urllib_http_request.return_value = {
"status": 200,
"body": json.dumps(body),
"headers": "",
}
context.resources.slack.chat_postMessage(channel="#random", text=":wave: hey there!")
assert mock_urllib_http_request.called
result = execute_solid(
slack_solid,
run_config={
"resources": {"slack": {"config": {"token": "xoxp-1234123412341234-12341234-1234"}}}
},
mode_def=ModeDefinition(resource_defs={"slack": slack_resource}),
)
assert result.success
|
nilq/baby-python
|
python
|
# Copyright (c) 2011-2020 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
"""Settings UI functionality related to the remote app."""
from __future__ import annotations
import ba
class RemoteAppSettingsWindow(ba.Window):
"""Window showing info/settings related to the remote app."""
def __init__(self) -> None:
from ba.internal import get_remote_app_name
self._r = 'connectMobileDevicesWindow'
width = 700
height = 390
spacing = 40
super().__init__(root_widget=ba.containerwidget(
size=(width, height),
transition='in_right',
scale=(1.85 if ba.app.small_ui else 1.3 if ba.app.med_ui else 1.0),
stack_offset=(-10, 0) if ba.app.small_ui else (0, 0)))
btn = ba.buttonwidget(parent=self._root_widget,
position=(40, height - 67),
size=(140, 65),
scale=0.8,
label=ba.Lstr(resource='backText'),
button_type='back',
text_scale=1.1,
autoselect=True,
on_activate_call=self._back)
ba.containerwidget(edit=self._root_widget, cancel_button=btn)
ba.textwidget(parent=self._root_widget,
position=(width * 0.5, height - 42),
size=(0, 0),
text=ba.Lstr(resource=self._r + '.titleText'),
maxwidth=370,
color=ba.app.title_color,
scale=0.8,
h_align='center',
v_align='center')
ba.buttonwidget(edit=btn,
button_type='backSmall',
size=(60, 60),
label=ba.charstr(ba.SpecialChar.BACK))
v = height - 70.0
v -= spacing * 1.2
ba.textwidget(parent=self._root_widget,
position=(15, v - 26),
size=(width - 30, 30),
maxwidth=width * 0.95,
color=(0.7, 0.9, 0.7, 1.0),
scale=0.8,
text=ba.Lstr(resource=self._r + '.explanationText',
subs=[('${APP_NAME}',
ba.Lstr(resource='titleText')),
('${REMOTE_APP_NAME}',
get_remote_app_name())]),
max_height=100,
h_align='center',
v_align='center')
v -= 90
# hmm the itms:// version doesnt bounce through safari but is kinda
# apple-specific-ish
# Update: now we just show link to the remote webpage.
ba.textwidget(parent=self._root_widget,
position=(width * 0.5, v + 5),
size=(0, 0),
color=(0.7, 0.9, 0.7, 1.0),
scale=1.4,
text='bombsquadgame.com/remote',
maxwidth=width * 0.95,
max_height=60,
h_align='center',
v_align='center')
v -= 30
ba.textwidget(parent=self._root_widget,
position=(width * 0.5, v - 35),
size=(0, 0),
color=(0.7, 0.9, 0.7, 0.8),
scale=0.65,
text=ba.Lstr(resource=self._r + '.bestResultsText'),
maxwidth=width * 0.95,
max_height=height * 0.19,
h_align='center',
v_align='center')
ba.checkboxwidget(
parent=self._root_widget,
position=(width * 0.5 - 150, v - 116),
size=(300, 30),
maxwidth=300,
scale=0.8,
value=not ba.app.config.resolve('Enable Remote App'),
autoselect=True,
text=ba.Lstr(resource='disableRemoteAppConnectionsText'),
on_value_change_call=self._on_check_changed)
def _on_check_changed(self, value: bool) -> None:
cfg = ba.app.config
cfg['Enable Remote App'] = not value
cfg.apply_and_commit()
def _back(self) -> None:
from bastd.ui.settings import controls
ba.containerwidget(edit=self._root_widget, transition='out_right')
ba.app.main_menu_window = (controls.ControlsSettingsWindow(
transition='in_left').get_root_widget())
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
from avro.io import BinaryEncoder, BinaryDecoder
from avro.io import DatumWriter, DatumReader
import avro.schema
from io import BytesIO
import argo_ams_library
from argo_ams_library import ArgoMessagingService
import argparse
import base64
import logging
import logging.handlers
import sys
import json
import time
# set up logging
LOGGER = logging.getLogger("AMS republish script")
def extract_messages(ams, ingest_sub, bulk_size, schema, verify):
# consume metric data messages
consumed_msgs = ams.pull_sub(ingest_sub, num=bulk_size, return_immediately=True, verify=verify)
# initialise the avro reader
avro_reader = DatumReader(writers_schema=schema)
# all the decoded messages that will be returned
decoded_msgs = []
# decode the messages
for msg in consumed_msgs:
try:
# decode the data field again using the provided avro schema
msg_bytes = BytesIO(msg[1].get_data())
msg_decoder = BinaryDecoder(msg_bytes)
avro_msg = avro_reader.read(msg_decoder)
# check that the tags field is present
if avro_msg["tags"] is None:
raise KeyError("tags field is empty")
# append to decoded messages
decoded_msgs.append((msg[0], avro_msg))
except Exception as e:
LOGGER.warning("Could not extract data from ams message {}, {}".format(msg[0], e.message))
last_msg_id = "-1"
if len(consumed_msgs) > 0:
last_msg_id = consumed_msgs.pop()[0]
return decoded_msgs, last_msg_id
def filter_messages(consumed_msgs, sites):
filtered_msgs = []
for msg in consumed_msgs:
if "endpoint_group" not in msg[1]["tags"]:
LOGGER.warning("Message {} has no endpoint_group".format(msg[0]))
continue
if msg[1]["tags"]["endpoint_group"] in sites:
filtered_msgs.append(msg)
return filtered_msgs
def republish_messages(filtered_msgs, ams, verify):
for msg in filtered_msgs:
topic = msg[1]["tags"]["endpoint_group"]
fields = ["status", "service", "timestamp", "metric", "hostname", "monitoring_host"]
header = dict()
for fl in fields:
if msg[1][fl] is None:
LOGGER.warning("Message {} contains empty field {}".format(msg[0], fl))
header[fl] = ""
else:
header[fl] = msg[1][fl]
data = dict()
if msg[1]["summary"] is None:
LOGGER.warning("Message {} contains no summary field".format(msg[0]))
data["body"] = ""
else:
data["body"] = msg[1]["summary"]
data["header"] = header
data["text"] = "true"
ams_msg = argo_ams_library.AmsMessage(data=json.dumps(data))
ams.publish(topic, ams_msg, verify=verify)
def main(args):
# set up the configuration object
config = dict()
# default values
config["bulk_size"] = 100
config["interval"] = 10
with open(args.ConfigPath, 'r') as f:
config = json.load(f)
# stream(console) handler
console_handler = logging.StreamHandler()
console_handler.setFormatter(logging.Formatter('%(asctime)s %(name)s[%(process)d]: %(levelname)s %(message)s'))
LOGGER.addHandler(console_handler)
if args.debug:
LOGGER.setLevel(logging.DEBUG)
else:
LOGGER.setLevel(logging.INFO)
# sys log handler
syslog_handler = logging.handlers.SysLogHandler(config["syslog_socket"])
syslog_handler.setFormatter(logging.Formatter('%(asctime)s %(name)s[%(process)d]: %(levelname)s %(message)s'))
if args.debug:
syslog_handler.setLevel(logging.DEBUG)
else:
syslog_handler.setLevel(logging.INFO)
syslog_handler.setLevel(logging.INFO)
LOGGER.addHandler(syslog_handler)
# start the process of republishing messages
ams_endpoint = "{}:{}".format(config["ams_host"], config["ams_port"])
ams = ArgoMessagingService(endpoint=ams_endpoint, token=config["ams_token"], project=config["ams_project"])
schema = avro.schema.parse(open(config["avro_schema"], "rb").read())
while True:
start_time = time.time()
try:
consumed_msgs, last_msg_id = extract_messages(ams, config["ingest_subscription"], config["bulk_size"], schema, args.verify)
if last_msg_id == "-1":
LOGGER.info("No new messages")
time.sleep(config["interval"])
continue
LOGGER.debug("Consumed messages \n {}".format(consumed_msgs))
filtered_msgs = filter_messages(consumed_msgs, config["sites"])
LOGGER.debug("Filtered messages \n {}".format(filtered_msgs))
republish_messages(filtered_msgs, ams, args.verify)
# make sure that the acknowledgment happens
try:
# try to acknowledge
ams.ack_sub(config["ingest_subscription"], [last_msg_id], verify=args.verify)
except Exception as e:
# if the acknowledgment fails
LOGGER.critical("Retrying to acknowledge message {} after error {}".format(last_msg_id, e.message))
while True:
try:
# consume again in order to refresh the TTL
ams.pull_sub(config["ingest_subscription"], config["bulk_size"], True, verify=args.verify)
# try to ack again using the msg_id from the first consumption
ams.ack_sub(config["ingest_subscription"], [last_msg_id], verify=args.verify)
break
except Exception as e:
LOGGER.critical(
"Retrying to acknowledge message {} after error {}".format(last_msg_id, e.message))
time.sleep(config["interval"])
end_time = time.time()
LOGGER.info("Consumed {} and Republished {} messages. in {}".format(
len(consumed_msgs),
len(filtered_msgs),
end_time - start_time))
except Exception as e:
LOGGER.critical("Could not republish, {}".format(e.message))
time.sleep(config["interval"])
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Republish messages for specific SITES")
parser.add_argument(
"-c", "--ConfigPath", type=str, help="Path for the config file", default="/etc/argo-messaging/republisher.json")
parser.add_argument(
"--verify", help="SSL verification for requests", dest="verify", action="store_true")
parser.add_argument(
"--debug", help="DEBUG mode", dest="debug", action="store_true")
sys.exit(main(parser.parse_args()))
|
nilq/baby-python
|
python
|
from datetime import datetime
from itertools import chain
from random import randint
from django.contrib.auth.decorators import login_required
from django.contrib.formtools.wizard.views import SessionWizardView
from django.http import HttpResponseRedirect, Http404, HttpResponse
from django.utils.decorators import method_decorator
from django.views.decorators.cache import cache_control
from django.views.generic import ListView, DetailView, RedirectView, TemplateView
from django.views.generic.edit import UpdateView, DeleteView
from registration.backends.simple.views import RegistrationView
from django.template.loader import get_template
from django.template import Context
from sysrev.forms import *
class SRRegistrationView(RegistrationView):
def get_success_url(self, user=None):
return "/"
class ProfileView(UpdateView):
template_name = "sysrev/profile_form.html"
form_class = ProfileForm
model = User
success_url = "#"
def get_object(self, queryset=None):
return self.request.user
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ProfileView, self).dispatch(*args, **kwargs)
class AboutView(TemplateView):
template_name = "sysrev/about.html"
class ReviewListView(ListView):
model = Review
def get_context_data(self, **kwargs):
context = super(ReviewListView, self).get_context_data(**kwargs)
in_progress_reviews = Review.objects.order_by('-last_modified').filter(participants=self.request.user, completed=False)
completed_reviews = Review.objects.order_by('-last_modified').filter(participants=self.request.user, completed=True)
reviews = list(chain(in_progress_reviews, completed_reviews))
for i in range(0, len(reviews)):
reviews[i] = {"review": reviews[i],
"count": reviews[i].paper_pool_counts(),
"percent": reviews[i].paper_pool_percentages()}
context["reviews"] = reviews
return context
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ReviewListView, self).dispatch(*args, **kwargs)
class ReviewDetailView(DetailView):
model = Review
def get_context_data(self, object=None):
context = super(ReviewDetailView, self).get_context_data(object=None)
try:
if self.request.user in object.participants.all():
context["count"] = object.paper_pool_counts()
context["percent"] = object.paper_pool_percentages()
context["abstract_papers"] = Paper.objects.filter(review=object, pool="A")
context["document_papers"] = Paper.objects.filter(review=object, pool="D")
context["final_papers"] = Paper.objects.filter(review=object, pool="F")
context["rejected_papers"] = Paper.objects.filter(review=object, pool="R")
else:
raise Http404("Review not found")
except Review.DoesNotExist:
raise Http404("Review not found")
return context
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ReviewDetailView, self).dispatch(*args, **kwargs)
class ReviewDownloadView(ReviewDetailView):
model = Review
template_name = "sysrev/review_download.txt"
def render_to_response(self, context, **response_kwargs):
t = get_template(self.template_name)
resp = HttpResponse(t.render(Context(context)), content_type="text/plain")
resp["Content-Disposition"] = 'attachment; filename="' + context['review'].slug + '.txt' + '"'
return resp
class ReviewUpdateView(UpdateView):
model = Review
form_class = ReviewUpdate
def get_success_url(self):
return Review.objects.get(pk=self.kwargs['pk']).get_absolute_url()
def post(self, request, *args, **kwargs):
result = super(ReviewUpdateView, self).post(request, *args, **kwargs)
if result:
Review.objects.get(pk=kwargs['pk']).perform_query()
return result
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ReviewUpdateView, self).dispatch(*args, **kwargs)
class ReviewDeleteView(DeleteView):
model = Review
success_url = "/"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ReviewDeleteView, self).dispatch(*args, **kwargs)
class ReviewWorkView(RedirectView):
permanent = False
def get(self, request, *args, **kwargs):
try:
review = Review.objects.get(pk=self.kwargs['pk'])
papers = Paper.objects.filter(review=review)
counts = review.paper_pool_counts()
if counts["abstract"] == 0 and counts["document"] == 0:
review.completed = True
review.date_completed = datetime.now()
review.save()
self.url = review.get_absolute_url()
return super(ReviewWorkView, self).get(request, args, **kwargs)
elif counts["abstract"] > 0:
papers = papers.filter(pool="A")
elif counts["document"] > 0:
papers = papers.filter(pool="D")
paper = papers.all()[randint(0, papers.count()-1)]
self.url = paper.get_absolute_url()
return super(ReviewWorkView, self).get(request, args, **kwargs)
except Review.DoesNotExist:
raise Http404("Paper not found")
except Paper.DoesNotExist:
raise Http404("Paper not found")
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ReviewWorkView, self).dispatch(*args, **kwargs)
class PaperDetailView(DetailView):
model = Paper
def get_context_data(self, object=None):
context = {}
try:
review = Review.objects.get(pk=self.kwargs['pk'])
if self.request.user in review.participants.all():
paper = Paper.objects.get(pk=self.kwargs['pk2'])
context["paper"] = paper
context["review"] = review
titles = {'A': 'Abstract screening', 'D': 'Document screening', 'F': 'Final document', 'R': 'Rejected document'}
context["title"] = titles[paper.pool]
context["to_judge"] = ('A', 'D')
context["to_embed_full"] = ('D', 'F')
context["count"] = review.paper_pool_counts()
context["percent"] = review.paper_pool_percentages()
else:
raise Http404("Paper not found")
except Review.DoesNotExist:
raise Http404("Paper not found")
except Paper.DoesNotExist:
raise Http404("Paper not found")
return context
@method_decorator(login_required)
@cache_control(no_cache=True, must_revalidate=True, no_store=True)
def dispatch(self, *args, **kwargs):
return super(PaperDetailView, self).dispatch(*args, **kwargs)
class PaperChoiceView(RedirectView):
permanent = False
def get(self, request, *args, **kwargs):
try:
review = Review.objects.get(pk=self.kwargs['pk'])
paper = Paper.objects.get(pk=self.kwargs['pk2'], review=review)
choice = self.kwargs['choice']
if choice == "document":
paper.pool = "D"
elif choice == "final":
paper.pool = "F"
elif choice == "rejected":
paper.pool = "R"
else:
raise Http404("Invalid choice")
paper.save()
self.url = review.get_absolute_url() + "/work/"
return super(PaperChoiceView, self).get(request, args, **kwargs)
except Review.DoesNotExist:
raise Http404("Review not found")
except Paper.DoesNotExist:
raise Http404("Paper not found")
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PaperChoiceView, self).dispatch(*args, **kwargs)
class ReviewCreateWizard(SessionWizardView):
form_list = [ReviewCreateStep1, ReviewCreateStep2]
template_name = "sysrev/review_create_wizard.html"
def done(self, form_list, **kwargs):
s1 = form_list[0].cleaned_data
s2 = form_list[1].cleaned_data
review = Review()
review.title = s1["title"]
review.description = s1["description"]
review.query = s2["query"]
review.save()
review.participants.add(self.request.user)
invited = filter(lambda i: i, map(lambda l: str.strip(str(l)), s1["invited"].splitlines()))
review.invite(invited)
review.perform_query()
review.save()
return HttpResponseRedirect(review.get_absolute_url())
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ReviewCreateWizard, self).dispatch(*args, **kwargs)
|
nilq/baby-python
|
python
|
'''
visualize VIL-100 datasets in points form or curves form.
datasets name:vil-100
paper link: https://arxiv.org/abs/2108.08482
reference: https://github.com/yujun0-0/MMA-Net/tree/main/dataset
datasets structure:
VIL-100
|----Annotations
|----data
|----JPEGImages
|----Json
|----train.json
*********** A sample of one json-file ***********
{
"camera_id": 8272,
"info": {
"height": 1080 ,
"width": 1920,
"date": "2020-11-24",
"image_path": "0_Road014_Trim005_frames/XXXXXX.jpg"
},
"annotations": {
"lane": [{
"id": 1,
"lane_id": 1,
"attribute": 1,
"occlusion": 0,
"points": [[412.6, 720],[423.7, 709.9], ...]
}, {...}, {...}, {...}]
}
}
'''
import os
import cv2
import numpy as np
import json
color = [(218,112,214), (255, 0, 0), (0, 255, 0), (0, 0, 255), (255, 255, 0), (255, 0, 255), (0, 255, 255), (255, 255, 255),
(100, 255, 0), (100, 0, 255), (255, 100, 0), (0, 100, 255), (255, 0, 100), (0, 255, 100)]
def get_points(mask, label):
# read label
label_content = open(label)
label_info = json.load(label_content)['annotations']
# label_info = eval(label_info)
for index, line in enumerate(label_info['lane']):
# print(line)
points_x = []
points_y = []
# get points
for point in line['points']:
points_x.append(int(float(point[0])))
points_y.append(int(float(point[1])))
ptStart = 0
points = list(zip(points_x, points_y))
# sort along y
sorted(points , key=lambda k: (k[1], k[0]))
# print(points)
while ptStart < len(points_x):
image = cv2.circle(mask, points[ptStart], 5, color[index], -1)
ptStart += 1
return image
def get_curves(mask, label):
# read label
label_content = open(label)
label_info = json.load(label_content)['annotations']
# label_info = eval(label_info)
for index, line in enumerate(label_info['lane']):
# print(line)
points_x = []
points_y = []
# get points
for point in line['points']:
points_x.append(int(float(point[0])))
points_y.append(int(float(point[1])))
ptStart = 0
ptEnd = 1
points = list(zip(points_x, points_y))
# sort along y
sorted(points , key=lambda k: (k[1], k[0]))
# print(points)
while ptEnd < len(points_x):
mask = cv2.line(mask, points[ptStart], points[ptEnd], color[index], 4, lineType = 8)
ptStart += 1
ptEnd += 1
return mask
if __name__ == '__main__':
# choose datasets category from:'train','test'
datasets_category = 'test'
# choose vis_mode between 'points' and 'curves'
vis_mod = 'curves'
# datasets dir
dataset_dir = '../dataset/VIL-100'
# save label dir(mask)
save_mask_dir = '{}/{}_{}'.format(dataset_dir, "vis_datasets", vis_mod)
if not os.path.exists(save_mask_dir):
os.makedirs(save_mask_dir)
# read file from txt
txt_file = dataset_dir + '/data/{}.txt'.format(datasets_category)
file_list = open(txt_file)
for file in file_list:
file = file.strip()
full_img_path = dataset_dir + file
if not os.path.exists(full_img_path):
continue
print("Now dealing with:", file)
file_name = os.path.splitext(file.strip().split('/')[-1])[0] # image_name xxx
json_file = dataset_dir + file.replace('JPEGImages', 'Json') + '.json'
img = cv2.imread(full_img_path)
# datasets have different height and width.
# get img shape,h and w.
h = img.shape[0]
w = img.shape[1]
# parse label
# visulize points
if vis_mod == 'points':
label_mask = get_points(img, json_file)
else:
# visulize curves
label_mask = get_curves(img, json_file)
cv2.imencode('.png',label_mask)[1].tofile('{}/{}.png'.format(save_mask_dir,file_name))
print("finished~~")
|
nilq/baby-python
|
python
|
from __future__ import print_function
import os
import re
import sqlite3
import sys
import traceback
import simpy
from vcd import VCDWriter
from . import probe
from .util import partial_format
from .timescale import parse_time, scale_time
from .queue import Queue
from .pool import Pool
class Tracer(object):
name = ''
def __init__(self, env):
self.env = env
cfg_scope = 'sim.' + self.name + '.'
self.enabled = env.config.setdefault(cfg_scope + 'enable', False)
self.persist = env.config.setdefault(cfg_scope + 'persist', True)
if self.enabled:
self.open()
include_pat = env.config.setdefault(cfg_scope + 'include_pat',
['.*'])
exclude_pat = env.config.setdefault(cfg_scope + 'exclude_pat', [])
self._include_re = [re.compile(pat) for pat in include_pat]
self._exclude_re = [re.compile(pat) for pat in exclude_pat]
def is_scope_enabled(self, scope):
return (self.enabled and
any(r.match(scope) for r in self._include_re) and
not any(r.match(scope) for r in self._exclude_re))
def open(self):
raise NotImplementedError() # pragma: no cover
def close(self):
if self.enabled:
self._close()
def _close(self):
raise NotImplementedError() # pragma: no cover
def remove_files(self):
raise NotImplementedError()
def flush(self):
pass
def activate_probe(self, scope, target, **hints):
raise NotImplementedError() # pragma: no cover
def activate_trace(self, scope, **hints):
raise NotImplementedError() # pragma: no cover
def trace_exception(self):
pass
class LogTracer(Tracer):
name = 'log'
default_format = '{level:7} {ts:.3f} {ts_unit}: {scope}:'
levels = {
'ERROR': 1,
'WARNING': 2,
'INFO': 3,
'PROBE': 4,
'DEBUG': 5,
}
def open(self):
self.filename = self.env.config.setdefault('sim.log.file', 'sim.log')
buffering = self.env.config.setdefault('sim.log.buffering', -1)
level = self.env.config.setdefault('sim.log.level', 'INFO')
self.max_level = self.levels[level]
self.format_str = self.env.config.setdefault('sim.log.format',
self.default_format)
ts_n, ts_unit = self.env.timescale
if ts_n == 1:
self.ts_unit = ts_unit
else:
self.ts_unit = '({}{})'.format(ts_n, ts_unit)
if self.filename:
self.file = open(self.filename, 'w', buffering)
self.should_close = True
else:
self.file = sys.stderr
self.should_close = False
def flush(self):
self.file.flush()
def _close(self):
if self.should_close:
self.file.close()
def remove_files(self):
if os.path.isfile(self.filename):
os.remove(self.filename)
def is_scope_enabled(self, scope, level=None):
return ((level is None or self.levels[level] <= self.max_level) and
super(LogTracer, self).is_scope_enabled(scope))
def activate_probe(self, scope, target, **hints):
level = hints.get('level', 'PROBE')
if not self.is_scope_enabled(scope, level):
return None
format_str = partial_format(self.format_str,
level=level,
ts_unit=self.ts_unit,
scope=scope)
def probe_callback(value):
print(format_str.format(ts=self.env.now), value, file=self.file)
return probe_callback
def activate_trace(self, scope, **hints):
level = hints.get('level', 'DEBUG')
if not self.is_scope_enabled(scope, level):
return None
format_str = partial_format(self.format_str,
level=level,
ts_unit=self.ts_unit,
scope=scope)
def trace_callback(*value):
print(format_str.format(ts=self.env.now), *value, file=self.file)
return trace_callback
def trace_exception(self):
tb_lines = traceback.format_exception(*sys.exc_info())
print(self.format_str.format(level='ERROR',
ts=self.env.now,
ts_unit=self.ts_unit,
scope='Exception'),
tb_lines[-1], '\n',
*tb_lines,
file=self.file)
class VCDTracer(Tracer):
name = 'vcd'
def open(self):
dump_filename = self.env.config.setdefault('sim.vcd.dump_file',
'sim.vcd')
if 'sim.vcd.timescale' in self.env.config:
vcd_ts_str = self.env.config.setdefault(
'sim.vcd.timescale',
self.env.config['sim.timescale'])
vcd_timescale = parse_time(vcd_ts_str)
else:
vcd_timescale = self.env.timescale
self.scale_factor = scale_time(self.env.timescale, vcd_timescale)
check_values = self.env.config.setdefault('sim.vcd.check_values', True)
self.dump_file = open(dump_filename, 'w')
self.vcd = VCDWriter(self.dump_file,
timescale=vcd_timescale,
check_values=check_values)
self.save_filename = self.env.config.setdefault('sim.gtkw.file',
'sim.gtkw')
if self.env.config.setdefault('sim.gtkw.live'):
from vcd.gtkw import spawn_gtkwave_interactive
quiet = self.env.config.setdefault('sim.gtkw.quiet', True)
spawn_gtkwave_interactive(dump_filename, self.save_filename,
quiet=quiet)
start_time = self.env.config.setdefault('sim.vcd.start_time', '')
stop_time = self.env.config.setdefault('sim.vcd.stop_time', '')
t_start = (scale_time(parse_time(start_time), self.env.timescale)
if start_time else None)
t_stop = (scale_time(parse_time(stop_time), self.env.timescale)
if stop_time else None)
self.env.process(self._start_stop(t_start, t_stop))
def vcd_now(self):
return self.env.now * self.scale_factor
def flush(self):
self.dump_file.flush()
def _close(self):
self.vcd.close(self.vcd_now())
self.dump_file.close()
def remove_files(self):
if os.path.isfile(self.dump_file.name):
os.remove(self.dump_file.name)
if os.path.isfile(self.save_filename):
os.remove(self.save_filename)
def activate_probe(self, scope, target, **hints):
assert self.enabled
var_type = hints.get('var_type')
if var_type is None:
if isinstance(target, (simpy.Container, Pool)):
if isinstance(target.level, float):
var_type = 'real'
else:
var_type = 'integer'
elif isinstance(target, (simpy.Resource, simpy.Store, Queue)):
var_type = 'integer'
else:
raise ValueError(
'Could not infer VCD var_type for {}'.format(scope))
kwargs = {k: hints[k]
for k in ['size', 'init', 'ident']
if k in hints}
if 'init' not in kwargs:
if isinstance(target, (simpy.Container, Pool)):
kwargs['init'] = target.level
elif isinstance(target, simpy.Resource):
kwargs['init'] = len(target.users) if target.users else 'z'
elif isinstance(target, (simpy.Store, Queue)):
kwargs['init'] = len(target.items)
parent_scope, name = scope.rsplit('.', 1)
var = self.vcd.register_var(parent_scope, name, var_type, **kwargs)
def probe_callback(value):
self.vcd.change(var, self.vcd_now(), value)
return probe_callback
def activate_trace(self, scope, **hints):
assert self.enabled
var_type = hints['var_type']
kwargs = {k: hints[k]
for k in ['size', 'init', 'ident']
if k in hints}
parent_scope, name = scope.rsplit('.', 1)
var = self.vcd.register_var(parent_scope, name, var_type, **kwargs)
if isinstance(var.size, tuple):
def trace_callback(*value):
self.vcd.change(var, self.vcd_now(), value)
else:
def trace_callback(value):
self.vcd.change(var, self.vcd_now(), value)
return trace_callback
def _start_stop(self, t_start, t_stop):
# Wait for simulation to start to ensure all variable registration is
# complete before doing and dump_on()/dump_off() calls.
yield self.env.timeout(0)
if t_start is None and t_stop is None:
# |vvvvvvvvvvvvvv|
pass
elif t_start is None:
# |vvvvvv--------|
yield self.env.timeout(t_stop)
self.vcd.dump_off(self.vcd_now())
elif t_stop is None:
# |--------vvvvvv|
self.vcd.dump_off(self.vcd_now())
yield self.env.timeout(t_start)
self.vcd.dump_on(self.vcd_now())
elif t_start <= t_stop:
# |---vvvvvv-----|
self.vcd.dump_off(self.vcd_now())
yield self.env.timeout(t_start)
self.vcd.dump_on(self.vcd_now())
yield self.env.timeout(t_stop - t_start)
self.vcd.dump_off(self.vcd_now())
else:
# |vvv-------vvvv|
yield self.env.timeout(t_stop)
self.vcd.dump_off(self.vcd_now())
yield self.env.timeout(t_start - t_stop)
self.vcd.dump_on(self.vcd_now())
class SQLiteTracer(Tracer):
name = 'db'
def open(self):
self.filename = self.env.config.setdefault('sim.db.file', 'sim.sqlite')
self.trace_table = self.env.config.setdefault('sim.db.trace_table',
'trace')
self.remove_files()
self.db = sqlite3.connect(self.filename)
self._is_trace_table_created = False
def _create_trace_table(self):
if not self._is_trace_table_created:
self.db.execute('CREATE TABLE {} ('
'timestamp FLOAT, '
'scope TEXT, '
'value)'.format(self.trace_table))
self._is_trace_table_created = True
def flush(self):
self.db.commit()
def _close(self):
self.db.commit()
self.db.close()
def remove_files(self):
if self.filename != ':memory:':
for filename in [self.filename, self.filename + '-journal']:
if os.path.exists(filename):
os.remove(filename)
def activate_probe(self, scope, target, **hints):
return self.activate_trace(scope, **hints)
def activate_trace(self, scope, **hints):
assert self.enabled
self._create_trace_table()
insert_sql = (
'INSERT INTO {} (timestamp, scope, value) VALUES (?, ?, ?)'
.format(self.trace_table))
def trace_callback(value):
self.db.execute(insert_sql, (self.env.now, scope, value))
return trace_callback
class TraceManager(object):
def __init__(self, env):
self.tracers = []
try:
self.log_tracer = LogTracer(env)
self.tracers.append(self.log_tracer)
self.vcd_tracer = VCDTracer(env)
self.tracers.append(self.vcd_tracer)
self.sqlite_tracer = SQLiteTracer(env)
self.tracers.append(self.sqlite_tracer)
except BaseException:
self.close()
raise
def flush(self):
"""Flush all managed tracers instances.
The effect of flushing is tracer-dependent.
"""
for tracer in self.tracers:
if tracer.enabled:
tracer.flush()
def close(self):
for tracer in self.tracers:
tracer.close()
if tracer.enabled and not tracer.persist:
tracer.remove_files()
def auto_probe(self, scope, target, **hints):
callbacks = []
for tracer in self.tracers:
if tracer.name in hints and tracer.is_scope_enabled(scope):
callback = tracer.activate_probe(scope, target,
**hints[tracer.name])
if callback:
callbacks.append(callback)
if callbacks:
probe.attach(scope, target, callbacks, **hints)
def get_trace_function(self, scope, **hints):
callbacks = []
for tracer in self.tracers:
if tracer.name in hints and tracer.is_scope_enabled(scope):
callback = tracer.activate_trace(scope, **hints[tracer.name])
if callback:
callbacks.append(callback)
def trace_function(*value):
for callback in callbacks:
callback(*value)
return trace_function
def trace_exception(self):
for tracer in self.tracers:
if tracer.enabled:
tracer.trace_exception()
|
nilq/baby-python
|
python
|
from pyhafas.profile import ProfileInterface
from pyhafas.profile.interfaces.helper.parse_lid import ParseLidHelperInterface
from pyhafas.types.fptf import Station
class BaseParseLidHelper(ParseLidHelperInterface):
def parse_lid(self: ProfileInterface, lid: str) -> dict:
"""
Converts the LID given by HaFAS
Splits the LID (e.g. A=1@O=Siegburg/Bonn) in multiple elements (e.g. A=1 and O=Siegburg/Bonn).
These are converted into a dict where the part before the equal sign is the key and the part after the value.
:param lid: Location identifier (given by HaFAS)
:return: Dict of the elements of the dict
"""
parsedLid = {}
for lidElementGroup in lid.split("@"):
if lidElementGroup:
parsedLid[lidElementGroup.split(
"=")[0]] = lidElementGroup.split("=")[1]
return parsedLid
def parse_lid_to_station(
self: ProfileInterface,
lid: str,
name: str = "",
latitude: float = 0,
longitude: float = 0) -> Station:
"""
Parses the LID given by HaFAS to a station object
:param lid: Location identifier (given by HaFAS)
:param name: Station name (optional, if not given, LID is used)
:param latitude: Latitude of the station (optional, if not given, LID is used)
:param longitude: Longitude of the station (optional, if not given, LID is used)
:return: Parsed LID as station object
"""
parsedLid = self.parse_lid(lid)
if latitude == 0 and longitude == 0 and parsedLid['X'] and parsedLid['Y']:
latitude = float(float(parsedLid['Y']) / 1000000)
longitude = float(float(parsedLid['X']) / 1000000)
return Station(
id=parsedLid['L'],
name=name or parsedLid['O'],
latitude=latitude,
longitude=longitude
)
|
nilq/baby-python
|
python
|
from __future__ import print_function, absolute_import
import torch
from torch.optim.lr_scheduler import _LRScheduler
from bisect import bisect_right
AVAI_SCH = ['single_step', 'multi_step', 'cosine', 'multi_step_warmup']
def build_lr_scheduler(optimizer,
lr_scheduler='single_step',
stepsize=1,
gamma=0.1,
lr_scales=None,
max_epoch=1,
frozen=20,
warmup=10,
warmup_factor_base=0.1,
frozen_factor_base=0.1):
"""A function wrapper for building a learning rate scheduler.
Args:
optimizer (Optimizer): an Optimizer.
lr_scheduler (str, optional): learning rate scheduler method. Default is single_step.
stepsize (int or list, optional): step size to decay learning rate. When ``lr_scheduler``
is "single_step", ``stepsize`` should be an integer. When ``lr_scheduler`` is
"multi_step", ``stepsize`` is a list. Default is 1.
gamma (float, optional): decay rate. Default is 0.1.
max_epoch (int, optional): maximum epoch (for cosine annealing). Default is 1.
Examples::
>>> # Decay learning rate by every 20 epochs.
>>> scheduler = torchreid.optim.build_lr_scheduler(
>>> optimizer, lr_scheduler='single_step', stepsize=20
>>> )
>>> # Decay learning rate at 30, 50 and 55 epochs.
>>> scheduler = torchreid.optim.build_lr_scheduler(
>>> optimizer, lr_scheduler='multi_step', stepsize=[30, 50, 55]
>>> )
"""
if lr_scheduler not in AVAI_SCH:
raise ValueError('Unsupported scheduler: {}. Must be one of {}'.format(lr_scheduler, AVAI_SCH))
if lr_scheduler == 'single_step':
if isinstance(stepsize, list):
stepsize = stepsize[-1]
if not isinstance(stepsize, int):
raise TypeError(
'For single_step lr_scheduler, stepsize must '
'be an integer, but got {}'.format(type(stepsize))
)
scheduler = torch.optim.lr_scheduler.StepLR(
optimizer, step_size=stepsize, gamma=gamma
)
elif lr_scheduler == 'multi_step':
if not isinstance(stepsize, list):
raise TypeError(
'For multi_step lr_scheduler, stepsize must '
'be a list, but got {}'.format(type(stepsize))
)
scheduler = torch.optim.lr_scheduler.MultiStepLR(
optimizer, milestones=stepsize, gamma=gamma
)
elif lr_scheduler == 'cosine':
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
optimizer, float(max_epoch)
)
elif lr_scheduler == 'multi_step_warmup':
if not isinstance(stepsize, list):
raise TypeError(
'For multi_step lr_scheduler, stepsize must '
'be a list, but got {}'.format(type(stepsize))
)
scheduler = MultiStepLRWithWarmUp(
optimizer, milestones=stepsize, frozen_iters=frozen, gamma=gamma, lr_scales=lr_scales,
warmup_factor_base=warmup_factor_base, frozen_factor_base=frozen_factor_base, warmup_iters=warmup
)
else:
raise ValueError('Unknown scheduler: {}'.format(lr_scheduler))
return scheduler
class MultiStepLRWithWarmUp(_LRScheduler):
def __init__(self,
optimizer,
milestones,
warmup_iters,
frozen_iters,
lr_scales=None,
warmup_method='linear',
warmup_factor_base=0.1,
frozen_factor_base=1.0,
gamma=0.1,
last_epoch=-1):
if warmup_method not in {'constant', 'linear'}:
raise KeyError('Unknown warm up method: {}'.format(warmup_method))
self.milestones = sorted(milestones)
self.gamma = gamma
self.lr_scales = lr_scales
self.warmup_iters = warmup_iters
self.frozen_iters = frozen_iters
self.warmup_method = warmup_method
self.warmup_factor_base = warmup_factor_base
self.frozen_factor_base = frozen_factor_base
self.uses_lr_scales = self.lr_scales is not None and len(self.lr_scales) > 0
if self.uses_lr_scales:
assert len(self.lr_scales) == len(self.milestones) + 1
# Base class calls method `step` which increases `last_epoch` by 1 and then calls
# method `get_lr` with this value. If `last_epoch` is not equal to -1, we drop
# the first step, so to avoid this dropping do small fix by subtracting 1
if last_epoch > -1:
last_epoch = last_epoch - 1
elif last_epoch < -1:
raise ValueError('Learning rate scheduler got incorrect parameter last_epoch = {}'.format(last_epoch))
super(MultiStepLRWithWarmUp, self).__init__(optimizer, last_epoch)
def get_lr(self):
# During warm up change learning rate on every step according to warmup_factor
if self.last_epoch < self.frozen_iters:
return [self.frozen_factor_base * base_lr for base_lr in self.base_lrs]
if self.last_epoch < self.frozen_iters + self.warmup_iters:
if self.warmup_method == 'constant':
warmup_factor = self.warmup_factor_base
elif self.warmup_method == 'linear':
alpha = (self.last_epoch - self.frozen_iters) / self.warmup_iters
warmup_factor = self.warmup_factor_base * (1 - alpha) + alpha
return [base_lr * warmup_factor for base_lr in self.base_lrs]
# On the last step of warm up set learning rate equal to base LR
elif self.last_epoch == self.frozen_iters + self.warmup_iters:
return [base_lr for base_lr in self.base_lrs]
# After warm up increase LR according to defined in `milestones` values of steps
else:
if self.uses_lr_scales:
lr_scale = self.lr_scales[bisect_right(self.milestones, self.last_epoch)]
else:
lr_scale = self.gamma ** bisect_right(self.milestones, self.last_epoch)
return [base_lr * lr_scale for base_lr in self.base_lrs]
def __repr__(self):
format_string = self.__class__.__name__ + \
'[warmup_method = {}, warmup_factor_base = {}, warmup_iters = {},' \
' milestones = {}, gamma = {}]'.format(self.warmup_method, self.warmup_factor_base,
self.warmup_iters, str(list(self.milestones)),
self.gamma)
return format_string
|
nilq/baby-python
|
python
|
"""
File: rocket.py
Name:Claire Lin
-----------------------
This program should implement a console program
that draws ASCII art - a rocket.
The size of rocket is determined by a constant
defined as SIZE at top of the file.
Output format should match what is shown in the sample
run in the Assignment 2 Handout.
"""
# This constant determines rocket size.
SIZE = 3
def main():
"""
:return: str, the rocket will be build in any size.
"""
head()
belt()
upper()
lower()
belt()
head()
def head():
for i in range(SIZE):
print(' ', end='')
for j in range(-i+(SIZE-1)):
print(' ', end='')
for k in range(i+1):
print('/', end='')
for l in range(i+1):
print('\\', end='')
print("")
def belt():
print('+', end='')
for k in range(SIZE*2):
print('=', end='')
print('+')
def upper():
for m in range(SIZE):
print('|', end='')
for n in range(-m+(SIZE-1)):
print('.', end='')
for u in range(m+1):
print('/\\', end='')
for s in range(-m+(SIZE - 1)):
print('.', end='')
print('|')
def lower():
for o in range(SIZE):
print('|', end='')
for p in range(o):
print('.', end='')
for r in range(-o+SIZE):
print('\\/', end='')
for q in range(o):
print('.', end='')
print('|')
###### DO NOT EDIT CODE BELOW THIS LINE ######
if __name__ == "__main__":
main()
|
nilq/baby-python
|
python
|
from enum import Enum
class RecipeStyle(Enum):
"""
Class for allrecipes.com style labels
"""
diabetic = 'diabetic'
dairy_free = 'dairy_free'
sugar_free = 'sugar-free'
gluten_free = 'gluten_free'
low_cholesterol = 'low_cholesterol'
mediterranean = 'mediterranean'
chinese = 'chinese'
indian = 'indian'
japanese = 'japanese'
korean = 'korean'
thai = 'thai'
european = 'european'
italian = 'italian'
american = 'american'
mexican = 'mexican'
eastern = 'eastern'
|
nilq/baby-python
|
python
|
from typing import Union, List, Optional
from pyspark.sql.types import StructType, StructField, StringType, ArrayType, DataType
# This file is auto-generated by generate_schema so do not edit manually
# noinspection PyPep8Naming
class ImplementationGuide_PageSchema:
"""
A set of rules of how FHIR is used to solve a particular problem. This
resource is used to gather all the parts of an implementation guide into a
logical whole and to publish a computable definition of all the parts.
"""
# noinspection PyDefaultArgument
@staticmethod
def get_schema(
max_nesting_depth: Optional[int] = 6,
nesting_depth: int = 0,
nesting_list: List[str] = [],
max_recursion_limit: Optional[int] = 2,
include_extension: Optional[bool] = False,
extension_fields: Optional[List[str]] = [
"valueBoolean",
"valueCode",
"valueDate",
"valueDateTime",
"valueDecimal",
"valueId",
"valueInteger",
"valuePositiveInt",
"valueString",
"valueTime",
"valueUnsignedInt",
"valueUri",
"valueQuantity",
],
extension_depth: int = 0,
max_extension_depth: Optional[int] = 2,
) -> Union[StructType, DataType]:
"""
A set of rules of how FHIR is used to solve a particular problem. This
resource is used to gather all the parts of an implementation guide into a
logical whole and to publish a computable definition of all the parts.
id: unique id for the element within a resource (for internal references). This
may be any string value that does not contain spaces.
extension: May be used to represent additional information that is not part of the basic
definition of the element. In order to make the use of extensions safe and
manageable, there is a strict set of governance applied to the definition and
use of extensions. Though any implementer is allowed to define an extension,
there is a set of requirements that SHALL be met as part of the definition of
the extension.
source: The source address for the page.
title: A short title used to represent this page in navigational structures such as
table of contents, bread crumbs, etc.
kind: The kind of page that this is. Some pages are autogenerated (list, example),
and other kinds are of interest so that tools can navigate the user to the
page of interest.
type: For constructed pages, what kind of resources to include in the list.
package: For constructed pages, a list of packages to include in the page (or else
empty for everything).
format: The format of the page.
page: Nested Pages/Sections under this page.
"""
from spark_fhir_schemas.stu3.complex_types.extension import ExtensionSchema
if (
max_recursion_limit
and nesting_list.count("ImplementationGuide_Page") >= max_recursion_limit
) or (max_nesting_depth and nesting_depth >= max_nesting_depth):
return StructType([StructField("id", StringType(), True)])
# add my name to recursion list for later
my_nesting_list: List[str] = nesting_list + ["ImplementationGuide_Page"]
schema = StructType(
[
# unique id for the element within a resource (for internal references). This
# may be any string value that does not contain spaces.
StructField("id", StringType(), True),
# May be used to represent additional information that is not part of the basic
# definition of the element. In order to make the use of extensions safe and
# manageable, there is a strict set of governance applied to the definition and
# use of extensions. Though any implementer is allowed to define an extension,
# there is a set of requirements that SHALL be met as part of the definition of
# the extension.
StructField(
"extension",
ArrayType(
ExtensionSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
)
),
True,
),
# The source address for the page.
StructField("source", StringType(), True),
# A short title used to represent this page in navigational structures such as
# table of contents, bread crumbs, etc.
StructField("title", StringType(), True),
# The kind of page that this is. Some pages are autogenerated (list, example),
# and other kinds are of interest so that tools can navigate the user to the
# page of interest.
StructField("kind", StringType(), True),
# For constructed pages, what kind of resources to include in the list.
StructField("type", ArrayType(StringType()), True),
# For constructed pages, a list of packages to include in the page (or else
# empty for everything).
StructField("package", ArrayType(StringType()), True),
# The format of the page.
StructField("format", StringType(), True),
# Nested Pages/Sections under this page.
StructField(
"page",
ArrayType(
ImplementationGuide_PageSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth,
)
),
True,
),
]
)
if not include_extension:
schema.fields = [
c
if c.name != "extension"
else StructField("extension", StringType(), True)
for c in schema.fields
]
return schema
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-15 01:52
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Article',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file_type', models.CharField(max_length=100)),
('article_title', models.CharField(max_length=1000)),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('topic', models.CharField(max_length=250)),
('author', models.CharField(max_length=250)),
('author_avatar', models.CharField(max_length=1000)),
],
),
migrations.AddField(
model_name='article',
name='category',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='post.Category'),
),
]
|
nilq/baby-python
|
python
|
from rallf.sdk.logger import Logger
from rallf.sdk.network import Network
from rallf.sdk.listener import Listener
class Task:
def __init__(self, manifest, robot, input, output):
self.manifest = manifest
self.robot = robot
self.finished = False
self.status = "stopped"
self.network = Network(input, output, self.manifest['fqtn'])
self.logger = Logger(input, output, self.manifest['fqtn'])
self.listener = Listener(input, output)
self.home = "%s/data/%s" % (robot.home, manifest['fqtn'])
def warmup(self):
self.status = "ready"
def waitloop(self):
while not self.finished:
self.listener.listen(self)
def main(self, input):
pass
def cooldown(self):
self.status = "finished"
def finish(self):
self.status = "terminating"
self.finished = True
|
nilq/baby-python
|
python
|
from django.urls import re_path
from . import views
urlpatterns = [
re_path(r'^$',
views.IndexView.as_view(),
name='index'),
re_path(r'^(?P<pk>[-\w]+)/$',
views.PageDetailView.as_view(),
name='page_detail'),
]
|
nilq/baby-python
|
python
|
import unittest
import time
from liquidtap.client import Client
class TestClient(unittest.TestCase):
def setUp(self) -> None:
self.client = Client()
self.client.pusher.connection.bind('pusher:connection_established', self._on_connect)
def _on_connect(self, data: str):
print('on_connect:', data)
self.client.pusher.subscribe(f"price_ladders_cash_btcjpy_buy").bind(
'updated', self._callback)
self.client.pusher.subscribe(f"price_ladders_cash_btcjpy_sell").bind(
'updated', self._callback)
@staticmethod
def _callback(data: str):
print(data)
def test_connect(self):
"""
Example:
on_connect: {"activity_timeout":120,"socket_id":"9557290249.7129860806"}
[["5251849.00000","0.02901661"],["5251843.00000","0.00700000"],["5251609.00000","0.00100000"],["5251603.00000","0.00967056"],["5251592.00000","0.07920000"],["5251591.00000","0.59311887"],["5251583.00000","0.00100000"],["5251544.00000","0.00070000"],["5251510.00000","0.00110000"],["5251175.00000","0.00050000"],["5250609.00000","0.18000000"],["5250591.00000","0.00010000"],["5250411.00000","0.16000000"],["5250383.00000","0.00762958"],["5250312.00000","0.00170000"],["5250306.00000","0.01907801"],["5250247.00000","0.00100000"],["5250230.00000","0.00090000"],["5250000.00000","0.00010000"],["5249309.00000","0.12000000"],["5248522.00000","0.12000000"],["5248373.00000","0.01570000"],["5248320.00000","0.02000000"],["5248154.00000","0.00094347"],["5247735.00000","0.12000000"],["5247688.00000","0.08000000"],["5247637.00000","0.00090000"],["5247001.00000","0.49706191"],["5247000.00000","0.01420000"],["5246825.00000","0.02500000"],["5246565.00000","0.12000000"],["5246500.00000","0.00010000"],["5245646.00000","0.12170000"],["5245426.00000","0.00140000"],["5245071.00000","0.00070000"],["5244776.00000","0.01000000"],["5244476.00000","0.13000000"],["5243056.00000","0.00270000"],["5243000.00000","0.00010000"],["5242752.00000","0.00150000"]]
[["5251849.00000","0.02901661"],["5251843.00000","0.00700000"],["5251609.00000","0.00100000"],["5251603.00000","0.00967056"],["5251592.00000","0.07920000"],["5251591.00000","0.59311887"],["5251583.00000","0.00100000"],["5251544.00000","0.00070000"],["5251510.00000","0.00110000"],["5251175.00000","0.00050000"],["5250609.00000","0.18000000"],["5250591.00000","0.00010000"],["5250411.00000","0.16000000"],["5250383.00000","0.00762958"],["5250312.00000","0.00170000"],["5250306.00000","0.01907801"],["5250247.00000","0.00100000"],["5250230.00000","0.00090000"],["5250000.00000","0.00010000"],["5249309.00000","0.12000000"],["5248522.00000","0.12000000"],["5248373.00000","0.01570000"],["5248320.00000","0.02000000"],["5248154.00000","0.00094347"],["5247735.00000","0.12000000"],["5247688.00000","0.08000000"],["5247637.00000","0.00090000"],["5247001.00000","0.49706191"],["5247000.00000","0.01420000"],["5246825.00000","0.02500000"],["5246565.00000","0.12000000"],["5246500.00000","0.00010000"],["5245646.00000","0.12170000"],["5245426.00000","0.00140000"],["5245071.00000","0.00070000"],["5244776.00000","0.01000000"],["5244476.00000","0.13000000"],["5243056.00000","0.00270000"],["5243000.00000","0.00010000"],["5242757.00000","0.00483790"]]
[["5251849.00000","0.02901661"],["5251843.00000","0.00700000"],["5251609.00000","0.00100000"],["5251603.00000","0.00967056"],["5251592.00000","0.07920000"],["5251591.00000","0.59311887"],["5251583.00000","0.00100000"],["5251544.00000","0.00070000"],["5251510.00000","0.00110000"],["5251175.00000","0.00050000"],["5250859.00000","0.00100000"],["5250609.00000","0.18000000"],["5250591.00000","0.00010000"],["5250411.00000","0.16000000"],["5250383.00000","0.00762958"],["5250312.00000","0.00170000"],["5250306.00000","0.01907801"],["5250230.00000","0.00090000"],["5250000.00000","0.00010000"],["5249309.00000","0.12000000"],["5248522.00000","0.12000000"],["5248373.00000","0.01570000"],["5248320.00000","0.02000000"],["5248154.00000","0.00094347"],["5247735.00000","0.12000000"],["5247688.00000","0.08000000"],["5247637.00000","0.00090000"],["5247001.00000","0.49706191"],["5247000.00000","0.01420000"],["5246825.00000","0.02500000"],["5246565.00000","0.12000000"],["5246500.00000","0.00010000"],["5245646.00000","0.12170000"],["5245426.00000","0.00140000"],["5245071.00000","0.00070000"],["5244776.00000","0.01000000"],["5244476.00000","0.13000000"],["5243056.00000","0.00270000"],["5243000.00000","0.00010000"],["5242757.00000","0.00483790"]]
"""
self.client.pusher.connect()
time.sleep(3)
if __name__ == '__main__':
unittest.main()
|
nilq/baby-python
|
python
|
class NopBackpressureManager:
def __init__(self):
pass
def register_pressure(self):
pass
def unregister_pressure(self):
pass
def reached(self) -> bool:
return False
class BackpressureManager:
def __init__(self, max):
self.max = max
self.pressure = 0
def register_pressure(self):
self.pressure += 1
def unregister_pressure(self):
self.pressure -= 1
def reached(self) -> bool:
return self.pressure >= self.max
|
nilq/baby-python
|
python
|
import re
import croniter
from datetime import timedelta, datetime
from functools import partial
from airflow import DAG
from airflow.sensors.external_task import ExternalTaskSensor
from dagger import conf
from dagger.alerts.alert import airflow_task_fail_alerts
from dagger.dag_creator.airflow.operator_factory import OperatorFactory
from dagger.dag_creator.airflow.utils.macros import user_defined_macros
from dagger.dag_creator.graph_traverser_base import GraphTraverserBase
from dagger.graph.task_graph import Graph
# noinspection PyStatementEffect
class DagCreator(GraphTraverserBase):
def __init__(self, task_graph: Graph, with_data_nodes: bool = conf.WITH_DATA_NODES):
super().__init__(task_graph=task_graph, with_data_nodes=with_data_nodes)
self._operator_factory = OperatorFactory()
@staticmethod
def _get_control_flow_task_id(pipe_id):
return "control_flow:{}".format(pipe_id)
@staticmethod
def _get_default_args():
return {
"depends_on_past": True,
"retries": 0,
"retry_delay": timedelta(minutes=5),
}
@staticmethod
def _get_execution_date_fn(from_dag_schedule: str, to_dag_schedule: str):
def execution_date_fn(execution_date, **kwargs):
to_dag_cron = croniter.croniter(to_dag_schedule, execution_date)
to_dag_next_schedule = to_dag_cron.get_next(datetime)
from_dag_cron = croniter.croniter(from_dag_schedule, to_dag_next_schedule)
from_dag_cron.get_next(datetime)
# skipping one schedule
from_dag_cron.get_prev(datetime)
from_dag_target_schedule = from_dag_cron.get_prev(datetime)
return from_dag_target_schedule
return execution_date_fn
def _get_external_task_sensor(self, from_task_id: str, to_task_id: str) -> ExternalTaskSensor:
from_pipeline_name = self._task_graph.get_node(from_task_id).obj.pipeline_name
from_task_name = self._task_graph.get_node(from_task_id).obj.name
from_pipeline_schedule = self._task_graph.get_node(from_task_id).obj.pipeline.schedule
to_pipeline_schedule = self._task_graph.get_node(to_task_id).obj.pipeline.schedule
return ExternalTaskSensor(
task_id=f"{from_pipeline_name}-{from_task_name}-sensor",
external_dag_id=from_pipeline_name,
external_task_id=from_task_name,
execution_date_fn=self._get_execution_date_fn(from_pipeline_schedule, to_pipeline_schedule),
mode=conf.EXTERNAL_SENSOR_MODE,
poke_interval=conf.EXTERNAL_SENSOR_POKE_INTERVAL,
timeout=conf.EXTERNAL_SENSOR_TIMEOUT
)
def _create_control_flow_task(self, pipe_id, dag):
control_flow_task_id = self._get_control_flow_task_id(pipe_id)
self._tasks[
control_flow_task_id
] = self._operator_factory.create_control_flow_operator(conf.IS_DUMMY_OPERATOR_SHORT_CIRCUIT, dag)
def _create_dag(self, pipe_id, node):
pipeline = node.obj
default_args = DagCreator._get_default_args()
default_args.update(pipeline.default_args)
default_args["owner"] = pipeline.owner.split("@")[0]
if len(pipeline.alerts) > 0:
default_args["on_failure_callback"] = partial(
airflow_task_fail_alerts, pipeline.alerts
)
dag = DAG(
pipeline.name,
description=pipeline.description,
default_args=default_args,
start_date=pipeline.start_date,
schedule_interval=pipeline.schedule,
user_defined_macros=user_defined_macros,
**pipeline.parameters,
)
self._create_control_flow_task(pipe_id, dag)
return dag
def _create_job_task(self, node):
pipeline_id = node.obj.pipeline_name
return self._operator_factory.create_operator(node.obj, self._dags[pipeline_id])
def _create_data_task(self, pipe_id, node):
if pipe_id not in self._data_tasks:
self._data_tasks[pipe_id] = {}
dataset_id = node.obj.airflow_name
if dataset_id not in self._data_tasks[pipe_id]:
self._data_tasks[pipe_id][
dataset_id
] = self._operator_factory.create_dataset_operator(
re.sub("[^0-9a-zA-Z-_]+", "_", dataset_id), self._dags[pipe_id]
)
def _create_edge_without_data(self, from_task_id, to_task_ids, node):
from_pipe = (
self._task_graph.get_node(from_task_id).obj.pipeline_name
if from_task_id
else None
)
for to_task_id in to_task_ids:
edge_properties = self._task_graph.get_edge(node.obj.alias(), to_task_id)
to_pipe = self._task_graph.get_node(to_task_id).obj.pipeline_name
if from_pipe and from_pipe == to_pipe:
self._tasks[from_task_id] >> self._tasks[to_task_id]
elif from_pipe and from_pipe != to_pipe and edge_properties.follow_external_dependency:
from_schedule = self._task_graph.get_node(from_task_id).obj.pipeline.schedule
to_schedule = self._task_graph.get_node(to_task_id).obj.pipeline.schedule
if not from_schedule.startswith('@') and not to_schedule.startswith('@'):
external_task_sensor = self._get_external_task_sensor(from_task_id, to_task_id)
self._tasks[self._get_control_flow_task_id(to_pipe)] >> external_task_sensor >> self._tasks[to_task_id]
else:
self._tasks[self._get_control_flow_task_id(to_pipe)] >> self._tasks[
to_task_id
]
def _create_edge_with_data(self, from_task_id, to_task_ids, node):
from_pipe = (
self._task_graph.get_node(from_task_id).obj.pipeline_name
if from_task_id
else None
)
data_id = node.obj.airflow_name
if from_pipe:
self._tasks[from_task_id] >> self._data_tasks[from_pipe][data_id]
for to_task_id in to_task_ids:
to_pipe = self._task_graph.get_node(to_task_id).obj.pipeline_name
self._data_tasks[to_pipe][data_id] >> self._tasks[to_task_id]
if not from_pipe or (from_pipe != to_pipe):
self._tasks[
self._get_control_flow_task_id(to_pipe)
] >> self._data_tasks[to_pipe][data_id]
|
nilq/baby-python
|
python
|
from __future__ import absolute_import
from collections import defaultdict, Sequence, OrderedDict
import operator
from string import capwords
import numpy as np
from .elements import ELEMENTS
# records
MODEL = 'MODEL '
ATOM = 'ATOM '
HETATM = 'HETATM'
TER = 'TER '
MODEL_LINE = 'MODEL ' + ' ' * 4 + '{:>4d}\n'
ENDMDL_LINE = 'ENDMDL\n'
TER_LINE = 'TER ' + '{:>5d}' + ' ' * 6 + '{:3s}' + ' ' + '{:1s}' + \
'{:>4d}' + '{:1s}' + ' ' * 53 + '\n'
ATOM_LINE = '{:6s}' + '{:>5d}' + ' ' + '{:4s}' + '{:1s}' + '{:3s}' + ' ' + \
'{:1s}' + '{:>4d}' + '{:1s}' + ' ' * 3 + '{:8.3f}' * 3 + '{:6.2f}' * 2 + \
' ' * 10 + '{:<2s}' + '{:2s}\n'
END_LINE = 'END \n'
ATOM_DATA = ('record id name alt resn chain resi i x y z q b ' \
'e charge').split()
TER_DATA = 'id resn chain resi i'.split()
def parse_pdb(infile):
if isinstance(infile, file):
f = infile
elif isinstance(infile, str):
f = open(infile)
else:
raise TypeError('Input should be either a file or string.')
pdb = defaultdict(list)
model_number = 1
for line in f:
record = line[:6]
if record in (ATOM, HETATM):
pdb['model'].append(model_number)
pdb['record'].append(record)
pdb['id'].append(int(line[6:11]))
name = line[12:16].strip()
pdb['name'].append(name)
pdb['alt'].append(line[16])
pdb['resn'].append(line[17:20].strip())
pdb['chain'].append(line[21])
pdb['resi'].append(int(line[22:26]))
pdb['i'].append(line[26])
pdb['x'].append(float(line[30:38]))
pdb['y'].append(float(line[38:46]))
pdb['z'].append(float(line[46:54]))
pdb['q'].append(float(line[54:60]))
pdb['b'].append(float(line[60:66]))
# Be forgiving when determining the element
e = line[76:78].strip()
if not e:
# If element is not given, take the first non-numeric letter of
# the name as element.
for e in name:
if e.isalpha():
break
pdb['e'].append(e)
pdb['charge'].append(line[78: 80].strip())
elif record == MODEL:
model_number = int(line[10: 14])
f.close()
return pdb
def tofile(pdb, out):
f = open(out, 'w')
nmodels = len(set(pdb['model']))
natoms = len(pdb['id'])
natoms_per_model = natoms / nmodels
for nmodel in xrange(nmodels):
offset = nmodel * natoms_per_model
# write MODEL record
if nmodels > 1:
f.write(MODEL_LINE.format(nmodel + 1))
prev_chain = pdb['chain'][offset]
for natom in xrange(natoms_per_model):
index = offset + natom
# write TER record
current_chain = pdb['chain'][index]
if prev_chain != current_chain:
prev_record = pdb['record'][index - 1]
if prev_record == ATOM:
line_data = [pdb[data][index - 1] for data in TER_DATA]
line_data[0] += 1
f.write(TER_LINE.format(*line_data))
prev_chain = current_chain
# write ATOM/HETATM record
line_data = [pdb[data][index] for data in ATOM_DATA]
# take care of the rules for atom name position
e = pdb['e'][index]
name = pdb['name'][index]
if len(e) == 1 and len(name) != 4:
line_data[2] = ' ' + name
f.write(ATOM_LINE.format(*line_data))
# write ENDMDL record
if nmodels > 1:
f.write(ENDMDL_LINE)
f.write(END_LINE)
f.close()
def pdb_dict_to_array(pdb):
dtype = [('record', np.str_, 6), ('id', np.int32),
('name', np.str_, 4), ('alt', np.str_, 1),
('resn', np.str_, 4), ('chain', np.str_, 2),
('resi', np.int32), ('i', np.str_, 1), ('x', np.float64),
('y', np.float64), ('z', np.float64),
('q', np.float64), ('b', np.float64),
('e', np.str_, 2), ('charge', np.str_, 2),
('model', np.int32)]
natoms = len(pdb['id'])
pdb_array = np.empty(natoms, dtype=dtype)
for data in ATOM_DATA:
pdb_array[data] = pdb[data]
pdb_array['model'] = pdb['model']
return pdb_array
def pdb_array_to_dict(pdb_array):
pdb = defaultdict(list)
for data in ATOM_DATA:
pdb[data] = pdb_array[data].tolist()
pdb['model'] = pdb_array['model'].tolist()
return pdb
class Structure(object):
@classmethod
def fromfile(cls, fid):
"""Initialize Structure from PDB-file"""
try:
fname = fid.name
except AttributeError:
fname = fid
if fname[-3:] in ('pdb', 'ent'):
arr = pdb_dict_to_array(parse_pdb(fid))
elif fname[-3:] == 'cif':
arr = mmcif_dict_to_array(parse_mmcif(fid))
else:
raise IOError('Filetype not recognized.')
return cls(arr)
def __init__(self, pdb):
self.data = pdb
@property
def atomnumber(self):
"""Return array of atom numbers"""
return self._get_property('number')
@property
def chain_list(self):
return np.unique(self.data['chain'])
def combine(self, structure):
return Structure(np.hstack((self.data, structure.data)))
@property
def coor(self):
"""Return the coordinates"""
return np.asarray([self.data['x'], self.data[ 'y'], self.data['z']])
def duplicate(self):
"""Duplicate the object"""
return Structure(self.data.copy())
def _get_property(self, ptype):
elements, ind = np.unique(self.data['e'], return_inverse=True)
return np.asarray([getattr(ELEMENTS[capwords(e)], ptype)
for e in elements], dtype=np.float64)[ind]
@property
def mass(self):
return self._get_property('mass')
def rmsd(self, structure):
return np.sqrt(((self.coor - structure.coor) ** 2).mean() * 3)
def rotate(self, rotmat):
"""Rotate atoms"""
self.data['x'], self.data['y'], self.data['z'] = (
np.asmatrix(rotmat) * np.asmatrix(self.coor)
)
def select(self, identifier, values, loperator='==', return_ind=False):
"""A simple way of selecting atoms"""
if loperator == '==':
oper = operator.eq
elif loperator == '<':
oper = operator.lt
elif loperator == '>':
oper = operator.gt
elif loperator == '>=':
oper = operator.ge
elif loperator == '<=':
oper = operator.le
elif loperator == '!=':
oper = operator.ne
else:
raise ValueError('Logic operator not recognized.')
if not isinstance(values, Sequence) or isinstance(values, basestring):
values = (values,)
selection = oper(self.data[identifier], values[0])
if len(values) > 1:
for v in values[1:]:
if loperator == '!=':
selection &= oper(self.data[identifier], v)
else:
selection |= oper(self.data[identifier], v)
if return_ind:
return selection
else:
return Structure(self.data[selection])
@property
def sequence(self):
resids, indices = np.unique(self.data['resi'], return_index=True)
return self.data['resn'][indices]
def translate(self, trans):
"""Translate atoms"""
self.data['x'] += trans[0]
self.data['y'] += trans[1]
self.data['z'] += trans[2]
def tofile(self, fid):
"""Write instance to PDB-file"""
tofile(pdb_array_to_dict(self.data), fid)
@property
def rvdw(self):
return self._get_property('vdwrad')
def parse_mmcif(infile):
if isinstance(infile, file):
pass
elif isinstance(infile, str):
infile = open(infile)
else:
raise TypeError("Input should either be a file or string.")
atom_site = OrderedDict()
with infile as f:
for line in f:
if line.startswith('_atom_site.'):
words = line.split('.')
atom_site[words[1].strip()] = []
if line.startswith('ATOM'):
words = line.split()
for key, word in zip(atom_site, words):
atom_site[key].append(word)
return atom_site
def mmcif_dict_to_array(atom_site):
natoms = len(atom_site['id'])
dtype = [('record', np.str_, 6), ('id', np.int32),
('name', np.str_, 4), ('alt', np.str_, 1),
('resn', np.str_, 4), ('chain', np.str_, 2),
('resi', np.int32), ('i', np.str_, 1), ('x', np.float64),
('y', np.float64), ('z', np.float64),
('q', np.float64), ('b', np.float64),
('e', np.str_, 2), ('charge', np.str_, 2),
('model', np.int32)]
cifdata = np.zeros(natoms, dtype=dtype)
cifdata['record'] = 'ATOM '
cifdata['id'] = atom_site['id']
cifdata['name'] = atom_site['label_atom_id']
cifdata['resn'] = atom_site['label_comp_id']
cifdata['chain'] = atom_site['label_asym_id']
cifdata['resi'] = atom_site['label_seq_id']
cifdata['x'] = atom_site['Cartn_x']
cifdata['y'] = atom_site['Cartn_y']
cifdata['z'] = atom_site['Cartn_z']
cifdata['q'] = atom_site['occupancy']
cifdata['b'] = atom_site['B_iso_or_equiv']
cifdata['e'] = atom_site['type_symbol']
cifdata['charge'] = atom_site['pdbx_formal_charge']
cifdata['model'] = atom_site['pdbx_PDB_model_num']
return cifdata
|
nilq/baby-python
|
python
|
# Copyright 2017-2020 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
import logging
import re
import os
import subprocess
class Scheme(object):
def __init__(
self,
name,
commit_cmd,
commit_pattern,
commit_ok_errors,
status_cmd,
status_pattern,
status_ok_errors,
):
self.name = name
self.commit_cmd = commit_cmd
self.commit_pattern = commit_pattern
self.commit_ok_errors = commit_ok_errors
self.status_cmd = status_cmd
self.status_pattern = status_pattern
self.status_ok_errors = status_ok_errors
SCHEMES = [
Scheme(
"git",
commit_cmd=["git", "log", "-1", "."],
commit_pattern=re.compile(r"commit ([a-f0-9]+)"),
commit_ok_errors=[128],
status_cmd=["git", "status", "-s"],
status_pattern=re.compile(r"(.)"),
status_ok_errors=[],
)
]
log = logging.getLogger("guild")
class NoCommit(Exception):
pass
class CommitReadError(Exception):
pass
def commit_for_dir(dir):
"""Returns a tuple of commit and workspace status.
Raises NoCommit if a commit is not available.
"""
dir = os.path.abspath(dir)
for scheme in SCHEMES:
commit = _apply_scheme(
dir,
scheme.commit_cmd,
scheme.commit_pattern,
scheme.commit_ok_errors,
)
if commit is None:
raise NoCommit(dir)
status = _apply_scheme(
dir,
scheme.status_cmd,
scheme.status_pattern,
scheme.status_ok_errors,
)
return _format_commit(commit, scheme), _format_status(status)
raise NoCommit(dir)
def _apply_scheme(repo_dir, cmd_template, pattern, ok_errors):
cmd = [arg.format(repo=repo_dir) for arg in cmd_template]
log.debug("vcs scheme cmd for repo %s: %s", repo_dir, cmd)
try:
out = subprocess.check_output(
cmd, cwd=repo_dir, env=os.environ, stderr=subprocess.STDOUT
)
except OSError as e:
if e.errno == 2:
return None
raise CommitReadError(e)
except subprocess.CalledProcessError as e:
if e.returncode in ok_errors:
return None
raise CommitReadError(e, e.output)
else:
out = out.decode("ascii", errors="replace")
log.debug("vcs scheme result: %s", out)
m = pattern.match(out)
if not m:
return None
return m.group(1)
def _format_commit(commit, scheme):
return "%s:%s" % (scheme.name, commit)
def _format_status(status):
return bool(status)
|
nilq/baby-python
|
python
|
"""Module for raceplan exceptions."""
class CompetitionFormatNotSupportedException(Exception):
"""Class representing custom exception for command."""
def __init__(self, message: str) -> None:
"""Initialize the error."""
# Call the base class constructor with the parameters it needs
super().__init__(message)
class NoRaceclassesInEventException(Exception):
"""Class representing custom exception for command."""
def __init__(self, message: str) -> None:
"""Initialize the error."""
# Call the base class constructor with the parameters it needs
super().__init__(message)
class InconsistentValuesInContestantsException(Exception):
"""Class representing custom exception for command."""
def __init__(self, message: str) -> None:
"""Initialize the error."""
# Call the base class constructor with the parameters it needs
super().__init__(message)
class InconsistentValuesInRaceclassesException(Exception):
"""Class representing custom exception for command."""
def __init__(self, message: str) -> None:
"""Initialize the error."""
# Call the base class constructor with the parameters it needs
super().__init__(message)
class MissingPropertyException(Exception):
"""Class representing custom exception for command."""
def __init__(self, message: str) -> None:
"""Initialize the error."""
# Call the base class constructor with the parameters it needs
super().__init__(message)
class InvalidDateFormatException(Exception):
"""Class representing custom exception for command."""
def __init__(self, message: str) -> None:
"""Initialize the error."""
# Call the base class constructor with the parameters it needs
super().__init__(message)
class NoRaceplanInEventException(Exception):
"""Class representing custom exception for command."""
def __init__(self, message: str) -> None:
"""Initialize the error."""
# Call the base class constructor with the parameters it needs
super().__init__(message)
class DuplicateRaceplansInEventException(Exception):
"""Class representing custom exception for command."""
def __init__(self, message: str) -> None:
"""Initialize the error."""
# Call the base class constructor with the parameters it needs
super().__init__(message)
class InconsistentInputDataException(Exception): # pragma: no cover
"""Class representing custom exception for command."""
def __init__(self, message: str) -> None:
"""Initialize the error."""
# Call the base class constructor with the parameters it needs
super().__init__(message)
class NoRacesInRaceplanException(Exception):
"""Class representing custom exception for command."""
def __init__(self, message: str) -> None:
"""Initialize the error."""
# Call the base class constructor with the parameters it needs
super().__init__(message)
class CouldNotCreateRaceplanException(Exception):
"""Class representing custom exception for command."""
def __init__(self, message: str) -> None:
"""Initialize the error."""
# Call the base class constructor with the parameters it needs
super().__init__(message)
class CouldNotCreateRaceException(Exception):
"""Class representing custom exception for command."""
def __init__(self, message: str) -> None:
"""Initialize the error."""
# Call the base class constructor with the parameters it needs
super().__init__(message)
|
nilq/baby-python
|
python
|
################ Running: F:\users\emiwar\edited_new\catAtt.py #################
COM1
Serial<id=0x52b47f0, open=True>(port='COM1', baudrate=115200, bytesize=8, parity='N', stopbits=1, timeout=2.5, xonxoff=False, rtscts=False, dsrdtr=False)
492
1 False [('b', 1.378630934454577)]
1 finished.
2 desert [('b', 1.3728325826727996)]
2 finished.
3 woods [('b', 0.2293752072882853)]
3 finished.
4 chair [('a', 0.6698945223711235)]
4 finished.
5 bathroom [('a', 1.068733033890112)]
5 finished.
6 car [('a', 0.7773190108109702)]
6 finished.
7 bathroom [('a', 0.7775817591646046)]
7 finished.
8 coast [('b', 0.5513196506735767)]
8 finished.
9 car [('b', 0.48504929610680847)]
9 finished.
10 bathroom [('a', 0.5399842292335961)]
10 finished.
11 desert [('a', 0.6085093517594942)]
11 finished.
12 False [('a', 0.8668944992050456)]
12 finished.
13 car [('b', 0.7476574781967429)]
13 finished.
14 car [('b', 0.3686060290774549)]
14 finished.
15 bathroom [('a', 1.200669363160614)]
15 finished.
16 desert [('a', 0.8890058288493492)]
16 finished.
17 chair [('a', 0.21403903161990456)]
17 finished.
18 desert [('b', 0.39270703235979454)]
18 finished.
19 car [('a', 0.5123246865859983)]
19 finished.
20 chair [('a', 0.3190014272281587)]
20 finished.
21 flower [('a', 0.5525155075779367)]
21 finished.
22 shoe [('b', 0.8203921460526544)]
22 finished.
23 coast [('b', 0.23459146589812008)]
23 finished.
24 coast [('b', 0.2188092631699874)]
24 finished.
25 chair [('a', 0.36736120010345985)]
25 finished.
26 car [('B', 0.16185797102525612)]
26 finished.
27 False [('b', 0.753368149445123)]
27 finished.
28 desert [('a', 0.35343201840896654)]
28 finished.
29 desert [('a', 0.41572713114283033)]
29 finished.
30 woods [('b', 0.6208063266049066)]
30 finished.
31 coast [('b', 0.24567528232955738)]
31 finished.
32 desert [('b', 0.4705515105538325)]
32 finished.
33 flower [('b', 0.18913981291507298)]
33 finished.
34 False [('b', 0.5904081602129736)]
34 finished.
35 woods [('b', 0.3012846810966039)]
35 finished.
36 False [('a', 0.7722517211336708)]
36 finished.
37 False [('b', 0.6514379167574589)]
37 finished.
38 bathroom [('b', 0.43740240408851605)]
38 finished.
39 flower [('a', 0.311466766292142)]
39 finished.
40 flower [('B', 0.197050121887969)]
40 finished.
41 False [('b', 0.6815756807627622)]
41 finished.
42 coast [('a', 0.4003460888470727)]
42 finished.
43 flower [('b', 0.4615614700655897)]
43 finished.
44 False [('a', 0.7023237100775077)]
44 finished.
45 shoe [('a', 0.9297171613679893)]
45 finished.
46 chair [('a', 0.696176982130055)]
46 finished.
47 False [('a', 0.6817715690444857)]
47 finished.
48 desert [('b', 0.3936137487767155)]
48 finished.
49 bathroom [('a', 0.40891092273659524)]
49 finished.
50 flower [('b', 0.27923933178635707)]
50 finished.
51 woods [('a', 0.3456536701046389)]
51 finished.
52 chair [('a', 0.7068977600997641)]
52 finished.
53 woods [('b', 0.8326407353197283)]
53 finished.
54 desert [('b', 0.27951996814181257)]
54 finished.
55 desert [('b', 0.9122499675231666)]
55 finished.
56 bathroom [('a', 0.31191337984409984)]
56 finished.
57 woods [('a', 0.3375410214398471)]
57 finished.
58 False [('a', 0.8180435393960579)]
58 finished.
59 chair [('a', 0.3158859824634419)]
59 finished.
60 desert [('a', 0.25162934770105494)]
60 finished.
61 shoe [('b', 0.6090976030955062)]
61 finished.
62 False [('a', 0.6494162793133)]
62 finished.
63 chair [('b', 0.37276894830574747)]
63 finished.
64 shoe [('b', 0.7006548474873853)]
64 finished.
65 woods [('b', 0.6106966731540524)]
65 finished.
66 coast [('b', 0.4466754269001285)]
66 finished.
67 desert [('b', 0.4042518313940491)]
67 finished.
68 False [('a', 0.6298359552938564)]
68 finished.
69 woods [('a', 1.6945318724606295)]
69 finished.
70 False [('a', 0.6370166683918796)]
70 finished.
71 car [('a', 0.8047627244541218)]
71 finished.
72 coast [('b', 0.3362410622303287)]
72 finished.
73 chair [('a', 0.19146877209414015)]
73 finished.
74 woods [('b', 0.974377636830468)]
74 finished.
75 car [('b', 0.3174794808492152)]
75 finished.
76 coast [('b', 0.8973571797014301)]
76 finished.
77 car [('b', 0.28512360462582365)]
77 finished.
78 False [('a', 0.6534501703317801)]
78 finished.
79 False [('b', 0.6501467549255722)]
79 finished.
80 woods [('a', 0.3114943314099037)]
80 finished.
81 bathroom [('a', 0.6465943737252928)]
81 finished.
82 shoe [('a', 0.28474238491617143)]
82 finished.
83 car [('a', 0.41531775982412)]
83 finished.
84 flower [('b', 0.21339330408090973)]
84 finished.
85 flower [('a', 0.29591564135671433)]
85 finished.
86 chair [('a', 0.5676411326680864)]
86 finished.
87 shoe [('b', 0.49978959604459305)]
87 finished.
88 False [('b', 0.6761151482933201)]
88 finished.
89 False [('a', 0.7256792822040552)]
89 finished.
90 bathroom [('a', 0.23374427840553835)]
90 finished.
91 False [('b', 0.5538893647617442)]
91 finished.
92 ('coast', 'car') [('b', 0.38653655149209953)]
92 finished.
93 desert [('a', 0.8663742809244468)]
93 finished.
94 car [('a', 1.045911462602362)]
94 finished.
95 coast [('b', 0.4071798920094807)]
95 finished.
96 coast [('a', 1.171189349776796)]
96 finished.
97 False [('b', 0.6872901642086617)]
97 finished.
98 False [('a', 0.714222456949301)]
98 finished.
99 chair [('a', 0.4120336986493385)]
99 finished.
100 car [('a', 0.5754499785493863)]
100 finished.
101 shoe [('a', 0.6480617763609189)]
101 finished.
102 woods [('b', 1.133653870685066)]
102 finished.
103 shoe [('a', 0.9177019958610799)]
103 finished.
104 coast [('b', 0.4209076137494776)]
104 finished.
105 bathroom [('a', 0.35304552027264435)]
105 finished.
106 bathroom [('a', 0.24814939824454996)]
106 finished.
107 False [('a', 0.6320100806219671)]
107 finished.
108 bathroom [('a', 0.7060127438812742)]
108 finished.
109 False [('b', 0.6406238865820342)]
109 finished.
110 bathroom [('a', 0.3036060158815417)]
110 finished.
111 flower [('b', 0.5142762382529327)]
111 finished.
112 chair [('a', 0.33121365399983915)]
112 finished.
113 woods [('a', 1.0191791635861591)]
113 finished.
114 coast [('b', 1.114690242857705)]
114 finished.
115 False [('a', 0.7907886761327063)]
115 finished.
116 flower [('b', 0.6469744204505332)]
116 finished.
117 chair [('a', 0.5013276709496495)]
117 finished.
118 coast [('a', 0.4840006486601851)]
118 finished.
119 desert [('a', 0.1940100413271466)]
119 finished.
120 False [('b', 0.7016685986686753)]
120 finished.
121 car [('b', 0.22344782729669532)]
121 finished.
122 coast [('b', 0.4494753390436017)]
122 finished.
123 shoe [('b', 1.4954950093942898)]
123 finished.
124 False [('a', 0.6772907712279448)]
124 finished.
125 woods [('a', 0.7492442319257862)]
125 finished.
126 bathroom [('b', 0.40211054961037007)]
126 finished.
127 False [('a', 0.5090705364964379)]
127 finished.
128 car [('b', 0.023127426793507766)]
128 finished.
129 shoe [('b', 0.21782219736815023)]
129 finished.
130 desert [('b', 0.7362953713768547)]
130 finished.
131 chair [('b', 0.5802260750174355)]
131 finished.
132 ('woods', 'chair') [('a', 1.5374714048562055)]
132 finished.
133 woods [('a', 0.2848174558748724)]
133 finished.
134 shoe [('a', 0.8176444316850393)]
134 finished.
135 flower [('b', 0.2320947700468423)]
135 finished.
136 False [('a', 0.6502578951335636)]
136 finished.
137 coast [('b', 0.24363018521125923)]
137 finished.
138 flower [('b', 0.2172087268818359)]
138 finished.
139 chair [('a', 0.4209718346082809)]
139 finished.
140 bathroom [('a', 0.5872669098066581)]
140 finished.
141 ('shoe', 'bathroom') [('b', 0.8831324061093255)]
141 finished.
142 shoe [('a', 0.6499930940585728)]
142 finished.
143 flower [('b', 0.16684696403945054)]
143 finished.
144 shoe [('a', 0.3412628987885)]
144 finished.
145 woods [('a', 1.1364839871594086)]
145 finished.
146 ('car', 'bathroom') [('b', 1.1306917935421552)]
146 finished.
147 shoe [('a', 1.0913197151880922)]
147 finished.
148 shoe [('b', 0.3711220791606138)]
148 finished.
149 False [('b', 0.7091097141528735)]
149 finished.
150 car [('a', 0.6771951730547698)]
150 finished.
151 flower [('b', 0.3703760615135252)]
151 finished.
152 False [('a', 0.6412144838859604)]
152 finished.
153 bathroom [('a', 0.2778027199733515)]
153 finished.
154 desert [('b', 0.2928975542408807)]
154 finished.
155 shoe [('a', 0.7593480204577645)]
155 finished.
156 coast [('a', 0.36913475149003716)]
156 finished.
157 False [('a', 0.6590250687145272)]
157 finished.
158 False [('a', 0.5562634838142912)]
158 finished.
159 flower [('a', 0.7076115206941722)]
159 finished.
160 chair [('b', 0.43174950228785747)]
160 finished.
161 flower [('a', 0.5486302922454342)]
161 finished.
162 woods [('b', 0.5503469539216894)]
162 finished.
163 car [('b', 0.7138388912726441)]
163 finished.
164 False [('a', 0.5912779276422953)]
164 finished.
165 False [('b', 0.6128054046403122)]
165 finished.
166 flower [('b', 0.23906082712346688)]
166 finished.
167 False [('a', 0.5863760286702018)]
167 finished.
168 chair [('b', 0.690766301468102)]
168 finished.
169 coast [('a', 0.8249151712834646)]
169 finished.
170 False [('b', 0.7172326264244475)]
170 finished.
171 bathroom [('a', 0.30437168100615963)]
171 finished.
172 desert [('a', 1.0236837143238517)]
172 finished.
173 car [('B', 0.20328775607322314)]
173 finished.
174 coast [('b', 0.6416587514704588)]
174 finished.
175 shoe [('b', 0.22412493215051654)]
175 finished.
176 False [('a', 0.7122958311865659)]
176 finished.
177 coast [('a', 0.208278801808774)]
177 finished.
178 coast [('a', 0.21451790222363343)]
178 finished.
179 desert [('a', 1.3078243585905511)]
179 finished.
180 ('shoe', 'desert') [('b', 1.4747094446011033)]
180 finished.
181 False [('b', 0.7989890053304407)]
181 finished.
182 flower [('b', 0.20828935866211395)]
182 finished.
183 bathroom [('a', 0.764251385659918)]
183 finished.
184 False [('', 2.527974342154266)]
184 finished.
185 shoe [('a', 1.5437530259314372)]
185 finished.
186 False [('a', 0.6155419756623814)]
186 finished.
187 False [('b', 0.6065824327506562)]
187 finished.
188 woods [('b', 0.21979955467668333)]
188 finished.
189 car [('b', 0.2621266722717337)]
189 finished.
190 False [('a', 0.6501435292202586)]
190 finished.
191 bathroom [('a', 0.16326144604772708)]
191 finished.
192 bathroom [('b', 0.3192292793164597)]
192 finished.
193 car [('a', 1.6502678654951524)]
193 finished.
194 False [('a', 0.5887747803808452)]
194 finished.
195 desert [('b', 0.2822017021753709)]
195 finished.
196 False [('a', 0.9592959986302958)]
196 finished.
197 woods [('a', 0.36274462942037644)]
197 finished.
198 woods [('a', 0.2755819685426104)]
198 finished.
199 False [('a', 0.7049177635772139)]
199 finished.
200 False [('a', 0.862835095790615)]
200 finished.
201 flower [('b', 0.8977046761292513)]
201 finished.
202 desert [('b', 0.18756185656366142)]
202 finished.
203 woods [('a', 0.4184634089197061)]
203 finished.
204 chair [('b', 0.8067166220889703)]
204 finished.
205 desert [('a', 0.4726728516143339)]
205 finished.
206 woods [('b', 0.3456610012526653)]
206 finished.
207 coast [('b', 0.22088984304627957)]
207 finished.
208 bathroom [('a', 0.06553254780556017)]
208 finished.
209 False [('b', 0.6243285034752262)]
209 finished.
210 car [('b', 0.35554984051850624)]
210 finished.
211 shoe [('a', 0.8698850212031175)]
211 finished.
212 desert [('b', 0.48484431720225984)]
212 finished.
213 car [('a', 0.47801520597477065)]
213 finished.
214 chair [('b', 0.37932064888263994)]
214 finished.
215 chair [('a', 0.7124920127143923)]
215 finished.
216 False [('a', 0.7216453911860299)]
216 finished.
217 chair [('a', 0.6548307721723177)]
217 finished.
218 car [('b', 0.28273628950591956)]
218 finished.
219 flower [('b', 0.3117356728098457)]
219 finished.
220 desert [('a', 0.5760065593249237)]
220 finished.
221 car [('a', 0.4983609018718198)]
221 finished.
222 car [('a', 0.2945347462700738)]
222 finished.
223 desert [('b', 0.5016191574040931)]
223 finished.
224 bathroom [('a', 0.4226327795577163)]
224 finished.
225 bathroom [('a', 0.708325281289035)]
225 finished.
226 False [('b', 0.6616414089057798)]
226 finished.
227 woods [('b', 0.04953158361331589)]
227 finished.
228 chair [('a', 1.302906037845787)]
228 finished.
229 flower [('b', 0.17862548008088197)]
229 finished.
230 car [('b', 0.37965289652220235)]
230 finished.
231 car [('B', 0.12259820586314163)]
231 finished.
232 False [('', 2.52378033885725)]
232 finished.
233 False [('b', 0.5865009514363919)]
233 finished.
234 coast [('b', 1.3435962566572925)]
234 finished.
235 shoe [('b', 0.3308212909450958)]
235 finished.
236 flower [('a', 0.7318702902875884)]
236 finished.
237 desert [('a', 0.6951975407228019)]
237 finished.
238 desert [('b', 0.4632320921309656)]
238 finished.
239 shoe [('b', 0.6035470441247526)]
239 finished.
240 woods [('b', 0.40509432695216674)]
240 finished.
241 car [('b', 0.20552434278488363)]
241 finished.
242 chair [('b', 0.4935842189115647)]
242 finished.
243 flower [('a', 0.20660876623514923)]
243 finished.
244 shoe [('b', 1.0181472311578545)]
244 finished.
245 desert [('a', 0.233792663983877)]
245 finished.
246 shoe [('b', 1.3972670065777493)]
246 finished.
247 bathroom [('a', 0.24647496398210933)]
247 finished.
248 chair [('a', 0.5309199975245065)]
248 finished.
249 False [('a', 0.5520383964485518)]
249 finished.
250 flower [('a', 0.3397377267046977)]
250 finished.
251 flower [('b', 1.352215927535326)]
251 finished.
252 bathroom [('a', 0.3173794839876791)]
252 finished.
253 desert [('a', 0.15316557515689055)]
253 finished.
254 coast [('b', 0.2759678801867267)]
254 finished.
255 chair [('a', 0.2639477294987955)]
255 finished.
256 chair [('a', 1.3918539799487917)]
256 finished.
257 coast [('a', 0.1973967385774813)]
257 finished.
258 flower [('b', 0.2560166000657773)]
258 finished.
259 flower [('a', 0.2797865286929664)]
259 finished.
260 coast [('b', 0.3157405324818683)]
260 finished.
261 desert [('a', 0.5886208262672881)]
261 finished.
262 ('desert', 'flower') [('a', 0.7331655575617333)]
262 finished.
263 chair [('a', 0.5034912394239655)]
263 finished.
264 flower [('b', 0.26835550907981087)]
264 finished.
265 flower [('a', 0.4129931993338687)]
265 finished.
266 shoe [('b', 0.30528719480025757)]
266 finished.
267 False [('b', 0.6625102965972474)]
267 finished.
268 chair [('b', 0.3745867798279505)]
268 finished.
269 chair [('b', 0.35715741470903595)]
269 finished.
270 coast [('a', 0.27323600109866675)]
270 finished.
271 flower [('b', 0.5279989748123626)]
271 finished.
272 coast [('b', 0.04906092389501282)]
272 finished.
273 shoe [('b', 0.20160364469484193)]
273 finished.
274 shoe [('a', 0.24021650933536876)]
274 finished.
275 woods [('b', 0.5104804629299906)]
275 finished.
276 ('bathroom', 'chair') [('b', 0.852873824780545)]
276 finished.
277 shoe [('a', 0.6472879003504204)]
277 finished.
278 bathroom [('a', 0.11819775743060745)]
278 finished.
279 bathroom [('a', 0.641296885992233)]
279 finished.
280 bathroom [('b', 0.42985161462638644)]
280 finished.
281 chair [('a', 0.3419268075749642)]
281 finished.
282 bathroom [('b', 0.23643539630938903)]
282 finished.
283 woods [('a', 0.40150939545310393)]
283 finished.
284 desert [('b', 0.6091820579240448)]
284 finished.
285 False [('a', 0.6700525819269387)]
285 finished.
286 False [('a', 0.7045893281356257)]
286 finished.
287 shoe [('a', 0.8672144305146503)]
287 finished.
288 False [('a', 0.6573937416042099)]
288 finished.
289 chair [('b', 0.8348104619590231)]
289 finished.
290 woods [('a', 0.4034005384582997)]
290 finished.
291 coast [('a', 0.8878392965380044)]
291 finished.
292 car [('a', 0.9283570867428352)]
292 finished.
293 False [('b', 0.6721463578714975)]
293 finished.
294 False [('a', 0.683560369220686)]
294 finished.
295 woods [('b', 1.268982762124324)]
295 finished.
296 coast [('b', 0.35359359691665304)]
296 finished.
297 car [('a', 0.3204178050718838)]
297 finished.
298 shoe [('b', 0.8818952015290051)]
298 finished.
299 False [('', 2.5198048037791523)]
299 finished.
300 chair [('a', 0.5857455499199204)]
300 finished.
301 car [('a', 0.3576535868232895)]
301 finished.
302 woods [('b', 0.4302580534867957)]
302 finished.
303 False [('b', 0.6325953994992233)]
303 finished.
304 False [('a', 0.6426912703909693)]
304 finished.
305 desert [('a', 0.13612974607531214)]
305 finished.
306 car [('b', 0.931864014824896)]
306 finished.
307 coast [('a', 0.25447060752048856)]
307 finished.
308 shoe [('b', 0.3096392576535436)]
308 finished.
309 False [('b', 1.4347497014023247)]
309 finished.
310 flower [('b', 0.37518734749392024)]
310 finished.
311 False [('a', 0.6429745459599872)]
311 finished.
312 flower [('a', 0.40895989480668504)]
312 finished.
313 car [('a', 0.46108523867405893)]
313 finished.
314 woods [('b', 0.26235833655664464)]
314 finished.
315 woods [('b', 0.16201808330333733)]
315 finished.
316 coast [('b', 0.06016086885210825)]
316 finished.
317 desert [('a', 0.3651202147029835)]
317 finished.
318 woods [('A', 0.16641706550581148)]
318 finished.
319 ('shoe', 'coast') [('a', 1.0337138981267344)]
319 finished.
320 woods [('a', 0.6613804200278537)]
320 finished.
321 shoe [('a', 0.22566916521918756)]
321 finished.
322 bathroom [('a', 0.26007131154528906)]
322 finished.
323 bathroom [('a', 0.3380371935536459)]
323 finished.
324 bathroom [('a', 0.9085884988362523)]
324 finished.
325 False [('a', 0.6686960262541106)]
325 finished.
326 shoe [('a', 0.929356762118914)]
326 finished.
327 False [('b', 0.7821836675502709)]
327 finished.
328 coast [('a', 0.9256378172303812)]
328 finished.
329 False [('a', 0.7059007239358834)]
329 finished.
330 False [('a', 0.6027934020839893)]
330 finished.
331 shoe [('a', 0.3181439761274305)]
331 finished.
332 desert [('a', 0.40932967792559793)]
332 finished.
333 woods [('b', 0.231437019425357)]
333 finished.
334 car [('B', 0.1370766371846912)]
334 finished.
335 car [('a', 0.1773205356894323)]
335 finished.
336 chair [('a', 0.6904798001933159)]
336 finished.
337 desert [('b', 0.5567018864803686)]
337 finished.
338 woods [('a', 0.3249936145693937)]
338 finished.
339 desert [('a', 0.30636223438159504)]
339 finished.
340 False [('a', 0.7145215677983288)]
340 finished.
341 flower [('a', 0.8080423869405422)]
341 finished.
342 woods [('a', 0.19285846455841238)]
342 finished.
343 False [('a', 0.7109454336768977)]
343 finished.
344 shoe [('b', 0.7865333844365523)]
344 finished.
345 bathroom [('b', 0.2408308595595372)]
345 finished.
346 desert [('a', 0.3854550605010445)]
346 finished.
347 chair [('b', 0.16525815758814133)]
347 finished.
348 coast [('b', 0.22066257744972972)]
348 finished.
349 coast [('b', 0.2821852804036098)]
349 finished.
350 car [('a', 0.30902432093807874)]
350 finished.
351 coast [('a', 0.1809869895578231)]
351 finished.
352 car [('b', 0.09572837450559746)]
352 finished.
353 chair [('a', 0.2373943105003491)]
353 finished.
354 car [('B', 0.02922049073549715)]
354 finished.
355 car [('a', 0.2867270733731857)]
355 finished.
356 coast [('b', 0.244228700154963)]
356 finished.
357 desert [('b', 0.6803622291026841)]
357 finished.
358 shoe [('a', 0.3092985058829072)]
358 finished.
359 shoe [('a', 0.28733907762944)]
359 finished.
360 chair [('b', 0.4682653652798763)]
360 finished.
361 shoe [('a', 0.442187004689913)]
361 finished.
362 chair [('a', 1.1985664965932301)]
362 finished.
363 chair [('b', 0.24968688665740046)]
363 finished.
364 False [('b', 0.67074464232428)]
364 finished.
365 desert [('a', 1.030670298615405)]
365 finished.
366 shoe [('b', 0.244121665390594)]
366 finished.
367 flower [('b', 0.017281275925597583)]
367 finished.
368 False [('b', 0.5719451031891367)]
368 finished.
369 bathroom [('a', 0.4958601005773744)]
369 finished.
370 car [('b', 0.83554240380181)]
370 finished.
371 car [('b', 0.06390737885885756)]
371 finished.
372 False [('a', 0.6734275493408859)]
372 finished.
373 woods [('a', 0.4551226691710326)]
373 finished.
374 False [('a', 0.5882184928505012)]
374 finished.
375 flower [('b', 0.333303324499866)]
375 finished.
376 flower [('a', 0.42850092387106997)]
376 finished.
377 coast [('b', 0.09649198690840421)]
377 finished.
378 car [('A', 0.1578748115534836)]
378 finished.
379 False [('b', 0.7024518585485566)]
379 finished.
380 car [('b', 0.04831373326396715)]
380 finished.
381 coast [('b', 0.48233119957876625)]
381 finished.
382 False [('a', 0.536959397461942)]
382 finished.
383 desert [('a', 0.3840052526211366)]
383 finished.
384 flower [('b', 0.2796762682219196)]
384 finished.
385 desert [('b', 0.27793116169141285)]
385 finished.
386 False [('a', 0.7687137089833413)]
386 finished.
387 desert [('b', 0.35663485046097776)]
387 finished.
388 desert [('b', 0.7045107382264177)]
388 finished.
389 False [('b', 1.1900723349735927)]
389 finished.
390 False [('a', 0.7229054689487384)]
390 finished.
391 False [('b', 0.5227387293125503)]
391 finished.
392 woods [('b', 0.11168036662729719)]
392 finished.
393 shoe [('b', 1.0824205809376508)]
393 finished.
394 bathroom [('b', 0.5544588483589905)]
394 finished.
395 shoe [('a', 0.5650291911661043)]
395 finished.
396 woods [('a', 0.34274173801532015)]
396 finished.
397 desert [('a', 0.20438537559130054)]
397 finished.
398 ('woods', 'shoe') [('b', 1.3715540304156093)]
398 finished.
399 False [('b', 0.6459457137270874)]
399 finished.
400 shoe [('a', 0.7641874580476724)]
400 finished.
401 bathroom [('b', 0.6211793354286783)]
401 finished.
402 False [('b', 0.9119100954894748)]
402 finished.
403 woods [('a', 0.24774295938550495)]
403 finished.
404 False [('a', 0.6737032005148649)]
404 finished.
405 False [('b', 0.7049602842371314)]
405 finished.
406 coast [('a', 0.4452303109546847)]
406 finished.
407 bathroom [('a', 0.7983400520861323)]
407 finished.
408 False [('b', 0.9654661862514331)]
408 finished.
409 False [('b', 0.7803860699968936)]
409 finished.
410 ('bathroom', 'car') [('b', 1.2188136618588032)]
410 finished.
411 flower [('b', 0.4508791073130851)]
411 finished.
412 woods [('a', 0.28899503739921784)]
412 finished.
413 coast [('b', 0.8116765837548883)]
413 finished.
414 False [('a', 1.0566454366389735)]
414 finished.
415 False [('b', 0.8033953186804865)]
415 finished.
416 bathroom [('b', 0.4729951288918528)]
416 finished.
417 flower [('b', 1.027734027114093)]
417 finished.
418 bathroom [('a', 0.4469748309948045)]
418 finished.
419 desert [('a', 1.1000508195193106)]
419 finished.
420 False [('a', 1.1430263038664634)]
420 finished.
421 shoe [('A', 0.2065603806568106)]
421 finished.
422 coast [('a', 0.7298504123182283)]
422 finished.
423 False [('a', 0.5772376057411748)]
423 finished.
424 bathroom [('a', 0.35366954761229863)]
424 finished.
425 False [('', 2.5224956284355358)]
425 finished.
426 chair [('b', 0.40128095687305176)]
426 finished.
427 False [('b', 0.5931356406117629)]
427 finished.
428 desert [('b', 0.5002021930686169)]
428 finished.
429 flower [('a', 0.3996161997265517)]
429 finished.
430 shoe [('a', 0.7058570302924636)]
430 finished.
431 ('chair', 'desert') [('b', 0.6933257519485778)]
431 finished.
432 car [('a', 0.2511941707398364)]
432 finished.
433 woods [('a', 0.16340572304579837)]
433 finished.
434 coast [('a', 0.7265601929793775)]
434 finished.
435 bathroom [('a', 0.4782116807473358)]
435 finished.
436 flower [('b', 0.275969053170229)]
436 finished.
437 car [('b', 0.7238508938280575)]
437 finished.
438 bathroom [('a', 0.19028317879747192)]
438 finished.
439 False [('b', 1.011443629187852)]
439 finished.
440 False [('b', 0.45072749916653265)]
440 finished.
441 flower [('a', 0.836957608661578)]
441 finished.
442 coast [('a', 0.1634077757680643)]
442 finished.
443 car [('b', 0.2932714428025065)]
443 finished.
444 shoe [('a', 1.0665662397104825)]
444 finished.
445 coast [('a', 0.3662298573035514)]
445 finished.
446 woods [('b', 0.27638692862092284)]
446 finished.
447 chair [('a', 0.32204444024773693)]
447 finished.
448 car [('b', 0.3332939406300284)]
448 finished.
449 ('chair', 'bathroom') [('a', 1.466034643488456)]
449 finished.
450 woods [('a', 1.1373050757638339)]
450 finished.
451 bathroom [('a', 0.650595421199796)]
451 finished.
452 coast [('b', 0.27028448081000533)]
452 finished.
453 flower [('b', 0.29694816027767956)]
453 finished.
454 car [('a', 0.3973966212788582)]
454 finished.
455 shoe [('a', 2.038953029919867)]
455 finished.
456 bathroom [('b', 0.3161581146869139)]
456 finished.
457 bathroom [('a', 1.4285736488618568)]
457 finished.
458 woods [('b', 0.2047830170722591)]
458 finished.
459 bathroom [('a', 0.3488837740287636)]
459 finished.
460 desert [('a', 0.30357375882977067)]
460 finished.
461 shoe [('B', 0.27115600771503523)]
461 finished.
462 coast [('b', 0.3067170619569879)]
462 finished.
463 coast [('a', 0.06388538541432354)]
463 finished.
464 woods [('a', 0.262398511248648)]
464 finished.
465 woods [('a', 0.28990116732347815)]
465 finished.
466 chair [('a', 0.4877676858823179)]
466 finished.
467 woods [('b', 0.36861834540650307)]
467 finished.
468 flower [('b', 0.5426979270741867)]
468 finished.
469 flower [('a', 0.528810972793508)]
469 finished.
470 bathroom [('a', 0.2125569666877709)]
470 finished.
471 chair [('b', 0.6296169005845513)]
471 finished.
472 chair [('a', 0.24945962106176012)]
472 finished.
473 coast [('a', 0.31549567212914553)]
473 finished.
474 flower [('b', 0.34411207624907547)]
474 finished.
475 chair [('a', 0.5656769714269103)]
475 finished.
476 False [('a', 0.45977736182521767)]
476 finished.
477 False [('b', 0.6086427786576678)]
477 finished.
478 bathroom [('a', 0.20282618697910948)]
478 finished.
479 desert [('b', 0.9308262174763513)]
479 finished.
480 chair [('b', 0.30117471387256955)]
480 finished.
481 chair [('a', 0.4742115130111415)]
481 finished.
482 desert [('a', 0.665592604572339)]
482 finished.
483 flower [('a', 0.7205970369841452)]
483 finished.
484 False [('b', 0.6841383569490063)]
484 finished.
485 shoe [('a', 0.3065056316409027)]
485 finished.
486 False [('a', 0.5851074467755097)]
486 finished.
487 shoe [('a', 0.4984019563016773)]
487 finished.
488 flower [('b', 0.3000955688485192)]
488 finished.
489 car [('b', 0.2350351469913221)]
489 finished.
490 woods [('a', 0.23135432407252665)]
490 finished.
491 chair [('a', 0.18968583683817997)]
491 finished.
492 chair [('b', 1.0585213308559105)]
492 finished.
|
nilq/baby-python
|
python
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: server_admin.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='server_admin.proto',
package='protos',
syntax='proto3',
serialized_pb=_b('\n\x12server_admin.proto\x12\x06protos\x1a\x1bgoogle/protobuf/empty.proto\"\x9a\x01\n\x0cServerStatus\x12/\n\x06status\x18\x01 \x01(\x0e\x32\x1f.protos.ServerStatus.StatusCode\"Y\n\nStatusCode\x12\r\n\tUNDEFINED\x10\x00\x12\x0b\n\x07STARTED\x10\x01\x12\x0b\n\x07STOPPED\x10\x02\x12\n\n\x06PAUSED\x10\x03\x12\t\n\x05\x45RROR\x10\x04\x12\x0b\n\x07UNKNOWN\x10\x05\x32\xc1\x01\n\x05\x41\x64min\x12;\n\tGetStatus\x12\x16.google.protobuf.Empty\x1a\x14.protos.ServerStatus\"\x00\x12=\n\x0bStartServer\x12\x16.google.protobuf.Empty\x1a\x14.protos.ServerStatus\"\x00\x12<\n\nStopServer\x12\x16.google.protobuf.Empty\x1a\x14.protos.ServerStatus\"\x00\x62\x06proto3')
,
dependencies=[google_dot_protobuf_dot_empty__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_SERVERSTATUS_STATUSCODE = _descriptor.EnumDescriptor(
name='StatusCode',
full_name='protos.ServerStatus.StatusCode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNDEFINED', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STARTED', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='STOPPED', index=2, number=2,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PAUSED', index=3, number=3,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='ERROR', index=4, number=4,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='UNKNOWN', index=5, number=5,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=125,
serialized_end=214,
)
_sym_db.RegisterEnumDescriptor(_SERVERSTATUS_STATUSCODE)
_SERVERSTATUS = _descriptor.Descriptor(
name='ServerStatus',
full_name='protos.ServerStatus',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='protos.ServerStatus.status', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_SERVERSTATUS_STATUSCODE,
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=60,
serialized_end=214,
)
_SERVERSTATUS.fields_by_name['status'].enum_type = _SERVERSTATUS_STATUSCODE
_SERVERSTATUS_STATUSCODE.containing_type = _SERVERSTATUS
DESCRIPTOR.message_types_by_name['ServerStatus'] = _SERVERSTATUS
ServerStatus = _reflection.GeneratedProtocolMessageType('ServerStatus', (_message.Message,), dict(
DESCRIPTOR = _SERVERSTATUS,
__module__ = 'server_admin_pb2'
# @@protoc_insertion_point(class_scope:protos.ServerStatus)
))
_sym_db.RegisterMessage(ServerStatus)
import abc
import six
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
class BetaAdminServicer(object):
"""Interface exported by the server.
"""
def GetStatus(self, request, context):
"""Return the serve status.
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def StartServer(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
def StopServer(self, request, context):
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
class BetaAdminStub(object):
"""Interface exported by the server.
"""
def GetStatus(self, request, timeout):
"""Return the serve status.
"""
raise NotImplementedError()
GetStatus.future = None
def StartServer(self, request, timeout):
raise NotImplementedError()
StartServer.future = None
def StopServer(self, request, timeout):
raise NotImplementedError()
StopServer.future = None
def beta_create_Admin_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
import google.protobuf.empty_pb2
import server_admin_pb2
import google.protobuf.empty_pb2
import server_admin_pb2
import google.protobuf.empty_pb2
import server_admin_pb2
request_deserializers = {
('protos.Admin', 'GetStatus'): google.protobuf.empty_pb2.Empty.FromString,
('protos.Admin', 'StartServer'): google.protobuf.empty_pb2.Empty.FromString,
('protos.Admin', 'StopServer'): google.protobuf.empty_pb2.Empty.FromString,
}
response_serializers = {
('protos.Admin', 'GetStatus'): server_admin_pb2.ServerStatus.SerializeToString,
('protos.Admin', 'StartServer'): server_admin_pb2.ServerStatus.SerializeToString,
('protos.Admin', 'StopServer'): server_admin_pb2.ServerStatus.SerializeToString,
}
method_implementations = {
('protos.Admin', 'GetStatus'): face_utilities.unary_unary_inline(servicer.GetStatus),
('protos.Admin', 'StartServer'): face_utilities.unary_unary_inline(servicer.StartServer),
('protos.Admin', 'StopServer'): face_utilities.unary_unary_inline(servicer.StopServer),
}
server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
return beta_implementations.server(method_implementations, options=server_options)
def beta_create_Admin_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
import google.protobuf.empty_pb2
import server_admin_pb2
import google.protobuf.empty_pb2
import server_admin_pb2
import google.protobuf.empty_pb2
import server_admin_pb2
request_serializers = {
('protos.Admin', 'GetStatus'): google.protobuf.empty_pb2.Empty.SerializeToString,
('protos.Admin', 'StartServer'): google.protobuf.empty_pb2.Empty.SerializeToString,
('protos.Admin', 'StopServer'): google.protobuf.empty_pb2.Empty.SerializeToString,
}
response_deserializers = {
('protos.Admin', 'GetStatus'): server_admin_pb2.ServerStatus.FromString,
('protos.Admin', 'StartServer'): server_admin_pb2.ServerStatus.FromString,
('protos.Admin', 'StopServer'): server_admin_pb2.ServerStatus.FromString,
}
cardinalities = {
'GetStatus': cardinality.Cardinality.UNARY_UNARY,
'StartServer': cardinality.Cardinality.UNARY_UNARY,
'StopServer': cardinality.Cardinality.UNARY_UNARY,
}
stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
return beta_implementations.dynamic_stub(channel, 'protos.Admin', cardinalities, options=stub_options)
# @@protoc_insertion_point(module_scope)
|
nilq/baby-python
|
python
|
import tensorflow as tf
from tensorflow.keras.applications import InceptionV3
__all__ = ["inception_score"]
def inception_score(images):
r"""
Args:
images: a numpy array/tensor of images. Shape: NxHxWxC
Return:
inception score
"""
img_shape = images.shape
if img_shape[1] != 299:
images = tf.image.resize(images, size=(299, 299))
assert images.shape[1:] == (299, 299, 3), "images must be of shape 299x299x3"
inception = InceptionV3(weights="imagenet")
predictions = inception(images)
in_scores = []
mean_pred = tf.reduce_mean(predictions, axis=0)
kl_div = tf.keras.losses.KLDivergence()
for i in range(predictions.shape[0]):
in_scores.append(kl_div(mean_pred, predictions[i, :]))
return tf.math.exp(tf.reduce_mean(in_scores)).numpy()
|
nilq/baby-python
|
python
|
from django.dispatch import receiver
from django.db.models.signals import post_save, post_delete
from django.core.mail import send_mail
from .models import Profile
from django.conf import settings
SUBJECT = 'WELCOME TO DEVCONNECT'
MESSAGE = """ Congratulation I have to say thank you for creating a new account
with our team. we'll definatly try so hard to make you happy with our service """
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_profile(sender, instance, created, **kwargs):
"""Receiver to create profile after user creation"""
if created:
profile = Profile.objects.create(user=instance,
username=instance.username,
email=instance.email,
name=instance.first_name)
send_mail(SUBJECT,
MESSAGE,
[settings.EMAIL_HOST_USER],
[profile.email],
fail_silently=False,
)
@receiver(post_save, sender=Profile)
def update_user(sender, instance, created, **kwargs):
"""Receiver to update user information based on profile information"""
profile = instance
user = profile.user
if not created:
user.first_name = profile.name
user.username = profile.username
user.email = profile.email
user.save()
@receiver(post_delete, sender=Profile)
def delete_profile(sender, instance, **kwargs):
"""Receiver to delete profile after user deletion"""
instance.user.delete()
|
nilq/baby-python
|
python
|
"""
Setup file for installation of the dataduct code
"""
from setuptools import find_packages
from setuptools import setup
from dataduct import __version__ as version
setup(
name='dataduct',
version=version,
author='Coursera Inc.',
packages=find_packages(
exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
namespace_packages=['dataduct'],
include_package_data=True,
url='https://github.com/coursera/dataduct',
long_description=open('README.rst').read(),
author_email='data-infra@coursera.org',
license='Apache License 2.0',
description='DataPipeline for Humans',
install_requires=[
'boto>=2.38',
'MySQL-python>=1.2.3',
'pandas>=0.14',
'psycopg2>=2.6',
'pyparsing>=1.5.6',
'pytimeparse>=1.1.4',
'PyYAML>=3.11',
'testfixtures>=4.1.2'
],
scripts=['bin/dataduct'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: MacOS',
'Operating System :: MacOS :: MacOS 9',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Unix',
'Programming Language :: Python :: 2.7',
'Programming Language :: Unix Shell',
'Topic :: Database',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Scientific/Engineering :: Visualization',
'Topic :: Utilities',
],
)
|
nilq/baby-python
|
python
|
import re
f = open("regex.txt", "r")
content = f.readlines()
# s = 'A message from cs@uni.edu to is@uni.edu'
for i in range(len(content)):
if re.findall('[\w\.]+@[\w\.]+', content[i]):
print(content[i], end='')
|
nilq/baby-python
|
python
|
"""
Command-line interface implementing synthetic MDS Provider data generation:
- custom geographic area, device inventory, time periods
- generates complete "days" of service
- saves data as JSON files to container volume
All fully customizable through extensive parameterization and configuration options.
"""
import argparse
from datetime import datetime, timedelta
import json
import math
import mds
from mds.fake import geometry
from mds.fake.data import random_string
from mds.fake.provider import ProviderDataGenerator
from mds.json import parse_boundary, CustomJsonEncoder
from mds.schema import ProviderSchema
import os
import random
import time
import uuid
def setup_cli():
"""
Create the cli argument interface, and parses incoming args.
Returns a tuple:
- the argument parser
- the parsed args
"""
schema = ProviderSchema(mds.TRIPS)
parser = argparse.ArgumentParser()
parser.add_argument(
"--boundary",
type=str,
help="Path to a data file with geographic bounds for the generated data. Overrides the MDS_BOUNDARY environment variable."
)
parser.add_argument(
"--close",
type=int,
help="The hour of the day (24-hr format) that provider stops operations. Overrides --start and --end."
)
parser.add_argument(
"--date_format",
type=str,
help="Format for datetime input (to this CLI) and output (to stdout and files). Options:\
- 'unix' for Unix timestamps (default)\
- 'iso8601' for ISO 8601 format\
- '<python format string>' for custom formats,\
see https://docs.python.org/3/library/datetime.html#strftime-strptime-behavior"
)
parser.add_argument(
"--devices",
type=int,
help="The number of devices to model in the generated data"
)
parser.add_argument(
"--end",
type=str,
help="The latest event in the generated data, in --date_format format"
)
parser.add_argument(
"--inactivity",
type=float,
help="Describes the portion of the fleet that remains inactive."
)
parser.add_argument(
"--open",
type=int,
help="The hour of the day (24-hr format) that provider begins operations. Overrides --start and --end."
)
parser.add_argument(
"--output",
type=str,
help="Path to a directory to write the resulting data file(s)"
)
parser.add_argument(
"--propulsion_types",
type=str,
nargs="+",
help="A list of propulsion_types to use for the generated data, e.g. '{}'".format(", ".join(schema.propulsion_types()))
)
parser.add_argument(
"--provider_name",
type=str,
help="The name of the fake mobility as a service provider"
)
parser.add_argument(
"--provider_id",
type=uuid.UUID,
help="The ID of the fake mobility as a service provider"
)
parser.add_argument(
"--start",
type=str,
help="The earliest event in the generated data, in --date_format format"
)
parser.add_argument(
"--speed_mph",
type=float,
help="The average speed of devices in miles per hour. Cannot be used with --speed_ms"
)
parser.add_argument(
"--speed_ms",
type=float,
help="The average speed of devices in meters per second. Always takes precedence"
)
parser.add_argument(
"--vehicle_types",
type=str,
nargs="+",
help="A list of vehicle_types to use for the generated data, e.g. '{}'".format(", ".join(schema.vehicle_types()))
)
return parser, parser.parse_args()
if __name__ == "__main__":
T0 = time.time()
parser, args = setup_cli()
print(f"Parsed args: {args}")
try:
boundary_file = args.boundary or os.environ["MDS_BOUNDARY"]
except:
print("A boundary file is required")
exit(1)
# collect the parameters for data generation
provider_name = args.provider_name or f"Provider {random_string(3)}"
provider_id = args.provider_id or uuid.uuid4()
N = args.devices or random.randint(100, 500)
date_format = "unix" if args.date_format is None else args.date_format
encoder = CustomJsonEncoder(date_format=date_format)
date_start = datetime.today()
date_end = date_start
if date_format == "unix":
date_start = datetime.fromtimestamp(
int(args.start)) if args.start else date_start
date_end = datetime.fromtimestamp(
int(args.end)) if args.end else date_end
elif date_format == "iso8601":
date_start = datetime.fromisoformat(
args.start) if args.start else date_start
date_end = datetime.fromisoformat(args.end) if args.end else date_end
else:
date_start = datetime.strptime(
args.start, date_format) if args.start else date_start
date_end = datetime.strptime(
args.end, date_format) if args.end else date_end
hour_open = 7 if args.open is None else args.open
hour_closed = 19 if args.close is None else args.close
inactivity = random.uniform(
0, 0.05) if args.inactivity is None else args.inactivity
# convert speed to meters/second
ONE_MPH_METERSSEC = 0.44704
if args.speed_ms is not None:
speed = args.speed_ms
elif args.speed_mph is not None:
speed = args.speed_mph * ONE_MPH_METERSSEC
else:
speed = random.uniform(8 * ONE_MPH_METERSSEC, 15 * ONE_MPH_METERSSEC)
# setup a data directory
outputdir = "data" if args.output is None else args.output
os.makedirs(outputdir, exist_ok=True)
print(f"Parsing boundary file: {boundary_file}")
t1 = time.time()
boundary = parse_boundary(boundary_file, downloads=outputdir)
print(f"Valid boundary: {boundary.is_valid} ({time.time() - t1} s)")
gen = ProviderDataGenerator(
boundary=boundary,
speed=speed,
vehicle_types=args.vehicle_types,
propulsion_types=args.propulsion_types)
print(f"Generating {N} devices for '{provider_name}'")
t1 = time.time()
devices = gen.devices(N, provider_name, provider_id)
print(f"Generating devices complete ({time.time() - t1} s)")
status_changes, trips = [], []
print(
f"Generating data from {encoder.encode(date_start)} to {encoder.encode(date_end)}")
t1 = time.time()
date = date_start
while(date <= date_end):
formatted_date = encoder.encode(date)
print(
f"Starting day: {formatted_date} (open hours {hour_open} to {hour_closed})")
t2 = time.time()
day_status_changes, day_trips = gen.service_day(
devices, date, hour_open, hour_closed, inactivity)
status_changes.extend(day_status_changes)
trips.extend(day_trips)
date = date + timedelta(days=1)
print(f"Finished day: {formatted_date} ({time.time() - t2} s)")
print(f"Finished generating data ({time.time() - t1} s)")
if len(status_changes) > 0 or len(trips) > 0:
print("Generating data files")
t1 = time.time()
trips_file = os.path.join(outputdir, "trips.json")
print("Writing to:", trips_file)
t2 = time.time()
with open(trips_file, "w") as f:
payload = gen.make_payload(trips=trips)
f.write(encoder.encode(payload))
print(f"Finished ({time.time() - t2} s)")
sc_file = os.path.join(outputdir, "status_changes.json")
print("Writing to:", sc_file)
t2 = time.time()
with open(sc_file, "w") as f:
payload = gen.make_payload(status_changes=status_changes)
f.write(encoder.encode(payload))
print(f"Finished ({time.time() - t2} s)")
print(f"Generating data files complete ({time.time() - t1} s)")
print(f"Data generation complete ({time.time() - T0} s)")
|
nilq/baby-python
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.