id stringlengths 2 8 | text stringlengths 16 264k | dataset_id stringclasses 1 value |
|---|---|---|
6680585 | '''Desenvolva um programa que pergunte a distância de uma viagem em Km.
Calcule o preço da passagem, cobrando R$0,50 por Km para viagens de até 200Km
e R$0,45 parta viagens mais longas.'''
dist = float(input('qual a distância da viagem? '))
print(f'você está prestes a fazer uma viagem de {dist:.2f}km.')
if dist <= 200:
dist = dist * 0.5
print(f'o preço da viagem será de R${dist:.2f}')
else:
dist = dist * 0.45
print(f'o preço da passagem será de R${dist:.2f}')
| StarcoderdataPython |
3426306 | <reponame>corochann/chainerchem
# --- Configuration ---
# --- Constant definitions ---
# The maximum atomic number in rdkit
MAX_ATOMIC_NUM = 117
| StarcoderdataPython |
3366063 | <filename>predict.py
import random
import torch
from torch.autograd import Variable
from train_util import variable_from_sentence
class ModelPredictor(object):
def __init__(self, encoder, decoder, input_lang, output_lang, max_length):
self.encoder = encoder
self.decoder = decoder
self.input_lang = input_lang
self.output_lang = output_lang
self.max_length = max_length
def evaluate(self, sentence):
SOS_token = self.input_lang.word2index["SOS"]
EOS_token = self.input_lang.word2index["EOS"]
input_variable = variable_from_sentence(self.input_lang, sentence, self.max_length)
input_length = input_variable.size()[0]
encoder_hidden = self.encoder.init_hidden()
encoder_outputs = Variable(torch.zeros(self.max_length, self.encoder.hidden_size))
for ei in range(input_length):
encoder_output, encoder_hidden = self.encoder(input_variable[ei],
encoder_hidden)
encoder_outputs[ei] = encoder_outputs[ei] + encoder_output[0][0]
decoder_input = Variable(torch.LongTensor([[SOS_token]])) # SOS
decoder_hidden = encoder_hidden
decoded_words = []
for di in range(self.max_length):
decoder_output, decoder_hidden, decoder_attention = self.decoder(
decoder_input, decoder_hidden, encoder_outputs)
topv, topi = decoder_output.data.topk(1)
ni = topi[0][0]
if ni == EOS_token:
break
else:
decoded_words.append(self.output_lang.index2word[ni])
decoder_input = Variable(torch.LongTensor([[ni]]))
return decoded_words
def evaluate_randomly(self, pairs, n=10):
match = 0
for i in range(n):
pair = random.choice(pairs)
print('>', pair[0])
print('=', pair[1])
output_words = self.evaluate(pair[0])
output_sentence = ' '.join(output_words)
print('<', output_sentence)
print('')
if pair[1] == output_sentence:
match += 1
print("accuracy: ", (match / n) * 100, "%")
def predict_sentence(self, sentence):
return ' '.join(self.evaluate(sentence))
| StarcoderdataPython |
8187203 | <filename>tests/test_dataset_KDD99.py
import os
import pandas as pd
from pandas import Int64Index
from tests.abstract.t_roughset import AbstractClasses
class TestRoughSet(AbstractClasses.TBase):
"""
Run tests for dataset: KDD99
"""
def setUp(self):
self.enabled = True # Dataset contains > 4 mln records
self.test_dataset_num = "KDD99"
self.KDD99_FILE_PATH = os.environ.get(
"ROUGHSETS_KDD99_TEST_DATA_FOLDER",
"http://kdd.ics.uci.edu/databases/kddcup99/corrected.gz")
# Don't use setUpDataSet() from Parent class
# super().setUpDataSet()
self.setUpDataSet()
def setUpDataSet(self):
if not self.enabled:
return "Test not enabled, so skip loading data"
# import roughsets_pandas as rst
from roughsets_base.roughset_dt import RoughSetDT
columns = ["duration", "protocol_type", "service", "flag", "src_bytes", "dst_bytes", "land", "wrong_fragment",
"urgent", "hot", "num_failed_logins", "logged_in", "num_compromised", "root_shell", "su_attempted",
"num_root", "num_file_creations", "num_shells", "num_access_files", "num_outbound_cmds",
"is_hot_login", "is_guest_login", "count", "srv_count", "serror_rate", "srv_serror_rate",
"rerror_rate", "srv_rerror_rate", "same_srv_rate", "diff_srv_rate", "srv_diff_host_rate",
"dst_host_count", "dst_host_srv_count", "dst_host_same_srv_rate", "dst_host_diff_srv_rate",
"dst_host_same_src_port_rate", "dst_host_srv_diff_host_rate", "dst_host_serror_rate",
"dst_host_srv_serror_rate", "dst_host_rerror_rate", "dst_host_srv_rerror_rate", "target"]
df = pd.read_csv(self.KDD99_FILE_PATH, header=None)
# %timeit -r1 -n1 df = pd.read_csv(self.KDD99_FILE_PATH, header=None)
# 54.3 s ± 0 ns per loop (mean ± std. dev. of 1 run, 1 loop each)
self.X = df.iloc[:, 0:41]
self.y = df.iloc[:, 41]
self.rough_set: RoughSetDT = RoughSetDT(self.X, self.y)
def tearDown(self) -> None:
# TODO: Destroy shared data
pass
def test_kdd_A(self):
if not self.enabled:
return "Test not enabled"
filename = self.get_test_dataset_path("kdd_positive_region_A.zip")
true_positive_region_of_X = pd.read_csv(filename, compression='zip', header=0, squeeze=True)
true_positive_region_of_X: Int64Index = Int64Index(true_positive_region_of_X)
filename = self.get_test_dataset_path("kdd_boundary_region_A.csv")
true_boundary_region_of_X = pd.read_csv(filename, header=0, squeeze=True)
true_boundary_region_of_X = Int64Index(true_boundary_region_of_X)
filename = self.get_test_dataset_path("kdd_negative_region_A.csv")
true_negative_region_of_X = pd.read_csv(filename, header=0, squeeze=True)
true_negative_region_of_X = Int64Index(true_negative_region_of_X)
self.rough_set.get_indiscernibility_relations()
positive_region_of_X, boundary_region_of_X, upper_approximation_of_X, negative_region_of_X = \
self.rough_set.get_approximation_indices()
# %timeit -r1 -n1 positive_region_of_X, boundary_region_of_X, upper_approximation_of_X, negative_region_of_X = RST.get_approximation_indices()
# 1min 3s ± 0 ns per loop (mean ± std. dev. of 1 run, 1 loop each)
self.assert_check_eqality_of_2_dataframe_indices(positive_region_of_X, true_positive_region_of_X)
self.assert_check_eqality_of_2_dataframe_indices(boundary_region_of_X, true_boundary_region_of_X)
self.assert_check_eqality_of_2_dataframe_indices(negative_region_of_X, true_negative_region_of_X)
def test_kdd_B(self):
if not self.enabled:
return "Test not enabled"
# subset = ["duration", "protocol_type"]
subset = [0, 1]
filename = self.get_test_dataset_path("kdd_positive_region_B.zip")
true_positive_region_of_X = pd.read_csv(filename, compression='zip', header=0, squeeze=True)
true_positive_region_of_X: Int64Index = Int64Index(true_positive_region_of_X)
filename = self.get_test_dataset_path("kdd_boundary_region_B.zip")
true_boundary_region_of_X = pd.read_csv(filename, header=0, squeeze=True)
true_boundary_region_of_X = Int64Index(true_boundary_region_of_X)
filename = self.get_test_dataset_path("kdd_negative_region_B.csv")
true_negative_region_of_X = pd.read_csv(filename, header=0, squeeze=True)
true_negative_region_of_X = Int64Index(true_negative_region_of_X)
# self.rough_set.get_indiscernibility_relations(
# subset=subset
# )
# X_IND, y_IND, IND_OF_X_EXT = self.rough_set.get_Xy_with_indiscernibility_relations_index(subset=subset)
positive_region_of_X, boundary_region_of_X, upper_approximation_of_X, negative_region_of_X = \
self.rough_set.get_approximation_indices(subset=subset)
self.assert_check_eqality_of_2_dataframe_indices(positive_region_of_X, true_positive_region_of_X)
self.assert_check_eqality_of_2_dataframe_indices(boundary_region_of_X, true_boundary_region_of_X)
self.assert_check_eqality_of_2_dataframe_indices(negative_region_of_X, true_negative_region_of_X)
| StarcoderdataPython |
6631229 | <reponame>brnor/dipl
from gym_puyopuyo.env import register # noqa: F401
| StarcoderdataPython |
1616950 | # Functions to convert HSV colors to RGB colors lovingly ported from FastLED
#
# The basically fall into two groups: spectra, and rainbows.
# Spectra and rainbows are not the same thing. Wikipedia has a good
# illustration here
# http://upload.wikimedia.org//wikipedia//commons//f//f6//Prism_compare_rainbow_01.png
# from this article
# http://en.wikipedia.org//wiki//Rainbow#Number_of_colours_in_spectrum_or_rainbow
# that shows a 'spectrum' and a 'rainbow' side by side. Among other
# differences, you'll see that a 'rainbow' has much more yellow than
# a plain spectrum. "Classic" LED color washes are spectrum based, and
# usually show very little yellow.
#
# Wikipedia's page on HSV color space, with pseudocode for conversion
# to RGB color space
# http://en.wikipedia.org//wiki//HSL_and_HSV
# Note that their conversion algorithm, which is (naturally) very popular
# is in the "maximum brightness at any given hue" style, vs the "uniform
# brightness for all hues" style.
#
# You can't have both; either purple is the same brightness as red, e.g
# red = #FF0000 and purple = #800080 -> same "total light" output
# OR purple is 'as bright as it can be', e.g.
# red = #FF0000 and purple = #FF00FF -> purple is much brighter than red.
# The colorspace conversions here try to keep the apparent brightness
# constant even as the hue varies.
#
# Adafruit's "Wheel" function, discussed here
# http://forums.adafruit.com//viewtopic.php?f=47&t=22483
# is also of the "constant apparent brightness" variety.
#
# More details here: https://github.com//FastLED//FastLED//wiki//FastLED-HSV-Colors
from __future__ import division
import colorsys
import math
def color_scale(color, level):
"""Scale RGB tuple by level, 0 - 255"""
return tuple([(i * level) >> 8 for i in list(color)])
def color_blend(a, b):
"""Performs a Screen blend on RGB color tuples, a and b"""
return (255 - (((255 - a[0]) * (255 - b[0])) >> 8), 255 - (((255 - a[1]) * (255 - b[1])) >> 8), 255 - (((255 - a[2]) * (255 - b[2])) >> 8))
def gamma_correct(color, gamma):
"""Applies a gamma correction to an RGB color tuple"""
return (gamma[color[0]], gamma[color[1]], gamma[color[2]])
def hsv2rgb_raw(hsv):
"""
Converts an HSV tuple to RGB. Intended for internal use.
You should use hsv2rgb_spectrum or hsv2rgb_rainbow instead.
"""
HSV_SECTION_3 = 0x40
h, s, v = hsv
# The brightness floor is minimum number that all of
# R, G, and B will be set to.
invsat = 255 - s
brightness_floor = (v * invsat) // 256
# The color amplitude is the maximum amount of R, G, and B
# that will be added on top of the brightness_floor to
# create the specific hue desired.
color_amplitude = v - brightness_floor
# figure out which section of the hue wheel we're in,
# and how far offset we are within that section
section = h // HSV_SECTION_3 # 0..2
offset = h % HSV_SECTION_3 # 0..63
rampup = offset
rampdown = (HSV_SECTION_3 - 1) - offset
# compute color-amplitude-scaled-down versions of rampup and rampdown
rampup_amp_adj = (rampup * color_amplitude) // (256 // 4)
rampdown_amp_adj = (rampdown * color_amplitude) // (256 // 4)
# add brightness_floor offset to everything
rampup_adj_with_floor = rampup_amp_adj + brightness_floor
rampdown_adj_with_floor = rampdown_amp_adj + brightness_floor
r, g, b = (0, 0, 0)
if section:
if section == 1:
# section 1: 0x40..0x7F
r = brightness_floor
g = rampdown_adj_with_floor
b = rampup_adj_with_floor
else:
# section 2; 0x80..0xBF
r = rampup_adj_with_floor
g = brightness_floor
b = rampdown_adj_with_floor
else:
# section 0: 0x00..0x3F
r = rampdown_adj_with_floor
g = rampup_adj_with_floor
b = brightness_floor
return (r, g, b)
def hsv2rgb_spectrum(hsv):
"""Generates RGB values from HSV values in line with a typical light spectrum"""
h, s, v = hsv
return hsv2rgb_raw(((h * 192) >> 8, s, v))
def _nscale8x3_video(r, g, b, scale):
"""Internal Use Only"""
nonzeroscale = 0
if scale != 0:
nonzeroscale = 1
if r != 0:
r = ((r * scale) >> 8) + nonzeroscale
if g != 0:
g = ((g * scale) >> 8) + nonzeroscale
if b != 0:
b = ((b * scale) >> 8) + nonzeroscale
return (r, g, b)
def _scale8_video_LEAVING_R1_DIRTY(i, scale):
"""Internal Use Only"""
nonzeroscale = 0
if scale != 0:
nonzeroscale = 1
if i != 0:
i = ((i * scale) >> 8) + nonzeroscale
return i
def hsv2rgb_rainbow(hsv):
"""Generates RGB values from HSV that have an even visual distribution.
Be careful as this method is only have as fast as hsv2rgb_spectrum.
"""
h, s, v = hsv
offset = h & 0x1F # 0..31
offset8 = offset * 8
third = (offset8 * (256 // 3)) >> 8
r, g, b = (0, 0, 0)
if not (h & 0x80):
if not (h & 0x40):
if not (h & 0x20):
r = 255 - third
g = third
b = 0
else:
r = 171
g = 85 + third
b = 0x00
else:
if not (h & 0x20):
twothirds = (third << 1)
r = 171 - twothirds
g = 171 + third
b = 0
else:
r = 0
g = 255 - third
b = third
else:
if not (h & 0x40):
if not (h & 0x20):
r = 0x00
twothirds = (third << 1)
g = 171 - twothirds
b = 85 + twothirds
else:
r = third
g = 0
b = 255 - third
else:
if not (h & 0x20):
r = 85 + third
g = 0
b = 171 - third
else:
r = 171 + third
g = 0x00
b = 85 - third
if s != 255:
r, g, b = _nscale8x3_video(r, g, b, s)
desat = 255 - s
desat = (desat * desat) >> 8
brightness_floor = desat
r = r + brightness_floor
g = g + brightness_floor
b = b + brightness_floor
if v != 255:
v = _scale8_video_LEAVING_R1_DIRTY(v, v)
r, g, b = _nscale8x3_video(r, g, b, v)
return (r, g, b)
def hsv2rgb_360(hsv):
"""
Python default hsv to rgb conversion for when hue values 0-359 are preferred.
Due to requiring float math, this method is slower than hsv2rgb_rainbow and hsv2rgb_spectrum.
"""
h, s, v = hsv
r, g, b = colorsys.hsv_to_rgb(h / 360.0, s, v)
return (int(r * 255.0), int(g * 255.0), int(b * 255.0))
# pre-generated spectrums for the sake of speed
hue_raw = [hsv2rgb_raw((hue, 255, 255)) for hue in range(256)]
hue_rainbow = [hsv2rgb_rainbow((hue, 255, 255)) for hue in range(256)]
hue_spectrum = [hsv2rgb_spectrum((hue, 255, 255)) for hue in range(256)]
hue_360 = [hsv2rgb_360((hue, 1.0, 1.0)) for hue in range(360)]
# print hue_360
def hue2rgb_raw(hue):
if hue >= 0 or hue < 256:
return hue_raw[hue]
else:
raise ValueError("hue must be between 0 and 255")
def hue2rgb_rainbow(hue):
if hue >= 0 or hue < 256:
return hue_rainbow[hue]
else:
raise ValueError("hue must be between 0 and 255")
def hue2rgb_spectrum(hue):
if hue >= 0 or hue < 256:
return hue_spectrum[hue]
else:
raise ValueError("hue must be between 0 and 255")
def hue2rgb_360(hue):
if hue >= 0 or hue < 360:
return hue_360[hue]
else:
raise ValueError("hue must be between 0 and 359")
hsv2rgb = hsv2rgb_rainbow
hue2rgb = hue2rgb_rainbow
def hex2rgb(hex):
"""Helper for converting RGB and RGBA hex values to Color"""
hex = hex.strip('#')
if len(hex) == 6:
split = (hex[0:2], hex[2:4], hex[4:6])
else:
raise ValueError('Must pass in a 6 character hex value!')
r, g, b = [int(x, 16) for x in split]
return (r, g, b)
WHEEL_MAX = 384
def _gen_wheel():
for p in range(385):
if p < 128:
r = 127 - p % 128
g = p % 128
b = 0
elif p < 256:
g = 127 - p % 128
b = p % 128
r = 0
else:
b = 127 - p % 128
r = p % 128
g = 0
return (r, g, b)
_wheel = _gen_wheel()
def wheel_color(position):
"""Get color from wheel value (0 - 384).
Provided for those used to using it from Adafruit libraries
"""
if position < 0:
position = 0
if position > 384:
position = 384
return _wheel[position]
def even_dist(start, stop, steps):
steps -= 1
start = float(start)
stop = float(stop)
div = (stop - start) / steps
return [int(round(start + x * div)) for x in range(steps)] + [int(stop)]
def hue_gradient(start, stop, steps):
assert (start >= 0 and start < 256) and (
stop >= 0 and stop < 256), "hue must be between 0 and 255"
flip = False
if start > stop:
start, stop = stop, start
flip = True
stops = even_dist(start, stop, steps)
if flip:
stops = stops[::-1]
return stops
def hue_helper(pos, length, cycle_step):
return hue2rgb(((pos * 255 // length) + cycle_step) % 255)
def hue_helper360(pos, length, cycle_step):
return hue2rgb_360(((pos * 360 // length) + cycle_step) % 360)
def wheel_helper(pos, length, cycle_step):
"""Helper for wheel_color that distributes colors over length and allows shifting position"""
return wheel_color(((pos * WHEEL_MAX // length) + cycle_step) % WHEEL_MAX)
def genVector(width, height, x_mult=1, y_mult=1):
"""Generates a map of vector lengths from the center point to each coordinate
widht - width of matrix to generate
height - height of matrix to generate
x_mult - value to scale x-axis by
y_mult - value to scale y-axis by
"""
centerX = (width - 1) / 2.0
centerY = (height - 1) / 2.0
return [[int(math.sqrt(math.pow(x - centerX, 2 * x_mult) + math.pow(y - centerY, 2 * y_mult))) for x in range(width)] for y in range(height)]
def diagonal_matrix(d, offset=0):
hues = hue_gradient(0, 255, d+d-1)
return [[hues[(x+y+(d*y))%d + ((d-1) if x >= (d-y) else 0)] for x in range(d)] for y in range(d)]
Off = (0, 0, 0)
Blue = (0, 0, 255)
Pink = (255, 192, 203)
Honeydew = (240, 255, 240)
Purple = (128, 0, 128)
Fuchsia = (255, 0, 255)
LawnGreen = (124, 252, 0)
AliceBlue = (240, 248, 255)
Crimson = (220, 20, 60)
White = (255, 255, 255)
NavajoWhite = (255, 222, 173)
Cornsilk = (255, 248, 220)
Bisque = (255, 228, 196)
PaleGreen = (152, 251, 152)
Brown = (165, 42, 42)
DarkTurquoise = (0, 206, 209)
DarkGreen = (0, 100, 0)
MediumOrchid = (186, 85, 211)
Chocolate = (210, 105, 30)
PapayaWhip = (255, 239, 213)
Olive = (128, 128, 0)
DarkSalmon = (233, 150, 122)
PeachPuff = (255, 218, 185)
Plum = (221, 160, 221)
DarkGoldenrod = (184, 134, 11)
MintCream = (245, 255, 250)
CornflowerBlue = (100, 149, 237)
HotPink = (255, 105, 180)
DarkBlue = (0, 0, 139)
LimeGreen = (50, 205, 50)
DeepSkyBlue = (0, 191, 255)
DarkKhaki = (189, 183, 107)
LightGrey = (211, 211, 211)
Yellow = (255, 255, 0)
LightSalmon = (255, 160, 122)
MistyRose = (255, 228, 225)
SandyBrown = (244, 164, 96)
DeepPink = (255, 20, 147)
Magenta = (255, 0, 255)
Amethyst = (153, 102, 204)
DarkCyan = (0, 139, 139)
GreenYellow = (173, 255, 47)
DarkOrchid = (153, 50, 204)
OliveDrab = (107, 142, 35)
Chartreuse = (127, 255, 0)
Peru = (205, 133, 63)
Orange = (255, 165, 0)
Red = (255, 0, 0)
Wheat = (245, 222, 179)
LightCyan = (224, 255, 255)
LightSeaGreen = (32, 178, 170)
BlueViolet = (138, 43, 226)
Cyan = (0, 255, 255)
MediumPurple = (147, 112, 219)
MidnightBlue = (25, 25, 112)
Gainsboro = (220, 220, 220)
PaleTurquoise = (175, 238, 238)
PaleGoldenrod = (238, 232, 170)
Gray = (128, 128, 128)
MediumSeaGreen = (60, 179, 113)
Moccasin = (255, 228, 181)
Ivory = (255, 255, 240)
SlateBlue = (106, 90, 205)
Green = (0, 255, 0)
Green_HTML = (0, 128, 0)
DarkSlateBlue = (72, 61, 139)
Teal = (0, 128, 128)
Azure = (240, 255, 255)
LightSteelBlue = (176, 196, 222)
Tan = (210, 180, 140)
AntiqueWhite = (250, 235, 215)
WhiteSmoke = (245, 245, 245)
GhostWhite = (248, 248, 255)
MediumTurquoise = (72, 209, 204)
FloralWhite = (255, 250, 240)
LavenderBlush = (255, 240, 245)
SeaGreen = (46, 139, 87)
Lavender = (230, 230, 250)
BlanchedAlmond = (255, 235, 205)
DarkOliveGreen = (85, 107, 47)
DarkSeaGreen = (143, 188, 143)
Violet = (238, 130, 238)
Navy = (0, 0, 128)
Beige = (245, 245, 220)
SaddleBrown = (139, 69, 19)
IndianRed = (205, 92, 92)
Snow = (255, 250, 250)
SteelBlue = (70, 130, 180)
MediumSlateBlue = (123, 104, 238)
Black = (0, 0, 0)
LightBlue = (173, 216, 230)
Turquoise = (64, 224, 208)
MediumVioletRed = (199, 21, 133)
DarkViolet = (148, 0, 211)
DarkGray = (169, 169, 169)
Salmon = (250, 128, 114)
DarkMagenta = (139, 0, 139)
Tomato = (255, 99, 71)
SkyBlue = (135, 206, 235)
Goldenrod = (218, 165, 32)
MediumSpringGreen = (0, 250, 154)
DodgerBlue = (30, 144, 255)
Aqua = (0, 255, 255)
ForestGreen = (34, 139, 34)
DarkRed = (139, 0, 0)
SlateGray = (112, 128, 144)
Indigo = (75, 0, 130)
CadetBlue = (95, 158, 160)
LightYellow = (255, 255, 224)
DarkOrange = (255, 140, 0)
PowderBlue = (176, 224, 230)
RoyalBlue = (65, 105, 225)
Sienna = (160, 82, 45)
Thistle = (216, 191, 216)
Lime = (0, 255, 0)
Seashell = (255, 245, 238)
LemonChiffon = (255, 250, 205)
LightSkyBlue = (135, 206, 250)
YellowGreen = (154, 205, 50)
Plaid = (204, 85, 51)
Aquamarine = (127, 255, 212)
LightCoral = (240, 128, 128)
DarkSlateGray = (47, 79, 79)
Coral = (255, 127, 80)
Khaki = (240, 230, 140)
BurlyWood = (222, 184, 135)
LightGoldenrodYellow = (250, 250, 210)
MediumBlue = (0, 0, 205)
LightSlateGray = (119, 136, 153)
RosyBrown = (188, 143, 143)
Silver = (192, 192, 192)
PaleVioletRed = (219, 112, 147)
FireBrick = (178, 34, 34)
SpringGreen = (0, 255, 127)
LightGreen = (144, 238, 144)
Linen = (250, 240, 230)
OrangeRed = (255, 69, 0)
DimGray = (105, 105, 105)
Maroon = (128, 0, 0)
LightPink = (255, 182, 193)
MediumAquamarine = (102, 205, 170)
Gold = (255, 215, 0)
Orchid = (218, 112, 214)
OldLace = (253, 245, 230)
| StarcoderdataPython |
74161 | """
--- Day 21: RPG Simulator 20XX ---
<NAME> got a new video game for Christmas. It's an RPG, and he's stuck on a boss. He needs to know what
equipment to buy at the shop. He hands you the controller.
In this game, the player (you) and the enemy (the boss) take turns attacking. The player always goes first. Each attack
reduces the opponent's hit points by at least 1. The first character at or below 0 hit points loses.
Damage dealt by an attacker each turn is equal to the attacker's damage score minus the defender's armor score. An
attacker always does at least 1 damage. So, if the attacker has a damage score of 8, and the defender has an armor score
of 3, the defender loses 5 hit points. If the defender had an armor score of 300, the defender would still lose 1 hit
point.
Your damage score and armor score both start at zero. They can be increased by buying items in exchange for gold. You
start with no items and have as much gold as you need. Your total damage or armor is equal to the sum of those stats
from all of your items. You have 100 hit points.
Here is what the item shop is selling:
Weapons: Cost Damage Armor
Dagger 8 4 0
Shortsword 10 5 0
Warhammer 25 6 0
Longsword 40 7 0
Greataxe 74 8 0
Armor: Cost Damage Armor
Leather 13 0 1
Chainmail 31 0 2
Splintmail 53 0 3
Bandedmail 75 0 4
Platemail 102 0 5
Rings: Cost Damage Armor
Damage +1 25 1 0
Damage +2 50 2 0
Damage +3 100 3 0
Defense +1 20 0 1
Defense +2 40 0 2
Defense +3 80 0 3
You must buy exactly one weapon; no dual-wielding. Armor is optional, but you can't use more than one. You can buy 0-2
rings (at most one for each hand). You must use any items you buy. The shop only has one of each item, so you can't buy,
for example, two rings of Damage +3.
For example, suppose you have 8 hit points, 5 damage, and 5 armor, and that the boss has 12 hit points, 7 damage, and 2
armor:
The player deals 5-2 = 3 damage; the boss goes down to 9 hit points.
The boss deals 7-5 = 2 damage; the player goes down to 6 hit points.
The player deals 5-2 = 3 damage; the boss goes down to 6 hit points.
The boss deals 7-5 = 2 damage; the player goes down to 4 hit points.
The player deals 5-2 = 3 damage; the boss goes down to 3 hit points.
The boss deals 7-5 = 2 damage; the player goes down to 2 hit points.
The player deals 5-2 = 3 damage; the boss goes down to 0 hit points.
In this scenario, the player wins! (Barely.)
You have 100 hit points. The boss's actual stats are in your puzzle input. What is the least amount of gold you can
spend and still win the fight?
Your puzzle answer was 121.
--- Part Two ---
Turns out the shopkeeper is working with the boss, and can persuade you to buy whatever items he wants. The other rules
still apply, and he still only has one of each item.
What is the most amount of gold you can spend and still lose the fight?
Your puzzle answer was 201.
"""
class Character:
def __init__(self, hit_points, damage, armor, inventory_cost=0):
self.hit_points = hit_points
self.damage = damage
self.armor = armor
self.inventory_cost = inventory_cost
def attack(self, other_character):
damage = self.damage - other_character.armor
other_character.hit_points -= damage if damage >= 1 else 1
return self.is_alive and other_character.is_alive
@property
def is_alive(self):
return self.hit_points > 0
def encounter(character1, character2):
both_live = character1.attack(character2)
if both_live:
both_live = character2.attack(character1)
if not both_live:
return character1 if character1.is_alive else character2
class Inventory:
def __init__(self, cost, damage, armor):
self.cost = cost
self.damage = damage
self.armor = armor
weapons = [
Inventory(cost, damage, 0) for cost, damage in [
(8, 4),
(10, 5),
(25, 6),
(40, 7),
(74, 8)
]
]
armors = [
Inventory(cost, 0, armor) for cost, armor in [
(0, 0),
(13, 1),
(31, 2),
(53, 3),
(75, 4),
(102, 5)
]
]
rings = [
Inventory(cost, damage, armor) for cost, damage, armor in [
(0, 0, 0),
(0, 0, 0),
(25, 1, 0),
(50, 2, 0),
(100, 3, 0),
(20, 0, 1),
(40, 0, 2),
(80, 0, 3)
]
]
def character_variant(hit_points):
for weapon in weapons:
for armor in armors:
for left_hand_ring in rings:
for right_hand_ring in [ring for ring in rings if ring != left_hand_ring]:
equipment = [weapon, armor, left_hand_ring, right_hand_ring]
yield Character(
hit_points=hit_points,
damage=sum(e.damage for e in equipment),
armor=sum(e.armor for e in equipment),
inventory_cost=sum(e.cost for e in equipment))
def simulate_battle(character1, character2):
winner = None
while winner is None:
winner = encounter(character1, character2)
return winner
def find_winners(boss_stats, is_player_winner=True):
for player in character_variant(100):
boss = Character(**boss_stats)
winner = simulate_battle(player, boss)
expected_winner = player if is_player_winner else boss
if winner == expected_winner:
yield player
def find_cheapest_winning_inventory(victors):
return min(victor.inventory_cost for victor in victors)
def find_most_expensive_losing_inventory(losers):
return max(loser.inventory_cost for loser in losers)
if __name__ == "__main__":
boss_stats = {
"hit_points": 103,
"damage": 9,
"armor": 2
}
player_victors = find_winners(boss_stats, is_player_winner=True)
player_losers = find_winners(boss_stats, is_player_winner=False)
print("Cheapest inventory: {}".format(find_cheapest_winning_inventory(player_victors)))
print("Most expensive losing inventory: {}".format(find_most_expensive_losing_inventory(player_losers)))
| StarcoderdataPython |
5169090 | <reponame>Ne02ptzero/swift3
# Copyright (c) 2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from urllib import quote
from swift.common.utils import public
from swift3.controllers.base import Controller
from swift3.response import HTTPOk
from swift3.etree import tostring
class S3AclController(Controller):
"""
Handles the following APIs:
- GET Bucket acl
- PUT Bucket acl
- GET Object acl
- PUT Object acl
Those APIs are logged as ACL operations in the S3 server log.
"""
@public
def GET(self, req):
"""
Handles GET Bucket acl and GET Object acl.
"""
resp = req.get_response(self.app)
acl = resp.object_acl if req.is_object_request else resp.bucket_acl
resp = HTTPOk()
resp.body = tostring(acl.elem())
return resp
@public
def PUT(self, req):
"""
Handles PUT Bucket acl and PUT Object acl.
"""
if req.is_object_request:
headers = {}
src_path = '/%s/%s' % (req.container_name, req.object_name)
# object-sysmeta' can be updated by 'Copy' method,
# but can not be by 'POST' method.
# So headers['X-Copy-From'] for copy request is added here.
headers['X-Copy-From'] = quote(src_path)
headers['Content-Length'] = 0
# In case of a MPU, copy only the manifest
req.get_response(self.app, 'PUT', headers=headers,
query={'multipart-manifest': 'get'})
else:
req.get_response(self.app, 'POST')
return HTTPOk()
| StarcoderdataPython |
8005744 | #!/usr/bin/env python
import os
def test_simple():
here = os.path.dirname(os.path.realpath(__file__))
main = os.path.join(here, "main.py")
assert os.path.isfile(main)
if __name__ == "__main__":
test_simple()
print("Test passed. main.py file exists.")
| StarcoderdataPython |
4905096 | <gh_stars>100-1000
# Copyright (c) 2021, <NAME>
# License: MIT License
from pathlib import Path
import ezdxf
DIR = Path("~/Desktop/Outbox").expanduser()
doc = ezdxf.new()
# setting the data
doc.header["$USERI1"] = 4711
doc.header["$USERR1"] = 3.141592
# reading the data
i1 = doc.header["$USERI1"]
r1 = doc.header["$USERR1"]
# setting the data
doc.header.custom_vars.append("MyFirstVar", "First Value")
# getting the data
my_first_var = doc.header.custom_vars.get("MyFirstVar", "Default Value")
doc.saveas(DIR / "header_vars.dxf")
| StarcoderdataPython |
5041474 |
import os
import jieba
import numpy as np
from scipy.special import softmax
from onnxruntime import GraphOptimizationLevel, InferenceSession, SessionOptions, get_all_providers
from gpt2_tokenizer import GPT2Tokenizer
def create_model_for_provider(model_path: str, provider: str= 'CPUExecutionProvider') -> InferenceSession:
assert provider in get_all_providers(), f"provider {provider} not found, {get_all_providers()}"
# Few properties that might have an impact on performances (provided by MS)
options = SessionOptions()
options.intra_op_num_threads = int(os.environ.get('NUM_THREADS', 4))
options.graph_optimization_level = GraphOptimizationLevel.ORT_ENABLE_ALL
# Load the model as a graph and prepare the CPU backend
session = InferenceSession(model_path, options, providers=[provider])
session.disable_fallback()
return session
print('model loading...')
tokenizer = GPT2Tokenizer(
'tokenizer/vocab.json',
'tokenizer/merges.txt',
model_file='tokenizer/chinese_vocab.model')
cpm = create_model_for_provider('./onnx_q/cpm.onnx')
cpm_kv = create_model_for_provider('./onnx_kv_q/cpm.onnx')
jieba.initialize()
print('model green')
def generate(
text,
max_len = 100,
temperature = 1.0,
top_p = 0.95,
top_k = 50,
eod=tokenizer.eod_id,
ban = [
8, # 一个空白字符
]):
ids = tokenizer.encode(text)
kv_cache = None
for i in range(max_len):
if i == 0:
logits, kv_cache = cpm.run(None, {
"input_ids": np.array([ids], dtype=np.int64),
})
else:
logits, new_kv = cpm_kv.run(None, {
"input_ids": np.array([[next_token]], dtype=np.int64),
'kv_cache': kv_cache,
})
kv_cache = np.concatenate([kv_cache, new_kv], axis=-2)
for x in ban:
logits[:, -1, x] = -9999
logits = logits / temperature
scores = softmax(logits[:, -1, :])
next_probs = np.sort(scores)[:, ::-1]
if top_p > 0.0 and top_p < 1.0:
next_probs = next_probs[:, :int(next_probs.shape[1] * (1 - top_p))]
if top_k > 0 and top_k < next_probs.shape[1]:
next_probs = next_probs[:, :top_k]
next_probs_1 = next_probs / next_probs.sum(axis=1).reshape((-1, 1))
next_tokens = np.argsort(scores)[:, ::-1]
if top_p > 0.0 and top_p < 1.0:
next_tokens = next_tokens[:, :int(next_tokens.shape[1] * (1 - top_p))]
if top_k > 0 and top_k < next_tokens.shape[1]:
next_tokens = next_tokens[:, :top_k]
next_token = np.random.choice(next_tokens[0], p=next_probs_1[0])
if eod is not None:
if eod == next_token:
break
ids.append(next_token)
return tokenizer.decode(ids).replace(' ', '')
if __name__ == '__main__':
print(generate('天下是否太平,取决于'))
| StarcoderdataPython |
3508381 | <gh_stars>0
# -*- coding: utf-8 -*-
# Import dependencies
import uuid
import bcrypt # https://github.com/pyca/bcrypt/, https://pypi.python.org/pypi/bcrypt/2.0.0
# Import the database object from the main app module
from flask import json
from app import login_manager, app
# create logger with 'spam_application'
from app.helpers import get_custom_logger
from app.mod_auth.models import User
from app.mod_database.helpers import get_db_cursor
logger = get_custom_logger('mod_auth_helpers')
def get_account_by_id(cursor=None, account_id=None):
try:
###
# User info by acoount_id
logger.debug('User info by acoount_id')
if app.config["SUPER_DEBUG"]:
logger.debug('account_id: ' + repr(account_id))
sql_query = "SELECT " \
"MyDataAccount.Accounts.id, " \
"MyDataAccount.LocalIdentities.id, " \
"MyDataAccount.LocalIdentities.username, " \
"MyDataAccount.Particulars.firstname, " \
"MyDataAccount.Particulars.lastname, " \
"MyDataAccount.Emails.email, " \
"MyDataAccount.Particulars.img_url, " \
"MyDataAccount.Particulars.dateOfBirth " \
"FROM MyDataAccount.Accounts " \
"INNER JOIN MyDataAccount.LocalIdentities " \
"ON MyDataAccount.Accounts.id = MyDataAccount.LocalIdentities.Accounts_id " \
"INNER JOIN MyDataAccount.Particulars " \
"ON MyDataAccount.Accounts.id = MyDataAccount.Particulars.Accounts_id " \
"INNER JOIN MyDataAccount.Emails " \
"ON MyDataAccount.Accounts.id = MyDataAccount.Emails.Accounts_id " \
"WHERE MyDataAccount.Accounts.id = '%s' AND MyDataAccount.Emails.prime = 1" % (account_id)
if app.config["SUPER_DEBUG"]:
logger.debug('sql_query: ' + repr(sql_query))
cursor.execute(sql_query)
data = cursor.fetchone()
if app.config["SUPER_DEBUG"]:
logger.debug('data: ' + repr(data))
account_id_from_db = unicode(data[0])
identity_id_from_db = str(data[1])
username_from_db = str(data[2])
firstname_from_db = str(data[3])
lastname_from_db = str(data[4])
email_from_db = str(data[5])
img_url_from_db = str(data[6])
data_of_birth_from_db = str(data[7])
except Exception as exp:
logger.debug('Account not found: ' + repr(exp))
if app.config["SUPER_DEBUG"]:
logger.debug('Exception: ' + repr(exp))
return cursor, None
else:
logger.debug('Account found with given id: ' + str(account_id))
if app.config["SUPER_DEBUG"]:
logger.debug('account_id_from_db: ' + str(account_id_from_db))
logger.debug('identity_id_from_db: ' + str(identity_id_from_db))
logger.debug('username_from_db: ' + str(username_from_db))
logger.debug('firstname_from_db: ' + str(firstname_from_db))
logger.debug('lastname_from_db: ' + str(lastname_from_db))
logger.debug('email_from_db: ' + str(email_from_db))
logger.debug('img_url_from_db: ' + str(img_url_from_db))
logger.debug('data_of_birth_from_db: ' + str(data_of_birth_from_db))
user = User(
account_id=account_id_from_db,
identity_id=identity_id_from_db,
username=username_from_db,
firstname=firstname_from_db,
lastname=lastname_from_db,
email=email_from_db,
img_url=img_url_from_db,
date_of_birth=data_of_birth_from_db
)
return cursor, user
def get_account_by_username_and_password(cursor=None, username=None, password=None):
username_to_check = str(username)
logger.debug('username_to_check: ' + username_to_check)
password_to_check = str(password)
logger.debug('password_to_check: ' + password_to_check)
try:
###
# User info by username
logger.debug('credentials')
sql_query = "SELECT " \
"MyDataAccount.LocalIdentities.Accounts_id, " \
"MyDataAccount.LocalIdentities.id, " \
"MyDataAccount.LocalIdentities.username, " \
"MyDataAccount.LocalIdentityPWDs.password, " \
"MyDataAccount.Salts.salt " \
"FROM MyDataAccount.LocalIdentities " \
"INNER JOIN MyDataAccount.LocalIdentityPWDs " \
"ON MyDataAccount.LocalIdentityPWDs.id = MyDataAccount.LocalIdentities.LocalIdentityPWDs_id " \
"INNER JOIN MyDataAccount.Salts " \
"ON MyDataAccount.Salts.LocalIdentities_id = MyDataAccount.LocalIdentities.id " \
"WHERE MyDataAccount.LocalIdentities.username = '%s'" % (username_to_check)
if app.config["SUPER_DEBUG"]:
logger.debug('sql_query: ' + repr(sql_query))
cursor.execute(sql_query)
data = cursor.fetchone()
account_id_from_db = str(data[0])
identity_id_from_db = str(data[1])
username_from_db = str(data[2])
password_from_db = str(data[3])
salt_from_db = str(data[4])
except Exception as exp:
logger.debug('Authentication failed: ' + repr(exp))
if app.config["SUPER_DEBUG"]:
logger.debug('Exception: ' + repr(exp))
return cursor, None
else:
logger.debug('User found with given username: ' + username)
if app.config["SUPER_DEBUG"]:
logger.debug('account_id_from_db: ' + account_id_from_db)
logger.debug('identity_id_from_db: ' + identity_id_from_db)
logger.debug('username_from_db: ' + username_from_db)
logger.debug('password_from_db: ' + password_from_db)
logger.debug('salt_from_db: ' + salt_from_db)
if bcrypt.hashpw(password_to_check, salt_from_db) == password_from_db:
if app.config["SUPER_DEBUG"]:
logger.debug('Password hash from client: ' + bcrypt.hashpw(password_to_check, salt_from_db))
logger.debug('Password hash from db : ' + password_from_db)
logger.debug('Authenticated')
cursor, user = get_account_by_id(cursor=cursor, account_id=int(account_id_from_db))
return cursor, user
else:
if app.config["SUPER_DEBUG"]:
logger.debug('Password hash from client: ' + bcrypt.hashpw(password_to_check, salt_from_db))
logger.debug('Password hash from db : ' + password_from_db)
logger.debug('Not Authenticated')
return cursor, None
# user_loader callback for Flask-Login.
# https://flask-login.readthedocs.org/en/latest/#how-it-works
@login_manager.user_loader
def load_user(account_id):
if app.config["SUPER_DEBUG"]:
logger.debug("load_user(account_id), account_id=" + account_id)
cursor = get_db_cursor()
cursor, loaded_user = get_account_by_id(cursor=cursor, account_id=unicode(account_id))
return loaded_user
# For API Auth module
def get_account_id_by_username_and_password(username=None, password=<PASSWORD>):
username_to_check = str(username)
logger.debug('username_to_check: ' + username_to_check)
password_to_check = str(password)
logger.debug('password_to_check: ' + password_to_check)
try:
###
# User info by username
logger.debug('User info by username from DB')
sql_query = "SELECT " \
"MyDataAccount.LocalIdentities.Accounts_id, " \
"MyDataAccount.LocalIdentities.id, " \
"MyDataAccount.LocalIdentities.username, " \
"MyDataAccount.LocalIdentityPWDs.password, " \
"MyDataAccount.Salts.salt " \
"FROM MyDataAccount.LocalIdentities " \
"INNER JOIN MyDataAccount.LocalIdentityPWDs " \
"ON MyDataAccount.LocalIdentityPWDs.id = MyDataAccount.LocalIdentities.LocalIdentityPWDs_id " \
"INNER JOIN MyDataAccount.Salts " \
"ON MyDataAccount.Salts.LocalIdentities_id = MyDataAccount.LocalIdentities.id " \
"WHERE MyDataAccount.LocalIdentities.username = '%s'" % (username_to_check)
if app.config["SUPER_DEBUG"]:
logger.debug('sql_query: ' + repr(sql_query))
# DB cursor
cursor = get_db_cursor()
cursor.execute(sql_query)
data = cursor.fetchone()
account_id_from_db = str(data[0])
identity_id_from_db = str(data[1])
username_from_db = str(data[2])
password_from_db = str(data[3])
salt_from_db = str(data[4])
except Exception as exp:
logger.debug('Authentication failed: ' + repr(exp))
if app.config["SUPER_DEBUG"]:
logger.debug('Exception: ' + repr(exp))
return None
else:
logger.debug('User found with given username: ' + username)
logger.debug('account_id_from_db: ' + account_id_from_db)
logger.debug('identity_id_from_db: ' + identity_id_from_db)
logger.debug('username_from_db: ' + username_from_db)
logger.debug('password_from_db: ' + password_from_db)
logger.debug('salt_from_db: ' + salt_from_db)
logger.info("Checking password")
if bcrypt.hashpw(password_to_check, salt_from_db) == password_from_db:
logger.debug('Password hash from client: ' + bcrypt.hashpw(password_to_check, salt_from_db))
logger.debug('Password hash from db : ' + password_from_db)
logger.debug('Authenticated')
#cursor, user = get_account_by_id(cursor=cursor, account_id=int(account_id_from_db))
user = {'account_id': account_id_from_db, 'username': username_from_db}
logger.debug('User dict created')
return user
else:
logger.debug('Password hash from client: ' + bcrypt.hashpw(password_to_check, salt_from_db))
logger.debug('Password hash from db : ' + password_from_db)
logger.debug('Not Authenticated')
return None
| StarcoderdataPython |
8078749 | from symbl.utils import wrap_keyboard_interrupt, Thread, Log
from symbl.utils.Helper import initialize_api_client
from symbl.jobs_api.JobStatus import JobStatus
from symbl_rest import JobsApi
import time
class Job():
__INTERVAL_TIME_IN_SECONDS = 5 ## in seconds
def __init__(self, job_id: str, conversation_id:str, wait=True):
self.__job_id = job_id
self.__conversation_id = conversation_id
self.__success_func = None
self.__jobs_api = JobsApi()
self.__error_func = None
self.__job_status = JobStatus.IN_PROGRESS
self.__wait = wait
def getConversationId(self):
return self.__conversation_id
def get_job_status(self):
return self.__job_status.value
def get_job_id(self):
return self.__job_id
def on_complete(self, func):
self.__success_func = func
return self
def on_error(self, func):
self.__error_func = func
return self
@initialize_api_client
def __fetch_current_job_status(self, credentials=None):
if self.__jobs_api is not None:
response = self.__jobs_api.get_job_status(self.__job_id)
self.__job_status = JobStatus(response.status)
else:
raise ValueError("Job object not initialized correctly. Please contact administrator.")
def synchronous_monitor_job(self, conversation, interval=None, wait=True, credentials=None):
if interval is None:
interval = self.__INTERVAL_TIME_IN_SECONDS
while self.__job_status != JobStatus.COMPLETED and self.__job_status != JobStatus.FAILED:
time.sleep(interval)
self.__fetch_current_job_status(credentials=credentials)
Log.getInstance().info("Fetching latest status of job {0}, current status is {1}".format(self.__job_id, self.__job_status.value))
if self.__job_status == JobStatus.COMPLETED and self.__success_func != None:
self.__success_func(conversation)
elif self.__error_func != None:
self.__error_func(conversation)
@wrap_keyboard_interrupt
def monitor_job(self, conversation, interval=None, wait=True, credentials=None):
if wait:
self.synchronous_monitor_job(conversation, interval, wait, credentials)
else:
Thread.getInstance().start_on_thread(self.synchronous_monitor_job, conversation, interval, wait, credentials)
| StarcoderdataPython |
8085245 | <gh_stars>0
# Your Token for Telegram Bot, get it on Bot Father
TOKEN = "TOKEN"
# Start message
start_msg = "Benvenuto nel Bot relativo allo stand del MakerSpace di Fabriano! Digita il comando /info per ottenere "\
"maggiori informazioni riguardanti questa realtà o digita il comando /timeline per visualizzare un'ampia raccolta "\
"di date che hanno caratterizzato la storia dell'evoluzione dei ChatterBot!"
# Help message
help_msg = "I don't need help"
#Info message
info_msg = {}
info_msg[0] = "Il progetto Makerspace in Library a Fabriano è un progetto innovativo a livello internazionale. In America il primo MakerSpace in una biblioteca " \
"nacque nel 2011 presso la biblioteca di Fayetteville (FFL) dello Stato di New York. In Europa, invece, è ancora poco diffuso. In Italia le biblioteche " \
"che offrono spazi simili, benché con caratteristiche diverse, si trovano a Pistoia, a Cinisello Balsamo e a Settimo Torinese. \n" \
"Molto frequentato e liberamente accessibile nel cuore della città di Fabriano, come è la nuova Biblioteca Multimediale Pubblica, rende " \
"possibile l’incontro e la sperimentazione di diverse realtà, dalle associazioni agli studenti delle scuole di ogni ordine e grado." \
info_msg[1] = "Il Maker Space si inserisce nel movimento culturale dei makers, degli _artigiani digitali_, del software e dell'hardware libero." \
"La creazione di uno spazio fisico, la sperimentazione di diverse realtà, dalle associazioni agli studenti delle scuole di ogni ordine e grado. " \
"Questa iniziativa è autogestita dagli utenti stessi, giovani e meno giovani, di qualsiasi cultura, etnia, genere ed estrazione economica e sociale. " \
"In maniera gratuita e volontaria, il gruppo propone, organizza e realizza le attività che si svolgono in modo continuativo: workshop per " \
"la realizzazione di piccoli circuiti elettrici ed elettronici, progetti di open hardware (Arduino, Makey Makey, Raspberry Pi, Little Bits), " \
"proposte per l’introduzione alla programmazione e alla robotica, in collaborazione con associazioni di altre aree geografiche." \
info_msg[2] = "Ad oggi tutto il materiale di cui è dotato il Maker Space, dalle schede elettroniche, ai materiali di consumo, ai libri di making, è stato ottenuto " \
"tramite partecipazioni ad attività educative sponsorizzate da Google, Make e da alcune aziende locali. Fabriano è nella rete delle città " \
"creative dell’Unesco per l'artigianato, le arti e le tradizioni popolari (in Italia solo Bologna, Torino e Fabriano hanno questo riconoscimento) " \
"ed è famosa in tutto il mondo per la qualità della carta, vantando una tradizione cartaria dal 1264.\n\n" \
"Se vuoi saperne di più puoi visitare il seguente [sito](https://goo.gl/9BhA7B)!\n\n"
#Timeline message
timeline_msg = "Seleziona una data dalla seguente lista per ottenere maggiori informazioni!"
#Events timeline
msg_style = {}
msg_style['1950'] = "Nel 1950 <NAME> pubblicò un articolo dal titolo Computing Machinery and Intelligence, " \
"in cui propose un criterio - oggi definito Test di Turing - in grado di determinare se una " \
"macchina è in grado di pensare o meno. Per soddisfare questo criterio un software deve fingere di " \
"essere umano in una conversazione in tempo reale in modo che l'interlocutore non sia in grado di " \
"distinguere, basandosi solo sul contenuto della conversazione, se stia conversando con un programma " \
"o con un essere umano."
msg_style['1966'] = """ELIZA è un Chatterbot scritto nel 1966 da <NAME>. ELIZA si contraddistinse quale la parodia di un terapeuta Rogersiano, """ \
"""in buona parte rispondendo al paziente con domande ottenute dalla riformulazione delle affermazioni del paziente stesso. """ \
"""Così, per esempio, alla frase "Mi fa male la testa" il programma può ribattere con "Perché dici che ti fa male la testa?" """ \
"""oppure la risposta a "Mia madre mi odia" potrebbe essere "Chi altro nella tua famiglia ti odia?"."""
msg_style['1972'] = "Lo psichiatra americano <NAME> introdusse nel 1972 il bot Parry, ideato in modo da possedere i" \
" tratti tipici di un paziente affetto da schizofrenia. Il bot dimostrò di essere più avanzato del suo" \
" predecessore, tanto da venir definito “ELIZA con una mentalità”. "
msg_style['1988'] = "Scritto da <NAME> nel 1988, Jabberwacky era capace di tenere conversazioni che fossero diver" \
"tenti e umoristiche. Fu uno dei primi tentativi di creazione di un’Intelligenza Artificiale attravers" \
"o le interazioni umane. Il suo diretto successore fu Cleverbot, ideato dallo stesso Carpenter e acces" \
"sibile online ancora oggi."
msg_style['1992 - Parte 1'] = "Nel 1992 Creative Labs rilasciò per la piattaforma MS-Dos Dr. Sbaitso. La conversazione con Sbaitso a" \
"veva la tipica impostazione di una seduta dallo psicologo con la possibilità per gli utenti di avere " \
"un’interazione ancora più viva con il programma grazie alla sintesi vocale delle sue risposte."
msg_style['1992 - Parte 2'] = "Nello stesso anno il giornalista e ricercatore <NAME> pubblicò un articolo sulla Stampa in" \
" cui descriveva la possibilità di dialogare con una macchina. Prendendo spunto dalla prima edizione d" \
"el <NAME>, svoltasi al Computer Museum di Boston, <NAME> creò ELOISA, invitando es" \
"perti di informatica, ricercatori di tutte le discipline e semplici curiosi a dialogarci."
msg_style['2000'] = "Nel 2000 <NAME> e <NAME> fondarono la società ActiveBuddy allo scopo di creare agenti int" \
"elligenti, basati sulla conversazione, in grado di comunicare attraverso piattaforme di messaggistica" \
" istantanea. Hoffer ebbe l'idea di creare agenti interattivi per aggiungere funzionalità ai sempre pi" \
"ù popolari servizi di messaggistica. L'implementazione originale fu in un gioco di avventura, ma pres" \
"to furono aggiunte una vasta gamma di applicazioni basate sui database quali l'accesso alle notizie, " \
"al meteo, a informazioni di borsa oltre che svariati altri strumenti (calcolatrici, traduttori, ecc.)" \
". Le applicazioni furono inserite in un unico pacchetto e lanciate, nel 2001, sotto il nome di Smarte" \
"rChild, una vetrina per il rapido accesso alle informazioni che offriva la possibilità di divertirsi " \
"anche attraverso la conversazione. Il successo del progetto - si raggiunsero oltre 13 milioni di uten" \
"ti - portò alla realizzazione di prodotti promozionali in ambito musicale, cinematografico e televisi" \
"vo - Radiohead, <NAME>, The Sporting News sono solo alcuni dei brand coinvolti."
msg_style['2002'] = "Jargon, Web Agency Milanese, ottenne nel 2002 l'esclusiva per l'Italia della piattaforma tedesca per " \
"la creazione di chatbots LingoBot. Grazie all’operato di Jargon arrivò la versione in lingua italiana" \
""" di LingoBot, a partire dalla quale si sviluppò "Alfa, Il Robot" con applicazioni personalizzate come""" \
" l'invio di cartoline virtuali."
msg_style['2005'] = "Dreams and Co realizza Giorgia e, successivamente, AvClick una piattaforma per creare assistenti virt" \
"uali"
msg_style['2006'] = "Società come INPS, Gruppo BPM, IKEA e Bettini fanno uso di assistenti virtuali"
msg_style['2007'] = "Microsoft lancia Doretta, un assistente virtuale in grado di effettuare ricerche su internet attraver" \
"so MSN"
msg_style['2015'] = "Telegram lancia la possibilità inserire Agenti Virtuali in grado di rispondere a comandi, programmabi" \
"li sia in maniera visuale che utilizzando librerie proprietarie"
msg_style['2016 - Parte uno'] = "Facebook decide di aprire l'accesso ai bot sulla sua piattaforma di messaggistica istantanea Messenger "
msg_style['2016 - Parte due'] = "Viene presentato GETrid, uno tra i primi Chatterbot con risvolti pratici nella vita comune: un assist" \
"ente virtuale in grado di consigliare il corretto modus operandi nella fase di riciclo di un oggetto."
msg_style['2016 - Parte tre'] = "Amazon Echo (abbreviato e indicato come Echo) è un marchio di altoparlanti intelligenti sviluppato da" \
" Amazon.com. I dispositivi si connettono all'assistente personale intelligente a comando vocale denom" \
"""inato Amazon Alexa, che risponde all'utente al nome "Alexa". Questa "parola sveglia" può essere modif""" \
"""icata dall'utente in "Amazon", "Echo" o "Computer"[1]. Il dispositivo è in grado di interagire con la""" \
" voce, riprodurre musica, creare elenchi di cose da fare, impostare allarmi, streaming di podcast, ri" \
"produrre audiolibri e fornire informazioni meteorologiche, sul traffico e altre informazioni in tempo" \
" reale. Può anche controllare diversi smart devices casalinghi agendo da hub di domotica."
msg_style['2017 - Parte 1'] = "Google Home è un marchio di altoparlanti intelligenti sviluppato da Google"
msg_style['2017 - Parte 2'] = "SnatchBot, società israeliana, lancia un sito web per la creazione di chatbot, che ha rivendicato la " \
"capacità di costruire bot in grado di compiere analisi sentimentali."
| StarcoderdataPython |
364977 | # ----------------------------------------------------------------------------
# Gimel Studio Copyright 2019-2021 by <NAME> and contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
import os
import sys
import glob
import wx
from gswidgetkit import (NumberField, EVT_NUMBERFIELD,
Button, EVT_BUTTON, TextCtrl,
DropDown, EVT_DROPDOWN)
from gimelstudio import constants
from gimelstudio.datafiles import ICON_ARROW_DOWN, ICON_ARROW_RIGHT
# Enum-like constants for widgets
SLIDER_WIDGET = "slider"
SPINBOX_WIDGET = "spinbox"
class Property(object):
"""
The base node property class.
"""
def __init__(self, idname, default, label, visible=True):
self.idname = idname
self.value = default
self.label = label
self.visible = visible
self.widget_eventhook = None
def _RunErrorCheck(self):
pass
@property
def IdName(self): # name
return self.idname
def GetIdname(self):
return self.idname
def GetValue(self):
return self.value
def SetValue(self, value, render=True):
""" Set the value of the node property.
NOTE: This is only to be used to AFTER the node init.
Use ``self.EditProperty`` for other cases, instead.
"""
self.value = value
self._RunErrorCheck()
self.WidgetEventHook(self.idname, self.value, render)
def GetLabel(self):
return self.label
def SetLabel(self, label):
self.label = label
def GetIsVisible(self):
return self.visible
def SetIsVisible(self, is_visible):
self.visible = is_visible
def SetWidgetEventHook(self, event_hook):
self.widget_eventhook = event_hook
def WidgetEventHook(self, idname, value, render):
self.widget_eventhook(idname, value, render)
def CreateFoldPanel(self, panel_bar, label=None):
images = wx.ImageList(24, 24)
images.Add(ICON_ARROW_DOWN.GetBitmap())
images.Add(ICON_ARROW_RIGHT.GetBitmap())
if label is None:
lbl = self.GetLabel()
else:
lbl = label
return panel_bar.AddFoldPanel(lbl, foldIcons=images)
def AddToFoldPanel(self, panel_bar, fold_panel, item, spacing=10):
# From https://discuss.wxpython.org/t/how-do-you-get-the-
# captionbar-from-a-foldpanelbar/24795
fold_panel._captionBar.SetSize(fold_panel._captionBar.DoGetBestSize())
panel_bar.AddFoldPanelWindow(fold_panel, item, spacing=spacing)
class PositiveIntegerProp(Property):
""" Allows the user to select a positive integer. """
def __init__(self, idname, default=0, lbl_suffix="", min_val=0,
max_val=10, widget="slider", label="", visible=True):
Property.__init__(self, idname, default, label, visible)
self.min_value = min_val
self.max_value = max_val
self.widget = widget
self.lbl_suffix = lbl_suffix
self._RunErrorCheck()
def _RunErrorCheck(self):
if self.value > self.max_value:
raise TypeError(
"PositiveIntegerField value must be set to an integer less than 'max_val'"
)
if self.value < self.min_value:
raise TypeError(
"PositiveIntegerField value must be set to an integer greater than 'min_val'"
)
def GetMinValue(self):
return self.min_value
def GetMaxValue(self):
return self.max_value
def CreateUI(self, parent, sizer):
fold_panel = self.CreateFoldPanel(sizer)
fold_panel.SetBackgroundColour(wx.Colour("#464646"))
self.numberfield = NumberField(fold_panel,
default_value=self.GetValue(),
label=self.GetLabel(),
min_value=self.GetMinValue(),
max_value=self.GetMaxValue(),
suffix=self.lbl_suffix, show_p=False,
size=(-1, 32))
self.AddToFoldPanel(sizer, fold_panel, self.numberfield, spacing=10)
self.numberfield.Bind(EVT_NUMBERFIELD, self.WidgetEvent)
def WidgetEvent(self, event):
self.SetValue(event.value)
class ChoiceProp(Property):
""" Allows the user to select from a list of choices. """
def __init__(self, idname, default="", choices=[], label="", visible=True):
Property.__init__(self, idname, default, label, visible)
self.choices = choices
self._RunErrorCheck()
def GetChoices(self):
return self.choices
def SetChoices(self, choices=[]):
self.choices = choices
def CreateUI(self, parent, sizer):
fold_panel = self.CreateFoldPanel(sizer)
fold_panel.SetBackgroundColour(wx.Colour("#464646"))
self.dropdown = DropDown(
fold_panel,
default=self.GetValue(),
items=self.GetChoices(),
size=(-1, 32)
)
self.AddToFoldPanel(sizer, fold_panel, self.dropdown, spacing=10)
self.dropdown.Bind(EVT_DROPDOWN, self.WidgetEvent)
def WidgetEvent(self, event):
value = event.value
if not value:
print("Value is null!")
self.SetValue(value)
class OpenFileChooserProp(Property):
""" Allows the user to select a file to open.
(e.g: use this to open an .PNG, .JPG, .JPEG image, etc.)
"""
def __init__(self, idname, default="", dlg_msg="Choose file...",
wildcard="All files (*.*)|*.*", btn_lbl="Choose...",
label="", visible=True):
Property.__init__(self, idname, default, label, visible)
self.dlg_msg = dlg_msg
self.wildcard = wildcard
self.btn_lbl = btn_lbl
self._RunErrorCheck()
def _RunErrorCheck(self):
if type(self.value) != str:
raise TypeError("OpenFileChooserField value must be a string!")
def GetDlgMessage(self):
return self.dlg_msg
def GetWildcard(self):
return self.wildcard
def GetBtnLabel(self):
return self.btn_lbl
def CreateUI(self, parent, sizer):
fold_panel = self.CreateFoldPanel(sizer)
pnl = wx.Panel(fold_panel)
pnl.SetBackgroundColour(wx.Colour("#464646"))
vbox = wx.BoxSizer(wx.VERTICAL)
hbox = wx.BoxSizer(wx.HORIZONTAL)
self.textcontrol = TextCtrl(pnl,
value=self.GetValue(), style=wx.BORDER_SIMPLE,
placeholder="", size=(-1, 32))
hbox.Add(self.textcontrol, proportion=1, flag=wx.EXPAND | wx.BOTH)
self.button = Button(pnl, label=self.GetBtnLabel(), size=(-1, 32))
hbox.Add(self.button, flag=wx.LEFT, border=5)
self.button.Bind(EVT_BUTTON, self.WidgetEvent)
vbox.Add(hbox, flag=wx.EXPAND | wx.BOTH)
vbox.Fit(pnl)
pnl.SetSizer(vbox)
self.AddToFoldPanel(sizer, fold_panel, pnl, spacing=10)
def WidgetEvent(self, event):
dlg = wx.FileDialog(
None,
message=self.GetDlgMessage(),
defaultDir=os.getcwd(),
defaultFile="",
wildcard=self.GetWildcard(),
style=wx.FD_OPEN | wx.FD_CHANGE_DIR | wx.FD_FILE_MUST_EXIST | wx.FD_PREVIEW
)
if dlg.ShowModal() == wx.ID_OK:
paths = dlg.GetPaths()
filetype = os.path.splitext(paths[0])[1]
if filetype not in constants.SUPPORTED_FT_OPEN_LIST:
dlg = wx.MessageDialog(
None,
"That file type isn't currently supported!",
"Cannot Open Image!",
style=wx.ICON_EXCLAMATION
)
dlg.ShowModal()
else:
self.SetValue(paths[0])
self.textcontrol.ChangeValue(self.GetValue())
class LabelProp(Property):
""" Allows setting and resetting text on a label. """
def __init__(self, idname, default="", label="", visible=True):
Property.__init__(self, idname, default, label, visible)
self._RunErrorCheck()
def CreateUI(self, parent, sizer):
label = wx.StaticText(parent, label=self.GetLabel())
label.SetForegroundColour("#fff")
sizer.Add(label, flag=wx.LEFT | wx.TOP, border=5)
static_label = wx.StaticText(parent, label=self.GetValue())
static_label.SetForegroundColour("#fff")
sizer.Add(static_label, flag=wx.LEFT | wx.TOP, border=5)
class StringProp(Property):
def __init__(self, idname, default="Text", dlg_msg="Edit text:",
dlg_title="Edit Text", label="", visible=True):
Property.__init__(self, idname, default, label, visible)
self.dlg_msg = dlg_msg
self.dlg_title = dlg_title
self._RunErrorCheck()
def GetDlgMessage(self):
return self.dlg_msg
def GetDlgTitle(self):
return self.dlg_title
def CreateUI(self, parent, sizer):
label = wx.StaticText(parent, label=self.GetLabel())
label.SetForegroundColour("#fff")
sizer.Add(label, flag=wx.LEFT | wx.TOP, border=5)
vbox = wx.BoxSizer(wx.VERTICAL)
hbox = wx.BoxSizer(wx.HORIZONTAL)
self.textcontrol = wx.TextCtrl(
parent,
id=wx.ID_ANY,
value=self.GetValue(),
style=wx.TE_READONLY
)
hbox.Add(self.textcontrol, proportion=1)
self.button = wx.Button(
parent,
id=wx.ID_ANY,
label="Edit"
)
hbox.Add(self.button, flag=wx.LEFT, border=5)
self.button.Bind(
wx.EVT_BUTTON,
self.WidgetEvent
)
vbox.Add(hbox, flag=wx.EXPAND)
sizer.Add(vbox, flag=wx.ALL | wx.EXPAND, border=5)
def WidgetEvent(self, event):
dlg = wx.TextEntryDialog(None, self.GetDlgMessage(),
self.GetDlgTitle(), self.GetValue())
if dlg.ShowModal() == wx.ID_OK:
value = dlg.GetValue()
self.SetValue(value)
self.textcontrol.ChangeValue(self.GetValue())
| StarcoderdataPython |
369547 | <reponame>minhduccao/PomoBot
import os
import discord
from dotenv import load_dotenv
from discord.ext import commands
import configparser
import asyncio
from enum import Enum
from timer import Timer
from timer import TimerStatus
DEBUG = True # For debug messages
SETTING_OPTIONS = ['work_time', 'short_break_time', 'long_break_time', 'sessions', 'use_long_breaks']
COMMAND_PREFIX = '*'
TIMER_COMMANDS = ['start', 'pause', 'stop', 'time', 'notify', 'set', 'setextra', 'togglebreak']
GENERAL_COMMANDS = ['reset', 'help']
load_dotenv()
TOKEN = os.getenv('DISCORD_TOKEN') # Grabs Discord bot token from .env file
bot = commands.Bot(command_prefix=COMMAND_PREFIX, help_command=None)
timer = Timer()
pingList = []
# ------------ Overall Work List ---------
# TODO: Complete remaining commands
# TODO: Complete all error handling
# TODO: Store user-set times
# TODO: Add break functionality + settings to adjust long breaks, sessions
# TODO: Add docstrings
# TODO: Create empty .env file before finalizing
# TODO: Remove all DEBUG statements and check imports before finalizing
# TODO: Update Enum with more colors
class MsgColors(Enum):
AQUA = 0x33c6bb
YELLOW = 0xFFD966
RED = 0xEA3546
PURPLE = 0x6040b1
@bot.event
async def on_ready():
print(f'{bot.user} has connected to Discord.')
@bot.command(name='start', help='Starts a Pomodoro timer')
async def start_timer(ctx):
if timer.get_status() == TimerStatus.STOPPED:
work_mins = config['CURRENT_SETTINGS']['work_time'] # Grabs work duration from user settings
work_secs = '00'
desc = f'Time Remaining: `{work_mins}:{work_secs}`' # Formats message to be sent
em = discord.Embed(title=':timer: Starting Timer',
description=desc,
color=MsgColors.AQUA.value)
await ctx.send(embed=em)
if DEBUG:
print('Command: *start (from stopped timer)')
work_time = int(work_mins) * 60 # Multiplied by 60 to get seconds
timer.start(work_time)
while timer.get_status() == TimerStatus.RUNNING:
await asyncio.sleep(1) # Sleep for 1 sec before timer counts down
timer.tick()
if timer.get_status() == TimerStatus.STOPPED: # Ping users when timer stops
for user in pingList:
await ctx.send(f'Pinging {user}')
pingList.clear()
elif timer.get_status() == TimerStatus.PAUSED: # Resuming timer from paused state
em = discord.Embed(title=':timer: Resuming Timer',
description=getFrmtTime(timer),
color=MsgColors.AQUA.value)
await ctx.send(embed=em)
if DEBUG:
print('Command: *start (from paused timer)')
timer.resume()
while timer.get_status() == TimerStatus.RUNNING:
await asyncio.sleep(1)
timer.tick()
if timer.get_status() == TimerStatus.STOPPED: # Ping users when timer stops
for user in pingList:
await ctx.send(f'Pinging {user}')
pingList.clear()
else:
em = discord.Embed(title=':warning: Warning',
description='Timer is already running.',
color=MsgColors.YELLOW.value)
await ctx.send(embed=em)
@bot.command(name='pause', help='Pauses the timer')
async def pause_timer(ctx):
if not timer.pause():
em = discord.Embed(title=':warning: Warning',
description='Timer has already been paused or stopped.',
color=MsgColors.YELLOW.value)
else:
em = discord.Embed(title=':pause_button: Paused Timer',
description='Timer has been paused.\n'+getFrmtTime(timer),
color=MsgColors.AQUA.value)
await ctx.send(embed=em)
@bot.command(name='stop', help='Stops the timer')
async def stop_timer(ctx):
if not timer.stop():
em = discord.Embed(title=':warning: Warning',
description='Timer has already been stopped or paused.',
color=MsgColors.YELLOW.value)
else:
em = discord.Embed(title=':stop_button: Stopped Timer',
description='Timer has been stopped.',
color=MsgColors.RED.value)
pingList.clear() # Clear ping list when timer stops
await ctx.send(embed=em)
@bot.command(name='time', help='Displays the current timer status', aliases=['timer', 'status'])
async def current_time(ctx):
status = timer.get_status()
if status == TimerStatus.STOPPED:
em = discord.Embed(title=':stop_button: Timer Stopped',
description='Time Remaining: 0:00',
color=MsgColors.RED.value)
elif status == TimerStatus.RUNNING:
em = discord.Embed(title=':timer: Timer Running',
description=getFrmtTime(timer),
color=MsgColors.AQUA.value)
else:
em = discord.Embed(title=':pause_button: Timer Paused',
description=getFrmtTime(timer),
color=MsgColors.YELLOW.value)
await ctx.send(embed=em)
@bot.command(name='notify', help='Signs up the user to be pinged when the timer ends')
async def notify_user(ctx):
em = discord.Embed(title=':ballot_box_with_check: Notification Confirmed',
description='Timer will ping ' + ctx.message.author.name + ' when the timer stops.',
color=MsgColors.AQUA.value)
pingList.append(ctx.message.author.mention)
await ctx.send(embed=em)
@bot.command(name='set', help='Sets duration for work and short breaks')
async def set_options_simple(ctx, work_time: int, short_break_time: int):
config.set('CURRENT_SETTINGS', 'work_time', str(work_time))
config.set('CURRENT_SETTINGS', 'short_break_time', str(short_break_time))
with open('settings.ini', 'w') as configFile:
config.write(configFile)
em = discord.Embed(title=':gear: Adjusting Timer Settings',
description=f'Setting work time to {work_time} minutes and break time to {short_break_time} minutes',
color=MsgColors.AQUA.value)
await ctx.send(embed=em)
if DEBUG:
print(f'Command: *set: Work Time: {work_time} Break Time: {short_break_time}')
@bot.command(name='setextra', help='Sets duration for long breaks and number of work sessions')
async def set_options_extra(ctx, long_break_time: int, sessions: int):
config.set('CURRENT_SETTINGS', 'long_break_time', str(long_break_time))
config.set('CURRENT_SETTINGS', 'sessions', str(sessions))
with open('settings.ini', 'w') as configFile:
config.write(configFile)
em = discord.Embed(title=':gear: Adjusting Timer Settings',
description=f'Setting long break time to {long_break_time} minutes and number of work sessions to {sessions}.',
color=MsgColors.AQUA.value)
await ctx.send(embed=em)
@bot.command(name='togglebreak', help='Toggles the option to enable/disable long breaks')
async def toggle_long_break(ctx):
break_option = config['CURRENT_SETTINGS']['use_long_breaks'] == 'True'
config.set('CURRENT_SETTINGS', 'use_long_breaks', str(not break_option))
with open('settings.ini', 'w') as configFile:
config.write(configFile)
if break_option:
desc = 'Disabled long breaks.'
else:
desc = 'Enabled long breaks.'
em = discord.Embed(title=':gear: Adjusting Timer Settings',
description=desc,
color=MsgColors.AQUA.value)
await ctx.send(embed=em)
@bot.command(name='reset', help='Reset timer settings to default values.')
async def reset_settings(ctx):
for option in SETTING_OPTIONS:
config.set('CURRENT_SETTINGS', option, config['DEFAULT'][option])
with open('settings.ini', 'w') as configFile:
config.write(configFile)
em = discord.Embed(title=':leftwards_arrow_with_hook: Reset Timer Settings',
description='Timer settings have been reset to default values.',
color=MsgColors.AQUA.value)
await ctx.send(embed=em)
@bot.command(name='help', help='Describes all bot commands.')
async def help(ctx):
# TODO: Fill in help command
help_commands = dict() # Dict of help commands + their description
for command in bot.commands:
help_commands[command.name] = command.help
desc = 'The prefix for this bot is `' + COMMAND_PREFIX + '`\n' # Prints ordered list of timer commands
desc += f'\n**Timer Commands | {len(TIMER_COMMANDS)}**\n'
for command in TIMER_COMMANDS:
desc += '`{:12s}` {}\n'.format(command, help_commands[command])
desc += f'\n**General Commands | {len(GENERAL_COMMANDS)}**\n' # Prints ordered list of general commands
for command in GENERAL_COMMANDS:
desc += '`{:12s}` {}\n'.format(command, help_commands[command])
em = discord.Embed(title='Bot Commands',
description=desc,
color=MsgColors.PURPLE.value)
await ctx.send(embed=em)
# TODO: Remove command later
@bot.command(name='t', help='Temporary for testing commands')
async def t(ctx):
await ctx.send(config['CURRENT_SETTINGS']['use_long_breaks'])
# ----------------------- ERROR HANDLING -----------------------------
# TODO: Fill in remaining method errors
@set_options_simple.error
async def set_options_simple_error(ctx, error):
if DEBUG:
print(f'*set error: {ctx.message.content} \n{ctx.message}\n')
if isinstance(error, commands.errors.MissingRequiredArgument):
em = discord.Embed(title=':warning: Invalid *set Command Usage',
description='Specify both a valid work and break time.\nFormat: `*set # #`',
color=MsgColors.YELLOW.value)
elif isinstance(error, commands.errors.BadArgument):
em = discord.Embed(title=':warning: Invalid *set Command Usage',
description='Specify whole numbers for both work and break times. \nFormat: `*set # #`',
color=MsgColors.YELLOW.value)
else:
em = discord.Embed(title=':x: Invalid *set Command Usage Error',
description=f'Unhandled *set error has been logged.',
color=MsgColors.RED.value)
with open('error.log', 'a') as errorLog:
errorLog.write(f'Unhandled *set message: {ctx.message.content} \n{ctx.message}\n')
await ctx.send(embed=em)
# ----------------------- UTILITY FUNCTIONS -----------------------------
def getFrmtTime(clock: Timer):
work_secs = clock.get_time() % 60
work_mins = int((clock.get_time() - work_secs) / 60)
if work_secs < 10: # Formats seconds if <10 seconds left
work_secs = '0' + str(work_secs)
return f'Time Remaining: `{work_mins}:{work_secs}`'
if __name__ == '__main__':
config = configparser.ConfigParser()
config.read('settings.ini') # Read in settings from settings.ini
bot.run(TOKEN)
| StarcoderdataPython |
9761614 | import requests
import json
import os
import re
import sys
import pickle
from googleapiclient.discovery import build
from email.mime.text import MIMEText
import base64
def config_check():
path = os.getcwd()
if os.path.exists(os.path.join(path, 'config.json')):
with open('config.json', 'r') as f:
return json.load(f)
else:
print('No config file! Come back when you get one.')
sys.exit()
def authentify():
client_id = config_check()['AUTHENTIFY']['client_id']
client_secret = config_check()['AUTHENTIFY']['client_secret']
grant_type = 'client_credentials'
body_params = {'grant_type' : grant_type}
url = 'https://accounts.spotify.com/api/token'
response = requests.post(url, data=body_params, auth = (client_id, client_secret))
resp_json = json.loads(response.text)
token = resp_json['access_token']
headers = {'Authorization': 'Bearer {0}'.format(token)}
response = requests.get('https://api.spotify.com/v1/artists/2Sp19cOHSqAUlE64hekARW/albums', headers=headers)
if response.status_code == 200:
return token
else:
print('Could not authenticate!')
def spotify_band_ids():
path = os.getcwd()
if os.path.exists(os.path.join(path, 'spotify_artists.txt')):
with open('spotify_artists.txt', 'r') as inputter:
temp_list = []
band_ids = inputter.read().split(', ')
for band_id in band_ids:
matcher = re.fullmatch('[0-9A-Za-z]{22}', band_id.strip())
if matcher:
pass
else:
temp_list.append(band_id)
if len(temp_list) >= 1:
bad_ids = ', '.join(temp_list)
print('Looks like you got some Spotify artist IDs that, uh, aren\'t formulated well. ' \
'I\'m gonna shut this down, and you should look at these artist IDs: {0}'.format(bad_ids))
sys.exit()
elif len(temp_list) == 0:
return band_ids
else:
print('You don\'t have any artists to check on! Now I have to quit!')
sys.exit()
def metadata_funnel(h, x, y):
try:
latest_album = requests.get('https://api.spotify.com/v1/artists/{0}/albums?include_groups={1}'.format(x, y), headers=h)
latest_album_id = json.loads(latest_album.text)['items'][0]['id']
latest_album_name = json.loads(latest_album.text)['items'][0]['name']
latest_album_rd = json.loads(latest_album.text)['items'][0]['release_date']
latest_album_url = 'https://open.spotify.com/album/{0}'.format(latest_album_id)
latest_album_block = {'id': latest_album_id, 'name': latest_album_name, 'date': latest_album_rd, 'url': latest_album_url}
except:
latest_album_block = {'id': 'n/a', 'name': 'n/a', 'date': 'n/a', 'url': 'n/a'}
return latest_album_block
def dict_checker():
path = os.getcwd()
if os.path.exists(os.path.join(path, 'sniffer_data.json')):
with open(os.path.join(path, 'sniffer_data.json'), 'r') as f:
return json.load(f)
else:
try:
token = authentify()
headers = {'Authorization': 'Bearer {0}'.format(token)}
spotify_bands = spotify_band_ids()
spotify_data_list = []
for band in spotify_bands:
artist_payload = requests.get('https://api.spotify.com/v1/artists/{0}'.format(band), headers=headers)
artist_id = json.loads(artist_payload.text)['id']
artist_name = json.loads(artist_payload.text)['name']
latest_single_block = metadata_funnel(band, 'single')
latest_album_block = metadata_funnel(band, 'album')
latest_comp_block = metadata_funnel(band, 'compilation')
whole_entry = {'id': artist_id, 'name': artist_name, 'latest_single': latest_single_block,
'latest_album': latest_album_block, 'latest_comp': latest_comp_block}
spotify_data_list.append(whole_entry)
all_data = {'bands': spotify_data_list}
with open(os.path.join(path, 'sniffer_data.json'), 'w') as f:
json.dump(all_data, f)
print('We just created a data profile based on your list of Spotify '\
'artists. Run Spotify Sniffer tomorrow to see if anyone drops '\
'new music!')
sys.exit()
except:
print('Something is wrong. You may not have sufficient privileges '\
'to make a file. Bogus!')
sys.exit()
def release_checker(h, xx, yy, zz, new_entries_list):
band_id = xx['id']
try:
band_release = requests.get('https://api.spotify.com/v1/artists/{0}/albums?include_groups={1}'.format(band_id, yy), headers=h)
newest_release_id = json.loads(band_release.text)['items'][0]['id']
last_release_pulled_from_spotify = xx[zz]['id']
if last_release_pulled_from_spotify == newest_release_id:
pass
else:
newest_release_name = json.loads(band_release.text)['items'][0]['name']
newest_release_rd = json.loads(band_release.text)['items'][0]['release_date']
newest_release_url = 'https://open.spotify.com/album/{0}'.format(newest_release_id)
xx[zz]['id'] = newest_release_id
xx[zz]['name'] = newest_release_name
xx[zz]['date'] = newest_release_rd
xx[zz]['url'] = newest_release_url
new_entries_list.append({'artist': xx['name'], 'kind': yy, 'title': newest_release_name,
'date': newest_release_rd, 'url': newest_release_url})
except IndexError:
pass
def mail():
if os.path.exists('token.pickle'):
with open('token.pickle', 'rb') as token:
creds = pickle.load(token)
service = build('gmail', 'v1', credentials=creds)
return service
def create_message(sender, to, subject, message_text):
message = MIMEText(message_text)
message['to'] = to
message['from'] = sender
message['subject'] = subject
return {'raw': base64.urlsafe_b64encode(message.as_string().encode()).decode()}
def send_message(service, user_id, message):
try:
message = (service.users().messages().send(userId=user_id, body=message).execute())
print('Message Id: %s' % message['id'])
return message
except HttpError as error:
print(error)
| StarcoderdataPython |
6674338 | # unet.py
#
# <NAME>
# 25-10-2019
#
# Implementation of the U-net architecture for image segmentation for the
# Kaggle cloud classifaction competition. Note that the code is somewhat
# specific to the image sizes in this compeitition and would require a
# fair bit of tweaking to adapt to other problems.
import tensorflow as tf
import pdb
from tensorflow.keras.layers import (Dense, Conv2D, Conv2DTranspose,
MaxPool2D, concatenate, ZeroPadding2D)
def U_net(optimizer, activation, metrics):
"""
Parameters:
- optimizer (String): Keras optimizer to use
- activation (String): Keras layer activation function to use in hidden
layers
- metrics to use for model evaluation
Returns: (Model)
a compiled U-net designed for binary segmentation.
"""
inputs = tf.keras.Input(shape = (2100, 1400, 3))
# downsampling layers
conv_1 = Conv2D(64, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(inputs)
conv_2 = Conv2D(64, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(conv_1)
pool_1 = MaxPool2D((2,2))(conv_2)
conv_3 = Conv2D(128, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(pool_1)
conv_4 = Conv2D(128, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(conv_3)
pool_2 = MaxPool2D((2,2))(conv_4)
conv_5 = Conv2D(256, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(pool_2)
conv_6 = Conv2D(256, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(conv_5)
pool_3 = MaxPool2D((2,2))(conv_6)
conv_7 = Conv2D(512, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(pool_3)
conv_8 = Conv2D(512, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(conv_7)
pool_4 = MaxPool2D((2,2))(conv_8)
conv_9 = Conv2D(1024, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(pool_4)
conv_10 = Conv2D(1024, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(conv_9)
# upsampling layers
upconv_1 = Conv2DTranspose(512, (2,2), strides = (2, 2), padding = 'same')(conv_10)
upconv_1 = ZeroPadding2D(padding = ((0, 0), (0, 1)))(upconv_1) # we need to padd with zeroes on the right
merge_1 = concatenate([conv_8, upconv_1])
conv_11 = Conv2D(512, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(merge_1)
conv_12 = Conv2D(512, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(conv_11)
upconv_2 = Conv2DTranspose(256, (2, 2), strides = (2, 2), padding='same')(conv_12)
upconv_2 = ZeroPadding2D(padding = ((0,1),(0,0)))(upconv_2) # we need to padd with zeroes on top
merge_2 = concatenate([conv_6, upconv_2])
conv_13 = Conv2D(256, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(merge_2)
conv_14 = Conv2D(256, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(conv_13)
upconv_3 = Conv2DTranspose(128, (2, 2), strides = (2, 2), padding='same')(conv_14)
merge_3 = concatenate([conv_4, upconv_3])
conv_15 = Conv2D(128, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(merge_3)
conv_16 = Conv2D(128, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(conv_15)
upconv_4 = Conv2DTranspose(64, (2, 2), strides = (2, 2), padding='same')(conv_16)
merge_4 = concatenate([conv_2, upconv_4])
conv_17 = Conv2D(64, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(merge_4)
conv_18 = Conv2D(64, (3,3), activation = activation,
kernel_initializer = 'he_normal', padding = 'same')(conv_17)
output = Dense(4, activation = 'softmax')(conv_18)
model = tf.keras.Model(inputs, output)
model.compile(optimizer = optimizer, loss = 'categorical_crossentropy',
metrics = metrics)
return model
s | StarcoderdataPython |
4988056 | <gh_stars>0
###################################################
# Here you'll find the model itself. Tune it, and #
# don't forget to take out all hyperparameters to #
# config files! #
###################################################
from keras.layers import Input, Conv2D, MaxPooling2D, UpSampling2D, \
Flatten, Dense, Lambda, concatenate, Reshape, Dropout
from keras import Model
import os
from keras.models import load_model
from keras.callbacks import TensorBoard
import keras.backend as K
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.model_selection import train_test_split
from utils import ensure_folder, save_array_as_images
from callbacks import SaveCallback
import numpy as np
from layers import SplitBagLayer, _attach_to_pipeline
WEIGHTS_DIRECTORY = 'weights'
TENSORBOARD_DIRECTORY = 'tensorboard-logs'
# TODO: maybe move it to config?
# TODO: HARDCODED link. It can be broken or unaccessable
IMAGE_DIR = '/nfs/nas22.ethz.ch/fs2202/biol_imsb_claassen_1/akozharin/images'
# TODO: make better name for the class
class BagModel(BaseEstimator, ClassifierMixin):
def __init__(self,
load_weights_from_file=None,
optimizer='adadelta',
label='unlabeled',
classifier_loss='binary_crossentropy',
classifier_activation='sigmoid',
decoder_loss='binary_crossentropy',
classifier_metrics='accuracy',
classifier_loss_weight=1.0,
decoder_loss_weight=1.0,
num_epochs=10,
batch_size=128,
verbose=False,
save_best_only=True,
debug=False):
self.optimizer = optimizer
self.label = label
self.classifier_loss = classifier_loss
self.classifier_activation = classifier_activation
self.decoder_loss = decoder_loss
self.classifier_metrics = classifier_metrics
self.classifier_loss_weight = classifier_loss_weight
self.decoder_loss_weight = decoder_loss_weight
self.num_epochs = num_epochs
self.batch_size = batch_size
self.model_ = None
self.load_weights_from_file = load_weights_from_file
self.verbose = verbose
self.save_best_only = save_best_only
self.debug = debug
def _create_model(self, input_shape):
input_img = Input(shape=input_shape)
# Create shared encoder.
encoder_pipeline = [
Conv2D(64, (3, 3), activation='relu', padding='same'),
Conv2D(128, (3, 3), activation='relu', padding='same'),
MaxPooling2D((2, 2), padding='same', strides=2),
Conv2D(128, (3, 3), activation='relu', padding='same'),
MaxPooling2D((2, 2), padding='same', strides=2),
Conv2D(64, (3, 3), activation='relu', padding='same'),
Conv2D(8, (3, 3), activation='relu', padding='same'),
]
# Split bag into single images to get encoded vectors
splitted_imgs = SplitBagLayer(bag_size=input_shape[0])(input_img)
encoded_img_matrices = []
for single_image in splitted_imgs:
encoded_img = _attach_to_pipeline(single_image, encoder_pipeline)
encoded_img_matrices.append(encoded_img)
# We have v=(vec1, ... , vecN) where N is number of images in one bag
# Now we need to do aggregation
concat_matrix = concatenate(
[Reshape((1, -1))(
Flatten()(img)
) for img in encoded_img_matrices],
axis=1)
# Now we have array with shape (num_vectors, latent_features). Let's aggregate them
# NOTE: Aggregator is based on maximum
# THIS IS THE PART WHERE WE LOOSE 1 DIMENSION (dimension of bags)
aggregator = Lambda(lambda matrix: K.max(matrix, axis=1))(concat_matrix)
# After encoding, we need to classify images
classifier = Dense(128, activation=self.classifier_activation)(aggregator)
classifier = Dropout(rate=0.5)(classifier)
classifier = Dense(1, activation=self.classifier_activation, name='classifier_output')(classifier)
decoder_pipeline = [
# TODO: maybe make activation functions tunable?
Conv2D(128, (3, 3), activation='relu', padding='same'),
UpSampling2D((2, 2)),
Conv2D(64, (3, 3), activation='relu', padding='same'),
UpSampling2D((2, 2)),
Conv2D(32, (3, 3), activation='relu', padding='same'),
Conv2D(input_shape[-1], (3, 3), activation='relu', padding='same'),
# reshape (None, w, h, c) -> (None, 1, w, h, c) where 'w'=width, 'h'=height, 'c'=color_channel
Reshape((1, *input_shape[1:]))
]
decoded_images = [_attach_to_pipeline(single_image, decoder_pipeline) for single_image in encoded_img_matrices]
decoded_images = concatenate(decoded_images, axis=1, name='decoded_output')
model = Model(inputs=[input_img], outputs=[classifier, decoded_images])
model.compile(optimizer=self.optimizer,
loss={'classifier_output': self.classifier_loss, 'decoded_output': self.decoder_loss},
loss_weights={'classifier_output': self.classifier_loss_weight,
'decoded_output': self.decoder_loss_weight},
metrics={'classifier_output': self.classifier_metrics}
)
return model
def fit(self, x_train, y_train):
# TODO: Validation of parameters
# Train/validation split
x_train, x_val, y_train, y_val = train_test_split(x_train, y_train,
# TODO: mb make `test_size` tunable?
test_size=0.4, random_state=42)
# NOTE: we make category matrix from y_train here!
y_train = (y_train > 0).astype(int)
self.model_ = self._create_model(x_train.shape[1:])
weights_folder = os.path.join(os.getcwd(), self.label, WEIGHTS_DIRECTORY)
# TODO: hardcoded monitor variable. Move it to config file
callbacks = [SaveCallback(monitor_variable='val_classifier_output_acc',
save_dir=weights_folder,
model=self.model_,
verbose=self.verbose,
save_best_only=self.save_best_only,
debug=self.debug)]
# Take care of tensorboard
tb_folder = os.path.join(os.getcwd(), self.label, TENSORBOARD_DIRECTORY)
ensure_folder(tb_folder)
callbacks.append(TensorBoard(log_dir=tb_folder))
if self.load_weights_from_file:
self.model_ = load_model(self.load_weights_from_file)
else:
# Train it
self.model_.fit(
# Train data
x_train,
# Test data. Note that each output has its own data to train on!
{'decoded_output': x_train, 'classifier_output': y_train},
epochs=self.num_epochs,
batch_size=self.batch_size,
shuffle=True,
validation_data=(x_val, {'classifier_output': y_val, 'decoded_output': x_val}),
callbacks=callbacks
)
return self
def predict(self, x_data):
return np.round(self.predict_proba(x_data).reshape(-1))
def predict_proba(self, x_data):
# NOTE. We do not return decoded pictures for two reasons:
# 1. sklearn expect `predict` method to return one value
# 2. We actually don't need decoded images
# NOTE: uncomment these two pieces if you want decoded and original pictures to be saved on NAS
'''
# TODO: better post-processing of image (mb create some reverse function to pre-processing)
save_array_as_images((255.*x_data).reshape(-1, *x_data.shape[2:]),
os.path.join(os.getcwd(), IMAGE_DIR, 'original'))
'''
classes, decoded_imgs = self.model_.predict(x_data)
'''
# TODO: better post-processing of image (mb create some reverse function to pre-processing)
save_array_as_images((255.*x_data).reshape(-1, *x_data.shape[2:]),
save_array_as_images((255.*decoded_imgs).reshape(-1, *decoded_imgs.shape[2:]),
os.path.join(os.getcwd(), IMAGE_DIR, 'decoded'))
'''
return classes
| StarcoderdataPython |
11261560 | <filename>homeassistant/components/sense/__init__.py
"""Support for monitoring a Sense energy sensor."""
import asyncio
from datetime import timedelta
import logging
from sense_energy import (
ASyncSenseable,
SenseAPITimeoutException,
SenseAuthenticationException,
)
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD, CONF_TIMEOUT
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.event import async_track_time_interval
from .const import (
ACTIVE_UPDATE_RATE,
DEFAULT_TIMEOUT,
DOMAIN,
SENSE_DATA,
SENSE_DEVICE_UPDATE,
SENSE_DEVICES_DATA,
SENSE_DISCOVERED_DEVICES_DATA,
SENSE_TIMEOUT_EXCEPTIONS,
)
_LOGGER = logging.getLogger(__name__)
PLATFORMS = ["binary_sensor", "sensor"]
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_EMAIL): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
}
)
},
extra=vol.ALLOW_EXTRA,
)
class SenseDevicesData:
"""Data for each sense device."""
def __init__(self):
"""Create."""
self._data_by_device = {}
def set_devices_data(self, devices):
"""Store a device update."""
self._data_by_device = {}
for device in devices:
self._data_by_device[device["id"]] = device
def get_device_by_id(self, sense_device_id):
"""Get the latest device data."""
return self._data_by_device.get(sense_device_id)
async def async_setup(hass: HomeAssistant, config: dict):
"""Set up the Sense component."""
hass.data.setdefault(DOMAIN, {})
conf = config.get(DOMAIN)
if not conf:
return True
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_IMPORT},
data={
CONF_EMAIL: conf[CONF_EMAIL],
CONF_PASSWORD: conf[CONF_PASSWORD],
CONF_TIMEOUT: conf[CONF_TIMEOUT],
},
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Set up Sense from a config entry."""
entry_data = entry.data
email = entry_data[CONF_EMAIL]
password = entry_data[CONF_PASSWORD]
timeout = entry_data[CONF_TIMEOUT]
gateway = ASyncSenseable(api_timeout=timeout, wss_timeout=timeout)
gateway.rate_limit = ACTIVE_UPDATE_RATE
try:
await gateway.authenticate(email, password)
except SenseAuthenticationException:
_LOGGER.error("Could not authenticate with sense server")
return False
except SENSE_TIMEOUT_EXCEPTIONS:
raise ConfigEntryNotReady
sense_devices_data = SenseDevicesData()
sense_discovered_devices = await gateway.get_discovered_device_data()
hass.data[DOMAIN][entry.entry_id] = {
SENSE_DATA: gateway,
SENSE_DEVICES_DATA: sense_devices_data,
SENSE_DISCOVERED_DEVICES_DATA: sense_discovered_devices,
}
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(entry, component)
)
async def async_sense_update(now):
"""Retrieve latest state."""
try:
await gateway.update_realtime()
except SenseAPITimeoutException:
_LOGGER.error("Timeout retrieving data")
data = gateway.get_realtime()
if "devices" in data:
sense_devices_data.set_devices_data(data["devices"])
async_dispatcher_send(hass, f"{SENSE_DEVICE_UPDATE}-{gateway.sense_monitor_id}")
hass.data[DOMAIN][entry.entry_id][
"track_time_remove_callback"
] = async_track_time_interval(
hass, async_sense_update, timedelta(seconds=ACTIVE_UPDATE_RATE)
)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(entry, component)
for component in PLATFORMS
]
)
)
track_time_remove_callback = hass.data[DOMAIN][entry.entry_id][
"track_time_remove_callback"
]
track_time_remove_callback()
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
| StarcoderdataPython |
6415187 | <reponame>aliaskar25/instagram_copy
from rest_framework import routers
from .views import PostView
router = routers.DefaultRouter()
router.register('', PostView)
urlpatterns = router.urls | StarcoderdataPython |
4875143 | <reponame>ermekaitygulov/STIT
## Pretrained models paths
e4e = './pretrained_models/e4e_ffhq_encode.pt'
stylegan2_ada_ffhq = 'pretrained_models/ffhq.pkl'
ir_se50 = './pretrained_models/model_ir_se50.pth'
## Dirs for output files
checkpoints_dir = './checkpoints'
## Keywords
pti_results_keyword = 'STIT'
## Edit directions
interfacegan_folder = 'editings/w_directions/'
styleclip_fs3 = 'editings/styleclip_global/fs3.npy'
stylespace_mean_std = 'editings/styleclip_global/S_mean_std'
shape_predictor_path = 'pretrained_models/shape_predictor_68_face_landmarks.dat'
segmentation_model_path = 'pretrained_models/79999_iter.pth'
| StarcoderdataPython |
1960021 | """Post utilities"""
import re
from typing import Union
from koabot import koakuma
from koabot.utils.base import list_contains
def get_name_or_id(url: str, /, *, start: Union[str, list] = [], end: Union[str, list] = ['?'], pattern: str = "") -> str:
"""Get a name or an id from an url
Arguments:
url::str
Url to extract the id from
Keywords:
start::str | list
First part to start looking from
end::str | list
The final part to stop at. By default it tries to trim out
anything past a question mark.
pattern::str (regex pattern)
If set to a pattern, it will be used after <start> and <end>
have done their job.
"""
if not isinstance(start, list):
start = [start]
starting_match = None
for v in start:
if v in url:
starting_match = v
if not starting_match:
return
# Index 1, because index 0 is everything before the character that matched
url = url.split(starting_match)[1]
if not isinstance(end, list):
end = [end]
ending_match = None
for v in end:
if v in url:
ending_match = v
if ending_match:
# Index 0, because index 1 is everything after the character that matched
url = url.split(ending_match)[0]
if pattern:
return re.findall(pattern, url)[0]
return url
def combine_tags(tags: Union[str, list], /, *, maximum: int = 5) -> str:
"""Combine tags and give them a readable format
Arguments:
tags::str or list
Keywords:
maximum::int
How many tags should be taken into account
"""
if not isinstance(tags, list):
tag_list = tags.split()
else:
tag_list = tags
tag_count = len(tag_list)
if tag_count > 1:
tag_list = tag_list[:maximum]
if tag_count > maximum:
joint_tags = ', '.join(tag_list)
joint_tags += f' and {tag_count - maximum} more'
else:
joint_tags = ', '.join(tag_list[:-1])
joint_tags += ' and ' + tag_list[-1]
return joint_tags.strip().replace('_', ' ')
return ''.join(tag_list).strip().replace('_', ' ')
def post_is_missing_preview(post, /, *, board: str = 'danbooru') -> bool:
"""Determine whether or not a post is missing its preview
Arguments:
post::json object
Keywords:
board::str
The board to check the rules with. Default is 'danbooru'
"""
if board == 'e621':
return list_contains(post['tags']['general'], koakuma.bot.rules['no_preview_tags'][board]) and post['rating'] != 's'
if board == 'sankaku':
return True
return list_contains(post['tag_string_general'].split(), koakuma.bot.rules['no_preview_tags'][board]) or post['is_banned']
| StarcoderdataPython |
6435119 | import os
from ConfigParser import ConfigParser
from ingenico.connect.sdk.communicator_configuration import CommunicatorConfiguration
from ingenico.connect.sdk.defaultimpl.authorization_type import AuthorizationType
from ingenico.connect.sdk.defaultimpl.default_authenticator import DefaultAuthenticator
from ingenico.connect.sdk.defaultimpl.default_connection import DefaultConnection
from ingenico.connect.sdk.factory import Factory
from ingenico.connect.sdk.meta_data_provider import MetaDataProvider
from ingenico.connect.sdk.session import Session
"""File containing a number of creation methods for integration tests"""
PROPERTIES_URL = os.path.abspath(os.path.join(__file__, os.pardir, "../resources/configuration.ini"))
PROPERTIES_URL_PROXY = os.path.abspath(os.path.join(__file__, os.pardir, "../resources/configuration.proxy.ini"))
# API_KEY_ID, SECRET_API_KEY and MERCHANT_ID are stored in OS and should be retrieved
API_KEY_ID = os.getenv("connect.api.apiKeyId")
SECRET_API_KEY = os.getenv("connect.api.secretApiKey")
MERCHANT_ID = str(os.getenv("connect.api.merchantId"))
if API_KEY_ID is None:
raise EnvironmentError("could not access environment variable connect.api.apiKeyId required for testing")
if SECRET_API_KEY is None:
raise EnvironmentError("could not access environment variable connect.api.secretApiKey required for testing")
if MERCHANT_ID == 'None':
raise EnvironmentError("could not access environment variable connect.api.merchantId required for testing")
def create_communicator_configuration(properties_url=PROPERTIES_URL, max_connections=False):
"""Convenience method to create a communicator configuration that connects to a host stored in system variables"""
try:
parser = ConfigParser()
parser.read(properties_url)
with open(properties_url) as f:
parser.readfp(f)
configuration = CommunicatorConfiguration(parser, api_key_id=API_KEY_ID, secret_api_key=SECRET_API_KEY,
max_connections=max_connections)
except IOError as e:
raise RuntimeError("Unable to read configuration", e)
host = os.getenv("connect.api.endpoint.host")
if host is not None:
scheme = os.getenv("connect.api.endpoint.scheme", "https")
port = int(os.getenv("connect.api.endpoint.port", -1))
configuration.api_endpoint = "{2}://{0}:{1}".format(host, port, scheme)
return configuration
def create_session():
host = os.getenv("connect.api.endpoint.host")
scheme = os.getenv("connect.api.endpoint.scheme", "https")
port = int(os.getenv("connect.api.endpoint.port", -1))
if not host:
raise RuntimeError("unable to read environment variables to find api_endpoint")
api_endpoint = "{2}://{0}:{1}".format(host, port, scheme)
authenticator = DefaultAuthenticator(api_id_key=API_KEY_ID, secret_api_key=SECRET_API_KEY,
authorization_type=AuthorizationType.V1HMAC)
return Session(api_endpoint=api_endpoint, authenticator=authenticator,
connection=DefaultConnection(3, 3), meta_data_provider=MetaDataProvider("Ingenico"))
def create_client(max_connections=False):
configuration = create_communicator_configuration(max_connections=max_connections)
return Factory.create_client_from_configuration(configuration).with_client_meta_info('{"test":"test"}')
def create_client_with_proxy(max_connections=False):
configuration = create_communicator_configuration(PROPERTIES_URL_PROXY, max_connections=max_connections)
return Factory.create_client_from_configuration(configuration).with_client_meta_info('{"test":"test"}')
| StarcoderdataPython |
1985468 | """
input
"""
name = input("Please enter your name:")
print("Hello,")
print(name)
| StarcoderdataPython |
1952860 | from .material_lib import MaterialLib
from .treat_material import TreatMaterial
from .libraries import *
from . import colors
| StarcoderdataPython |
8014251 | import json
import unittest
from baseSetUp import Base
class EditRides(Base):
def setUp(self):
super().setUp()
self.app.post('/api/v1/users/rides',
data=json.dumps(self.ride),
content_type='application/json',
headers=self.headers)
self.ride = {
"start point": "Juja",
"destination": "Ngara Market",
"route": "Thika superhighway",
"start time": "2030-09-10 6:00",
"available space": 5
}
def tearDown(self):
super().tearDown()
def test_can_edit_ride_offer(self):
"""test that user can change offer details """
response = self.app.put('/api/v1/users/rides/1',
data=json.dumps(self.ride),
content_type='application/json',
headers=self.headers)
self.assertEqual(response.status_code, 200)
response_data = json.loads(response.get_data().decode('utf-8'))
self.assertEqual(response_data['start point'],'Juja')
def test_user_cannot_edit_ride_offer_he_does_not_own(self):
"""test that user cannot change offer details he does not own """
response = self.app.put('/api/v1/users/rides/1',
data=json.dumps(self.ride),
content_type='application/json',
headers=self.headers_for_passenger)
self.assertEqual(response.status_code, 401)
response_data = json.loads(response.get_data().decode('utf-8'))
self.assertEqual(response_data['message'],'You cannot change \
details of ride offer you do not own')
def test_can_edit_non_existing_ride_offer(self):
"""test that user cannot change details of a non-existing offer"""
response = self.app.put('/api/v1/users/rides/-1',
data=json.dumps(self.ride),
content_type='application/json',
headers=self.headers)
self.assertEqual(response.status_code, 404)
def test_cannot_update_ride_with_wrong_date_time(self):
"""test for invalid date format when updating offer details
The date format is Month day Year hour:minutes."""
response = self.app.put('/api/v1/users/rides/1',
data=json.dumps(self.ride_with_wrong_date),
content_type='application/json',
headers=self.headers)
self.assertEqual(response.status_code, 400)
response_data = json.loads(response.get_data().decode('utf-8'))
self.assertEqual(response_data['message'],
"use correct format for date and time.")
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
11343752 | import os
import sqlite3
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Hotel_Booking.settings")
import django
django.setup()
from Aconchego.models import Room, Hotel, Fotos
hotel = Hotel(
name="Kirimizi Hotel",
localizacao='Pemba, Mozambique',
categoria =4,
descricao="Kirimizi Hotel & Restaurante disponibiliza acomodações com um restaurante, estacionamento privado gratuito, uma piscina exterior e um bar. Quarto espaçoso e confortável , com vista maravilhosa da praia, do recanto da piscina.",
price = 436,
cover = 'img/kirimizi/k1.jpg'
)
hotel.save()
room = Room(
tipo='Quarto Standard',
descricao='Quarto de hotel composto por cama Queen size ou duas camas single, varanda com vistas para o jardim e mar.',
Numero_cama=1,
cover = 'img/kirimizi/k2.jpg',
price= 9230,
)
room.save()
hotel.rooms.add(room)
f1 =Fotos(path='img/kirimizi/k3.jpg')
f2 =Fotos(path='img/kirimizi/k4.jpg')
f3 =Fotos(path='img/kirimizi/k5.jpg')
f4 =Fotos(path='img/kirimizi/k6.jpg')
f5 =Fotos(path='img/kirimizi/k7.jpg')
f6 =Fotos(path='img/kirimizi/k8.jpg')
f7 =Fotos(path='img/kirimizi/k9.jpg')
f8 =Fotos(path='img/kirimizi/k10.jpg')
f1.save()
f2.save()
f3.save()
f4.save()
f5.save()
f6.save()
f7.save()
f8.save()
hotel.fotos.add(f1, f2, f3, f4, f5, f6, f7, f8)
hotel.save()
print('Hello There') | StarcoderdataPython |
3279496 | # Generated by Django 3.1.3 on 2020-11-28 05:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bagiapi', '0003_auto_20201128_1209'),
]
operations = [
migrations.AlterField(
model_name='enrollment',
name='reward',
field=models.IntegerField(blank=True, null=True),
),
]
| StarcoderdataPython |
4819081 | import requests
from django.shortcuts import render
from django.conf import settings
def oauthtest(request):
return render(request, 'oauthtest.html', {
'link': '{}o/authorize/?response_type=code&client_id={}&redirect_uri={}{}/oauthdone/'.format(
settings.API_URL,
settings.OAUTH_CLIENT_ID,
'https://' if request.is_secure() else 'http://',
request.META['HTTP_HOST'],
)
})
def oauthdone(request):
# Get token
r = requests.post(settings.API_URL + 'o/token/', data={
'grant_type': 'authorization_code',
'code': request.GET['code'],
'redirect_uri': '{}{}/oauthdone/'.format('https://' if request.is_secure() else 'http://', request.META['HTTP_HOST']),
'client_id': settings.OAUTH_CLIENT_ID,
'client_secret': settings.OAUTH_CLIENT_SECRET,
})
oauth = r.json()
# Get user object + accounts
headers = {
'Authorization': oauth['token_type'] + ' ' + oauth['access_token'],
}
r = requests.get(settings.API_URL + 'api/users/me/?expand_accounts=True', headers=headers)
return render(request, 'accounts.html', {
'user': r.json(),
})
| StarcoderdataPython |
6452357 | # Generated by Django 2.2.8 on 2020-04-10 01:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('exams', '0007_auto_20200410_0111'),
]
operations = [
migrations.AlterField(
model_name='question',
name='exam',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='questions', to='exams.Exam'),
),
]
| StarcoderdataPython |
325765 | <gh_stars>10-100
input = """
8 2 2 3 0 0
8 2 4 5 0 0
8 2 6 7 0 0
6 0 4 0 2 3 4 5 1 1 1 1
0
4 c
3 b
7 f
2 a
6 e
5 d
0
B+
0
B-
1
0
1
"""
output = """
COST 2@1
"""
| StarcoderdataPython |
1824433 | <reponame>IntelPython/scikit-ipp
# -*- coding: utf-8 -*-
"""
==============
Edge operators
==============
Edge operators are used in image processing within edge detection algorithms.
They are discrete differentiation operators, computing an approximation of the
gradient of the image intensity function.
"""
import numpy as np
import matplotlib.pyplot as plt
from skipp import filters
from skimage.data import camera
from skimage.util import compare_images
image = camera().astype(np.float32)
edge_prewitt = filters.prewitt(image)
edge_sobel = filters.sobel(image)
fig, axes = plt.subplots(ncols=2, sharex=True, sharey=True,
figsize=(8, 4))
axes[0].imshow(edge_prewitt, cmap=plt.cm.gray)
axes[0].set_title('Prewitt Edge Detection')
axes[1].imshow(edge_sobel, cmap=plt.cm.gray)
axes[1].set_title('Sobel Edge Detection')
for ax in axes:
ax.axis('off')
plt.tight_layout()
plt.show()
| StarcoderdataPython |
3357788 | """
Defines the tbmodels command-line interface.
"""
import os
from collections.abc import Iterable
from functools import singledispatch
import click
import bands_inspect as bi
import symmetry_representation as sr
from . import Model
@click.group()
def cli():
pass
def _output_option(**kwargs):
return click.option('--output', '-o', type=click.Path(dir_okay=False), **kwargs)
_input_option = click.option( # pylint: disable=invalid-name
'--input',
'-i',
type=click.Path(exists=True, dir_okay=False),
default='model.hdf5',
help='File containing the input model (in HDF5 format).'
)
def _read_input(input): # pylint: disable=redefined-builtin
click.echo("Reading initial model from file '{}' ...".format(input))
return Model.from_hdf5_file(input)
def _write_output(model, output):
click.echo("Writing output model to file '{}' ...".format(output))
model.to_hdf5_file(output)
click.echo("Done!")
@cli.command(short_help='Parse Wannier90 output files to an HDF5 file.')
@click.option(
'--folder',
'-f',
type=click.Path(exists=True, file_okay=False),
default='.',
help='Directory containing the Wannier90 output files.'
)
@click.option('--prefix', '-p', type=str, default='wannier', help='Common prefix of the Wannier90 output files.')
@click.option(
'--pos-kind',
type=click.Choice(['wannier', 'nearest_atom']),
default='wannier',
help="Which position to use for the orbitals."
)
@_output_option(default='model.hdf5', help='Path of the output file.')
def parse(folder, prefix, output, pos_kind):
"""
Parse Wannier90 output files and create an HDF5 file containing the tight-binding model.
"""
click.echo("Parsing output files '{}*' ...".format(os.path.join(folder, prefix)))
model = Model.from_wannier_folder(folder=folder, prefix=prefix, ignore_orbital_order=True, pos_kind=pos_kind)
_write_output(model, output)
@cli.command(short_help='Create symmetrized tight-binding model.')
@_input_option
@_output_option(default='model_symmetrized.hdf5', help='Output file for the symmetrized model.')
@click.option(
'--symmetries',
'-s',
type=click.Path(),
default='symmetries.hdf5',
help='File containing symmetry_representation.SymmetryGroup objects (in HDF5 form).'
)
@click.option(
'--full-group/--no-full-group',
'-f',
default=None,
help="""
Full group: The full symmetry group is given in the symmetries.
No full group: The symmetries only contain a generating subset of the full group. Overrides the option given in the symmetries file (if any).
"""
)
def symmetrize(input, output, symmetries, full_group): # pylint: disable=redefined-builtin
"""
Symmetrize tight-binding model with given symmetry group(s).
"""
model = _read_input(input)
click.echo("Reading symmetries from file '{}' ...".format(symmetries))
sym = sr.io.load(symmetries)
model_sym = _symmetrize(sym, model, full_group) # pylint: disable=assignment-from-no-return
_write_output(model_sym, output)
@singledispatch
def _symmetrize(sym, model, full_group): # pylint: disable=unused-argument
"""
Implementation for the symmetrization procedure. The singledispatch is used
to treat (nested) lists of symmetries or symmetry groups.
"""
raise ValueError("Invalid type '{}' for _symmetrize".format(type(sym)))
@_symmetrize.register(Iterable)
def _(sym, model, full_group):
for s in sym:
model = _symmetrize(s, model, full_group) # pylint: disable=assignment-from-no-return
return model
@_symmetrize.register(sr.SymmetryGroup)
def _(sym, model, full_group): # pylint: disable=missing-docstring
symmetries = sym.symmetries
if full_group is None:
full_group = sym.full_group
click.echo(
"Symmetrizing model with {} symmetr{}, full_group={} ...".format(
len(symmetries), 'y' if len(symmetries) == 1 else 'ies', full_group
)
)
return model.symmetrize(symmetries=symmetries, full_group=full_group)
@_symmetrize.register(sr.SymmetryOperation)
def _(sym, model, full_group): # pylint: disable=missing-docstring
sym_group = sr.SymmetryGroup(
symmetries=[sym],
full_group=full_group or False # catches 'None', does nothing for 'True' or 'False'
)
return _symmetrize(sym_group, model, full_group)
@cli.command(short_help="Slice specific orbitals from model.")
@_input_option
@_output_option(default='model_sliced.hdf5', help='Output file for the sliced model.')
@click.argument(
'slice-idx',
type=int,
nargs=-1,
) # pylint: disable=redefined-builtin
def slice(input, output, slice_idx): # pylint: disable=redefined-builtin
"""
Create a model containing only the orbitals given in the SLICE_IDX.
"""
model = _read_input(input)
click.echo("Slicing model with indices {} ...".format(slice_idx))
model_slice = model.slice_orbitals(slice_idx=slice_idx)
_write_output(model_slice, output)
@cli.command(short_help="Calculate energy eigenvalues.")
@_input_option
@click.option(
'-k',
'--kpoints',
type=click.Path(exists=True, dir_okay=False),
default='kpoints.hdf5',
help='File containing the k-points for which the eigenvalues are evaluated.'
)
@_output_option(default='eigenvals.hdf5', help='Output file for the energy eigenvalues.')
def eigenvals(input, kpoints, output): # pylint: disable=redefined-builtin
"""
Calculate the energy eigenvalues for a given set of k-points (in reduced coordinates). The input and output is given in an HDF5 file.
"""
model = _read_input(input)
click.echo("Reading kpoints from file '{}' ...".format(kpoints))
kpts = bi.io.load(kpoints)
if isinstance(kpts, bi.eigenvals.EigenvalsData):
kpts = kpts.kpoints
click.echo("Calculating energy eigenvalues ...")
eigenvalues = bi.eigenvals.EigenvalsData.from_eigenval_function(kpoints=kpts, eigenval_function=model.eigenval)
click.echo("Writing kpoints and energy eigenvalues to file '{}' ...".format(output))
bi.io.save(eigenvalues, output)
click.echo("Done!")
| StarcoderdataPython |
6605128 | from typing import Callable, Any
from py4j.java_gateway import JavaObject
from keanu.functional.hash_shortener import shorten_hash
class BiConsumer:
def __init__(self, lambda_function: Callable[[JavaObject, JavaObject], None]) -> None:
self.lambda_function = lambda_function
def accept(self, arg1: JavaObject, arg2: JavaObject) -> None:
"""
>>> c = BiConsumer(lambda x,y : print(x + y))
>>> c.accept("foo", "bar")
foobar
"""
self.lambda_function(arg1, arg2)
def hashCode(self) -> int:
return shorten_hash(hash(self.lambda_function))
class Java:
implements = ["java.util.function.BiConsumer"]
| StarcoderdataPython |
1746042 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
# Description: delivering inputs and targets for the dlrm benchmark
# The inpts and outputs are used according to the following two option(s)
# 1) random distribution, generated and loaded based on uniform distribution
# 2) synthetic data, the synthetic pre-generated data would be loaded.
from __future__ import absolute_import, division, print_function, unicode_literals
import sys
import numpy as np
from numpy import random as ra
import torch
from torch.utils.data import Dataset # , RandomSampler
class RandomDataset(Dataset):
""" Uniform distribution """
def __init__(
self,
m_den,
ln_emb,
data_size,
num_batches,
mini_batch_size,
num_indices_per_lookup,
num_indices_per_lookup_fixed,
num_targets=1,
round_targets=False,
data_generation="random",
trace_file="",
enable_padding=False,
reset_seed_on_access=False,
rand_seed=0
):
# compute batch size
nbatches = int(np.ceil((data_size * 1.0) / mini_batch_size))
if num_batches != 0:
nbatches = num_batches
data_size = nbatches * mini_batch_size
# print("Total number of batches %d" % nbatches)
# save args (recompute data_size if needed)
self.m_den = m_den
self.ln_emb = ln_emb
self.data_size = data_size
self.num_batches = nbatches
self.mini_batch_size = mini_batch_size
self.num_indices_per_lookup = num_indices_per_lookup
self.num_indices_per_lookup_fixed = num_indices_per_lookup_fixed
self.num_targets = num_targets
self.round_targets = round_targets
self.data_generation = data_generation
self.trace_file = trace_file
self.enable_padding = enable_padding
self.reset_seed_on_access = reset_seed_on_access
self.rand_seed = rand_seed
def reset_numpy_seed(self, numpy_rand_seed):
np.random.seed(numpy_rand_seed)
# torch.manual_seed(numpy_rand_seed)
def __getitem__(self, index):
if isinstance(index, slice):
return [
self[idx] for idx in range(
index.start or 0, index.stop or len(self), index.step or 1
)
]
# WARNING: reset seed on access to first element
# (e.g. if same random samples needed across epochs)
if self.reset_seed_on_access and index == 0:
self.reset_numpy_seed(self.rand_seed)
# number of data points in a batch
n = min(self.mini_batch_size, self.data_size - (index * self.mini_batch_size))
# generate a batch of dense and sparse features
if self.data_generation == "random":
(X, lS_o, lS_i) = generate_uniform_input_batch(
self.m_den,
self.ln_emb,
n,
self.num_indices_per_lookup,
self.num_indices_per_lookup_fixed
)
# generate a batch of target (probability of a click)
T = generate_random_output_batch(n, self.num_targets, self.round_targets)
return (X, lS_o, lS_i, T)
def __len__(self):
# WARNING: note that we produce bacthes of outputs in __getitem__
# therefore we should use num_batches rather than data_size below
return self.num_batches
def collate_wrapper_random(list_of_tuples):
# where each tuple is (X, lS_o, lS_i, T)
(X, lS_o, lS_i, T) = list_of_tuples[0]
return (X,
torch.stack(lS_o),
lS_i,
T)
def make_random_data_and_loader(args, ln_emb, m_den):
train_data = RandomDataset(
m_den,
ln_emb,
args.data_size,
args.num_batches,
args.mini_batch_size,
args.num_indices_per_lookup,
args.num_indices_per_lookup_fixed,
1, # num_targets
args.round_targets,
args.data_generation,
args.data_trace_file,
args.data_trace_enable_padding,
reset_seed_on_access=True,
rand_seed=args.numpy_rand_seed
) # WARNING: generates a batch of lookups at once
train_loader = torch.utils.data.DataLoader(
train_data,
batch_size=1,
shuffle=False,
num_workers=args.num_workers,
collate_fn=collate_wrapper_random,
pin_memory=False,
drop_last=False, # True
)
return train_data, train_loader
def generate_random_output_batch(n, num_targets, round_targets=False):
# target (probability of a click)
if round_targets:
P = np.round(ra.rand(n, num_targets).astype(np.float32)).astype(np.float32)
else:
P = ra.rand(n, num_targets).astype(np.float32)
return torch.tensor(P)
# uniform ditribution (input data)
def generate_uniform_input_batch(
m_den,
ln_emb,
n,
num_indices_per_lookup,
num_indices_per_lookup_fixed,
):
# dense feature
#Xt = torch.tensor(ra.rand(n, m_den).astype(np.float32))
Xt = torch.tensor(ra.rand(1, m_den).astype(np.float32))
# sparse feature (sparse indices)
lS_emb_offsets = []
lS_emb_indices = []
# for each embedding generate a list of n lookups,
# where each lookup is composed of multiple sparse indices
for size in ln_emb:
lS_batch_offsets = []
lS_batch_indices = []
offset = 0
for _ in range(n):
# num of sparse indices to be used per embedding (between
if num_indices_per_lookup_fixed:
sparse_group_size = np.int64(num_indices_per_lookup)
else:
# random between [1,num_indices_per_lookup])
r = ra.random(1)
sparse_group_size = np.int64(
np.round(max([1.0], r * min(size, num_indices_per_lookup)))
)
# sparse indices to be used per embedding
r = ra.random(sparse_group_size)
sparse_group = np.unique(np.round(r * (size - 1)).astype(np.int64))
# reset sparse_group_size in case some index duplicates were removed
sparse_group_size = np.int64(sparse_group.size)
# store lengths and indices
lS_batch_offsets += [offset]
lS_batch_indices += sparse_group.tolist()
# update offset for next iteration
offset += sparse_group_size
lS_emb_offsets.append(torch.tensor(lS_batch_offsets))
lS_emb_indices.append(torch.tensor(lS_batch_indices))
return (Xt, lS_emb_offsets, lS_emb_indices)
class SyntheticDataset(Dataset):
def __init__(
self,
mini_batch_size,
nbatches=1,
synthetic_data_folder="./synthetic_data/syn_data_bs65536/",
):
self.synthetic_data_folder = synthetic_data_folder
self.num_batches = nbatches
self.mini_batch_size = mini_batch_size
self.X = torch.load(f"{self.synthetic_data_folder}/X_0.pt")
self.lS_o = torch.load(f"{self.synthetic_data_folder}/lS_o_0.pt")
self.lS_i = torch.load(f"{self.synthetic_data_folder}/lS_i_0.pt")
self.T = torch.load(f"{self.synthetic_data_folder}/T_0.pt")
# print('data loader initiated ...')
def __getitem__(self, index):
sInd = index * self.mini_batch_size
eInd = sInd + self.mini_batch_size
if sInd >= len(self.X):
sys.exit(f' mini_batch_size({self.mini_batch_size}) * '
f'num_batches({self.num_batches}) has to be less'
f' than size of data({len(self.X)})'
)
X = self.X[sInd:eInd]
lS_o = [i[:][sInd:eInd] - i[:][sInd] for i in self.lS_o]
if eInd < len(self.lS_o[0]):
lS_i = [val[self.lS_o[ind][sInd]:self.lS_o[ind][eInd]] for ind, val in enumerate(self.lS_i)]
elif sInd < len(self.lS_o[0]):
lS_i = [val[self.lS_o[ind][sInd]:] for ind, val in enumerate(self.lS_i)]
T = self.T[sInd:eInd]
return (X, lS_o, lS_i, T)
def __len__(self):
return self.num_batches
def synthetic_data_loader(args, ln_emb, m_den):
train_data = SyntheticDataset(
args.mini_batch_size,
nbatches=args.num_batches,
synthetic_data_folder=args.synthetic_data_folder,
)
train_loader = torch.utils.data.DataLoader(
train_data,
batch_size=1,
shuffle=False,
num_workers=args.num_workers,
collate_fn=collate_wrapper_random,
pin_memory=False,
drop_last=False,
)
return train_data, train_loader
def data_loader(args, ln_emb, m_den):
data_gens = {"random": make_random_data_and_loader,
"synthetic": synthetic_data_loader,
}
train_data, train_ld = data_gens[args.data_generation](args, ln_emb, m_den)
return train_data, train_ld
| StarcoderdataPython |
3452660 | <gh_stars>0
'''Example 4.5 c-Section wlps (LRFD).
<NAME>; <NAME> "Cold-formed steel design" (2020). WILEY. p137
'''
import steeldesign as sd
# creo perfil
p1 = sd.c_w_lps_profile(H= 10.0, B= 3.5, D= 0.720, t= 0.075, r_out= (0.075+3/32) )
# creo material
s = sd.steel(FY= 50, E0= 27000, nu= 0.3, n= 4.58, offset= 0.002, name= 'SA301_1_4Hard')
# parametros de diseño
dp = sd.designParameters()
# creo un miembro
m = sd.member(L= 100, profile= p1, steel= s, designParameters= dp)
# creo el analisis
analysis = sd.ASCE_8_02(m)
Se, nEffAreas= analysis.s3_Se_effective(50)
print(Se)
# Valor de referencia 3.211
# Valor de steeldesign 3.196
# NOTA: La referencia usa el AISI S100, que tiene algunas variaciones respecto de ASCE 8
| StarcoderdataPython |
8195893 | <gh_stars>0
# -*- coding:utf-8 -*-
"""
导表工具GUI界面
@author: 覃贵锋
@date: 2020-02-17
"""
from PyQt4.Qt import *
import os
import json
import uuid
import time
import random
import Utils
import Language
import BackService
from Config import Config
class ExportToolGUI(QMainWindow):
"""
GUI 主界面
"""
def __init__(self):
""""""
super(ExportToolGUI, self).__init__()
# 设置标题
self.setWindowTitle(Language.TITLE)
# 设置窗口大小
self.setMinimumSize(*Language.SIZE_DEFAULT)
# 加入样式
self.setStyleSheet(Language.PYQT_QSS)
# 加载数据
if self.load_local_config():
# 初始化布局
self.init_componets()
def init_componets(self):
"""
初始化控件
"""
self.create_sys_menu()
self.mContent = ExportMainPanel(self)
self.setCentralWidget(self.mContent)
# 注册回调事件
service = BackService.get_service()
service.callback.connect(self.event_service_callback)
def load_local_config(self):
"""
加载本地配置
"""
if not os.path.exists(Config.LOCAL_PATH):
QMessageBox.critical(self, Language.MSG_TITLE, Language.MSG_LOCALPATH_MISS)
return False
try:
with open(Config.LOCAL_PATH) as pf:
config = json.load(pf)
if not Config.serialize(config):
raise ValueError,"config error"
except Exception as e:
print(e)
QMessageBox.critical(self, Language.MSG_TITLE, Language.MSG_LOCALPATH_ERROR)
return False
return True
def create_sys_menu(self):
"""
创建菜单
"""
_memuBar = QMenuBar(self)
# 测试菜单
toolMenu = _memuBar.addMenu(Language.MENU_TOOL)
#
settingAct = QAction(Language.MENU_TOOL_SETTING, self)
settingAct.triggered.connect(self.event_menu_localsetting)
toolMenu.addAction(settingAct)
exitAct = QAction(Language.MENU_EXIT, self)
exitAct.triggered.connect(self.event_menu_exit)
toolMenu.addAction(exitAct)
self.setMenuBar(_memuBar)
def event_service_callback(self, task):
""" """
if task.signal() == "excel_list_files":
if task.error():
return QMessageBox.critical(self, Language.MSG_TITLE, Language.MSG_SVNPATH_UPERROR)
filelist = task.get_result()
self.mContent.update_file_list(filelist)
def event_menu_localsetting(self):
"""
菜单打开配置列表
"""
# 如果为True表示需要重新刷新数据
if SettingDialog.show(self):
self.mContent.async_search_files(True)
def event_menu_exit(self):
"""
菜单点击退出
"""
self.close()
class SettingDialog(QDialog):
"""
配置设置界面
"""
def __init__(self, parent=None):
""" """
super(SettingDialog, self).__init__(parent)
self.setWindowTitle(Language.TITLE_LOCAL_SETTING)
self.setMinimumSize(*Language.SIZE_SETTING_DIALOG)
self.setStyleSheet(Language.PYQT_QSS)
self.mIsChange = False
self.init_componets()
def init_componets(self):
"""
初始化GUI界面布局
"""
# 总体布局分上下层
vLayout = QGridLayout()
# 第一行:数据表路径
vLayout.addWidget(QLabel(Language.LABEL_EXCEL_PATH), 0, 0)
# 输入路径
self.mExcelPathInput = QLineEdit()
self.mExcelPathInput.setText(Config.InConfig['path'])
vLayout.addWidget(self.mExcelPathInput,0,1,1,5)
# 预览目录选择按钮
excelPathPreviewButton = QPushButton(Language.BTN_PREVIEW)
excelPathPreviewButton.clicked.connect(self.event_btn_excel_path)
vLayout.addWidget(excelPathPreviewButton,0,6)
# 第二行:数据表路径的属性配置
vLayout.addWidget(QLabel(Language.LABEL_FILE_FILTER), 1, 0)
# 过滤方法
self.mFileFilterInput = QLineEdit()
self.mFileFilterInput.setText(u",".join(Config.InConfig['filter']))
self.mFileFilterInput.setPlaceholderText(Language.MSG_FILEFILTER_HOLD)
vLayout.addWidget(self.mFileFilterInput,1, 1, 1, 3)
# svn相关设置
self.mUsingSVN = QCheckBox(Language.LABEL_USING_SVN)
self.mUsingSVN.setChecked(Config.InConfig['svn'])
vLayout.addWidget(self.mUsingSVN, 1, 4)
vLayout.addWidget(QLabel(Language.LABEL_SVN_ACCOUNT), 1, 5)
self.mSVNAccountInput = QLineEdit()
self.mSVNAccountInput.setText(Config.InConfig['svn_account'])
vLayout.addWidget(self.mSVNAccountInput, 1, 6, 1, 1)
# 最好一行保存
save_config_button = QPushButton(Language.BTN_SAVESETTING)
save_config_button.clicked.connect(self.event_btn_save)
vLayout.addWidget(save_config_button, 2, 6)
#导出路径设置
self.setLayout(vLayout)
def event_btn_excel_path(self):
"""
选择excel路径
"""
dirPath = QFileDialog.getExistingDirectory(self, Language.LABEL_EXCEL_PATH, "./")
self.mExcelPathInput.setText(dirPath)
def event_btn_save(self):
"""
保存配置
"""
path = Utils.ufromQString(self.mExcelPathInput.text())
if not path or len(path) <= 0:
return
# 检查路径是否合法
if not os.path.exists(path) or not os.path.isdir(path):
return
Config.InConfig['path'] = path
Config.InConfig['filter'] = Utils.ufromQString(self.mFileFilterInput.text()).split(",")
Config.InConfig['svn'] = self.mUsingSVN.isChecked()
Config.InConfig['svn_account'] = Utils.ufromQString(self.mSVNAccountInput.text())
Config.save_to_file()
self.mIsChange = True
self.close()
@staticmethod
def show(parent):
"""
静态方法打开设置界面
"""
settingDialog = SettingDialog(parent)
settingDialog.exec_()
return settingDialog.mIsChange
class ConfirmDialog(QDialog):
"""
输入确认框,用于提供输入内容,并获取输入的内容
ConfirmDialog.show(parent, title, "提示内容", "默认输入内容")
返回值为 输入的内容
"""
def __init__(self, title,parent=None, **kwargs):
""" """
super(ConfirmDialog, self).__init__(parent)
self.setWindowTitle(title)
self.setMinimumSize(*Language.SIZE_INPUT_DIALOG)
self.setStyleSheet(Language.PYQT_QSS)
hLayout = QGridLayout()
self.mInput = QLineEdit()
self.mInput.setText(kwargs.get("dtext", u""))
self.mInput.setPlaceholderText(kwargs.get("placehold", u""))
hLayout.addWidget(self.mInput,0, 0, 1, 3)
comfirm_button = QPushButton(Language.BTN_CONFIRM)
comfirm_button.clicked.connect(self.event_btn_confirm)
hLayout.addWidget(comfirm_button, 0, 3)
self.setLayout(hLayout)
self.mInputText = u""
def event_btn_confirm(self):
"""点击确认按钮"""
self.mInputText = Utils.fromQString(self.mInput.text())
self.close()
def get_input_value(self):
"""获取输入的数据"""
return self.mInputText
@staticmethod
def show(parent, title, placehold=u"", default=u""):
"""
显示输入提示框
"""
dialog = ConfirmDialog(title, parent, placehold=placehold, dtext=default)
dialog.exec_()
return dialog.get_input_value()
class ExportMainPanel(QWidget):
"""
导表主界面
"""
list_update = pyqtSignal(str)
def __init__(self, parent=None):
""" """
super(ExportMainPanel, self).__init__(parent)
self.mExcelData = []
self.mLastRefreshTime = 0
self.init_componets()
self.init_data()
self.mTimer = QTimer()
self.mTimer.setInterval(10000)
self.mTimer.timeout.connect(self.event_time_update)
self.mTimer.start()
def init_componets(self):
"""
初始化GUI界面布局
"""
self.mVLayout = QVBoxLayout()
# 搜索筛选界面
filter_layout = QHBoxLayout()
filter_layout.addWidget(QLabel(Language.LABEL_FILTER))
self.mFilterInput = QLineEdit()
self.mFilterInput.textChanged.connect(self.event_input_filter)
filter_layout.addWidget(self.mFilterInput)
self.mVLayout.addLayout(filter_layout)
# 保存缓存界面
cache_layout = QHBoxLayout()
cache_layout.addWidget(QLabel(Language.LABEL_SAVESELECTED))
self.mFiletrCombox = QComboBox()
cache_layout.addWidget(self.mFiletrCombox)
select_keys = Config.LocalData.keys()
select_keys.insert(0, u"无")
self.mFiletrCombox.addItems(select_keys)
self.mFiletrCombox.currentIndexChanged.connect(self.event_cbox_selectupdate)
self.mSaveFilterButton = QPushButton(Language.BTN_SAVESELECTED)
self.mSaveFilterButton.clicked.connect(self.event_btn_savefilter)
cache_layout.addWidget(self.mSaveFilterButton)
cache_layout.insertStretch(2)
self.mRefreshButton = QPushButton(Language.BTN_REFRESH)
self.mRefreshButton.clicked.connect(self.event_btn_refresh)
cache_layout.addWidget(self.mRefreshButton)
self.mVLayout.addLayout(cache_layout)
# Excel列表展示界面
excel_list_layout = QVBoxLayout()
excel_list_layout.addWidget(QLabel(Language.LABEL_EXCEL_LIST))
self.mExcelList = QListWidget()
excel_list_layout.addWidget(self.mExcelList)
self.mVLayout.addLayout(excel_list_layout)
# 导表按钮区域
operator_layout = QHBoxLayout()
self.mSelectAllCheck = QCheckBox(Language.LABEL_SELECT_ALL)
self.mSelectAllCheck.clicked.connect(self.event_cbox_selectall)
operator_layout.addWidget(self.mSelectAllCheck)
operator_layout.insertStretch(3)
for export_btn_config in Config.OutConfig:
export_btn = QPushButton(export_btn_config["export_btn"])
export_btn.clicked.connect(self.event_btn_export)
operator_layout.addWidget(export_btn)
self.mVLayout.addLayout(operator_layout)
self.setLayout(self.mVLayout)
def init_data(self):
"""
初始化数据
"""
self.mExcelList.clear()
self.event_time_update()
def event_time_update(self):
"""
定时更新文件信息
"""
self.async_search_files()
def async_search_files(self, isReset=False):
"""
异步获取文件列表
"""
if isReset:
self.mExcelData = []
self.mExcelList.clear()
# 加个定时限制
if not isReset and time.time() - self.mLastRefreshTime < 5:
return
self.mLastRefreshTime = time.time()
service = BackService.get_service()
# 异步调用获取文件列表
if Config.InConfig["svn"]:
service.exec_async(Utils.get_svn_files,"excel_list_files", Config.InConfig["path"], Config.InConfig["filter"])
else:
print("asdasdasdsadsa")
service.exec_async(Utils.get_file_list,"excel_list_files", Config.InConfig["path"], Config.InConfig["filter"])
def update_file_list(self, filelist):
"""
更新文件数据
"""
# 先更新数据
for item in self.mExcelData:
key = item.get_name()
if key in filelist:
item.init_item_data(filelist[key])
del filelist[key]
# 再添加数据 原理:如果mExcelData已经有数据,说明是后续的更新,那么只需要更新filelist里面的状态即可,否则有新增就添加
for filepath, info in filelist.items():
item = ExcelItemData(info)
self.mExcelData.append(item)
self.show_all_excel()
def sort_excel_data(self):
"""
"""
new_list = []
# 写while循环就只需要一次就搞定
for item in self.mExcelData:
if item.is_checked():
new_list.append(item)
for item in self.mExcelData:
if not item.is_checked():
new_list.append(item)
self.mExcelData = new_list
def get_list_item(self, item_data):
"""
获取item控件
"""
item = QWidget()
item.setObjectName(item_data.get_id())
grid_layout = QGridLayout()
excel_name = QCheckBox(item_data.get_name())
excel_name.setObjectName("checkbox")
excel_name.clicked.connect(self.event_cbox_change)
excel_name.setChecked(item_data.is_checked())
excel_state = QLabel(item_data.get_state())
if Config.InConfig['svn']:
if not item_data.is_locked():
excel_lock_btn = QPushButton(Language.BTN_LOCK)
else:
lock_account = item_data.lock_account()
if lock_account == Config.InConfig["svn_account"]:
excel_lock_btn = QPushButton(Language.BTN_UNLOCK)
else:
excel_lock_btn = QPushButton(Language.BTN_LOCK_BY.format(lock_account))
excel_lock_btn.setEnabled(False)
else:
excel_lock_btn = QPushButton(Language.BTN_SVN_SUPPORT)
excel_lock_btn.setEnabled(False)
excel_lock_btn.setObjectName("btn_lock")
excel_explorer_btn = QPushButton(Language.BTN_EXPLORER)
excel_explorer_btn.setObjectName("btn_explorer")
# 事件用同一个监听,根据self.sender() 判断是谁
excel_lock_btn.clicked.connect(self.event_list_btn_event)
excel_explorer_btn.clicked.connect(self.event_list_btn_event)
grid_layout.addWidget(excel_name, 0, 0, 1, 3)
grid_layout.addWidget(excel_state, 0, 4, 1, 2)
grid_layout.addWidget(excel_lock_btn, 0, 6)
grid_layout.addWidget(excel_explorer_btn, 0, 7)
item.setLayout(grid_layout)
return item
def event_cbox_change(self, *event):
"""
checkbox 点击事件
"""
# 遍历整个列表,重新刷新全部的选中状态,然后绘画整个列表
# 另外可以使用self.sender()获取是哪个checkbox响应了,可通过objname进行数据交换
for i in range(self.mExcelList.count()):
item = self.mExcelList.item(i)
widgets = self.mExcelList.itemWidget(item)
checkbox = widgets.findChild(QCheckBox, "checkbox")
for item_data in self.mExcelData:
if item_data.is_this_item(Utils.fromQString(checkbox.text())):
item_data.set_checked(checkbox.isChecked())
self.event_input_filter()
def event_cbox_selectall(self, checked):
"""
全选
"""
for i in range(self.mExcelList.count()):
item = self.mExcelList.item(i)
widgets = self.mExcelList.itemWidget(item)
checkbox = widgets.findChild(QCheckBox, "checkbox")
checkbox.setChecked(checked)
uid = Utils.fromQString(widgets.objectName())
for item_data in self.mExcelData:
if item_data.is_this_id(uid):
item_data.set_checked(checked)
def event_cbox_selectupdate(self, event):
"""
快速选择列表更改
"""
# 没有数据更改列表没有用
if not self.mExcelData or self.mExcelList.count() <= 0:
return
if self.mFiletrCombox.currentIndex() == 0:
self.show_all_excel()
else:
select_text = Utils.ufromQString(self.mFiletrCombox.currentText())
data = Config.LocalData.get(select_text, None)
if not data or not isinstance(data, list):
return
# 快速更新原始数据
for item_data in self.mExcelData:
if item_data.get_name() in data:
item_data.set_checked(True)
else:
item_data.set_checked(False)
self.show_all_excel()
def event_list_btn_event(self, sender):
"""
按钮响应事件(包括锁表和打开目录)
"""
btn_obj = self.sender()
objectName = Utils.fromQString(btn_obj.objectName())
itemWidget = btn_obj.parentWidget()
uid = Utils.fromQString(itemWidget.objectName())
item_data = None
for item in self.mExcelData:
if item.is_this_id(uid):
item_data = item
break
if not item_data:
return
update_flag = False
if objectName == "btn_lock":
label = Utils.ufromQString(btn_obj.text())
if label == Language.BTN_LOCK:
res, account = Utils.lock_file(Config.InConfig["path"],item.get_name())
if res:
item.set_locked(account)
update_flag = True
elif label == Language.BTN_UNLOCK:
res = Utils.unlock_file(Config.InConfig["path"],item.get_name())
if res:
item.set_locked("")
update_flag = True
if update_flag:
self.event_input_filter()
elif objectName == "btn_explorer":
Utils.open_file_dir(Config.InConfig["path"], item_data.get_name())
def event_input_filter(self):
"""
捕获输入消息
"""
keywords = Utils.fromQString(self.mFilterInput.text())
keywords_lst = keywords.strip().split(",")
if keywords == "" or not keywords_lst:
self.show_all_excel()
return
# 搜索部分
self.show_part_excel(keywords_lst)
def show_all_excel(self):
"""
显示全部excel
"""
#是否需要过滤
need_filter = self.mFiletrCombox.currentIndex() != 0
self.mExcelList.clear()
# 优先排
self.sort_excel_data()
for item_data in self.mExcelData:
if need_filter and not item_data.is_checked():
continue
item = QListWidgetItem()
item_w = self.get_list_item(item_data)
item.setSizeHint(item_w.sizeHint())
self.mExcelList.addItem(item)
self.mExcelList.setItemWidget(item, item_w)
def show_part_excel(self, keywords_lst):
"""
显示部分匹配关键字的excel
"""
self.mExcelList.clear()
self.sort_excel_data()
for item_data in self.mExcelData:
if item_data.is_checked() or item_data.contain_keys(keywords_lst):
item = QListWidgetItem()
item_w = self.get_list_item(item_data)
item.setSizeHint(item_w.sizeHint())
self.mExcelList.addItem(item)
self.mExcelList.setItemWidget(item, item_w)
def event_btn_export(self):
"""
导表按钮-开始导表
"""
btn_obj = self.sender()
for i in range(self.mExcelList.count()):
item = self.mExcelList.item(i)
widgets = self.mExcelList.itemWidget(item)
checkbox = widgets.findChild(QCheckBox, "checkbox")
uid = Utils.fromQString(widgets.objectName())
for item_data in self.mExcelData:
if item_data.is_this_id(uid) and item_data.is_checked():
print(item_data)
def event_btn_savefilter(self):
"""
选择保存
"""
text = ConfirmDialog.show(self, Language.TITLE_INPUT_SELECTED, Language.MSG_SELECTED_HOLD)
if not text or len(text) <= 0:
return
save_file_list = []
for i in range(self.mExcelList.count()):
item = self.mExcelList.item(i)
widgets = self.mExcelList.itemWidget(item)
checkbox = widgets.findChild(QCheckBox, "checkbox")
uid = Utils.fromQString(widgets.objectName())
for item_data in self.mExcelData:
if item_data.is_this_id(uid) and item_data.is_checked():
save_file_list.append(item_data.get_name())
Config.LocalData[text] = save_file_list
Config.save_to_file()
def event_btn_refresh(self):
""" 刷新 """
self.event_time_update()
class ExcelItemData(QWidget):
"""
Excel 每行数据的Item界面
包含:
1、表名
2、状态
3、操作按钮
"""
def __init__(self, fileitem):
""" """
super(ExcelItemData, self).__init__()
#保存一个转换编码的文件列表
self.mStates = u""
self.mLocked = u""
self.mFileName = ""
self.mRawFileName = u""
self.mChecked = False
self.mID = str(uuid.uuid4())
self.init_item_data(fileitem)
def init_item_data(self, fileitem):
"""
"""
if isinstance(fileitem, unicode):
self.mFileName = Utils.fromUnicode(fileitem)
self.mRawFileName = fileitem
elif isinstance(fileitem, dict):
self.mFileName = Utils.fromUnicode(fileitem["filename"])
self.mRawFileName = fileitem["filename"]
self.mStates = fileitem["status"]
self.mLocked = fileitem["lock"]
def get_id(self):
"""
"""
return self.mID
def is_this_id(self, uid):
""" """
return self.mID == uid
def get_name(self):
"""
"""
return self.mRawFileName
def get_state(self):
"""
"""
return self.mStates
def is_locked(self):
"""
"""
return len(self.mLocked) > 0
def lock_account(self):
"""
"""
return self.mLocked
def set_checked(self, b):
"""
"""
self.mChecked = b
def set_locked(self, account):
""" """
self.mLocked = account
def is_this_item(self, filename):
"""
"""
return self.mFileName == filename
def contain_key(self, keyword):
"""
判断filename是否包含指定的关键字
"""
if self.mFileName.find(keyword)>=0:
return True
return False
def contain_keys(self, keywords):
"""
判断filename是否在关键字列表中
"""
for keyword in keywords:
if keyword != '' and self.contain_key(keyword):
return True
return False
def is_checked(self):
"""
判断是否已经选中-- 选中的文件不参与筛选,始终显示在最前面
"""
return self.mChecked
def event_cbox_change(self):
"""
勾选事件触发
"""
self.mChecked = self.mExcelName.isChecked()
#ExportMainPanel.list_update.emit(self.mFileName) | StarcoderdataPython |
4996150 | # -*- coding: utf-8 -*-
"""
Created on Sat Apr 23 20:00:49 2016
@author: lykke
"""
# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""
import kivy
import kivy.uix
kivy.require('1.9.0')
from kivy.garden.mapview import MapView
from kivy.garden.mapview import MapMarker
from kivy.app import App
from kivy.uix.anchorlayout import AnchorLayout
from kivy.uix.relativelayout import RelativeLayout
from kivy.uix.button import Button
from kivy.garden.mapview.mapview.geojson import GeoJsonMapLayer
from kivy.uix.togglebutton import ToggleButton
from kivy.uix.stacklayout import StackLayout
from kivy.core.window import Window
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.label import Label
from kivy.graphics import *
import api
import time
class Main(App):
height = 720
width = (height/16) * 9
Window.size = (width,height)
def build(self):
self.itu_lat = 55.6593807
self.itu_lon = 12.5910774
self.obs_dic = None
self.old_time = 0.0
self.weatherbox = AnchorLayout(anchor_x = 'center', anchor_y = 'bottom')
self.Layout = RelativeLayout()
self.mapview = MapView(zoom=11, lat=self.itu_lat, lon=self.itu_lon)
mapview = self.mapview
self.Layout.add_widget(mapview)
# add map layer
self.jsonmap = GeoJsonMapLayer(source='5_mile_airport.json')
self.mapview.add_layer(self.jsonmap)
self.overlay = AnchorLayout(anchor_x='right', anchor_y='top')
lowerLeft = AnchorLayout(anchor_x='left', anchor_y='bottom')
self.lowerLeftStack = StackLayout(orientation='lr-bt',size_hint=(0.15,0.15))
lowerLeft.add_widget(self.lowerLeftStack)
btnre = Button(background_normal='refocus_normal.png', background_down='refocus_down.png', size_hint = (2,1))
btnre.bind(on_press=self.resetloc)
btnnf = ToggleButton(background_normal='nofly_normal.png', background_down='nofly_down.png',size_hint = (2,1))
btnnf.bind(on_press=self.nofly)
self.lowerLeftStack.add_widget(btnre)
self.lowerLeftStack.add_widget(btnnf)
btn = ToggleButton(background_normal='Settings B.png', background_down="Settings G.png")
btn.bind(on_press= self.show_dropdown)
self.settings = StackLayout(size_hint=(0.2,0.2))
self.settings.add_widget(btn)
self.overlay.add_widget(self.settings)
self.Layout.add_widget(lowerLeft)
self.Layout.add_widget(self.overlay)
marker = MapMarker(anchor_x = 0.5, anchor_y = 0.5, lat=self.itu_lat, lon=self.itu_lon)
self.mapview.add_marker(marker)
return self.Layout
def resetloc(self,instance):
self.mapview.center_on(self.itu_lat,self.itu_lon)
def nofly(self,instance):
if instance.state == 'down':
self.mapview.remove_layer(self.jsonmap)
else:
self.mapview.add_layer(self.jsonmap)
def show_dropdown(self,instance):
if instance.state == 'down':
size = (1,0.5)
btn1 = ToggleButton(text='Weather', size_hint = size)
btn2 = Button(text='Level',size_hint = size)
btn3 = Button(text='Nearby\nusers', size_hint = size)
btn1.bind(on_press = self.show_weather_data)
self.settings.add_widget(btn1)
self.settings.add_widget(btn2)
self.settings.add_widget(btn3)
else:
for child in self.settings.children[:]:
if child.text != "":
self.settings.remove_widget(child)
def show_weather_data(self,instance):
weatherbox = self.weatherbox
if instance.state == 'down':
layout = BoxLayout(orientation='vertical', size_hint = (0.2,0.1) )
clat = self.mapview.lat
clon = self.mapview.lon
ctime = time.time()
if(self.obs_dic == None or ctime > (self.old_time + 0.5)):
self.old_time = ctime
self.obs_dic = api.loc_weather(clat,clon)
weList = self.obs_dic['weather']
we = weList[0]
wi = self.obs_dic['wind']
l1 = Label(text = 'Current weather: ' + we['main'], color = (0.,0.,0.,1))
main = self.obs_dic['main']
k = main['temp']
#Conversion from imperial to metric
temp = k-273.15
l2 = Label(text = 'temp: ' + str(temp) + ' ' + u'\u00B0' + 'C', color = (0.,0.,0.,1))
hu = main['humidity']
l3 = Label(text = 'humidity: ' + str(hu) + '%', color = (0.,0.,0.,1))
pre = main['pressure']
l4 = Label(text = 'pressure' + str(pre) + ' hPa', color = (0.,0.,0.,1))
wispeed = wi['speed']
widir = wi['deg']
l5 = Label(text = 'wind speed: ' + str(wispeed) + 'm/s', color = (0.,0.,0.,1))
l6 = Label(text = 'wind direction '+ str(widir) + u'\u00B0', color = (0.,0.,0.,1))
Tdp = temp - ((100-hu)/5)
l7 = Label(text = 'dew point: ' + str(Tdp) + ' ' + u'\u00B0' + 'C', color = (0.,0.,0.,1))
layout.add_widget(l1)
layout.add_widget(l2)
layout.add_widget(l3)
layout.add_widget(l4)
layout.add_widget(l5)
layout.add_widget(l6)
layout.add_widget(l7)
weatherbox.add_widget(layout)
weatherbox.add_widget
self.Layout.add_widget(weatherbox)
else:
for c in self.weatherbox.children[:]:
for child in c.children[:]:
c.remove_widget(child)
self.weatherbox.remove_widget(c)
self.overlay.remove_widget(weatherbox)
Main().run()
| StarcoderdataPython |
5117072 | <filename>flow/function.py
from . import autograd
from .utils import _make_pair
from .tensor import Tensor, ones, zeros, transpose
import numpy as np
class Add(autograd.Function):
@staticmethod
def forward(ctx, a, b, inplace=False):
if inplace:
a.data += b.data
return a
else:
new_tensor = Tensor(a.data + b.data)
return new_tensor
@staticmethod
def backward(ctx, grad_output):
b_grad = grad_output * 1
a_grad = grad_output * 1
return a_grad, b_grad
class Mul(autograd.Function):
@staticmethod
def forward(ctx, a, b, inplace=False):
ctx.save_for_backward(a, b)
if inplace:
a.data *= b.data
return a
else:
new_tensor = Tensor(a.data * b.data)
return new_tensor
@staticmethod
def backward(ctx, grad_output):
a, b = ctx.saved_tensors
b_grad = grad_output * a.data
a_grad = grad_output * b.data
return a_grad, b_grad
class Sub(autograd.Function):
@staticmethod
def forward(ctx, a, b, inplace=False):
if inplace:
a.data -= b.data
return a
else:
new_tensor = Tensor(a.data - b.data)
return new_tensor
@staticmethod
def backward(ctx, grad_output):
b_grad = grad_output * (-1)
a_grad = grad_output * 1
return a_grad, b_grad
class Truediv(autograd.Function):
@staticmethod
def forward(ctx, a, b, inplace=False):
ctx.save_for_backward(a, b)
if inplace:
a.data /= b.data
return a
else:
new_tensor = Tensor(a.data / b.data)
return new_tensor
@staticmethod
def backward(ctx, grad_output):
a, b = ctx.saved_tensors
b_grad = grad_output * (-a.data) / (b.data ** 2)
a_grad = grad_output / b.data
return a_grad, b_grad
class MM(autograd.Function):
@staticmethod
def forward(ctx, a, b):
ctx.save_for_backward(a, b)
new_tensor = Tensor(np.matmul(a.data, b.data))
return new_tensor
@staticmethod
def backward(ctx, grad_output):
a, b = ctx.saved_tensors
a_grad = np.matmul(grad_output.data, np.transpose(b.data))
b_grad = np.matmul(np.transpose(a.data), grad_output.data)
return Tensor(a_grad), Tensor(b_grad)
class ReLU(autograd.Function):
@staticmethod
def forward(ctx, a):
ctx.save_for_backward(a)
copy = a.copy()
copy[copy < 0] = 0
return copy
@staticmethod
def backward(ctx, grad_output):
a, = ctx.saved_tensors
a_grad = grad_output.copy()
a_grad[a < 0] = 0
return a_grad
class Sum(autograd.Function):
@staticmethod
def forward(ctx, a, axis=None):
ctx.save_for_backward(a)
new_tensor = Tensor(np.sum(a.data, axis=axis))
return new_tensor
@staticmethod
def backward(ctx, grad_output):
a, = ctx.saved_tensors
a_grad = grad_output * ones(a.shape)
return a_grad
class Min(autograd.Function):
@staticmethod
def forward(ctx, a, axis=None):
idx = np.argmin(a.data, axis=axis)
ctx.save_for_backward(a, axis, idx)
new_tensor = Tensor(np.min(a.data, axis=axis))
return new_tensor
@staticmethod
def backward(ctx, grad_output):
a, axis, idx = ctx.saved_tensors
grad = np.zeros(a.shape)
if axis is None:
grad.itemset(idx, grad_output.item())
else:
expand_idx = np.expand_dims(idx, axis=axis)
filled_grad = np.expand_dims(grad_output.data, axis=axis)
np.put_along_axis(grad, expand_idx, filled_grad, axis=axis)
return Tensor(grad)
class Max(autograd.Function):
@staticmethod
def forward(ctx, a, axis=None):
idx = np.argmax(a.data, axis=axis)
ctx.save_for_backward(a, axis, idx)
new_tensor = Tensor(np.max(a.data, axis=axis))
return new_tensor
@staticmethod
def backward(ctx, grad_output):
a, axis, idx = ctx.saved_tensors
grad = np.zeros(a.shape)
if axis is None:
grad.itemset(idx, grad_output.item())
else:
expand_idx = np.expand_dims(idx, axis=axis)
filled_grad = np.expand_dims(grad_output.data, axis=axis)
np.put_along_axis(grad, expand_idx, filled_grad, axis=axis)
return Tensor(grad)
class SquareLoss(autograd.Function):
@staticmethod
def forward(ctx, a, b):
ctx.save_for_backward(a, b)
new_tensor = Tensor(np.sum(np.square((a - b).data)))
return new_tensor
@staticmethod
def backward(ctx, grad_output):
a, b = ctx.saved_tensors
a_grad = grad_output * 2.0 * (a - b)
b_grad = grad_output * -2.0 * (a - b)
return a_grad, b_grad
class MaxPool2d(autograd.Function):
@staticmethod
def forward(ctx, tensor, kernel_size, stride=1, padding=0):
kernel_size = _make_pair(kernel_size)
stride = _make_pair(stride)
padding = _make_pair(padding)
data = tensor.data
data = np.pad(data, ((0, 0), (0, 0), (padding[0], padding[0]), (padding[1], padding[1])), 'constant', constant_values=0)
batchsize, channel, height, width = data.shape
output = np.zeros(
(batchsize,
channel,
(height - kernel_size[0] + 2 * padding[0]) // stride[0] + 1,
(width - kernel_size[1] + 2 * padding[1]) // stride[1] + 1
))
batchsize, channel, output_height, output_width = output.shape
for i in range(batchsize):
for j in range(channel):
for h in range(0, height - kernel_size[0] + 1, stride[0]):
for w in range(0, width - kernel_size[1] + 1, stride[1]):
output[i, j, h // stride[0], w // stride[1]] = np.max(data[
i,
j,
h : h + kernel_size[0],
w : w + kernel_size[1]
])
ctx.save_for_backward(tensor, kernel_size, stride, padding)
return Tensor(output)
@staticmethod
def backward(ctx, grad_output):
tensor, kernel_size, stride, padding = ctx.saved_tensors
batchsize, channel, height, width = tensor.shape
batchsize, channel, output_height, output_width = grad_output.shape
grad = zeros(tensor.shape)
for i in range(batchsize):
for j in range(channel):
for h in range(0, height - kernel_size[0] + 1, stride[0]):
for w in range(0, width - kernel_size[1] + 1, stride[1]):
mask = tensor[i, j, h : h + kernel_size[0], w : w + kernel_size[1]] == max(tensor[i, j, h : h + kernel_size[0], w : w + kernel_size[1]])
grad[i, j, h : h + kernel_size[0], w : w + kernel_size[1]] += mask * grad_output[i, j, h // stride[0], w // stride[1]]
return grad[:, :, padding[0]: height-padding[0], padding[1]: width-padding[1]], None, None, None
def im2col(image, kernel_height, kernel_width, stride):
# image is a 4d tensor([batchsize, channel, height, width])
image_col = []
for i in range(0, image.shape[2] - kernel_height + 1, stride[0]):
for j in range(0, image.shape[3] - kernel_width + 1, stride[1]):
col = image[:, :, i:i + kernel_height, j:j + kernel_width].reshape([-1])
image_col.append(col)
image_col = np.array(image_col)
return image_col
class Conv2d(autograd.Function):
@staticmethod
def forward(ctx, input, weight, bias, stride, padding):
input, weight = input.data, weight.data
batchsize, input_channel, height, width = input.shape
output_channel, input_channel, kernel_height, kernel_width = weight.shape
col_weight = weight.reshape([output_channel, -1])
input = np.pad(input, ((0, 0), (0, 0), (padding[0], padding[0]), (padding[1], padding[1])), 'constant', constant_values=0)
conv_out = np.zeros(
(batchsize,
output_channel,
(height - kernel_height + 2 * padding[0]) // stride[0] + 1,
(width - kernel_width + 2 * padding[1]) // stride[1] + 1
))
col_image = []
for i in range(batchsize):
img_i = input[i][np.newaxis, :]
col_image_i = im2col(img_i, kernel_height, kernel_width, stride)
col_image.append(col_image_i)
if bias is not None:
conv_out[i] = np.reshape(np.dot(col_weight, np.transpose(col_image_i)) + bias.data, conv_out[0].shape)
else:
conv_out[i] = np.reshape(np.dot(col_weight, np.transpose(col_image_i)), conv_out[0].shape)
col_image = np.array(col_image)
ctx.save_for_backward(col_image, col_weight, bias,
input.shape,
weight.shape,
stride,
padding
)
return Tensor(conv_out)
@staticmethod
def backward(ctx, grad_output):
col_image, col_weight, bias, input_shape, weight_shape, stride, padding = ctx.saved_tensors
batchsize, output_channel, output_height, output_width = grad_output.shape
batchsize, input_channel, height, width = input_shape
output_channel, input_channel, kernel_height, kernel_width = weight_shape
# init gradient for img2col
col_weight_gradient = zeros(col_weight.shape)
conv_out_gradient = grad_output.reshape(batchsize, output_channel, -1)
# init gradient for input tensor
bias_gradient = ones(output_channel) if bias is None else None
input_gradient = zeros(input_shape)
for i in range(batchsize):
col_image_gradient = mm(transpose(conv_out_gradient[i]), col_weight)
col_weight_gradient += mm(conv_out_gradient[i], col_image[i])
j = 0
for h in range(0, height - kernel_height + 1, stride[0]):
for w in range(0, width - kernel_width + 1, stride[1]):
input_gradient[i, :, h: h + kernel_height, w: w + kernel_width] += col_image_gradient[j].reshape((input_channel, kernel_height, kernel_width))
j += 1
weight_gradient = col_weight_gradient.reshape(output_channel, input_channel, kernel_height, kernel_width)
# remove padding
input_gradient = input_gradient[:, :, padding[0]: height-padding[0], padding[1]: width-padding[1]]
return input_gradient, weight_gradient, bias_gradient, None, None
class View(autograd.Function):
@staticmethod
def forward(ctx, tensor, shape):
ctx.save_for_backward(tensor.shape)
new_tensor = tensor.copy().reshape(shape)
return new_tensor
@staticmethod
def backward(ctx, grad_output):
original_shape, = ctx.saved_tensors
grad = grad_output.copy().reshape(original_shape)
return grad
class LogSoftmax(autograd.Function):
@staticmethod
def forward(ctx, tensor, dim):
# tensor size is (N, C)
data = tensor.data
data_shift = data - np.max(data)
data_shift_exp = np.exp(data_shift)
exp_sum = np.sum(data_shift_exp, axis=dim, keepdims=True)
exp_sum[exp_sum == 0] = 1e-10
res = data_shift - np.log(exp_sum)
ctx.save_for_backward(data_shift_exp, exp_sum)
return Tensor(res)
@staticmethod
def backward(ctx, grad_output):
data_shift_exp, exp_sum = ctx.saved_tensors
e = - data_shift_exp / exp_sum
N, C = e.shape
grad = zeros((N, C))
for i in range(N):
jac = np.tile(e[i], (C, 1))
jac[np.diag_indices_from(jac)] += 1
grad[i] = mm(Tensor(np.transpose(jac)), grad_output[i])
return grad
class NllLoss(autograd.Function):
@staticmethod
def forward(ctx, input, target, reduction="average"):
# input is size (N, C), target is size (N, 1), output is size (N, 1)
input, target = input.data, target.data
nll = [- log_pro[target[idx]] for idx, log_pro in enumerate(input)]
if reduction == "average":
loss = np.average(nll)
elif reduction == "sum":
loss = np.sum(nll)
else:
raise RuntimeError("unsupported reducetion type.")
ctx.save_for_backward(target, input, reduction)
return Tensor(loss)
@staticmethod
def backward(ctx, grad_output):
# grad_output is size (N, 1), output is size (N, C)
target, input, reduction = ctx.saved_tensors
output = zeros(input.shape)
batch_size = output.shape[0]
for idx in range(batch_size):
output[idx, target[idx]] = - 1
if reduction == "average":
output = output * grad_output / batch_size
elif reduction == "sum":
output = output * grad_output
else:
raise RuntimeError("unsupported reducetion type.")
return output, None, None
add = Add.apply
mul = Mul.apply
sub = Sub.apply
true_div = Truediv.apply
max = Max.apply
min = Min.apply
mm = MM.apply
sum = Sum.apply
square_loss = SquareLoss.apply
relu = ReLU.apply
conv2d = Conv2d.apply
max_pool2d = MaxPool2d.apply
log_softmax = LogSoftmax.apply
view = View.apply
nll_loss = NllLoss.apply | StarcoderdataPython |
5104908 | <filename>python/filemgmt/metadefs.py
#!/usr/bin/env python
WCL_META_SECT = 'filemeta'
WCL_META_HEADERS = 'headers'
WCL_META_COMPUTE = 'compute'
WCL_META_WCL = 'wcl'
WCL_UPDATE_HEAD_PREFIX = 'hdrupd_'
WCL_UPDATE_WHICH_HEAD = 'headers'
WCL_META_REQ = 'req_metadata'
WCL_META_OPT = 'opt_metadata'
MD_EXIT_FAILURE = 1
| StarcoderdataPython |
8159706 | <filename>zyte_api/apikey.py
# -*- coding: utf-8 -*-
import os
from typing import Optional
from .constants import ENV_VARIABLE
class NoApiKey(Exception):
pass
def get_apikey(key: Optional[str] = None) -> str:
""" Return API key, probably loading it from an environment variable """
if key is not None:
return key
try:
return os.environ[ENV_VARIABLE]
except KeyError:
raise NoApiKey("API key not found. Please set {} "
"environment variable.".format(ENV_VARIABLE))
| StarcoderdataPython |
1841859 | import uuid
import pytest
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm.exc import ObjectDeletedError
from quetz import errors, rest_models
from quetz.dao import Dao
from quetz.database import get_session
from quetz.db_models import Channel, Package, PackageVersion
@pytest.fixture
def package_name():
return "my-package"
@pytest.fixture
def channel_name():
return "my-channel"
@pytest.fixture
def channel(dao, db, user, channel_name):
channel_data = rest_models.Channel(name=channel_name, private=False)
channel = dao.create_channel(channel_data, user.id, "owner")
yield channel
try:
db.delete(channel)
db.commit()
except ObjectDeletedError:
pass
@pytest.fixture
def package(dao, channel, package_name, user, db):
package_data = rest_models.Package(name=package_name)
package = dao.create_package(channel.name, package_data, user.id, "owner")
yield package
db.delete(package)
db.commit()
def test_create_version(dao, package, channel_name, package_name, db, user):
assert (
not db.query(PackageVersion)
.filter(PackageVersion.package_name == package_name)
.first()
)
assert dao.db == db
dao.create_version(
channel_name=channel_name,
package_name=package_name,
package_format="tarbz2",
platform="noarch",
version="0.0.1",
build_number="0",
build_string="",
filename="filename.tar.bz2",
info="{}",
uploader_id=user.id,
upsert=False,
)
created_version = (
db.query(PackageVersion)
.filter(PackageVersion.package_name == package_name)
.first()
)
assert created_version
assert created_version.version == "0.0.1"
assert created_version.build_number == 0
assert created_version.filename == "filename.tar.bz2"
assert created_version.info == "{}"
assert created_version.time_created == created_version.time_modified
# error for insert-only with existing row
with pytest.raises(IntegrityError):
dao.create_version(
channel_name=channel_name,
package_name=package_name,
package_format="tarbz2",
platform="noarch",
version="0.0.1",
build_number="0",
build_string="",
filename="filename-2.tar.bz2",
info="{}",
uploader_id=user.id,
upsert=False,
)
# update with upsert
dao.create_version(
channel_name=channel_name,
package_name=package_name,
package_format="tarbz2",
platform="noarch",
version="0.0.1",
build_number="0",
build_string="",
filename="filename-2.tar.bz2",
info='{"version": "x.y.z"}',
uploader_id=user.id,
upsert=True,
)
created_version = (
db.query(PackageVersion)
.filter(PackageVersion.package_name == package_name)
.first()
)
assert created_version
assert created_version.version == "0.0.1"
assert created_version.build_number == 0
assert created_version.filename == "filename-2.tar.bz2"
assert created_version.info == '{"version": "x.y.z"}'
assert created_version.time_created != created_version.time_modified
def test_update_channel(dao, channel, db):
assert not channel.private
dao.update_channel(channel.name, {"private": True})
channel = db.query(Channel).filter(Channel.name == channel.name).one()
assert channel.private
def test_create_user_with_profile(dao: Dao, user_without_profile):
user = dao.create_user_with_profile(
user_without_profile.username,
provider="github",
identity_id="1",
name="new user",
avatar_url="http://avatar",
role=None,
exist_ok=True,
)
assert user.profile
with pytest.raises(IntegrityError):
dao.create_user_with_profile(
user_without_profile.username,
provider="github",
identity_id="1",
name="new user",
avatar_url="http://avatar",
role=None,
exist_ok=False,
)
@pytest.fixture
def db_extra(database_url):
"""a separate session for db connection
Use only for tests that require two sessions concurrently.
For most cases you will want to use the db fixture (from quetz.testing.fixtures)"""
session = get_session(database_url)
yield session
session.close()
@pytest.fixture
def dao_extra(db_extra):
return Dao(db_extra)
@pytest.fixture
def user_with_channel(dao, db):
channel_data = rest_models.Channel(name="new-test-channel", private=False)
user = dao.create_user_with_role("new-user")
user_id = user.id
channel = dao.create_channel(channel_data, user_id, "owner")
db.commit()
yield user_id
db.delete(channel)
db.delete(user)
db.commit()
# disable running tests in transaction and use on disk database
# because we want to connect to the db with two different
# client concurrently
@pytest.mark.parametrize("sqlite_in_memory", [False])
@pytest.mark.parametrize("auto_rollback", [False])
def test_rollback_on_collision(dao: Dao, db, dao_extra, user_with_channel):
"""testing rollback on concurrent writes."""
new_package = rest_models.Package(name=f"new-package-{uuid.uuid4()}")
user_id = user_with_channel
channel_name = "new-test-channel"
dao.create_package(channel_name, new_package, user_id, "owner")
with pytest.raises(errors.DBError, match="(IntegrityError)|(UniqueViolation)"):
dao_extra.create_package(channel_name, new_package, user_id, "owner")
requested = db.query(Package).filter(Package.name == new_package.name).one_or_none()
assert requested
# need to clean up because we didn't run the test in a transaction
db.delete(requested)
db.commit()
| StarcoderdataPython |
6448122 | """
Download youtube video
"""
import pytube
# url = 'https://youtu.be/fp0O7kp0uW8'
# Load url in function Youtube
# youtube = pytube.YouTube(url)
# Set Streams Resolution
# video = youtube.streams.first()
# or
# video = youtube.streams.get_highest_resolution()
# Download Video
# video.download() # In Same Folder
# or
# video.download('D://Downloads') # In Other Folder
# Get Information of Video
# video.title # Title
# video.video_id # Id
# video.age_restricted # Age
# Streams Format
# video.streams.all()
# stream = video.streams.all()
# for i in stream:
# print(i)
url = input('Give URL: ')
pytube.YouTube(url).streams.get_highest_resolution().download('D://Downloads')
| StarcoderdataPython |
11234231 | import os
import numpy as np
import scipy.sparse
from sklearn import datasets
def load_dataset(args):
path = f"{args.data_folder}/{args.function_name}"
X, y = datasets.load_svmlight_file(f"{path}.svm")
w = np.load(f"{path}.npy")
return X, y, w
def store_dataset(X, y, w, args):
folder = f"{args.data_folder}"
path = f"{folder}/{args.function_name}"
if not os.path.exists(folder):
os.mkdir(folder)
datasets.dump_svmlight_file(X, y.squeeze(), f"{path}.svm")
np.save(f"{path}.npy", w)
def generate_X(args):
if args.remove_bias:
X = scipy.sparse.rand(args.number_samples, args.number_features,
density=0.1) # Size: (num_samples, num_features)
else:
X = scipy.sparse.rand(args.number_samples, args.number_features - 1,
density=0.1) # Size: (num_samples, num_features)
ones = np.ones((args.number_samples, 1))
X = scipy.sparse.hstack((X, ones))
return X
def generate_dataset_poisson_regression(args):
"""
Generate dataset poisson regression with size like a1a dataset
"""
X = generate_X(args) # Size: (num_samples, num_features)
w = np.random.rand(args.number_features, 1)
mean_y = X.dot(w)
y = np.random.poisson(lam=np.exp(mean_y)) # Size: (number_samples, 1)
return X, y, w
def generate_dataset_linear(args):
"""
Generate simple dataset with linear
"""
X = generate_X(args) # Size: (num_samples, num_features)
w = np.random.rand(args.number_features, 1)
y = X.dot(w)
return X, y, w
def generate_dataset(args):
if args.function_name == "poisson_regression":
X, y, w = generate_dataset_poisson_regression(args)
elif args.function_name == "linear":
X, y, w = generate_dataset_linear(args)
else:
raise RuntimeError(f"The function name doesn't determined: {args.function_name}")
return X, y, w
| StarcoderdataPython |
177633 | <filename>python/verify.py
##-----------------------------------------------------------------------------
## Import
##-----------------------------------------------------------------------------
import argparse, os
from time import time
from fnc.extractFeature import extractFeature
from fnc.matching import matching
#------------------------------------------------------------------------------
# Argument parsing
#------------------------------------------------------------------------------
parser = argparse.ArgumentParser()
parser.add_argument("--file", type=str,
help="Path to the file that you want to verify.")
parser.add_argument("--temp_dir", type=str, default=os.path.join(os.getcwd(), "templates", "CASIA1/"),
help="Path to the directory containing templates.")
parser.add_argument("--thres", type=float, default=0.38,
help="Threshold for matching.")
args = parser.parse_args()
##-----------------------------------------------------------------------------
## Execution
##-----------------------------------------------------------------------------
# Extract feature
def execute(image_file):
# start = time()
final_result = []
print('>>> Start verifying {}\n'.format(os.path.join(os.getcwd(), image_file)))
template, mask, file = extractFeature(os.path.join(os.getcwd(), image_file))
# print("Template recieved : {}".format(template))
# print("Mask recieved : {}".format(mask))
print("Threshold recieved : {}".format(args.thres))
# Matching
print("Templates path : {}".format(args.temp_dir))
result = matching(template, mask, os.path.join(os.getcwd(), "templates", "CASIA1/"), 0.35000000)
if result == -1:
print('>>> No registered sample.')
elif result == 0:
print('>>> No sample matched.')
else:
print('>>> {} samples matched (descending reliability):'.format(len(result)))
for res in result:
# print("\t", res)
# print(int(os.path.split(res)[len(os.path.split(res))-1].split("_",1)[0]))
# print(os.path.split(res)[len(os.path.split(res))-1].split(".",2)[0])
# print(os.path.split(res)[len(os.path.split(res))-1].split(".",2)[1])
image = os.path.join( "static", "CASIA1", str(int(os.path.split(res)[len(os.path.split(res))-1].split("_",1)[0])), os.path.split(res)[len(os.path.split(res))-1].split(".",2)[0] + "." + os.path.split(res)[len(os.path.split(res))-1].split(".",2)[1])
final_result.append({"path": image, "id": os.path.split(res)[len(os.path.split(res))-1].split("_",1)[0]})
# print(os.path.split(res)[len(os.path.split(res))-1])
# print(final_result)
# Time measure
# end = time()
# print('\n>>> Verification time: {} [s]\n'.format(end - start))
return final_result
# if __name__ == '__main__':
# execute() | StarcoderdataPython |
6503505 | class SymbolTable:
def __init__(self):
self.table = {}
def add_symbol(self, symbol, symbol_type):
if self.symbol_exists(symbol):
print("TYPE CHECKING ERROR, SYMBOL ALREADY EXISTS (UNIQUENESS CHECK):", symbol, symbol_type)
else:
self.table[symbol] = symbol_type
def symbol_exists(self, symbol):
return symbol in self.table
def get_type(self, symbol):
return self.table[symbol]
def print_table(self):
print('- Symbol Table -')
for key in self.table:
print("|-------|-------|")
print('|', key, '\t|', self.table[key], '\t|')
print("|-------|-------|")
| StarcoderdataPython |
11342811 | <reponame>baquerrj/ECEN5623
#!/usr/bin/python3.7
import numpy as np
import sys
def print_execution_times( transform, file ):
deltaTimes = []
with open(file) as f:
for line in f.readlines():
if transform in line.upper() and 'END' in line.upper():
items = line.split(' ')
deltaTimes.append( items[-2] )
deltaTimes = np.asarray(deltaTimes, dtype=np.float64)
worst = deltaTimes.max()
best = deltaTimes.min()
average = deltaTimes.mean()
print(transform)
print('average execution time: {} ms\n'.format(average))
print('worst execution time: {} ms\n'.format(worst))
print('best execution time: {} ms\n'.format(best))
print('*'*80)
def main():
file = sys.argv[1]
argCount = len(sys.argv[2:])
transforms = []
for i in range(argCount):
try:
transforms.append(sys.argv[i+2].upper())
except IndexError:
pass
geometry = ''
with open(file) as f:
for line in f.readlines():
if 'geometry' in line.lower():
items = line.split(' ')
geometry = items[-1].rstrip('\n')
print('*'*80)
print('file: {}'.format(file))
print('geometry: {}'.format(geometry))
print('*'*80)
for transform in transforms:
print_execution_times(transform, file)
if __name__ == '__main__':
main() | StarcoderdataPython |
1962081 | <filename>.github/script/tags_to_plugins.py
import os
import shutil
from pathlib import Path
from typing import Dict
import yaml
from git import Repo, Diff
poc_dir_list = ['cves', 'cnvd', 'vulnerabilities', 'default-logins', 'exposures', 'miscellaneous']
class MyDumper(yaml.Dumper):
def increase_indent(self, flow=False, indentless=False):
return super(MyDumper, self).increase_indent(flow, False)
def update_tags_yaml_format():
with open("plugins/tags.yaml", "r") as y:
tags = yaml.safe_load(y)
tags_y = yaml.dump(tags, Dumper=MyDumper, sort_keys=True, allow_unicode=True,
default_flow_style=False, explicit_start=False, indent=2, width=2)
with open("plugins/tags.yaml", "w") as y:
y.write(tags_y)
return tags
plugins_path_dict = {}
nuclei_path_dict = {}
fingerprint_path_dict = {}
tags_dict = update_tags_yaml_format()
for site, site_list, file_list in os.walk("plugins"):
for file_name in file_list:
plugins_abs_filename = os.path.abspath(os.path.join(site, file_name))
if not file_name.startswith('.') and file_name.endswith('.yaml') and not file_name == "tags.yaml":
plugins_path_dict.setdefault(file_name, plugins_abs_filename)
for site, site_list, file_list in os.walk("nuclei-templates"):
for file_name in file_list:
nuclei_abs_filename = os.path.abspath(os.path.join(site, file_name))
if len(Path(site).parts) > 1 and Path(site).parts[1] in poc_dir_list:
if not file_name.startswith('.') and file_name.endswith('.yaml'):
nuclei_path_dict.setdefault(file_name, nuclei_abs_filename)
for site, site_list, file_list in os.walk("fingerprint"):
for file_name in file_list:
fingerprint_path_dict.setdefault(file_name[:-len(Path(file_name).suffix)], file_name)
class NucleiDiffGitMode:
def __init__(self, c_ins: Diff, g_tags_dict: Dict):
self.c_ins = c_ins
self.tags_dict = g_tags_dict
self.mode_map = {"A": 'added', "C": 'added', "D": 'deleted', "R": 'renamed', "M": 'added', "T": 'changed'}
self.mode = self.mode_map.get(c_ins.change_type)
self.abs_filename = 'nuclei-templates/' + self.c_ins.a_path
self.file_name = Path(self.abs_filename).name
def added(self, add_abs_filename=None):
if add_abs_filename is None:
add_abs_filename = self.abs_filename
else:
print("added", add_abs_filename)
with open(add_abs_filename, 'r') as y:
yaml_template = yaml.safe_load(y)
try:
tags = set(yaml_template.get('info')['tags'].split(','))
for name, tags_list in self.tags_dict.items():
for tag in tags_list:
tags_set = tags.issuperset(tag)
if tags_set:
to_file = os.path.join("plugins", name, self.file_name)
if not Path(to_file).parent.is_dir():
Path(to_file).parent.mkdir()
shutil.copy(add_abs_filename, to_file)
except KeyError:
pass
def changed(self):
pass
def deleted(self):
print("deleted", self.file_name)
for file_path in plugins_path_dict.values():
if file_path.endswith(self.file_name):
if Path(file_path).exists():
os.unlink(file_path)
def renamed(self):
print("renamed", self.file_name)
for file_path in plugins_path_dict.values():
if file_path.endswith(self.file_name):
if Path(file_path).exists():
os.unlink(file_path)
self.added('nuclei-templates/' + self.c_ins.rename_to)
def modified(self):
pass
def copied(self):
pass
def run(self):
if hasattr(self, self.mode):
func = getattr(self, self.mode)
func()
def tags_to_plugins_all():
for nuclei_file_name, file_path in nuclei_path_dict.items():
with open(file_path, 'r') as y:
yaml_template = yaml.safe_load(y)
try:
tags = set(yaml_template.get('info')['tags'].split(','))
is_match = False
for name, tags_list in tags_dict.items():
for tag in tags_list:
tags_set = tags.issuperset(tag)
if tags_set:
to_file = os.path.join("plugins", name, nuclei_file_name)
if not Path(to_file).parent.is_dir():
Path(to_file).parent.mkdir()
shutil.copy(file_path, to_file)
is_match = True
if not is_match:
print("未分类Tags:", tags, file_path)
except KeyError:
pass
all_fingerprints = set(fingerprint_path_dict.keys())
all_tags = set(tags_dict.keys())
print(all_tags.difference(all_fingerprints))
if __name__ == '__main__':
repo = Repo('nuclei-templates')
current_sha = repo.head.object.hexsha
for c in repo.commit('HEAD~100').diff(current_sha):
if not c.a_path.startswith('.') and c.a_path.endswith('.yaml') and Path(c.a_path).parts[0] in poc_dir_list:
NucleiDiffGitMode(c_ins=c, g_tags_dict=tags_dict).run()
# tags_to_plugins_all()
| StarcoderdataPython |
8016069 | <filename>sql2eml.py
import re
import os
import argparse
import sys
import errno
import optparse
import sqlite3
import uuid
import email
import email.utils
from email.message import EmailMessage
from email.parser import BytesParser, Parser
from email.policy import default
from datetime import datetime
import hashlib
import notesdb
import common
import constants
#
# MIT License
#
# https://opensource.org/licenses/MIT
#
# Copyright 2020 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Description:
#
# This program exports notes as a directory of RFC822 email files from a SQLite database.
#
global __name__, __author__, __email__, __version__, __license__
__program_name__ = 'sql2eml'
__author__ = '<NAME>'
__email__ = '<EMAIL>'
__version__ = '1.00'
__license__ = 'MIT License (https://opensource.org/licenses/MIT)'
__website__ = 'https://github.com/renesugar'
__db_schema_version__ = '1'
__db_schema_min_version__ = '1'
def _get_option_parser():
parser = optparse.OptionParser('%prog [options]',
version='%prog ' + __version__)
parser.add_option('', "--email",
action="store", dest="email_address", default=None,
help="Email address")
parser.add_option("", "--input",
action="store", dest="input_path", default=[],
help="Path to input SQLite directory")
parser.add_option('', "--output",
action="store", dest="output_path", default=None,
help="Path to output emails directory")
return parser
def _save_email(output_path, columns):
# process email messages as notes
email_address = columns["email_address"]
email_date = columns["email_date"]
email_x_mail_created_date = columns["email_x_mail_created_date"]
email_subject = columns["email_subject"]
email_body = columns["email_body"]
email_content_type = columns["email_content_type"]
email_x_universally_unique_identifier = columns["email_x_universally_unique_identifier"]
email_message_id = columns["email_message_id"]
filename = common.create_uuid_string() + ".eml"
outputFilename = os.path.join(output_path, filename)
print("processing %s" % (outputFilename,))
# email_filename
# use email address from command line for "From" header (discard "From" header from message)
# email_from
email_from = email_address
# email_date
if email_date is None:
email_date = email_x_mail_created_date
if email_date is None:
email_date = email.utils.formatdate()
# email_x_mail_created_date
if email_x_mail_created_date is None:
email_x_mail_created_date = email_date
# email_subject
if email_subject is None:
email_subject = constants.NOTES_UNTITLED
# email_body
# email_content_type
if email_content_type == 'text/plain':
# email_body
email_body = common.text_to_html(email_body)
# email_content_type
email_content_type = 'text/html'
# email_content_transfer_encoding
# email_x_uniform_type_identifier
email_x_uniform_type_identifier = "com.apple.mail-note"
# email_mime_version
email_mime_version = "1.0"
# email_x_universally_unique_identifier
if email_x_universally_unique_identifier is None:
email_x_universally_unique_identifier = common.create_universally_unique_identifier().upper()
else:
email_x_universally_unique_identifier = email_x_universally_unique_identifier.upper()
# email_message_id
if email_message_id is None:
email_message_id = common.create_message_id()
msg = EmailMessage()
msg['From'] = email_from
msg['Date'] = email_date
msg['X-Uniform-Type-Identifier'] = email_x_uniform_type_identifier
msg['X-Mail-Created-Date'] = email_x_mail_created_date
msg['X-Universally-Unique-Identifier'] = email_x_universally_unique_identifier
msg['Subject'] = email_subject
msg['Message-Id'] = email_message_id
msg.content_subtype = 'html'
#msg.set_param("charset", "utf8", header='Content-Type', requote=True, charset=None, language='', replace=True)
#msg.set_content(email_body)
msg.set_payload(email_body, 'utf8')
msg.replace_header('Content-Type','text/html')
# save email message to file
with open(outputFilename, 'wb') as fp:
fp.write(msg.as_bytes())
def process_icloud_note(output_path, email_address, row):
# NOTE: SQLite3 returning column as string even though sqlite3.PARSE_DECLTYPES specified
internal_date = common.string_to_datetime(row['note_internal_date'])
email_date = email.utils.format_datetime(internal_date)
email_x_mail_created_date = email_date
if row['note_title'] is None:
email_subject = constants.NOTES_UNTITLED
else:
email_subject = common.remove_line_breakers(row['note_title']).strip()
print("processing '%s'" % (email_subject,))
if row['note_data_format'] == 'text/markdown':
# email_body
email_body = common.markdown_to_html(row['note_data'])
# email_content_type
email_content_type = 'text/html'
else:
# email_body
email_body = row['note_data']
# email_content_type
email_content_type = row['note_data_format']
email_x_universally_unique_identifier = None
email_message_id = None
columns = {}
columns["email_address"] = email_address
columns["email_date"] = email_date
columns["email_x_mail_created_date"] = email_x_mail_created_date
columns["email_subject"] = email_subject
columns["email_body"] = email_body
columns["email_content_type"] = email_content_type
columns["email_x_universally_unique_identifier"] = email_x_universally_unique_identifier
columns["email_message_id"] = email_message_id
_save_email(output_path, columns)
def process_email(output_path, email_address, row):
# process email messages as notes
# email_subject
email_subject = row['email_subject']
if email_subject is None:
email_subject = constants.NOTES_UNTITLED
print("processing %s" % (email_subject,))
# email_date
email_date = row['email_date']
if email_date is None:
email_date = row['email_x_mail_created_date']
if email_date is None:
email_date = email.utils.formatdate()
# email_x_mail_created_date
email_x_mail_created_date = row['email_x_mail_created_date']
if email_x_mail_created_date is None:
email_x_mail_created_date = email_date
if row['email_content_type'] == 'text/html':
# email_body
email_body = row['email_body']
# email_content_type
email_content_type = 'text/html'
elif row['note_data_format'] == 'text/markdown':
# email_body
email_body = common.markdown_to_html(row['note_data'])
# email_content_type
email_content_type = 'text/html'
else:
# email_body
email_body = row['note_data']
# email_content_type
email_content_type = row['note_data_format']
# email_x_universally_unique_identifier
email_x_universally_unique_identifier = row['email_x_universally_unique_identifier']
# email_message_id
email_message_id = row['email_message_id']
columns = {}
columns["email_address"] = email_address
columns["email_date"] = email_date
columns["email_x_mail_created_date"] = email_x_mail_created_date
columns["email_subject"] = email_subject
columns["email_body"] = email_body
columns["email_content_type"] = email_content_type
columns["email_x_universally_unique_identifier"] = email_x_universally_unique_identifier
columns["email_message_id"] = email_message_id
_save_email(output_path, columns)
def process_joplin_note(output_path, email_address, row):
# NOTE: Apple Notes App does not allow attachments in GMail notes
# email_subject
email_subject = row['note_title']
if email_subject is None:
email_subject = constants.NOTES_UNTITLED
print("processing %s" % (email_subject,))
# email_date
email_date = email.utils.format_datetime(datetime.strptime(row['note_internal_date'], "%Y-%m-%d %H:%M:%S"))
if email_date is None:
email_date = email.utils.formatdate()
# email_x_mail_created_date
email_x_mail_created_date = email.utils.format_datetime(datetime.strptime(row['apple_created'], "%Y-%m-%d %H:%M:%S"))
if email_x_mail_created_date is None:
email_x_mail_created_date = email_date
# email_body
email_body = common.markdown_to_html(row['note_data'])
# email_content_type
email_content_type = 'text/html'
# email_x_universally_unique_identifier
email_x_universally_unique_identifier = None
# email_message_id
email_message_id = None
columns = {}
columns["email_address"] = email_address
columns["email_date"] = email_date
columns["email_x_mail_created_date"] = email_x_mail_created_date
columns["email_subject"] = email_subject
columns["email_body"] = email_body
columns["email_content_type"] = email_content_type
columns["email_x_universally_unique_identifier"] = email_x_universally_unique_identifier
columns["email_message_id"] = email_message_id
_save_email(output_path, columns)
def process_apple_note(output_path, email_address, row):
# NOTE: Apple Notes App does not allow attachments in GMail notes
# email_subject
email_subject = row['note_title']
if email_subject is None:
email_subject = constants.NOTES_UNTITLED
print("processing %s" % (email_subject,))
# email_date
email_date = email.utils.format_datetime(datetime.strptime(row['note_internal_date'], "%Y-%m-%d %H:%M:%S"))
if email_date is None:
email_date = email.utils.formatdate()
# email_x_mail_created_date
email_x_mail_created_date = email.utils.format_datetime(datetime.strptime(row['apple_created'], "%Y-%m-%d %H:%M:%S"))
if email_x_mail_created_date is None:
email_x_mail_created_date = email_date
if row['note_data_format'] == 'text/markdown':
# email_body
email_body = common.markdown_to_html(row['note_data'])
# email_content_type
email_content_type = 'text/html'
else:
# email_body
email_body = row['note_data']
# email_content_type
email_content_type = row['note_data_format']
# email_x_universally_unique_identifier
email_x_universally_unique_identifier = None
# email_message_id
email_message_id = None
columns = {}
columns["email_address"] = email_address
columns["email_date"] = email_date
columns["email_x_mail_created_date"] = email_x_mail_created_date
columns["email_subject"] = email_subject
columns["email_body"] = email_body
columns["email_content_type"] = email_content_type
columns["email_x_universally_unique_identifier"] = email_x_universally_unique_identifier
columns["email_message_id"] = email_message_id
_save_email(output_path, columns)
def process_bookmark_note(output_path, email_address, row):
# email_subject
email_subject = row['note_title']
if email_subject is None:
email_subject = constants.NOTES_UNTITLED
print("processing %s" % (email_subject,))
# email_date
email_date = email.utils.format_datetime(datetime.strptime(row['note_internal_date'], "%Y-%m-%d %H:%M:%S"))
if email_date is None:
email_date = email.utils.formatdate()
# email_x_mail_created_date
email_x_mail_created_date = email.utils.format_datetime(datetime.strptime(row['apple_created'], "%Y-%m-%d %H:%M:%S"))
if email_x_mail_created_date is None:
email_x_mail_created_date = email_date
if row['note_data_format'] == 'text/markdown':
# email_body
email_body = common.markdown_to_html(row['note_data'])
# email_content_type
email_content_type = 'text/html'
else:
# email_body
email_body = row['note_data']
# email_content_type
email_content_type = row['note_data_format']
# email_x_universally_unique_identifier
email_x_universally_unique_identifier = None
# email_message_id
email_message_id = None
columns = {}
columns["email_address"] = email_address
columns["email_date"] = email_date
columns["email_x_mail_created_date"] = email_x_mail_created_date
columns["email_subject"] = email_subject
columns["email_body"] = email_body
columns["email_content_type"] = email_content_type
columns["email_x_universally_unique_identifier"] = email_x_universally_unique_identifier
columns["email_message_id"] = email_message_id
_save_email(output_path, columns)
def main(args):
parser = _get_option_parser()
(options, args) = parser.parse_args(args)
email_address = ''
if hasattr(options, 'email_address') and options.email_address:
email_address = options.email_address
if common.check_email_address(email_address) == False:
# Check if email address is valid
common.error("email address '%s' is not valid." % (email_address,))
else:
common.error("email address not specified.")
inputPath = ''
if hasattr(options, 'input_path') and options.input_path:
inputPath = os.path.abspath(os.path.expanduser(options.input_path))
if os.path.isdir(inputPath) == False:
# Check if input directory exists
common.error("input path '%s' does not exist." % (inputPath,))
else:
common.error("input path not specified.")
outputPath = ''
if hasattr(options, 'output_path') and options.output_path:
outputPath = os.path.abspath(os.path.expanduser(options.output_path))
if os.path.isdir(outputPath) == False:
# Check if output directory exists
common.error("output path '%s' does not exist." % (outputPath,))
else:
common.error("output path not specified.")
notesdbfile = os.path.join(options.input_path, 'notesdb.sqlite')
new_database = (not os.path.isfile(notesdbfile))
sqlconn = sqlite3.connect(notesdbfile,
detect_types=sqlite3.PARSE_DECLTYPES)
sqlconn.row_factory = sqlite3.Row
sqlcur = sqlconn.cursor()
if (new_database):
common.error("database not found")
db_settings = notesdb.get_db_settings(sqlcur, __db_schema_version__)
notesdb.check_db_settings(db_settings, '%prog', __version__, __db_schema_min_version__, __db_schema_version__)
sqlcur.execute('''SELECT note_id,
note_type,
note_uuid,
note_parent_uuid,
note_tag_uuid,
note_note_uuid,
note_original_format,
note_internal_date,
note_hash,
note_title,
note_data,
note_data_format,
note_url,
email_filename,
email_from,
email_x_uniform_type_identifier,
email_content_type,
email_content_transfer_encoding,
email_mime_version,
email_date,
email_x_mail_created_date,
email_subject,
email_x_universally_unique_identifier,
email_message_id,
email_body,
apple_id,
apple_title,
apple_snippet,
apple_folder,
apple_created,
apple_last_modified,
apple_data,
apple_attachment_id,
apple_attachment_path,
apple_account_description,
apple_account_identifier,
apple_account_username,
apple_version,
apple_user,
apple_source,
joplin_id,
joplin_parent_id,
joplin_type_,
joplin_created_time,
joplin_updated_time,
joplin_is_conflict,
joplin_latitude,
joplin_longitude,
joplin_altitude,
joplin_author,
joplin_source_url,
joplin_is_todo,
joplin_todo_due,
joplin_todo_completed,
joplin_source,
joplin_source_application,
joplin_application_data,
joplin_order,
joplin_user_created_time,
joplin_user_updated_time,
joplin_encryption_cipher_text,
joplin_encryption_applied,
joplin_encryption_blob_encrypted,
joplin_size,
joplin_markup_language,
joplin_is_shared,
joplin_note_id,
joplin_tag_id,
joplin_mime,
joplin_filename FROM notes
ORDER BY
note_internal_date DESC''')
notes_to_convert_results = sqlcur.fetchall()
current = 0
for row in notes_to_convert_results:
current += 1
note_original_format = row['note_original_format']
# Only convert notes to EML
note_type = row['note_type']
if note_type != "note":
continue
if note_original_format == "email":
process_email(outputPath, email_address, row)
elif note_original_format == "joplin":
process_joplin_note(outputPath, email_address, row)
elif note_original_format == "icloud":
process_icloud_note(outputPath, email_address, row)
elif note_original_format == "apple":
process_apple_note(outputPath, email_address, row)
elif note_original_format == "bookmark":
process_bookmark_note(outputPath, email_address, row)
else:
common.error("unknown note format")
sqlconn.commit()
if __name__ == "__main__":
main(sys.argv[1:])
| StarcoderdataPython |
1862724 | <reponame>dcavar/dcavar.github.io<filename>LID/resources/lidtrainer.py
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
$Revision: 0.3 $
$Date: 2004/12/01 11:00:00 $
$Id: lidtrainer.py,v 0.3 2008/11/23 10:50:00 dcavar Exp $
(C) 2003-2011 by <NAME> <<EMAIL>>
License:
This program is free software; you can redistribute it and/or modify
it under the terms of the Lesser GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
Respect copyrights and mention the author of this tool in any
subsequent or modified version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the Lesser GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
or download it from http://www.gnu.org/licenses/lgpl.txt
Functionality:
Lidtrainer processes all the files given as parameters to the script in the
following way:
It extracts all tri-grams from all files.
It keeps track of the frequencies of single tri-grams over all documents.
It prints the sorted list (based on frequency/probability) of the tri-grams
to the screen. The output can be piped to a file. This file represents the
language model for Lid.
Read about Lid to understand how this algorithm works.
Please send your comments and suggestions!
"""
__version__ = 0.3
__author__ = "<NAME> <<EMAIL>>"
import sys, re, os.path, glob
from string import *
class Trigrams:
trigrams = {} # tri-grams are stored in a dictionary
num = 0 # number of tri-grams
characters = 0 # number of characters
def createTrigrams(self, text):
"""Creates trigrams from characters."""
text = re.sub(r"\n", r" ", text)
text = re.sub(r"\s+", r" ", text)
self.characters = self.characters + len(text)
# go thru list up to one but last word and take
# the actual word and the following word together
for i in range(len(text) - 2):
trigram = text[i:i+3]
self.num += 1
if self.trigrams.has_key(trigram):
# increment the number of this trigram
self.trigrams[trigram] += 1
else:
# append the trigram
self.trigrams[trigram] = 1
def calcProb(self):
"""Calculate the probabilities for each trigram."""
for x in self.trigrams.keys():
self.trigrams[x] = float(self.trigrams[x]) / float(self.num)
def eliminateFrequences(self, num):
"""Eliminates all bigrams with a frequency <= num"""
for x in self.trigrams.keys():
if self.trigrams[x] <= num:
value = self.trigrams[x]
del self.trigrams[x]
self.num -= value
def createTrigramNSC(self, text):
"""Creates bigrams without punctuation symbols."""
self.createTrigrams(self.cleanTextSC(text))
def cleanTextSC(self, text):
"""Eliminates punctuation symbols from the submitted text."""
for i in punctuation:
if i in text:
text = replace(text, i, " ")
return text
def cleanPBIG(self):
"""Eliminate tri-grams that contain punctuation marks."""
for i in self.trigrams.keys():
for a in punctuation:
if a in i:
value = self.trigrams[i]
del self.trigrams[i]
self.num -= value
break
if __name__ == "__main__":
myTrigrams = Trigrams()
if len(sys.argv) > 1:
for x in sys.argv[1:]:
for y in glob.glob(os.path.normcase(x)):
try:
myTrigrams.createTrigrams(myTrigrams.cleanTextSC(open(y).read()))
except IOError:
pass
myTrigrams.eliminateFrequences(2)
myTrigrams.calcProb()
pairs = zip(myTrigrams.trigrams.values(), myTrigrams.trigrams.keys())
pairs.sort()
pairs.reverse()
for i in pairs:
print i[1], i[0]
else:
print "Usage:"
print "python lidtrainer.py [document1] ..."
| StarcoderdataPython |
8042368 | <gh_stars>1-10
#!/usr/bin/env python
"""RDFValues for the NSRL file store."""
from grr.lib import rdfvalue
from grr.proto import jobs_pb2
class NSRLInformation(rdfvalue.RDFProtoStruct):
protobuf = jobs_pb2.NSRLInformation
| StarcoderdataPython |
201931 | import os
import json
import numpy as np
from imagededup.utils import general_utils
def test_get_files_to_remove():
from collections import OrderedDict
dict_a = OrderedDict({'1': ['2'], '2': ['1', '3'], '3': ['4'], '4': ['3'], '5': []})
dups_to_remove = general_utils.get_files_to_remove(dict_a)
assert set(dups_to_remove) == set(['2', '4'])
def test_correct_saving_floats():
res = {
'image1.jpg': [
('image1_duplicate1.jpg', np.float16(0.324)),
('image1_duplicate2.jpg', np.float16(0.324)),
],
'image2.jpg': [],
'image3.jpg': [('image1_duplicate1.jpg', np.float32(0.324))],
}
save_file = 'myduplicates.json'
general_utils.save_json(results=res, filename=save_file, float_scores=True)
with open(save_file, 'r') as f:
saved_json = json.load(f)
assert len(saved_json) == 3 # all valid files present as keys
assert isinstance(
saved_json['image1.jpg'][0][1], float
) # saved score is of type 'float' for np.float16 score
assert isinstance(
saved_json['image3.jpg'][0][1], float
) # saved score is of type 'float' for np.float32 score
os.remove(save_file) # clean up
def test_correct_saving_ints():
res = {
'image1.jpg': [('image1_duplicate1.jpg', 2), ('image1_duplicate2.jpg', 22)],
'image2.jpg': [],
'image3.jpg': [('image1_duplicate1.jpg', 43)],
}
save_file = 'myduplicates.json'
general_utils.save_json(results=res, filename=save_file)
with open(save_file, 'r') as f:
saved_json = json.load(f)
assert len(saved_json) == 3 # all valid files present as keys
assert isinstance(
saved_json['image1.jpg'][0][1], int
) # saved score is of type 'int'
os.remove(save_file) # clean up
| StarcoderdataPython |
3549577 | #!/usr/bin/env python
import argparse
import os
import os.path as osp
import re
import chainer
import numpy as np
import skimage.io
import fcn
def infer(n_class):
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-g', '--gpu', default=0, type=int, help='GPU id')
parser.add_argument('-m', '--model-file')
parser.add_argument('-i', '--img-files', nargs='+', required=True)
parser.add_argument('-o', '--out-dir', required=True)
args = parser.parse_args()
# model
if args.model_file is None:
args.model_file = fcn.models.FCN8s.download()
match = re.match('^fcn(32|16|8)s.*$', osp.basename(args.model_file))
if match is None:
print('Unsupported model filename: %s' % args.model_file)
quit(1)
model_name = 'FCN%ss' % match.groups()[0]
model_class = getattr(fcn.models, model_name)
model = model_class(n_class=n_class)
chainer.serializers.load_npz(args.model_file, model)
if args.gpu >= 0:
chainer.cuda.get_device(args.gpu).use()
model.to_gpu()
# inference
if not osp.exists(args.out_dir):
os.makedirs(args.out_dir)
for file in args.img_files:
# input
img = skimage.io.imread(file)
input, = fcn.datasets.transform_lsvrc2012_vgg16((img,))
input = input[np.newaxis, :, :, :]
if args.gpu >= 0:
input = chainer.cuda.to_gpu(input)
# forward
with chainer.no_backprop_mode():
input = chainer.Variable(input)
with chainer.using_config('train', False):
model(input)
lbl_pred = chainer.functions.argmax(model.score, axis=1)[0]
lbl_pred = chainer.cuda.to_cpu(lbl_pred.data)
# visualize
viz = fcn.utils.visualize_segmentation(
lbl_pred=lbl_pred, img=img, n_class=n_class,
label_names=fcn.datasets.VOC2012ClassSeg.class_names)
out_file = osp.join(args.out_dir, osp.basename(file))
skimage.io.imsave(out_file, viz)
print('==> wrote to: %s' % out_file)
if __name__ == '__main__':
infer(n_class=21)
| StarcoderdataPython |
3286224 | <filename>UIU/cen_uiu/helpers/bus.py<gh_stars>0
import asyncio
import dbus
from dbus.proxies import ProxyObject
import logging
Logger = logging.getLogger(__name__)
_LOGGER = Logger
"""
bus object
used to interface with a dbus proxy or interface object.
used to create subclasses (act as a baseclass).
"""
class BusObject:
INTERFACE: str
def __init__(self, interface: ProxyObject or dbus.Interface) -> None:
self._interface: dbus.Interface = dbus.Interface(interface, self.INTERFACE)
self._props: dbus.Interface = dbus.Interface(interface, "org.freedesktop.DBus.Properties")
# cal this function to get a property of a dbus object.
async def _get_prop(self, name: str):
try:
return await self.run_in_executor(self._props.Get, self._interface.dbus_interface, name)
except dbus.DBusException:
return None
# call this function to set a property of a dbus object.
async def _set_prop(self, name: str, val):
try:
return await self.run_in_executor(self._props.Set, self._interface.dbus_interface, name, val)
except dbus.DBusException as e:
_LOGGER.error(f"BusObject: something went wrong with setting the property {name} to {val}")
_LOGGER.error(e)
pass
# object path property.
@property
def object_path(self) -> str:
return self._interface.object_path
async def run_in_executor(self, func, *args):
loop = asyncio.get_running_loop()
return await loop.run_in_executor(None, func, *args)
| StarcoderdataPython |
1930766 | <filename>bin/ansible/ansible/modules/package_facts.py
#!/usr/bin/python
# (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# most of it copied from AWX's scan_packages module
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
module: package_facts
short_description: package information as facts
description:
- Return information about installed packages as facts
options:
manager:
description:
- The package manager used by the system so we can query the package information.
- Since 2.8 this is a list and can support multiple package managers per system.
- The 'portage' and 'pkg' options were added in version 2.8.
default: ['auto']
choices: ['auto', 'rpm', 'apt', 'portage', 'pkg', 'pacman']
required: False
type: list
strategy:
description:
- This option controls how the module queries the package managers on the system.
C(first) means it will return only information for the first supported package manager available.
C(all) will return information for all supported and available package managers on the system.
choices: ['first', 'all']
default: 'first'
version_added: "2.8"
version_added: "2.5"
requirements:
- For 'portage' support it requires the C(qlist) utility, which is part of 'app-portage/portage-utils'.
- For Debian-based systems C(python-apt) package must be installed on targeted hosts.
author:
- <NAME> (@matburt)
- <NAME> (@bcoca)
- <NAME> (@maxamillion)
'''
EXAMPLES = '''
- name: Gather the package facts
package_facts:
manager: auto
- name: Print the package facts
debug:
var: ansible_facts.packages
- name: Check whether a package called foobar is installed
debug:
msg: "{{ ansible_facts.packages['foobar'] | length }} versions of foobar are installed!"
when: "'foobar' in ansible_facts.packages"
'''
RETURN = '''
ansible_facts:
description: facts to add to ansible_facts
returned: always
type: complex
contains:
packages:
description:
- Maps the package name to a non-empty list of dicts with package information.
- Every dict in the list corresponds to one installed version of the package.
- The fields described below are present for all package managers. Depending on the
package manager, there might be more fields for a package.
returned: when operating system level package manager is specified or auto detected manager
type: dict
contains:
name:
description: The package's name.
returned: always
type: str
version:
description: The package's version.
returned: always
type: str
source:
description: Where information on the package came from.
returned: always
type: str
sample: |-
{
"packages": {
"kernel": [
{
"name": "kernel",
"source": "rpm",
"version": "3.10.0",
...
},
{
"name": "kernel",
"source": "rpm",
"version": "3.10.0",
...
},
...
],
"kernel-tools": [
{
"name": "kernel-tools",
"source": "rpm",
"version": "3.10.0",
...
}
],
...
}
}
# Sample rpm
{
"packages": {
"kernel": [
{
"arch": "x86_64",
"epoch": null,
"name": "kernel",
"release": "514.26.2.el7",
"source": "rpm",
"version": "3.10.0"
},
{
"arch": "x86_64",
"epoch": null,
"name": "kernel",
"release": "514.16.1.el7",
"source": "rpm",
"version": "3.10.0"
},
{
"arch": "x86_64",
"epoch": null,
"name": "kernel",
"release": "514.10.2.el7",
"source": "rpm",
"version": "3.10.0"
},
{
"arch": "x86_64",
"epoch": null,
"name": "kernel",
"release": "514.21.1.el7",
"source": "rpm",
"version": "3.10.0"
},
{
"arch": "x86_64",
"epoch": null,
"name": "kernel",
"release": "693.2.2.el7",
"source": "rpm",
"version": "3.10.0"
}
],
"kernel-tools": [
{
"arch": "x86_64",
"epoch": null,
"name": "kernel-tools",
"release": "693.2.2.el7",
"source": "rpm",
"version": "3.10.0"
}
],
"kernel-tools-libs": [
{
"arch": "x86_64",
"epoch": null,
"name": "kernel-tools-libs",
"release": "693.2.2.el7",
"source": "rpm",
"version": "3.10.0"
}
],
}
}
# Sample deb
{
"packages": {
"libbz2-1.0": [
{
"version": "1.0.6-5",
"source": "apt",
"arch": "amd64",
"name": "libbz2-1.0"
}
],
"patch": [
{
"version": "2.7.1-4ubuntu1",
"source": "apt",
"arch": "amd64",
"name": "patch"
}
],
}
}
'''
import re
from ansible.module_utils._text import to_native, to_text
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
from ansible.module_utils.common.process import get_bin_path
from ansible.module_utils.facts.packages import LibMgr, CLIMgr, get_all_pkg_managers
class RPM(LibMgr):
LIB = 'rpm'
def list_installed(self):
return self._lib.TransactionSet().dbMatch()
def get_package_details(self, package):
return dict(name=package[self._lib.RPMTAG_NAME],
version=package[self._lib.RPMTAG_VERSION],
release=package[self._lib.RPMTAG_RELEASE],
epoch=package[self._lib.RPMTAG_EPOCH],
arch=package[self._lib.RPMTAG_ARCH],)
def is_available(self):
''' we expect the python bindings installed, but this gives warning if they are missing and we have rpm cli'''
we_have_lib = super(RPM, self).is_available()
try:
get_bin_path('rpm')
if not we_have_lib:
module.warn('Found "rpm" but %s' % (missing_required_lib('rpm')))
except ValueError:
pass
return we_have_lib
class APT(LibMgr):
LIB = 'apt'
def __init__(self):
self._cache = None
super(APT, self).__init__()
@property
def pkg_cache(self):
if self._cache is not None:
return self._cache
self._cache = self._lib.Cache()
return self._cache
def is_available(self):
''' we expect the python bindings installed, but if there is apt/apt-get give warning about missing bindings'''
we_have_lib = super(APT, self).is_available()
if not we_have_lib:
for exe in ('apt', 'apt-get', 'aptitude'):
try:
get_bin_path(exe)
except ValueError:
continue
else:
module.warn('Found "%s" but %s' % (exe, missing_required_lib('apt')))
break
return we_have_lib
def list_installed(self):
# Store the cache to avoid running pkg_cache() for each item in the comprehension, which is very slow
cache = self.pkg_cache
return [pk for pk in cache.keys() if cache[pk].is_installed]
def get_package_details(self, package):
ac_pkg = self.pkg_cache[package].installed
return dict(name=package, version=ac_pkg.version, arch=ac_pkg.architecture, category=ac_pkg.section, origin=ac_pkg.origins[0].origin)
class PACMAN(CLIMgr):
CLI = 'pacman'
def list_installed(self):
rc, out, err = module.run_command([self._cli, '-Qi'], environ_update=dict(LC_ALL='C'))
if rc != 0 or err:
raise Exception("Unable to list packages rc=%s : %s" % (rc, err))
return out.split("\n\n")[:-1]
def get_package_details(self, package):
# parse values of details that might extend over several lines
raw_pkg_details = {}
last_detail = None
for line in package.splitlines():
m = re.match(r"([\w ]*[\w]) +: (.*)", line)
if m:
last_detail = m.group(1)
raw_pkg_details[last_detail] = m.group(2)
else:
# append value to previous detail
raw_pkg_details[last_detail] = raw_pkg_details[last_detail] + " " + line.lstrip()
provides = None
if raw_pkg_details['Provides'] != 'None':
provides = [
p.split('=')[0]
for p in raw_pkg_details['Provides'].split(' ')
]
return {
'name': raw_pkg_details['Name'],
'version': raw_pkg_details['Version'],
'arch': raw_pkg_details['Architecture'],
'provides': provides,
}
class PKG(CLIMgr):
CLI = 'pkg'
atoms = ['name', 'version', 'origin', 'installed', 'automatic', 'arch', 'category', 'prefix', 'vital']
def list_installed(self):
rc, out, err = module.run_command([self._cli, 'query', "%%%s" % '\t%'.join(['n', 'v', 'R', 't', 'a', 'q', 'o', 'p', 'V'])])
if rc != 0 or err:
raise Exception("Unable to list packages rc=%s : %s" % (rc, err))
return out.splitlines()
def get_package_details(self, package):
pkg = dict(zip(self.atoms, package.split('\t')))
if 'arch' in pkg:
try:
pkg['arch'] = pkg['arch'].split(':')[2]
except IndexError:
pass
if 'automatic' in pkg:
pkg['automatic'] = bool(int(pkg['automatic']))
if 'category' in pkg:
pkg['category'] = pkg['category'].split('/', 1)[0]
if 'version' in pkg:
if ',' in pkg['version']:
pkg['version'], pkg['port_epoch'] = pkg['version'].split(',', 1)
else:
pkg['port_epoch'] = 0
if '_' in pkg['version']:
pkg['version'], pkg['revision'] = pkg['version'].split('_', 1)
else:
pkg['revision'] = '0'
if 'vital' in pkg:
pkg['vital'] = bool(int(pkg['vital']))
return pkg
class PORTAGE(CLIMgr):
CLI = 'qlist'
atoms = ['category', 'name', 'version', 'ebuild_revision', 'slots', 'prefixes', 'sufixes']
def list_installed(self):
rc, out, err = module.run_command(' '.join([self._cli, '-Iv', '|', 'xargs', '-n', '1024', 'qatom']), use_unsafe_shell=True)
if rc != 0:
raise RuntimeError("Unable to list packages rc=%s : %s" % (rc, to_native(err)))
return out.splitlines()
def get_package_details(self, package):
return dict(zip(self.atoms, package.split()))
def main():
# get supported pkg managers
PKG_MANAGERS = get_all_pkg_managers()
PKG_MANAGER_NAMES = [x.lower() for x in PKG_MANAGERS.keys()]
# start work
global module
module = AnsibleModule(argument_spec=dict(manager={'type': 'list', 'default': ['auto']},
strategy={'choices': ['first', 'all'], 'default': 'first'}),
supports_check_mode=True)
packages = {}
results = {'ansible_facts': {}}
managers = [x.lower() for x in module.params['manager']]
strategy = module.params['strategy']
if 'auto' in managers:
# keep order from user, we do dedupe below
managers.extend(PKG_MANAGER_NAMES)
managers.remove('auto')
unsupported = set(managers).difference(PKG_MANAGER_NAMES)
if unsupported:
if 'auto' in module.params['manager']:
msg = 'Could not auto detect a usable package manager, check warnings for details.'
else:
msg = 'Unsupported package managers requested: %s' % (', '.join(unsupported))
module.fail_json(msg=msg)
found = 0
seen = set()
for pkgmgr in managers:
if found and strategy == 'first':
break
# dedupe as per above
if pkgmgr in seen:
continue
seen.add(pkgmgr)
try:
try:
# manager throws exception on init (calls self.test) if not usable.
manager = PKG_MANAGERS[pkgmgr]()
if manager.is_available():
found += 1
packages.update(manager.get_packages())
except Exception as e:
if pkgmgr in module.params['manager']:
module.warn('Requested package manager %s was not usable by this module: %s' % (pkgmgr, to_text(e)))
continue
except Exception as e:
if pkgmgr in module.params['manager']:
module.warn('Failed to retrieve packages with %s: %s' % (pkgmgr, to_text(e)))
if found == 0:
msg = ('Could not detect a supported package manager from the following list: %s, '
'or the required Python library is not installed. Check warnings for details.' % managers)
module.fail_json(msg=msg)
# Set the facts, this will override the facts in ansible_facts that might exist from previous runs
# when using operating system level or distribution package managers
results['ansible_facts']['packages'] = packages
module.exit_json(**results)
if __name__ == '__main__':
main()
| StarcoderdataPython |
74278 | <filename>tardis/default_settings/__init__.py
# pylint: disable=wildcard-import
# first apps, so other files can add to INSTALLED_APPS
from tardis.default_settings.apps import *
from tardis.default_settings.admins import *
from tardis.default_settings.analytics import *
from tardis.default_settings.auth import *
from tardis.default_settings.caches import *
from tardis.default_settings.celery import *
from tardis.default_settings.custom_views import *
from tardis.default_settings.database import *
from tardis.default_settings.debug import *
from tardis.default_settings.downloads import *
from tardis.default_settings.email import *
from tardis.default_settings.filters import *
from tardis.default_settings.frontend import *
from tardis.default_settings.i18n import *
from tardis.default_settings.localisation import *
from tardis.default_settings.logging import *
from tardis.default_settings.middlewares import *
from tardis.default_settings.publication import *
from tardis.default_settings.search import *
from tardis.default_settings.sftp import *
from tardis.default_settings.sharing import *
from tardis.default_settings.site_customisations import *
from tardis.default_settings.staging import *
from tardis.default_settings.static_files import *
from tardis.default_settings.storage import *
from tardis.default_settings.templates import *
from tardis.default_settings.uploads import *
from tardis.default_settings.urls import *
# Get version from git to be displayed on About page.
def get_git_version():
repo_dir = path.dirname(path.dirname(path.abspath(__file__)))
def run_git(args):
import subprocess
process = subprocess.Popen('git %s' % args,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
cwd=repo_dir,
universal_newlines=True)
return process.communicate()[0]
try:
info = {
'commit_id': run_git("log -1 --format='%H'").strip(),
'date': run_git("log -1 --format='%cd' --date=rfc").strip(),
'branch': run_git("rev-parse --abbrev-ref HEAD").strip(),
'tag': run_git("describe --abbrev=0 --tags").strip(),
}
except Exception:
return ["unavailable"]
return info
MYTARDIS_VERSION = get_git_version()
| StarcoderdataPython |
3200860 | import click
import torchvision.models.resnet as resnet
from artlearn.common_utils import (
LOG_DIR, MODEL_DIR,
get_dataloaders, ArtistLearner
)
@click.command()
@click.option('--mode', type=str, default='sgd',
help='Which optimizer you wish to use, currently supports '
'SGD and ADAM. Default is SGD.')
@click.option('-e', '--epochs', type=int, default=80,
help='Number of epochs with which to train. Default is 80.')
@click.option('-l', '--lr', type=float, default=1e-3,
help='The learning rate to use for the optimizer. '
'Default is 1e-3.')
@click.option('-m', '--momentum', type=float, default=0.9,
help='If using SGD, the momentum to use. Default is 0.9.')
@click.option('-a', '--log-after', type=int, default=80,
help='Number of iterations within an epoch to log out stats '
'after. Default is 80.')
@click.option('--log-path', envvar='ART_LOG_PATH', type=str, default=LOG_DIR,
help='Absolute path to write logs out to.')
@click.option('--model-path', envvar='ART_MODEL_PATH', type=str,
default=MODEL_DIR,
help='Absolute path to write model files out to.')
@click.option('-n', '--name', type=str,
help='Name override for the model and log files. Otherwise, '
'named after its parameters in the form: '
'{mode}_e_{epochs}_lr_{lr}_m_{momentum}')
@click.option('-p', '--pretrained', is_flag=True)
def train(mode, epochs, lr, momentum, log_after, log_path, model_path, name,
pretrained):
train, test, val = get_dataloaders()
network = resnet.resnet18(pretrained=pretrained)
learner = ArtistLearner(network, mode, epochs, train, test, val, lr=lr,
momentum=momentum, log_after=log_after,
log_path=log_path, model_path=model_path,
model_name=name)
learner.train_and_validate()
def main():
train()
if __name__ == '__main__':
main()
| StarcoderdataPython |
11332681 | #!/usr/bin/env python3
# Copyright (C) <2020-2021> Intel Corporation
# SPDX-License-Identifier: MIT
import tensorflow as tf
import cv2
import numpy as np
from openvino.inference_engine import IECore, IENetwork
import os
from tensorflow.python.ops import gen_nn_ops
tf.enable_eager_execution()
default_config = {
'cpu_extension': "/opt/intel/openvino/inference_engine/lib/intel64/libcpu_extension_sse4.so",
'model_path': 'models/hfnet_vino',
'model_file': "hfnet.xml",
'weights_file': "hfnet.bin",
'keypoint_number': 500,
'keypoint_threshold': 0.002,
'nms_iterations': 0,
'nms_radius': 4,
}
class FeatureNet:
def __init__(self, config=default_config):
self.config = config
self.ie = IECore()
if os.path.exists(config['cpu_extension']):
self.ie.add_extension(config['cpu_extension'], 'CPU')
else:
print('CPU extension file does not exist: %s' % config['cpu_extension'])
model = os.path.join(config['model_path'], config['model_file'])
weights = os.path.join(config['model_path'], config['weights_file'])
self.net = IENetwork(model=model, weights=weights)
# Input size is specified by the OpenVINO model
input_shape = self.net.inputs['image'].shape
self.input_size = (input_shape[3], input_shape[2])
self.scaling_desc = (np.array(self.input_size) / 8 - 1.) / (np.array(self.input_size) - 1.)
print('OpenVINO model input size: (%d, %d)' % (self.input_size[0], self.input_size[1]))
self.input_blob = next(iter(self.net.inputs))
self.out_blob = next(iter(self.net.outputs))
self.net.batch_size = 1
self.exec_net = self.ie.load_network(network=self.net, device_name="CPU")
def simple_nms(self, scores, iterations, radius):
"""Performs non maximum suppression (NMS) on the heatmap using max-pooling.
This method does not suppress contiguous points that have the same score.
It is an approximate of the standard NMS and uses iterative propagation.
Arguments:
scores: the score heatmap, with shape `[B, H, W]`.
size: an interger scalar, the radius of the NMS window.
"""
if iterations < 1: return scores
radius = tf.constant(radius, name='radius')
size = radius*2 + 1
max_pool = lambda x: gen_nn_ops.max_pool_v2( # supports dynamic ksize
x[..., None], ksize=[1, size, size, 1],
strides=[1, 1, 1, 1], padding='SAME')[..., 0]
zeros = tf.zeros_like(scores)
max_mask = tf.equal(scores, max_pool(scores))
for _ in range(iterations-1):
supp_mask = tf.cast(max_pool(tf.to_float(max_mask)), tf.bool)
supp_scores = tf.where(supp_mask, zeros, scores)
new_max_mask = tf.equal(supp_scores, max_pool(supp_scores))
max_mask = max_mask | (new_max_mask & tf.logical_not(supp_mask))
return tf.where(max_mask, scores, zeros)
def select_keypoints(self, scores, keypoint_number, keypoint_threshold, nms_iterations, nms_radius):
scores = self.simple_nms(scores, nms_iterations, nms_radius)
keypoints = tf.where(tf.greater_equal(
scores[0], keypoint_threshold))
scores = tf.gather_nd(scores[0], keypoints)
k = tf.constant(keypoint_number, name='k')
k = tf.minimum(tf.shape(scores)[0], k)
scores, indices = tf.nn.top_k(scores, k)
keypoints = tf.to_int32(tf.gather(
tf.to_float(keypoints), indices))
return np.array(keypoints)[..., ::-1], np.array(scores)
def select_keypoints_threshold(self, scores, keypoint_threshold, scale):
keypoints = tf.where(tf.greater_equal(scores[0], self.config['keypoint_threshold'])).numpy()
keypoints = np.array(keypoints)
scores = np.array([scores[0, i[0], i[1]] for i in keypoints])
return keypoints[..., ::-1], scores
def infer(self, image):
if len(image.shape) == 3:
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
scale = [image.shape[1] / self.input_size[0], image.shape[0] / self.input_size[1]]
image_scaled = cv2.resize(image, self.input_size)[:,:,None]
image_scaled = image_scaled.transpose((2, 0, 1))
res = self.exec_net.infer(inputs={self.input_blob: np.expand_dims(image_scaled, axis=0)})
features = {}
# 1. Keypoints
scores = self.find_first_available(res, [
'pred/simple_nms/Select',
'pred/local_head/detector/Squeeze'])
if self.config['keypoint_number'] == 0 and self.config['nms_iterations'] == 0:
keypoints, features['scores'] = self.select_keypoints_threshold(scores,
self.config['keypoint_threshold'], scale)
else:
keypoints, features['scores'] = self.select_keypoints(scores,
self.config['keypoint_number'], self.config['keypoint_threshold'],
self.config['nms_iterations'], self.config['nms_radius'])
# scaling back
features['keypoints'] = np.array([[int(i[0] * scale[0]), int(i[1] * scale[1])] for i in keypoints])
# 2. Local descriptors
if len(features['keypoints']) > 0:
local = self.find_first_available(res, [
'pred/local_head/descriptor/Conv_1/BiasAdd/Normalize',
'pred/local_head/descriptor/l2_normalize'])
local = np.transpose(local, (0,2,3,1))
features['local_descriptors'] = \
tf.nn.l2_normalize(
tf.contrib.resampler.resampler(
local,
tf.to_float(self.scaling_desc)[::-1]*tf.to_float(keypoints[None])),
-1).numpy()
else:
features['local_descriptors'] = np.array([[]])
# 3. Global descriptor
features['global_descriptor'] = self.find_first_available(res, [
'pred/global_head/l2_normalize_1',
'pred/global_head/dimensionality_reduction/BiasAdd/Normalize'])
return features
@staticmethod
def find_first_available(dic, keys):
for key in keys:
if key in dic: return dic[key]
print('Could not find any of these keys:%s\nAvailable keys are:%s' % (
''.join(['\n\t' + key for key in keys]),
''.join(['\n\t' + key for key in dic.keys()])))
raise KeyError('Given keys are not available. See the log above.')
| StarcoderdataPython |
8002737 | # coding: utf-8
"""
Account Management API
API for managing accounts, users, creating API keys, uploading trusted certificates
OpenAPI spec version: v3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class AccountCreationReq(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'address_line1': 'str',
'address_line2': 'str',
'admin_email': 'str',
'admin_full_name': 'str',
'admin_name': 'str',
'admin_password': '<PASSWORD>',
'aliases': 'list[str]',
'city': 'str',
'company': 'str',
'contact': 'str',
'contract_number': 'str',
'country': 'str',
'customer_number': 'str',
'display_name': 'str',
'email': 'str',
'end_market': 'str',
'phone_number': 'str',
'postal_code': 'str',
'state': 'str'
}
attribute_map = {
'address_line1': 'address_line1',
'address_line2': 'address_line2',
'admin_email': 'admin_email',
'admin_full_name': 'admin_full_name',
'admin_name': 'admin_name',
'admin_password': '<PASSWORD>',
'aliases': 'aliases',
'city': 'city',
'company': 'company',
'contact': 'contact',
'contract_number': 'contract_number',
'country': 'country',
'customer_number': 'customer_number',
'display_name': 'display_name',
'email': 'email',
'end_market': 'end_market',
'phone_number': 'phone_number',
'postal_code': 'postal_code',
'state': 'state'
}
def __init__(self, address_line1=None, address_line2=None, admin_email=None, admin_full_name=None, admin_name=None, admin_password=None, aliases=None, city=None, company=None, contact=None, contract_number=None, country=None, customer_number=None, display_name=None, email=None, end_market=None, phone_number=None, postal_code=None, state=None):
"""
AccountCreationReq - a model defined in Swagger
"""
self._address_line1 = address_line1
self._address_line2 = address_line2
self._admin_email = admin_email
self._admin_full_name = admin_full_name
self._admin_name = admin_name
self._admin_password = <PASSWORD>
self._aliases = aliases
self._city = city
self._company = company
self._contact = contact
self._contract_number = contract_number
self._country = country
self._customer_number = customer_number
self._display_name = display_name
self._email = email
self._end_market = end_market
self._phone_number = phone_number
self._postal_code = postal_code
self._state = state
self.discriminator = None
@property
def address_line1(self):
"""
Gets the address_line1 of this AccountCreationReq.
Postal address line 1, not longer than 100 characters. Required for commercial accounts only.
:return: The address_line1 of this AccountCreationReq.
:rtype: str
"""
return self._address_line1
@address_line1.setter
def address_line1(self, address_line1):
"""
Sets the address_line1 of this AccountCreationReq.
Postal address line 1, not longer than 100 characters. Required for commercial accounts only.
:param address_line1: The address_line1 of this AccountCreationReq.
:type: str
"""
self._address_line1 = address_line1
@property
def address_line2(self):
"""
Gets the address_line2 of this AccountCreationReq.
Postal address line 2, not longer than 100 characters.
:return: The address_line2 of this AccountCreationReq.
:rtype: str
"""
return self._address_line2
@address_line2.setter
def address_line2(self, address_line2):
"""
Sets the address_line2 of this AccountCreationReq.
Postal address line 2, not longer than 100 characters.
:param address_line2: The address_line2 of this AccountCreationReq.
:type: str
"""
self._address_line2 = address_line2
@property
def admin_email(self):
"""
Gets the admin_email of this AccountCreationReq.
The email address of the account admin, not longer than 254 characters.
:return: The admin_email of this AccountCreationReq.
:rtype: str
"""
return self._admin_email
@admin_email.setter
def admin_email(self, admin_email):
"""
Sets the admin_email of this AccountCreationReq.
The email address of the account admin, not longer than 254 characters.
:param admin_email: The admin_email of this AccountCreationReq.
:type: str
"""
self._admin_email = admin_email
@property
def admin_full_name(self):
"""
Gets the admin_full_name of this AccountCreationReq.
The full name of the admin user to be created.
:return: The admin_full_name of this AccountCreationReq.
:rtype: str
"""
return self._admin_full_name
@admin_full_name.setter
def admin_full_name(self, admin_full_name):
"""
Sets the admin_full_name of this AccountCreationReq.
The full name of the admin user to be created.
:param admin_full_name: The admin_full_name of this AccountCreationReq.
:type: str
"""
self._admin_full_name = admin_full_name
@property
def admin_name(self):
"""
Gets the admin_name of this AccountCreationReq.
The username of the admin user to be created, containing alphanumerical letters and -,._@+= characters. It must be at least 4 but not more than 30 character long.
:return: The admin_name of this AccountCreationReq.
:rtype: str
"""
return self._admin_name
@admin_name.setter
def admin_name(self, admin_name):
"""
Sets the admin_name of this AccountCreationReq.
The username of the admin user to be created, containing alphanumerical letters and -,._@+= characters. It must be at least 4 but not more than 30 character long.
:param admin_name: The admin_name of this AccountCreationReq.
:type: str
"""
self._admin_name = admin_name
@property
def admin_password(self):
"""
Gets the admin_password of this AccountCreationReq.
The password when creating a new user. It will be generated when not present in the request.
:return: The admin_password of this AccountCreationReq.
:rtype: str
"""
return self._admin_password
@admin_password.setter
def admin_password(self, admin_password):
"""
Sets the admin_password of this AccountCreationReq.
The password when creating a new user. It will be generated when not present in the request.
:param admin_password: The admin_password of this AccountCreationReq.
:type: str
"""
self._admin_password = admin_password
@property
def aliases(self):
"""
Gets the aliases of this AccountCreationReq.
An array of aliases, not more than 10. An alias is not shorter than 8 and not longer than 100 characters.
:return: The aliases of this AccountCreationReq.
:rtype: list[str]
"""
return self._aliases
@aliases.setter
def aliases(self, aliases):
"""
Sets the aliases of this AccountCreationReq.
An array of aliases, not more than 10. An alias is not shorter than 8 and not longer than 100 characters.
:param aliases: The aliases of this AccountCreationReq.
:type: list[str]
"""
self._aliases = aliases
@property
def city(self):
"""
Gets the city of this AccountCreationReq.
The city part of the postal address, not longer than 100 characters. Required for commercial accounts only.
:return: The city of this AccountCreationReq.
:rtype: str
"""
return self._city
@city.setter
def city(self, city):
"""
Sets the city of this AccountCreationReq.
The city part of the postal address, not longer than 100 characters. Required for commercial accounts only.
:param city: The city of this AccountCreationReq.
:type: str
"""
self._city = city
@property
def company(self):
"""
Gets the company of this AccountCreationReq.
The name of the company, not longer than 100 characters. Required for commercial accounts only.
:return: The company of this AccountCreationReq.
:rtype: str
"""
return self._company
@company.setter
def company(self, company):
"""
Sets the company of this AccountCreationReq.
The name of the company, not longer than 100 characters. Required for commercial accounts only.
:param company: The company of this AccountCreationReq.
:type: str
"""
self._company = company
@property
def contact(self):
"""
Gets the contact of this AccountCreationReq.
The name of the contact person for this account, not longer than 100 characters. Required for commercial accounts only.
:return: The contact of this AccountCreationReq.
:rtype: str
"""
return self._contact
@contact.setter
def contact(self, contact):
"""
Sets the contact of this AccountCreationReq.
The name of the contact person for this account, not longer than 100 characters. Required for commercial accounts only.
:param contact: The contact of this AccountCreationReq.
:type: str
"""
self._contact = contact
@property
def contract_number(self):
"""
Gets the contract_number of this AccountCreationReq.
Contract number of the customer.
:return: The contract_number of this AccountCreationReq.
:rtype: str
"""
return self._contract_number
@contract_number.setter
def contract_number(self, contract_number):
"""
Sets the contract_number of this AccountCreationReq.
Contract number of the customer.
:param contract_number: The contract_number of this AccountCreationReq.
:type: str
"""
self._contract_number = contract_number
@property
def country(self):
"""
Gets the country of this AccountCreationReq.
The country part of the postal address, not longer than 100 characters. Required for commercial accounts only.
:return: The country of this AccountCreationReq.
:rtype: str
"""
return self._country
@country.setter
def country(self, country):
"""
Sets the country of this AccountCreationReq.
The country part of the postal address, not longer than 100 characters. Required for commercial accounts only.
:param country: The country of this AccountCreationReq.
:type: str
"""
self._country = country
@property
def customer_number(self):
"""
Gets the customer_number of this AccountCreationReq.
Customer number of the customer.
:return: The customer_number of this AccountCreationReq.
:rtype: str
"""
return self._customer_number
@customer_number.setter
def customer_number(self, customer_number):
"""
Sets the customer_number of this AccountCreationReq.
Customer number of the customer.
:param customer_number: The customer_number of this AccountCreationReq.
:type: str
"""
self._customer_number = customer_number
@property
def display_name(self):
"""
Gets the display_name of this AccountCreationReq.
The display name for the account, not longer than 100 characters.
:return: The display_name of this AccountCreationReq.
:rtype: str
"""
return self._display_name
@display_name.setter
def display_name(self, display_name):
"""
Sets the display_name of this AccountCreationReq.
The display name for the account, not longer than 100 characters.
:param display_name: The display_name of this AccountCreationReq.
:type: str
"""
self._display_name = display_name
@property
def email(self):
"""
Gets the email of this AccountCreationReq.
The company email address for this account, not longer than 254 characters. Required for commercial accounts only.
:return: The email of this AccountCreationReq.
:rtype: str
"""
return self._email
@email.setter
def email(self, email):
"""
Sets the email of this AccountCreationReq.
The company email address for this account, not longer than 254 characters. Required for commercial accounts only.
:param email: The email of this AccountCreationReq.
:type: str
"""
self._email = email
@property
def end_market(self):
"""
Gets the end_market of this AccountCreationReq.
The end market of the account to be created.
:return: The end_market of this AccountCreationReq.
:rtype: str
"""
return self._end_market
@end_market.setter
def end_market(self, end_market):
"""
Sets the end_market of this AccountCreationReq.
The end market of the account to be created.
:param end_market: The end_market of this AccountCreationReq.
:type: str
"""
if end_market is None:
raise ValueError("Invalid value for `end_market`, must not be `None`")
self._end_market = end_market
@property
def phone_number(self):
"""
Gets the phone_number of this AccountCreationReq.
The phone number of a representative of the company, not longer than 100 characters.
:return: The phone_number of this AccountCreationReq.
:rtype: str
"""
return self._phone_number
@phone_number.setter
def phone_number(self, phone_number):
"""
Sets the phone_number of this AccountCreationReq.
The phone number of a representative of the company, not longer than 100 characters.
:param phone_number: The phone_number of this AccountCreationReq.
:type: str
"""
self._phone_number = phone_number
@property
def postal_code(self):
"""
Gets the postal_code of this AccountCreationReq.
The postal code part of the postal address, not longer than 100 characters.
:return: The postal_code of this AccountCreationReq.
:rtype: str
"""
return self._postal_code
@postal_code.setter
def postal_code(self, postal_code):
"""
Sets the postal_code of this AccountCreationReq.
The postal code part of the postal address, not longer than 100 characters.
:param postal_code: The postal_code of this AccountCreationReq.
:type: str
"""
self._postal_code = postal_code
@property
def state(self):
"""
Gets the state of this AccountCreationReq.
The state part of the postal address, not longer than 100 characters.
:return: The state of this AccountCreationReq.
:rtype: str
"""
return self._state
@state.setter
def state(self, state):
"""
Sets the state of this AccountCreationReq.
The state part of the postal address, not longer than 100 characters.
:param state: The state of this AccountCreationReq.
:type: str
"""
self._state = state
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, AccountCreationReq):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| StarcoderdataPython |
4843172 | <gh_stars>0
import discord
from core import checks
from core.models import PermissionLevel
from discord.ext import commands
class idk(commands.Cog):
"""
Nothing Is Here
"""
def __init__(self, bot):
self.bot = bot
self.db = bot.plugin_db.get_partition(self)
@commands.command(aliases=["howto"])
@checks.has_permissions(PermissionLevel.REGULAR)
async def helpie(self, ctx):
"""Explaination of commands"""
embed = discord.Embed(title="How To Use Nezubabey- Brief Explanation")
embed.description = """
**The commands are explained as followed -**
**To reply normally:** `nezur` or <@<PASSWORD>> r,
**To reply anonymously:** `nezuar` or <@742315489765621763> ar,
**To close a thread:** to close without any name or who did it just type `nezuclose silenty ~~reason~~` to close after sometime do `nezuclose in 5m etc` ,
**To block a certain person from dming the bot:** `nezublock userid or usermention`,
**To get the loglink of this thread:** `nezuloglink`,
**To check logs of user:** `nezulogs user`,
**To make the bot say something:** `nezusay` only for owner,
**To delete a message:** `nezudelete messageid`,
**To open a thread with a person without them dming the bot:** `nezucontact userid or mention`,
**To get pinged if user replies in thread:** `nezusubscribe`,
**To add an tag:** `nezutags add "tag name" value`,
**To selfie verify a member:** `nezusverify` in the modmail channel.
Any questions? Just ping me in chat my tag is `❥sasha#0001` <@682849186227552266>
"""
embed.color = self.bot.main_color
return await ctx.send(embed=embed)
@commands.group(aliases=["selfyrol"], invoke_without_command=True)
async def selfy_role(self, ctx):
"""Checks the selfie verify role"""
try:
roles = ((await self.db.find_one({"_id": "config"})) or {})["nezuroles"]
await ctx.send(
embed=discord.Embed(
description="The verified role is <@&" + roles["selfy"] + ">",
color=0xFFC2FF,
)
)
except KeyError:
await ctx.send(
embed=discord.Embed(
description="There isn't a verified role set\nAdmins can set it with `selfy_role set [role]`",
color=0xFFC2FF,
)
)
@selfy_role.command(name="set")
@checks.has_permissions(PermissionLevel.ADMIN)
async def selfy_role_set(self, ctx, *, role: discord.Role):
"""Sets the selfie verified role"""
await self.db.find_one_and_update(
{"_id": "config"},
{"$set": {"nezuroles": {"selfy": str(role.id)}}},
upsert=True,
)
await ctx.send(
embed=discord.Embed(
description="The selfie verified role is now " + role.mention,
color=0xFFC2FF,
)
)
@commands.command(aliases=["aselfie", "sfy"])
@checks.has_permissions(PermissionLevel.SUPPORTER)
@checks.thread_only()
async def sverify(self, ctx):
"""Adds the selfie verified role to the thread recipient"""
try:
roles = ((await self.db.find_one({"_id": "config"})) or {})["nezuroles"]
try:
await self.bot.guild.get_member(ctx.thread.recipient.id).add_roles(
self.bot.guild.get_role(int(roles["selfy"])),
reason="Role added by "
+ ctx.author.display_name
+ " ("
+ ctx.author.name
+ "#"
+ ctx.author.discriminator
+ ") ["
+ str(ctx.author.id)
+ "]",
)
await ctx.send(
embed=discord.Embed(
description="Added <@&"
+ roles["selfy"]
+ "> to "
+ ctx.thread.recipient.mention,
color=0xFFC2FF,
)
)
except discord.Forbidden:
await ctx.send(
embed=discord.Embed(
description="Failed to add <@&"
+ roles["selfy"]
+ "> to "
+ ctx.thread.recipient.mention,
color=0xFFC2FF,
)
)
except KeyError:
await ctx.send(
embed=discord.Embed(
description="Selfie verified role not found", color=0xFFC2FF
)
)
@commands.command()
@checks.has_permissions(PermissionLevel.ADMINISTRATOR)
async def mmkkjj(self, ctx, role: discord.Role, member: discord.Member = None):
"""Assign a role to a member."""
if member is None:
try:
member = ctx.guild.get_member(int(ctx.channel.topic[9:]))
except (ValueError, TypeError):
raise commands.MissingRequiredArgument(SimpleNamespace(name="role"))
if role.position > ctx.author.roles[-1].position:
return await ctx.send("You do not have permissions to give this role.")
await member.add_roles(role)
await ctx.send(f"Successfully added the role to {member.name}!")
def setup(bot):
bot.add_cog(idk(bot))
| StarcoderdataPython |
9738835 | <filename>infotrope/environment.py<gh_stars>1-10
#
# Copyright 2004 - 2006 <NAME> <<EMAIL>>
#
# This file forms part of the Infotrope Python Library.
#
# The Infotrope Python Library is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# The Infotrope Python Library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the Infotrope Python Library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
class caller:
def __init__( self, callee, *args, **kw ):
self.callee = callee
self.args = args
self.kw = kw
def __call__( self ):
self.callee( *self.args, **self.kw )
self.callee = None
self.args = None
self.kw = None
class environment:
def __init__( self, logging=False, protologging=None ):
self.logging = logging
self.protologging = protologging
if self.protologging is None:
self.protologging = self.logging
self.sock_delay = None
self.sock_bandwidth = None
self.sock_readable = None
self.sock_writable = None
self.defcall = False
def defer_call( self, obj, *args, **kw ):
obj( *args, **kw )
def callback( self, mech, vals ):
raise "__ABSTRACT__"
def logger( self, *txt ):
pass
def proto_logger( self, uri, time, txt ):
self.logger( str(uri), str(time), str(txt) )
def alert( self, uri, text ):
raise "__ABSTRACT__"
def secquery( self, mech, question ):
raise "__ABSTRACT__"
def status( self, text ):
pass
def make_operation( self, title, status=None, pmax=None ):
return None
class cli_environment(environment):
def __init__( self, logging=False, protologging=None ):
environment.__init__( self, logging, protologging )
def callback( self, mech, vals ):
print "Need user information for",mech.mechname,"login to",mech.sasl.service,"on",mech.uri().server
import getpass
for x,v in vals.items():
if x == 'password':
vals[x] = getpass.getpass( 'Password: ' )
else:
vals[x] = raw_input( x+': ' )
return vals
def logger( self, *txt ):
print "LOG : ",`txt`
def alert( self, uri, text ):
print "** Alert from %s!" % uri
print " %s" % text
def secquery( self, mech, question ):
print "Security Question\n%s" % question
a = raw_input( "y/N?" )
if a and a[0].upper() == 'Y':
return True
return False
def status( self, text ):
if self.logging:
self.logger( text )
| StarcoderdataPython |
3269656 | from __future__ import print_function, absolute_import
from autoundo import AutoUndo
import numpy as np
undo = AutoUndo('mystack', strict=False)
from example_module import MyVal, f1, some_list
a = MyVal()
b = []
c = {0, 1, 2, 3, 4}
d = {
'a': 1,
'b': 2,
'c': 3
}
e = [[0, 1, 2, 3], 3, 5, 6, [4, 5, 6]]
f = {
'a': [0, 1, 2, 3, [4, 5, 6], {7, 8, 9, 10}],
'b': {
'a': [1, 2, 3],
'b': {
'a': [4, 5, 6, [7, 8, 9, 10]],
'b': 1
}
}
}
print(a, b, c, d, e, f, some_list)
f1(a, b, c, d, e, f)
print(a, b, c, d, e, f, some_list)
undo.undo()
print(a, b, c, d, e, f, some_list)
undo.redo()
print(a, b, c, d, e, f, some_list)
| StarcoderdataPython |
6466159 | <filename>project/api/views/password_reset.py<gh_stars>0
from rest_framework import generics, status
from rest_framework.response import Response
from rest_framework.exceptions import AuthenticationFailed
from rest_framework.permissions import AllowAny
from db.serializers import SetNewPasswordSerializer
from db.models import User
from django.shortcuts import get_object_or_404
class SetNewPasswordAPIView(generics.GenericAPIView):
serializer_class = SetNewPasswordSerializer
permission_classes = [AllowAny]
def post(self, request):
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
otp = serializer.validated_data["otp"]
user_email = serializer.validated_data["email"]
new_password = serializer.validated_data["new_password"]
user = get_object_or_404(User, email=user_email)
if user.otp_code == otp:
user.password = <PASSWORD>
user.save()
return Response({"Success": "Password reset succesful"}, status=status.HTTP_200_OK)
return Response({"Error": "Incorrect otp code"}, status=status.HTTP_400_BAD_REQUEST)
return Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST)
| StarcoderdataPython |
9602758 | <reponame>valosekj/spinalcordtoolbox<filename>spinalcordtoolbox/gui/base.py<gh_stars>1-10
"""Base classes for creating GUI objects to create manually selected points.
The definition of X,Y axis is the following:
xmin,ymin o---------o xmax,ymin
| |
| |
| |
| |
xmin,ymax o---------o xmax,ymax
"""
from __future__ import absolute_import
from __future__ import division
from collections import namedtuple
import logging
import sys
import matplotlib as mpl
import numpy as np
mpl.use('Qt5Agg')
from PyQt5 import QtCore, QtGui, QtWidgets
logger = logging.getLogger(__name__)
Position = namedtuple('Position', ('x', 'y', 'z'))
class AnatomicalParams(object):
"""The base parameter object for GUI configuration"""
def __init__(self,
cmap='gray',
interp='nearest',
perc_min=5.,
perc_max=95.,
vmode='percentile',
alpha=1.0):
"""
Parameters
----------
cmap : str
interp : str
perc_min : float: low percentile threshold for intensity adjustment
perc_max : float: high percentile threshold for intensity adjustment
vmode : str: "percentile": intensity adjustment based on vmin/vmax percentile,
"mean-std": intensity adjustment based on
"clahe: CLAHE (not implemented yet)
alpha : float
"""
self.cmap = cmap
self.interp = interp
self.perc_min = perc_min
self.perc_max = perc_max
self.vmode = vmode
self.alpha = alpha
self.start_vertebrae = 50
self.end_vertebrae = -1
self.num_points = 0
self._title = '' # figure title
self.subtitle = '' # subplot title (will be displayed above the image)
self._vertebraes = []
self.input_file_name = ""
self.starting_slice = 'top' # used in centerline.py canvas and corresponds to the location of
# the first axial slice for labeling. Possible values are: 'top': top slice; 'midfovminusinterval': mid-FOV
# minus the interval.
self.interval_in_mm = 15 # superior-inferior distance between two consecutive labels in AUTO mode
@property
def dialog_title(self):
if not self._title:
self._title = '{}: manual labeling'.format(self.input_file_name)
return self._title
@property
def vertebraes(self):
return self._vertebraes
@vertebraes.setter
def vertebraes(self, values):
if not values:
return
self._vertebraes = values
self.start_vertebrae = values[0]
self.end_vertebrae = values[-1]
class BaseDialog(QtWidgets.QWidget):
"""Abstract base class to a Anatomical GUI.
Attributes
----------
update_canvas_signal : QtCore.Signal
Signal emits when dialog has a point to add to the
"""
lb_status = None
lb_warning = None
btn_ok = None
btn_undo = None
def __init__(self, controller):
"""Initialize the UI parameters
Parameters
----------
controller : BaseController
The logical object that controls the state of the UI
"""
super(BaseDialog, self).__init__()
self.params = controller.params
self._controller = controller
self.image = controller.image
self._controller._dialog = self
self._init_ui()
def _init_ui(self):
self.resize(1200, 800)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose)
layout = QtWidgets.QVBoxLayout(self)
self._init_header(layout)
self._init_canvas(layout)
self._init_controls(layout)
self._init_footer(layout)
events = (
(QtGui.QKeySequence.Undo, self.on_undo),
(QtGui.QKeySequence.Save, self.on_save_quit),
(QtGui.QKeySequence.Quit, self.close),
(QtGui.QKeySequence.MoveToNextChar, self.increment_vertical_nav),
(QtGui.QKeySequence.MoveToPreviousChar, self.decrement_vertical_nav),
(QtGui.QKeySequence.MoveToNextLine, self.increment_horizontal_nav),
(QtGui.QKeySequence.MoveToPreviousLine, self.decrement_horizontal_nav)
)
for event, action in events:
QtWidgets.QShortcut(event, self, action)
self.setWindowTitle(self.params.dialog_title)
def increment_vertical_nav(self):
"""Action to increment the anatonical viewing position.
The common case is when the right arrow key is pressed. Ignore implementing
this function if no navigation functionality is required
"""
pass
def decrement_vertical_nav(self):
"""Action to decrement the anatonical viewing position.
The common case is when the left arrow key is pressed. Ignore implementing
this function if no navigation functionality is required
"""
pass
def increment_horizontal_nav(self):
"""Action to increment the anatonical viewing position.
The common case is when the down arrow key is pressed. Ignore implementing
this function if no navigation functionality is required
"""
pass
def decrement_horizontal_nav(self):
"""Action to decrement the anatonical viewing position.
The common case is when the up arrow key is pressed. Ignore implementing
this function if no navigation functionality is required
"""
pass
def _init_canvas(self, parent):
"""
Parameters
----------
parent : QtGui.QWidget
The widget / dialog that will host the canvas layout
"""
raise NotImplementedError('Include _init_canvas in your class declaration')
def _init_controls(self, parent):
"""
Parameters
----------
parent : QtGui.QWidget
The widget / dialog that will host the control layout
"""
raise NotImplementedError('Include _init_controls in your class declaration')
def _init_header(self, parent):
self.lb_status = QtWidgets.QLabel('Label Status')
self.lb_status.setStyleSheet("color:black")
self.lb_status.setAlignment(QtCore.Qt.AlignCenter)
self.lb_warning = QtWidgets.QLabel()
self.lb_warning.setStyleSheet('color:red')
self.lb_warning.setAlignment(QtCore.Qt.AlignCenter)
message_label = getattr(self.params, 'message_warn', '')
self.Label = QtWidgets.QLabel(message_label)
self.Label.setAlignment(QtCore.Qt.AlignLeft)
parent.addWidget(self.lb_status)
parent.addWidget(self.lb_warning)
parent.addWidget(self.Label)
parent.addStretch()
message = getattr(self.params, 'init_message', '')
self.update_status(message)
def _init_footer(self, parent):
"""
Parameters
----------
parent : QtGui.QWidget
The widget / dialog that will host the footer layout
Returns
-------
The footer layout created
"""
ctrl_layout = QtWidgets.QHBoxLayout()
if sys.platform.lower() == 'darwin':
cmd_key = 'Cmd'
else:
cmd_key = 'Ctrl'
self.btn_ok = QtWidgets.QPushButton('Save and Quit [%s+S]' % cmd_key)
self.btn_undo = QtWidgets.QPushButton('Undo [%s+Z]' % cmd_key)
ctrl_layout.addStretch()
ctrl_layout.addWidget(self.btn_undo)
ctrl_layout.addWidget(self.btn_ok)
self.btn_undo.clicked.connect(self.on_undo)
self.btn_ok.clicked.connect(self.on_save_quit)
parent.addLayout(ctrl_layout)
return ctrl_layout
def on_save_quit(self):
self._controller.save()
self.close()
def on_undo(self):
try:
self._controller.undo()
except InvalidActionWarning as err:
self.update_warning(str(err))
def show(self):
"""Override the base class show to fix a bug found in MAC"""
super(BaseDialog, self).show()
self.activateWindow()
self.raise_()
def update_status(self, msg):
"""Print the message into the dialog's status widget and clear the warning widget
Parameters
----------
msg : str The message to display in the header of dialog
"""
self.lb_status.setText(msg)
self.lb_warning.setText('')
def update_warning(self, msg):
"""Print the message into the dialog's warning widget and clear the status widget
Parameters
----------
msg : str The message to display in the header of dialog
"""
self.lb_warning.setText(msg)
self.lb_status.setText('')
class BaseController(object):
orientation = None
_overlay_image = None
_dialog = None
default_position = ()
position = ()
saved = False
def __init__(self, image, params, init_values=None):
self.image = image
self.params = params
self.points = []
self._overlay_image = init_values
self.setup_intensity()
def setup_intensity(self):
if self.params.vmode == 'percentile':
self.params.vmin, self.params.vmax = np.percentile(self.image.data,
(self.params.perc_min, self.params.perc_max))
elif self.params.vmode == 'mean-std':
# TODO: update this
self.mean_intensity = (self.params.vmax + self.params.vmin) / 2.0
self.std_intensity = (self.params.vmax - self.params.vmin) / 2.0
elif self.params.vmode == 'clahe':
# TODO: implement
logger.warning("CLAHE is not implemented yet.")
def reformat_image(self):
"""Set the camera position and increase contrast.
The image orientation is set to SAL. And set the default contrast, and
axes position for all canvases. Need to run before displaying the GUI
with the image.
"""
logger.debug('Image orientation {}'.format(self.image.orientation))
self.orientation = self.image.orientation
self.image.change_orientation('SAL')
if self._overlay_image:
self._overlay_image.change_orientation('SAL')
x, y, z, t, dx, dy, dz, dt = self.image.dim
self.params.aspect = dx / dy
self.params.offset = x * dx
self.default_position = Position(x // 2, y // 2, z // 2)
self.setup_intensity()
self.reset_position()
def reset_position(self):
"""Set the canvas position to the center of the image"""
self.position = self.default_position
def valid_point(self, x, y, z):
dim = self.image.dim
if -1 < x < dim[0] and -1 < y < dim[1] and -1 < z < dim[2]:
return True
return False
def save(self):
logger.debug('Overlay shape {}'.format(self._overlay_image.data.shape))
for point in self.points:
x, y, z, label = [int(i) for i in point]
self._overlay_image.data[x, y, z] = label
if self.orientation != self._overlay_image.orientation:
self._overlay_image.change_orientation(self.orientation)
self.saved = True
def undo(self):
"""Remove the last point selected and refresh the UI"""
if self.points:
x, y, z, label = self.points[-1]
self.position = Position(x, y, z)
self.points = self.points[:-1]
self.label = label
logger.debug('Point removed {}'.format(self.position))
else:
raise InvalidActionWarning('There is no points selected to undo')
def as_string(self):
if self._overlay_image is None:
logger.warning('There is no information to save')
return ''
output = []
data = self._overlay_image.data
xs, ys, zs = np.where(data)
for x, y, z in zip(xs, ys, zs):
output.append('{},{},{},{}'.format(x, y, z, int(data[x, y, z])))
return ':'.join(output)
def as_niftii(self, file_name=None):
if not self._overlay_image:
logger.warning('There is no information to save')
raise IOError('There is no information to save')
if file_name:
self._overlay_image.absolutepath = file_name
if self._overlay_image.absolutepath == self.image.absolutepath:
raise IOError('Aborting: the original file and the labeled file are the same', self._overlay_image.absolutepath)
logger.debug('Data: {}'.format(np.where(self._overlay_image.data)))
self._overlay_image.save()
class TooManyPointsWarning(StopIteration):
message = 'Reached the maximum number of points'
class InvalidActionWarning(ValueError):
pass
class MissingLabelWarning(ValueError):
pass
def launch_dialog(controller, dialog_class):
app = QtWidgets.QApplication([])
dialog = dialog_class(controller)
dialog.show()
app.exec_()
return controller
| StarcoderdataPython |
1709423 | from util.observe import Observable
from util.primitives.funcs import do
class SlotsSavable(object):
'''
Prereqs:
1) use slots
2) only store persistent information in slots
3) child objects stored in slots must also be SlotSavable (or pickleable)
'''
def __getstate__(self):
return dict((k, getattr(self, k)) for k in self.__slots__)
def __setstate__(self, info):
do(setattr(self, key, info.get(key, None)) for key in self.__slots__)
def __eq__(self, s):
try:
return all(getattr(self, attr) == getattr(s, attr) for attr in self.__slots__)
except Exception:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
val = 0
for child in [getattr(self, slot) for slot in self.__slots__]:
if isinstance(child, list):
for c in child:
val ^= c.__hash__()
elif isinstance(child, dict):
for k,v in child.iteritems():
val ^= v.__hash__()
else:
val ^= child.__hash__()
return val
class ObservableSlotsSavable(SlotsSavable, Observable):
'''
Prereqs:
1) use slots
2) only store persistent information in slots
3) child objects stored in slots must also be SlotSavable (or pickleable)
'''
def __init__(self):
Observable.__init__(self)
def __setstate__(self, info):
if not hasattr(self, 'observers'):
Observable.__init__(self)
return SlotsSavable.__setstate__(self, info)
| StarcoderdataPython |
8095345 | <reponame>openeuler-mirror/A-Tune-Collector<gh_stars>0
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Copyright (c) 2019 Huawei Technologies Co., Ltd.
# A-Tune is licensed under the Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
# http://license.coscl.org.cn/MulanPSL2
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
# PURPOSE.
# See the Mulan PSL v2 for more details.
# Create: 2019-10-29
"""
Test case.
"""
from atune_collector.plugin.configurator.affinity.processid import ProcessidAffinity
class TestAffinityProcessid:
""" test affinity processid"""
user = "UT"
def test_get_affinity_pid_sshd(self):
"""test get affinity processid about sshd"""
processid_affinity = ProcessidAffinity(self.user)
processid = processid_affinity.get("sshd")
assert processid is not None
def test_get_affinity_pid_atune(self):
"""test get affinity processid about atune"""
try:
processid_affinity = ProcessidAffinity(self.user)
processid_affinity.get("atune")
assert True
except LookupError:
assert False
| StarcoderdataPython |
9672091 | from flask import Flask
from flask import render_template
app = Flask(__name__)
@app.route('/hello/<name>')
def hello_world(name):
return render_template('hello.html', name=name)
@app.route('/user/<username>', methods=['POST','GET'])
def show_user_profile(username):
# show the user profile for that user
return 'User %s' % username
@app.route('/test/<num>')
def print_number(num):
return num
if __name__ == '__main__':
app.run()
| StarcoderdataPython |
56031 | """
####################
Create a low hydro scenario
Date applied: 2021-07-29
Description:
This script adds a scenario to the database for low hydro power.
The worst year for hydro is 2015. As such we use those values for every year unless a plant is missing
in 2015 in which case we use the lowest value in the other years for that plant.
#################
"""
import time
from switch_model.utilities import query_yes_no, format_seconds
from switch_model.wecc.utilities import connect
import pandas as pd
raw_data_scenario = 21
all_plants_scenario = 23
worst_year = 2015
new_start_year = 2020
new_end_year = 2050
new_scenario_id = 24
new_scenario_name = "Lowest year (2015) repeated. Using EIA and AMPL Canada and Mex data."
new_scenario_description = "Lowest year (2015) repeated from 2020 to 2050, based on data from id 21 (EIA + AMPL Canada & Mex)."
def main():
db_conn = connect()
db_cursor = db_conn.cursor()
# 1. Get all the hydro plants
db_cursor.execute(
f"""
SELECT DISTINCT generation_plant_id FROM hydro_historical_monthly_capacity_factors
WHERE hydro_simple_scenario_id={all_plants_scenario};
""")
hydro_plants = pd.DataFrame(db_cursor.fetchall(), columns=["generation_plant_id"])["generation_plant_id"]
# 2. Get all the hydro flow data for the worst year
db_cursor.execute(
f"""
SELECT generation_plant_id, month, hydro_min_flow_mw, hydro_avg_flow_mw FROM hydro_historical_monthly_capacity_factors
WHERE hydro_simple_scenario_id={raw_data_scenario} and year={worst_year};
""")
worst_year_data = pd.DataFrame(db_cursor.fetchall(),
columns=["generation_plant_id", "month", "hydro_min_flow_mw", "hydro_avg_flow_mw"])
# 3. Identify plants where data is missing
missing_hydro_plants = hydro_plants[~hydro_plants.isin(worst_year_data["generation_plant_id"])].values
# 4. For each missing plant get the data for all the years
db_cursor.execute(
f"""
SELECT generation_plant_id, year, month, hydro_min_flow_mw, hydro_avg_flow_mw FROM hydro_historical_monthly_capacity_factors
WHERE hydro_simple_scenario_id={raw_data_scenario} and generation_plant_id in ({",".join(missing_hydro_plants.astype(str))});
""")
missing_plants_data = pd.DataFrame(db_cursor.fetchall(),
columns=["generation_plant_id", "year", "month", "hydro_min_flow_mw",
"hydro_avg_flow_mw"])
# 5. Pick the year with the least flow
# Aggregate by year
missing_data_by_year = missing_plants_data.groupby(["generation_plant_id", "year"], as_index=False)[
"hydro_avg_flow_mw"].mean()
# Select years where the flow is at its lowest
year_to_use = \
missing_data_by_year.loc[missing_data_by_year.groupby("generation_plant_id")["hydro_avg_flow_mw"].idxmin()][
["generation_plant_id", "year"]]
# Essentially filter missing_plants_data to only include keys from the right table, aka plants and years that are lowest
missing_plants_data = missing_plants_data.merge(
year_to_use,
on=["generation_plant_id", "year"],
how="right"
).drop("year", axis=1)
# 6. Add the missing data to our worst year data and verify we have data for all the plants
worst_year_data = pd.concat([worst_year_data, missing_plants_data])
assert all(hydro_plants.isin(worst_year_data["generation_plant_id"]))
# 7. Cross join the series with all the years from 2020 to 2050
years = pd.Series(range(new_start_year, new_end_year + 1), name="year")
worst_year_data = worst_year_data.merge(
years,
how="cross"
)
worst_year_data["hydro_simple_scenario_id"] = new_scenario_id
# 8. Complete some data checks
assert len(worst_year_data) == 12 * (new_end_year - new_start_year + 1) * len(hydro_plants)
# 9. Add data to database
print(f"hydro_simple_scenario: {new_scenario_id}")
print(f"name: {new_scenario_name}")
print(f"description: {new_scenario_description}")
print(f"Num hydro plants: {worst_year_data.generation_plant_id.nunique()}")
print(f"From year: {new_start_year}")
print(f"To year: {new_end_year}")
print(f"Example data:\n{worst_year_data.head()}")
if not query_yes_no("\nAre you sure you want to add this data to the database?", default="no"):
raise SystemExit
db_cursor.execute(
"INSERT INTO hydro_simple_scenario(hydro_simple_scenario_id, name, description) "
f"VALUES ('{new_scenario_id}','{new_scenario_name}','{new_scenario_description}')"
)
n = len(worst_year_data)
start_time = time.time()
for i, r in enumerate(worst_year_data.itertuples(index=False)):
if i !=0 and i % 1000 == 0:
print(
f"{i}/{n} inserts completed. Estimated time remaining {format_seconds((n - i) * (time.time() - start_time) / i)}")
db_cursor.execute(
f"INSERT INTO hydro_historical_monthly_capacity_factors(hydro_simple_scenario_id, generation_plant_id, year, month, hydro_min_flow_mw, hydro_avg_flow_mw) "
f"VALUES ({r.hydro_simple_scenario_id},{r.generation_plant_id},{r.year},{r.month},{r.hydro_min_flow_mw},{r.hydro_avg_flow_mw})"
)
db_conn.commit()
db_cursor.close()
db_conn.close()
print("Done.")
if __name__ == "__main__":
main()
| StarcoderdataPython |
6539136 | <reponame>NirmaniWarakaulla/HackerRankSolutions
def getNode(llist, positionFromTail):
stk = []
t = llist
while t:
stk = [t.data] + stk
t = t.next
return stk[positionFromTail]
| StarcoderdataPython |
192901 | import json
import urllib2
import uuid
def checkin(id):
try:
result=urllib2.urlopen('http://stats.kennytheserver.com/checkin?id=%s' %id).read()
return True
except:
pass
return False
def has_internet():
try:
response=urllib2.urlopen('http://7172.16.31.10',timeout=5)
return True
except urllib2.URLError as err: pass
return False
def gen_id():
return uuid.uuid4().hex
| StarcoderdataPython |
1689601 | # External Resource Algorithms
# modules
import os
# module
def module(self, *args):
# get module name
module_name = str(args[0]) + '.synt' if len(args) > 0 else None
# validate file path
if module_name is None:
self.throw(f"Module not found")
else:
# get module meta path
module_path = self.meta["MODULES_PATH"] if "MODULES_PATH" in self.meta else None
# validate module path
if module_path is None:
self.run_algorithm("warn", ['"Modules Path(META:#QUOTEMODULES_PATH#QUOTE) not found, setting to #QUOTE.#QUOTE(self Directory)"'])
module_path = "."
self.meta["MODULES_PATH"] = module_path
# get module file path
module_file_path = os.path.join(module_path, module_name)
# validate module file path
if not os.path.exists(module_file_path):
self.throw(f'Module not found: "{".".join(module_name.split(".")[:-1])}" ({module_file_path})')
else:
# run module
with open(module_file_path, 'r') as module_file:
# get module code
module_code = module_file.read()
self.module_run_token_id = 0
# tokenize code
tokens = self.tokenize(module_code)
# iteration
while self.module_run_token_id < len(tokens):
# token data
token = tokens[self.module_run_token_id]
# run algorithm
try:
self.run(token)
except Exception as UnknownError:
self.throw(str(UnknownError))
# update token id
self.module_run_token_id += 1
| StarcoderdataPython |
5151938 | from askapdev.rbuild.builders import Scons as Builder
builder = Builder(pkgname="BLAS", archivename="blas")
builder.remote_archive = "blas.tgz"
builder.add_file("files/SConstruct")
builder.build()
| StarcoderdataPython |
1618702 | <reponame>tungol/bplistlib
# encoding: utf-8
"""
This file contains classes that know how to handle various different parts of
a binary plist file.
"""
from struct import pack, unpack
from datetime import datetime
from plistlib import Data
from time import mktime
from .functions import find_with_type, get_byte_width
from .functions import flatten_object_list, unflatten_reference_list
from .types import UID, Fill, FillType
class BooleanHandler(object):
"""Handler for boolean types in a binary plist."""
def __init__(self):
self.type_number = 0
self.types = (bool, type(None), FillType)
self.integer_to_boolean = {0: None, 8: False, 9: True, 15: Fill}
self.boolean_to_integer = dict(zip(self.integer_to_boolean.values(),
self.integer_to_boolean.keys()))
def get_object_length(self, boolean):
"""Return the object length for a boolean."""
return self.boolean_to_integer[boolean]
def get_byte_length(self, object_length):
"""The byte length for a boolean is always zero."""
return 0
def encode_body(self, string, object_length):
"""Return an empty string."""
return ''
def decode_body(self, raw, object_length):
"""Return the decoded boolean value."""
return self.integer_to_boolean[object_length]
class IntegerHandler(object):
"""Handler class for integers."""
def __init__(self):
self.type_number = 1
self.formats = ('b', '>h', '>l', '>q')
self.types = int
def get_object_length(self, integer):
"""Return the object length for an integer."""
bit_lengths = [8 * 2 ** x for x in range(4)]
limits = [2 ** (bit_length - 1) for bit_length in bit_lengths]
for index, limit in enumerate(limits):
if -limit <= integer < limit:
return index
raise ValueError
def get_byte_length(self, object_length):
"""Calculate the byte length from the object length for a number."""
return 1 << object_length
def encode_body(self, value, object_length):
"""Pack the given number appropriately for the object length."""
return pack(self.formats[object_length], value)
def decode_body(self, raw, object_length):
"""Unpack the encoded number appropriately for the object length."""
return unpack(self.formats[object_length], raw)[0]
class FloatHandler(IntegerHandler):
"""Handler class for floats. Subclass of the integer handler."""
def __init__(self):
IntegerHandler.__init__(self)
self.type_number = 2
self.formats = (None, None, '>f', '>d')
self.types = float
def get_object_length(self, float_):
"""Return the object length for a float."""
single_max = (2 - 2 ** (-23)) * (2 ** 127)
single_min = 2 ** -126
double_max = (2 - 2 ** (-52)) * (2 ** 1023)
double_min = 2 ** -1022
if (-single_max < float_ < single_min or
single_min < float_ < single_max):
return 2
elif (-double_max < float_ < double_min or
double_min < float_ < double_max):
return 3
raise ValueError
def encode_body(self, float_, object_length):
body = IntegerHandler.encode_body(self, float_, object_length)
return body[::-1]
def decode_body(self, raw, object_length):
return IntegerHandler.decode_body(self, raw, object_length)
class DateHandler(FloatHandler):
"""
Handler class for dates. Subclass of the float handler because dates are
stored internally as the floating point number of seconds since the epoch.
"""
def __init__(self):
FloatHandler.__init__(self)
self.type_number = 3
# seconds between 1 Jan 1970 and 1 Jan 2001
self.epoch_adjustment = 978307200.0
self.types = datetime
def get_object_length(self, date):
return 3
def encode_body(self, date, object_length):
seconds = self.convert_to_seconds(date)
return FloatHandler.encode_body(self, seconds, object_length)
def decode_body(self, raw, object_length):
seconds = FloatHandler.decode_body(self, raw, object_length)
return self.convert_to_date(seconds)
def convert_to_seconds(self, date):
"""Convert a datetime object to seconds since 1 Jan 2001."""
seconds = mktime(date.timetuple())
return seconds - self.epoch_adjustment
def convert_to_date(self, seconds):
"""Convert seconds since 1 Jan 2001 to a datetime object."""
seconds += self.epoch_adjustment
return datetime.fromtimestamp(seconds)
class DataHander(object):
"""Handler class for arbitrary binary data. Uses plistlib.Data."""
def __init__(self):
self.type_number = 4
# this is ugly but maintains interop with plistlib.
self.types = type(Data(''))
def get_object_length(self, data):
"""Get the length of the data stored inside the Data object."""
return len(data.data)
def get_byte_length(self, object_length):
"""Return the object length."""
return object_length
def encode_body(self, data, object_length):
"""Get the binary data from the Data object."""
return data.data
def decode_body(self, raw, object_length):
"""Store the binary data in a Data object."""
return Data(raw)
class StringHandler(object):
"""Handler class for strings."""
def __init__(self):
self.type_number = 5
self.encoding = 'ascii'
self.types = str
def get_object_length(self, string):
"""Return the length of the string."""
return len(string)
def get_byte_length(self, object_length):
"""Return the object length."""
return object_length
def encode_body(self, string, object_length):
"""Return the encoded version of string, according to self.encoding."""
return string.encode(self.encoding)
def decode_body(self, string, object_length):
"""Return string."""
return string
class UnicodeStringHandler(StringHandler):
"""Handler class for unicode strings. Subclass of the string handler."""
def __init__(self):
StringHandler.__init__(self)
self.type_number = 6
self.encoding = 'utf_16_be'
self.types = unicode
def get_byte_length(self, object_length):
"""Return twice the object length."""
return object_length * 2
def decode_body(self, raw, object_length):
"""Decode the raw string according to self.encoding."""
return raw.decode(self.encoding)
class UIDHandler(IntegerHandler):
"""Handler class for UIDs. Subclass of the integer Handler."""
def __init__(self):
IntegerHandler.__init__(self)
self.type_number = 8
self.formats = ('B', '>H', '>L', '>Q')
self.types = UID
def get_object_length(self, uid):
"""Return the object length for an integer."""
bit_lengths = [8 * 2 ** x for x in range(4)]
limits = [2 ** bit_length for bit_length in bit_lengths]
for index, limit in enumerate(limits):
if index == 0:
if 0 <= uid <= limit:
return index
else:
if limits[index - 1] < uid <= limit:
return index
raise ValueError
def encode_body(self, uid, object_length):
"""Get the integer value of the UID object, and encode that."""
value = int(uid)
return IntegerHandler.encode_body(self, value, object_length)
def decode_body(self, raw, object_length):
"""Decode an integer value and put in a UID object."""
value = IntegerHandler.decode_body(self, raw, object_length)
return UID(value)
class ArrayHandler(object):
"""Handler class for arrays."""
def __init__(self, object_handler):
self.type_number = 0xa
self.types = list
self.object_handler = object_handler
self.formats = (None, 'B', 'H')
self.endian = '>'
self.format = None
self.reference_size = None
def get_object_length(self, array):
"""Return the length of the list given."""
return len(array)
def get_byte_length(self, object_length):
"""Return the object length times the reference size."""
return object_length * self.reference_size
def encode_body(self, array, object_length):
"""Encode the flattened array as a single reference list."""
format_ = self.endian + self.format * len(array)
encoded = pack(format_, *array)
return encoded
def decode_body(self, raw, object_length):
"""Decode the reference list into a flattened array."""
format_ = self.endian + self.format * object_length
array = unpack(format_, raw)
return list(array)
def set_reference_size(self, reference_size):
"""Save the given reference size, and set self.format appropriately."""
self.reference_size = reference_size
self.format = self.formats[reference_size]
def flatten(self, array, objects):
"""Flatten the array into a list of references."""
return flatten_object_list(array, objects)
def unflatten(self, array, objects):
"""Unflatten the list of references into a list of objects."""
return unflatten_reference_list(array, objects, self.object_handler)
def collect_children(self, array, objects):
"""Collect all the items in the array."""
for item in array:
self.object_handler.collect_objects(item, objects)
class DictionaryHandler(ArrayHandler):
"""Handler class for dictionaries. Subclasses the container handler."""
def __init__(self, object_handler):
ArrayHandler.__init__(self, object_handler)
self.type_number = 0xd
self.types = dict
def get_byte_length(self, object_length):
"""Return twice the object length times the reference size."""
return ArrayHandler.get_byte_length(self, object_length) * 2
def encode_body(self, dictionary, object_length):
"""Encode the flattened dictionary as two reference lists."""
keys = ArrayHandler.encode_body(self, dictionary.keys(), object_length)
values = ArrayHandler.encode_body(self, dictionary.values(),
object_length)
return ''.join((keys, values))
def decode_body(self, raw, object_length):
"""
Decode the two reference lists in raw into a flattened dictionary.
"""
half = ArrayHandler.get_byte_length(self, object_length)
keys = ArrayHandler.decode_body(self, raw[:half], object_length)
values = ArrayHandler.decode_body(self, raw[half:], object_length)
return dict(zip(keys, values))
def flatten(self, dictionary, objects):
"""Flatten a dictionary into a dictionary of references."""
keys = ArrayHandler.flatten(self, dictionary.keys(), objects)
values = ArrayHandler.flatten(self, dictionary.values(), objects)
return dict(zip(keys, values))
def unflatten(self, dictionary, objects):
"""Unflatten a dictionary into a dictionary of objects."""
keys = ArrayHandler.unflatten(self, dictionary.keys(), objects)
values = ArrayHandler.unflatten(self, dictionary.values(), objects)
return dict(zip(keys, values))
def collect_children(self, dictionary, objects):
"""Collect all the keys and values in dictionary."""
ArrayHandler.collect_children(self, dictionary.keys(), objects)
ArrayHandler.collect_children(self, dictionary.values(), objects)
class ObjectHandler(object):
"""A master handler class for all of the object handler classes."""
def __init__(self):
"""Intialize one of every (useful) handler class."""
handlers = [BooleanHandler(), IntegerHandler(), FloatHandler(),
DateHandler(), DataHander(), StringHandler(),
UnicodeStringHandler(), ArrayHandler(self),
DictionaryHandler(self), UIDHandler()]
self.size_handler = UIDHandler()
self.size_handler.type_number = 1
self.handlers_by_type_number = {}
self.handlers_by_type = {}
for handler in handlers:
self.handlers_by_type_number.update({handler.type_number: handler})
if type(handler.types) == type:
self.handlers_by_type.update({handler.types: handler})
else:
for type_ in handler.types:
self.handlers_by_type.update({type_: handler})
def set_reference_size(self, reference_size):
"""Set the reference size on the references handler."""
array_handler = self.handlers_by_type[list]
dict_handler = self.handlers_by_type[dict]
array_handler.set_reference_size(reference_size)
dict_handler.set_reference_size(reference_size)
def encode(self, object_, handler=None):
"""Use the appropriate handler to encode the given object."""
if handler is None:
handler = self.handlers_by_type[type(object_)]
object_length = handler.get_object_length(object_)
first_byte = self.encode_first_byte(handler.type_number, object_length)
body = handler.encode_body(object_, object_length)
return ''.join((first_byte, body))
def decode(self, file_object, handler=None):
"""Start reading in file_object, and decode the object found."""
object_type, object_length = self.decode_first_byte(file_object)
if handler is None:
handler = self.handlers_by_type_number[object_type]
byte_length = handler.get_byte_length(object_length)
raw = file_object.read(byte_length)
return handler.decode_body(raw, object_length)
def flatten_objects(self, objects):
"""Flatten all objects in objects."""
flattened_objects = {}
for item_index, item in enumerate(objects):
if type(item) in (list, dict):
flattened = self.flatten(item, objects)
flattened_objects.update({item_index: flattened})
for index, object_ in flattened_objects.items():
objects[index] = object_
def flatten(self, object_, objects):
"""Flatten the given object, using the appropriate handler."""
handler = self.handlers_by_type[type(object_)]
return handler.flatten(object_, objects)
def unflatten(self, object_, objects):
"""Unflatten the give object, using the appropriate handler."""
if type(object_) in (list, dict):
handler = self.handlers_by_type[type(object_)]
return handler.unflatten(object_, objects)
return object_
def encode_first_byte(self, type_number, length):
"""
Encode the first byte (or bytes if length is greater than 14) of a an
encoded object. This encodes the type and length of the object.
Boolean type objects never encode as more than one byte.
"""
big = False
if length >= 15 and type_number != 0:
real_length = self.encode(length, handler=self.size_handler)
length = 15
big = True
value = (type_number << 4) + length
encoded = pack('B', value)
if big:
return ''.join((encoded, real_length))
return encoded
def decode_first_byte(self, file_object):
"""
Get the type number and object length from the first byte of an object.
Boolean type objects never encode as more than one byte.
"""
value = unpack('B', file_object.read(1))[0]
object_type = value >> 4
object_length = value & 0xF
if object_length == 15 and object_type != 0:
object_length = self.decode(file_object, handler=self.size_handler)
return object_type, object_length
def collect_objects(self, object_, objects):
"""
Collect all the objects in object_ into objects, using the appropriate
handler.
"""
try:
find_with_type(object_, objects)
except ValueError:
objects.append(object_)
if type(object_) in (dict, list):
handler = self.handlers_by_type[type(object_)]
handler.collect_children(object_, objects)
class TableHandler(object):
"""A handler class for the offset table found in binary plists."""
def __init__(self):
self.formats = (None, 'B', 'H', 'BBB', 'L')
self.endian = '>'
def decode(self, file_object, offset_size, length, table_offset):
"""
Decode the offset table in file_object. Returns a list of offsets.
"""
file_object.seek(table_offset)
offset_format = self.formats[offset_size]
table_format = self.endian + offset_format * length
raw = file_object.read(offset_size * length)
offsets = unpack(table_format, raw)
if offset_size == 3:
zip_args = [offsets[x::3] for x in range(3)]
offsets = zip(*zip_args)
offsets = [o[0] * 0x10000 + o[1] * 0x100 + o[2] for o in offsets]
return offsets
def encode(self, offsets, table_offset):
"""Return the encoded form of a list of offsets."""
offset_size = get_byte_width(table_offset, 4)
offset_format = self.formats[offset_size]
table_format = self.endian + offset_format * len(offsets)
if offset_size == 3:
new_offsets = []
for offset in offsets[:]:
first = offset // 0x10000
second = (offset % 0x10000) // 0x100
third = (offset % 0x10000) % 0x100
new_offsets += [first, second, third]
offsets = new_offsets
encoded = pack(table_format, *offsets)
return encoded
class TrailerHandler(object):
"""A handler class for the 'trailer' found in binary plists."""
def __init__(self):
self.format = '>6xBB4xL4xL4xL'
def decode(self, file_object):
"""Decode the final 32 bytes of file_object."""
file_object.seek(-32, 2)
trailer = unpack(self.format, file_object.read())
return trailer
def encode(self, offsets, table_offset):
"""
Encode the trailer for a binary plist file with given offsets and
table_offet.
"""
offset_size = get_byte_width(table_offset, 4)
number_of_objects = len(offsets)
reference_size = get_byte_width(number_of_objects, 2)
root_object = 0
return pack(self.format, offset_size, reference_size,
number_of_objects, root_object, table_offset)
| StarcoderdataPython |
11390689 | <gh_stars>1-10
import numpy as np
from unittest import TestCase
from ezyrb import POD
snapshots = np.load('tests/test_datasets/p_snapshots.npy').T
poddb = np.load('tests/test_datasets/p_snapshots_pod.npy')
modes = np.load('tests/test_datasets/p_snapshots_pod_modes.npy')
class TestPOD(TestCase):
def test_constructor_empty(self):
a = POD()
def test_numpysvd(self):
A = POD('svd').fit(snapshots).reduce(snapshots)
assert np.allclose(A, poddb, rtol=1e-03, atol=1e-08) or np.allclose(
A,
-1 * poddb,
rtol=1e-03,
atol=1e-08,
)
def test_correlation_matirix(self):
A = POD('correlation_matrix').fit(snapshots).reduce(snapshots)
assert np.allclose(A, poddb, rtol=1e-03, atol=1e-08) or np.allclose(
A,
-1 * poddb,
rtol=1e-03,
atol=1e-08,
)
def test_correlation_matirix_savemem(self):
A = POD('correlation_matrix', save_memory=True).fit(snapshots).reduce(snapshots)
assert np.allclose(A, poddb, rtol=1e-03, atol=1e-08) or np.allclose(
A,
-1 * poddb,
rtol=1e-03,
atol=1e-08,
)
def test_randomized_svd(self):
A = POD('randomized_svd').fit(snapshots).reduce(snapshots)
np.testing.assert_allclose(np.absolute(A),
np.absolute(poddb),
rtol=1e-03,
atol=1e-08)
def test_singlular_values(self):
a = POD('svd').fit(snapshots)
np.testing.assert_allclose(
a.singular_values,
np.array([887.15704, 183.2508, 84.11757, 26.40448]),
rtol=1e-6,
atol=1e-8)
def test_modes(self):
a = POD('svd')
a.fit(snapshots)
np.testing.assert_allclose(a.modes, modes)
def test_truncation_01(self):
a = POD(method='svd', rank=0)
a.fit(snapshots)
assert a.singular_values.shape[0] == 1
def test_truncation_02(self):
a = POD(method='randomized_svd', rank=0)
a.fit(snapshots)
assert a.singular_values.shape[0] == 1
def test_truncation_03(self):
a = POD(method='correlation_matrix', rank=0)
a.fit(snapshots)
assert a.singular_values.shape[0] == 2
def test_truncation_04(self):
a = POD(method='svd', rank=3)
a.fit(snapshots)
assert a.singular_values.shape[0] == 3
def test_truncation_05(self):
a = POD(method='randomized_svd', rank=3)
a.fit(snapshots)
assert a.singular_values.shape[0] == 3
def test_truncation_06(self):
a = POD(method='correlation_matrix', rank=4)
a.fit(snapshots)
assert a.singular_values.shape[0] == 4
def test_truncation_07(self):
a = POD(method='svd', rank=0.8)
a.fit(snapshots)
assert a.singular_values.shape[0] == 1
def test_truncation_08(self):
a = POD(method='randomized_svd', rank=0.995)
a.fit(snapshots)
assert a.singular_values.shape[0] == 3
def test_truncation_09(self):
a = POD(method='correlation_matrix', rank=0.9999)
a.fit(snapshots)
assert a.singular_values.shape[0] == 2
| StarcoderdataPython |
11341087 | #!/usr/bin/env python
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
import unittest
from Bio import pairwise2
class TestPairwiseGlobal(unittest.TestCase):
def test_globalxx_simple(self):
aligns = pairwise2.align.globalxx("GAACT", "GAT")
self.assertEqual(len(aligns), 2)
aligns.sort()
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GAACT
|||||
G-A-T
Score=3
""")
seq1, seq2, score, begin, end = aligns[1]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GAACT
|||||
GA--T
Score=3
""")
class TestPairwiseLocal(unittest.TestCase):
def test_localxs(self):
aligns = pairwise2.align.localxs("AxBx", "zABz", -0.1, 0)
aligns.sort()
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
-AxBx
|||
zA-Bz
Score=1.9
""")
seq1, seq2, score, begin, end = aligns[1]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
-AxBx
||||
zA-Bz
Score=1.9
""")
class TestPairwiseOpenPenalty(unittest.TestCase):
def test_match_score_open_penalty1(self):
aligns = pairwise2.align.globalms("AA", "A", 2.0, -1, -0.1, 0)
self.assertEqual(len(aligns), 2)
aligns.sort()
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
AA
||
-A
Score=1.9
""")
seq1, seq2, score, begin, end = aligns[1]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
AA
||
A-
Score=1.9
""")
def test_match_score_open_penalty2(self):
aligns = pairwise2.align.globalms("GAA", "GA", 1.5, 0, -0.1, 0)
self.assertEqual(len(aligns), 2)
aligns.sort()
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GAA
|||
G-A
Score=2.9
""")
seq1, seq2, score, begin, end = aligns[1]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GAA
|||
GA-
Score=2.9
""")
def test_match_score_open_penalty3(self):
aligns = pairwise2.align.globalxs("GAACT", "GAT", -0.1, 0)
self.assertEqual(len(aligns), 1)
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GAACT
|||||
GA--T
Score=2.9
""")
def test_match_score_open_penalty4(self):
aligns = pairwise2.align.globalms("GCT", "GATA", 1, -2, -0.1, 0)
self.assertEqual(len(aligns), 1)
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GCT-
||||
GATA
Score=-0.1
""")
class TestPairwiseExtendPenalty(unittest.TestCase):
def test_extend_penalty1(self):
aligns = pairwise2.align.globalxs("GACT", "GT", -0.2, -0.5)
self.assertEqual(len(aligns), 1)
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GACT
||||
G--T
Score=1.3
""")
def test_extend_penalty2(self):
aligns = pairwise2.align.globalxs("GACT", "GT", -0.2, -1.5)
self.assertEqual(len(aligns), 2)
aligns.sort()
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GACT
||||
-G-T
Score=0.6
""")
seq1, seq2, score, begin, end = aligns[1]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GACT
||||
G-T-
Score=0.6
""")
class TestPairwisePenalizeExtendWhenOpening(unittest.TestCase):
def test_penalize_extend_when_opening(self):
aligns = pairwise2.align.globalxs("GACT", "GT", -0.2, -1.5, penalize_extend_when_opening=1)
self.assertEqual(len(aligns), 1)
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GACT
||||
G--T
Score=-1.2
""")
class TestPairwisePenalizeEndgaps(unittest.TestCase):
def test_penalize_end_gaps(self):
aligns = pairwise2.align.globalxs("GACT", "GT", -0.2, -0.8, penalize_end_gaps=0)
self.assertEqual(len(aligns), 3)
aligns.sort()
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GACT
||||
--GT
Score=1
""")
seq1, seq2, score, begin, end = aligns[1]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GACT
||||
G--T
Score=1
""")
seq1, seq2, score, begin, end = aligns[2]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GACT
||||
GT--
Score=1
""")
class TestPairwiseSeparateGapPenalties(unittest.TestCase):
def test_separate_gap_penalties1(self):
aligns = pairwise2.align.localxd("GAT", "GTCT", -0.3, 0, -0.8, 0)
self.assertEqual(len(aligns), 2)
aligns.sort()
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
G-AT
||||
GTCT
Score=1.7
""")
seq1, seq2, score, begin, end = aligns[1]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GA-T
||||
GTCT
Score=1.7
""")
def test_separate_gap_penalties2(self):
aligns = pairwise2.align.localxd("GAT", "GTCT", -0.5, 0, -0.2, 0)
self.assertEqual(len(aligns), 1)
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
GAT--
|||
G-TCT
Score=1.8
""")
class TestPairwiseSeparateGapPenaltiesWithExtension(unittest.TestCase):
def test_separate_gap_penalties_with_extension(self):
aligns = pairwise2.align.localxd(list("GAAT"), list("GTCCT"), -0.1, 0, -0.1, -0.1, gap_char=["-"])
self.assertEqual(len(aligns), 3)
aligns.sort()
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
['G', '-', 'A', 'A', 'T']
|||||
['G', 'T', 'C', 'C', 'T']
Score=1.9
""")
seq1, seq2, score, begin, end = aligns[1]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
['G', 'A', '-', 'A', 'T']
|||||
['G', 'T', 'C', 'C', 'T']
Score=1.9
""")
seq1, seq2, score, begin, end = aligns[2]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
['G', 'A', 'A', '-', 'T']
|||||
['G', 'T', 'C', 'C', 'T']
Score=1.9
""")
class TestPairwiseMatchDictionary(unittest.TestCase):
match_dict = {
("A", "A") : 1.5,
("A", "T") : 0.5,
("T", "T") : 1.0
}
def test_match_dictionary1(self):
aligns = pairwise2.align.localds("ATAT", "ATT", self.match_dict, -.5, 0)
self.assertEqual(len(aligns), 2)
aligns.sort()
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
ATAT
||||
AT-T
Score=3
""")
seq1, seq2, score, begin, end = aligns[1]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
ATAT
|||
ATT-
Score=3
""")
def test_match_dictionary2(self):
aligns = pairwise2.align.localds("ATAT", "ATT", self.match_dict, -1, 0)
self.assertEqual(len(aligns), 1)
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
ATAT
|||
ATT-
Score=3
""")
def test_match_dictionary3(self):
aligns = pairwise2.align.localds("ATT", "ATAT", self.match_dict, -1, 0)
self.assertEqual(len(aligns), 1)
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
ATT-
|||
ATAT
Score=3
""")
class TestPairwiseOneCharacter(unittest.TestCase):
def test_align_one_char1(self):
aligns = pairwise2.align.localxs("abcde", "c", -0.3, -0.1)
self.assertEqual(len(aligns), 1)
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
abcde
|
--c--
Score=1
""")
def test_align_one_char2(self):
aligns = pairwise2.align.localxs("abcce", "c", -0.3, -0.1)
self.assertEqual(len(aligns), 2)
aligns.sort()
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
abcce
|
---c-
Score=1
""")
seq1, seq2, score, begin, end = aligns[1]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
abcce
|
--c--
Score=1
""")
def test_align_one_char3(self):
aligns = pairwise2.align.globalxs("abcde", "c", -0.3, -0.1)
self.assertEqual(len(aligns), 1)
seq1, seq2, score, begin, end = aligns[0]
alignment = pairwise2.format_alignment(seq1, seq2, score, begin, end)
self.assertEqual(alignment, """\
abcde
|||||
--c--
Score=0.2
""")
if __name__ == '__main__':
runner = unittest.TextTestRunner(verbosity = 2)
unittest.main(testRunner=runner)
| StarcoderdataPython |
6600165 | import bintrees
#import types
import copy
def str2int(s):
if type(s) is int: ##it is already an int
return s
assert(type(s) is str)
if s.startswith("0x"): ##hex
s = s[2:]
return int(s, 16)
else: ##decimal
return int(s)
class Method:
def addRangeToTree(self, tree, start, end, data):
#[start,end] ##inclusive
start = str2int(start)
end = str2int(end)
## overlapping check
def isOverlap(start, end):
if tree.is_empty():
return False
###check left item
min_start, (min_end, _) = tree.min_item()
if end < min_start:
return False
if start <= min_start: ## here end >= min_start
return True
left_start, (left_end, _) = tree.floor_item(start)
if left_end >= start:
return True
## check right item
max_start, (max_end, _) = tree.max_item()
if start > max_end:
return False
if end >= max_start:
return True
right_start, (right_end, _) = tree.ceiling_item(start)
if end >= right_start:
return True
return False
assert(not isOverlap(start,end))
tree[start] = (end, data)
def queryTree(self, tree, proxy_key):
if tree.is_empty():
return None, None, None
if proxy_key < tree.min_key():
return None, None, None
left_start, (left_end, data) = tree.floor_item(proxy_key)
if left_end >= proxy_key:
return left_start, left_end, data
else:
return None, None, None
def __init__(self, method_id, version):
self.start_line = None
self.method_id = method_id
self.version = version
self.file = None
self.start_addr = None
self.code_size = None
self.method_name = None
self.class_name = None
self._addr2line_tree = bintrees.FastRBTree()
self._bci2line_tree = bintrees.FastRBTree()
def addAddr2Line(self, start, end, line_no):
# [start,end] ##inclusive
self.addRangeToTree(self._addr2line_tree, start, end, line_no)
def addr2line(self, addr):
addr = str2int(addr)
start, end, lineno = self.queryTree(self._addr2line_tree, addr)
return lineno
def addBCI2Line(self, start, end, line_no):
self.addRangeToTree(self._bci2line_tree, start, end, line_no)
def bci2line(self, bci):
bci = str2int(bci)
start, end, lineno = self.queryTree(self._bci2line_tree, bci)
return lineno
class MethodManager:
def __init__(self):
self._method_dict = dict()
def addMethod(self, method):
assert(isinstance(method, Method))
key = method.method_id + "#"+ method.version
if key in self._method_dict:
print (key, "already shown before")
#assert(key not in self._method_dict)
return
self._method_dict[key] = method
#print("method_count = "+str(len(self._method_dict)))
def getMethod(self, method_id, version):
key = method_id + "#" + version
if key in self._method_dict:
return self._method_dict[key]
if version != "0":
return None
key = method_id + "#1"
if key not in self._method_dict:
return None
candidate_method = self._method_dict[key]
new_method = copy.deepcopy(candidate_method)
new_method.version = "0"
new_method.start_addr = "0"
new_method.code_size = "0"
new_method._addr2line_tree = bintrees.FastRBTree()
self.addMethod(new_method)
return new_method
| StarcoderdataPython |
4837255 | <filename>xunsearch/__init__.py
# -*- encoding: utf-8 -*-
#
from .xunsearch import XS
from .xunsearch import XSException
from .xunsearch import XSDocument
from .xunsearch import XSIndex
from .xunsearch import XSSearch
from .xunsearch import XSTokenizer | StarcoderdataPython |
3444582 | def chiffre(input: str, key: int, direction: str) -> str:
input = input.lower()
output = ""
if direction == "encrypt":
for letter in input:
if letter.isalpha():
temp = ord(letter)
temp = temp + key
if temp > 122:
temp_key = temp - 123
temp = 97 + temp_key
output += chr(temp)
else:
output += letter
return output
else:
for letter in input:
if letter.isalpha():
temp = ord(letter)
temp = temp - key
if temp < 97:
temp_key = 97 - temp
temp = 123 - temp_key
output += chr(temp)
else:
output += letter
return output
print(
chiffre(
"fakt ist, dass alles im universum entweder eine kartoffel ist oder nicht.",
13,
"encrypt",
)
)
# print(chiffre("Test Test.",3,"encrypt",))
| StarcoderdataPython |
15524 | # theory MPD client
# Copyright (C) 2008 <NAME> <<EMAIL>>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import ConfigParser
import pickle
from pylons import config
from pylons import app_globals as g
class ConfigFileError(Exception):
pass
class TConfig:
"""
handles the global configuration. loaded into app globals at application startup
also handles committing the configuration to disk to maintain across app restarts
"""
def __init__(self):
""" try to read the configuration from disk """
self.server = None
self.port = None
self.password = None
self.webpassword = ''
self.timeout = False
self.awskey = None
self.aws_secret = None
self.streams = []
self.default_search = 'Any'
conf = ConfigParser.ConfigParser()
conf.read(config['localconf'])
try:
self.server = conf.get('mpd','server')
self.port = conf.get('mpd','port')
self.password = conf.get('mpd','password')
self.awskey = conf.get('services','awskey')
self.aws_secret = conf.get('services','aws_secret')
self.webpassword = conf.get('main','webpassword')
self.timeout = conf.getboolean('main','timeout')
self.default_search = conf.get('main','default_search')
conf_stream = conf.get('ext','streams')
except (ConfigParser.NoSectionError,ConfigParser.NoOptionError):
pass
try:
self.streams = pickle.loads(eval(conf_stream))
except:
# we don't really care what happened, the user must have messed with the magic pickled string :)
pass
def commit_config(self):
""" commit the configuration to disk """
conf = ConfigParser.ConfigParser()
conf.add_section("mpd")
conf.set("mpd", "server",self.server)
conf.set("mpd", "port",self.port)
conf.set("mpd", "password",self.password)
conf.add_section("services")
conf.set('services','awskey',self.awskey)
conf.set('services','aws_secret',self.aws_secret)
conf.add_section('main')
conf.set('main','webpassword',self.webpassword)
conf.set('main','timeout',self.timeout)
conf.set('main','default_search',self.default_search)
conf.add_section('ext')
conf.set('ext','streams',repr(pickle.dumps(self.streams)))
try:
conffile = open(config['localconf'],"w")
conf.write(conffile)
except IOError,e:
raise ConfigFileErro
def get_stream_name(self,url):
""" search the list of streams for a particular name """
for s in self.streams:
if s[1] == url:
return s[0]
| StarcoderdataPython |
109768 | <gh_stars>0
import os
from shutil import copyfile
from absl import app
from absl import flags
import sys
import numpy as np
from math import isclose
FLAGS = flags.FLAGS
flags.DEFINE_string(name = 'data_path', default = 'extracted_actions', help = 'The path to the data.')
flags.DEFINE_string(name = 'save_path', default = 'split', help = 'Where to save the data split.')
flags.DEFINE_integer(name = 'seed', default = None, help = 'A seed for the random split.')
FLAGS(sys.argv)
def split(data_path, save_path, train, validation, test, seed):
if not isclose(train + validation + test, 1.0):
raise ValueError('train: ' + str(train) + ', validation: ' + str(validation) + ' and test: ' + str(test) + ' must sum to 1')
# Check if the replays_path exists
if not os.path.isdir(data_path):
raise ValueError('The path ' + data_path + ' does not exist.')
# Make list of the paths to all the replays
cwd = os.getcwd()
data_paths = []
data_base_path = os.path.join(cwd, data_path)
for data in os.listdir(data_path):
_data_path = os.path.join(data_base_path, data)
if os.path.isfile(_data_path) and data.lower().endswith('.npy'):
data_paths.append(_data_path)
# Check if the save_path exists. Otherwise we need to create it
if not os.path.isdir(save_path):
os.makedirs(save_path)
train_path = os.path.join(save_path, 'train')
if not os.path.isdir(train_path):
os.makedirs(train_path)
validation_path = os.path.join(save_path, 'validation')
if not os.path.isdir(validation_path):
os.makedirs(validation_path)
test_path = os.path.join(save_path, 'test')
if not os.path.isdir(test_path):
os.makedirs(test_path)
if seed is not None:
np.random.seed(seed)
np.random.shuffle(data_paths)
train_end = int(len(data_paths) * train)
validation_end = int(len(data_paths) * (train + validation))
for path in data_paths[:train_end]:
copyfile(path, os.path.join(cwd, save_path, 'train', path.split('\\')[-1]))
for path in data_paths[train_end: validation_end]:
copyfile(path, os.path.join(cwd, save_path, 'validation', path.split('\\')[-1]))
for path in data_paths[validation_end:]:
copyfile(path, os.path.join(cwd, save_path, 'test', path.split('\\')[-1]))
def main(argv):
split(FLAGS.data_path, FLAGS.save_path, 0.7, 0.2, 0.1, FLAGS.seed)
if __name__ == "__main__":
app.run(main) | StarcoderdataPython |
4800367 | import random
import hangman_art
import hangman_words
print(hangman_art.logo)
# Pick word and prepare
chosen_word = random.choice(hangman_words.word_list)
display = []
word_length = len(chosen_word)
for i in range(word_length):
display += "_"
life = 6
while life > 0 and ("_" in display):
print(display)
# User choose letter
guess = input("Please guess a letter: ").lower()
idx = 0
found = False
for letter in chosen_word:
if letter == guess:
display[idx] = letter
found = True
idx += 1
if not found:
print(f"You guessed {guess}. That is not in the word, you loose life.")
print(hangman_art.stages[life])
life -= 1
if life > 0 and ("_" in display):
print("Next Round")
if life == 0:
print("You loose")
print(hangman_art.stages[life])
else:
print("You win")
| StarcoderdataPython |
6479971 | # coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from yk_utils.models import Model
from yk_utils.models import deserialization
class VerifyImagesResponse(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, matching_score: float=None, verify_images_status: List[str]=None): # noqa: E501
"""VerifyImagesResponse - a model defined in Swagger
:param matching_score: The matching_score of this VerifyImagesResponse. # noqa: E501
:type matching_score: float
:param verify_images_status: The verify_images_status of this VerifyImagesResponse. # noqa: E501
:type verify_images_status: List[str]
"""
self.swagger_types = {
'matching_score': float,
'verify_images_status': List[str]
}
self.attribute_map = {
'matching_score': 'matching_score',
'verify_images_status': 'verify_images_status'
}
self._matching_score = matching_score
self._verify_images_status = verify_images_status
@classmethod
def from_dict(cls, dikt) -> 'VerifyImagesResponse':
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The verify_images_response of this VerifyImagesResponse. # noqa: E501
:rtype: VerifyImagesResponse
"""
return deserialization.deserialize_model(dikt, cls)
@property
def matching_score(self) -> float:
"""Gets the matching_score of this VerifyImagesResponse.
Face matching confidence. Varies between -1 (totally different) to 1 (totally equal). # noqa: E501
:return: The matching_score of this VerifyImagesResponse.
:rtype: float
"""
return self._matching_score
@matching_score.setter
def matching_score(self, matching_score: float):
"""Sets the matching_score of this VerifyImagesResponse.
Face matching confidence. Varies between -1 (totally different) to 1 (totally equal). # noqa: E501
:param matching_score: The matching_score of this VerifyImagesResponse.
:type matching_score: float
"""
self._matching_score = matching_score
@property
def verify_images_status(self) -> List[str]:
"""Gets the verify_images_status of this VerifyImagesResponse.
Face matching status # noqa: E501
:return: The verify_images_status of this VerifyImagesResponse.
:rtype: List[str]
"""
return self._verify_images_status
@verify_images_status.setter
def verify_images_status(self, verify_images_status: List[str]):
"""Sets the verify_images_status of this VerifyImagesResponse.
Face matching status # noqa: E501
:param verify_images_status: The verify_images_status of this VerifyImagesResponse.
:type verify_images_status: List[str]
"""
allowed_values = ["matching_successful", "matching_failed", "reference_face_detection_failed", "probe_face_detection_failed"] # noqa: E501
if not set(verify_images_status).issubset(set(allowed_values)):
raise ValueError(
"Invalid values for `verify_images_status` [{0}], must be a subset of [{1}]" # noqa: E501
.format(", ".join(map(str, set(verify_images_status) - set(allowed_values))), # noqa: E501
", ".join(map(str, allowed_values)))
)
self._verify_images_status = verify_images_status
| StarcoderdataPython |
3500292 | """
shutit.tk.setup (core ShutIt setup module)
Nomenclature:
- Host machine: Machine on which this script is run.
- Target: Environment to which we deploy (docker container or bash shell)
- Container: Docker container created to run the modules on.
- target_child pexpect-spawned child created to build on target
- host_child pexpect spawned child living on the host machine
"""
# The MIT License (MIT)
#
# Copyright (C) 2014 OpenBet Limited
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# ITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import print_function
import shutit_global
import shutit_util
from shutit_module import ShutItModule
from shutit_sendspec import ShutItSendSpec
from shutit_pexpect import ShutItPexpectSession
class ShutItConnModule(ShutItModule):
def __init__(self, *args, **kwargs):
super(ShutItConnModule, self).__init__(*args, **kwargs)
def setup_host_child(self, shutit):
shutit.setup_host_child_environment()
def setup_target_child(self, shutit, target_child, target_child_id='target_child',prefix='root'):
shutit.setup_target_child_environment(target_child, target_child_id=target_child_id,prefix=prefix)
def build(self, shutit):
return True
class ConnDocker(ShutItConnModule):
"""Connects ShutIt to docker daemon and starts the container.
"""
def is_installed(self, shutit):
"""Always considered false for ShutIt setup.
"""
return False
def destroy_container(self, shutit, host_shutit_session_name, container_shutit_session_name, container_id):
host_child = shutit.get_shutit_pexpect_session_from_id(host_shutit_session_name).pexpect_child
shutit.conn_docker_destroy_container(host_shutit_session_name, container_shutit_session_name, container_id)
shutit.send(' command docker rm -f ' + container_id + ' && rm -f ' + shutit.build['cidfile'],shutit_pexpect_child=host_child,expect=shutit.expect_prompts['ORIGIN_ENV'])
def start_container(self, shutit, shutit_session_name):
return shutit.conn_docker_start_container(shutit_session_name)
def build(self, shutit):
"""Sets up the target ready for building.
"""
target_child = self.start_container(shutit, 'target_child')
self.setup_host_child(shutit)
# TODO: on the host child, check that the image running has bash as its cmd/entrypoint.
self.setup_target_child(shutit, target_child)
shutit.send('chmod -R 777 ' + shutit_global.shutit_global_object.shutit_state_dir + ' && mkdir -p ' + shutit_global.shutit_global_object.shutit_state_dir_build_db_dir + '/' + shutit_global.shutit_global_object.build_id, shutit_pexpect_child=target_child, echo=False)
return True
def finalize(self, shutit):
"""Finalizes the target, exiting for us back to the original shell
and performing any repository work required.
"""
# Finish with the target
target_child_pexpect_session = shutit.get_shutit_pexpect_session_from_id('target_child')
assert not target_child_pexpect_session.sendline(ShutItSendSpec(target_child_pexpect_session,'exit',ignore_background=True)), shutit_util.print_debug()
host_child_pexpect_session = shutit.get_shutit_pexpect_session_from_id('host_child')
host_child = host_child_pexpect_session.pexpect_child
shutit.set_default_shutit_pexpect_session(host_child_pexpect_session)
shutit.set_default_shutit_pexpect_session_expect(shutit.expect_prompts['ORIGIN_ENV'])
shutit.do_repository_work(shutit.repository['name'], docker_executable=shutit.host['docker_executable'], password=shutit.host['password'])
# Final exits
host_child.sendline('rm -f ' + shutit.build['cidfile']) # Ignore response, just send.
host_child.sendline('exit') # Exit raw bash. Ignore response, just send.
return True
def get_config(self, shutit):
return True
class ConnBash(ShutItConnModule):
"""Connects ShutIt to a machine via bash.
Assumes no docker daemon available for tagging and pushing.
"""
def is_installed(self, shutit):
"""Always considered false for ShutIt setup.
"""
return False
def get_config(self, shutit):
return True
def build(self, shutit):
"""Sets up the machine ready for building.
"""
shutit_pexpect_session = ShutItPexpectSession(shutit, 'target_child','/bin/bash')
target_child = shutit_pexpect_session.pexpect_child
shutit_pexpect_session.expect(shutit_global.shutit_global_object.base_prompt.strip(), timeout=10)
self.setup_host_child(shutit)
self.setup_target_child(shutit, target_child)
return True
def finalize(self, shutit):
"""Finalizes the target, exiting for us back to the original shell
and performing any repository work required.
"""
# Finish with the target
target_child_pexpect_session = shutit.get_shutit_pexpect_session_from_id('target_child')
assert not target_child_pexpect_session.sendline(ShutItSendSpec(target_child_pexpect_session,'exit',ignore_background=True)), shutit_util.print_debug()
return True
def conn_module():
"""Connects ShutIt to something
"""
return [
ConnDocker('shutit.tk.conn_docker', -0.1, description='Connect ShutIt to docker'),
ConnBash ('shutit.tk.conn_bash', -0.1, description='Connect ShutIt to a host via bash'),
]
class setup(ShutItModule):
def is_installed(self, shutit):
"""Always considered false for ShutIt setup.
"""
return False
def build(self, shutit):
"""Initializes target ready for build and updating package management if in container.
"""
if shutit.build['delivery'] in ('docker','dockerfile'):
if shutit.get_current_shutit_pexpect_session_environment().install_type == 'apt':
shutit.add_to_bashrc('export DEBIAN_FRONTEND=noninteractive')
if not shutit.command_available('lsb_release'):
shutit.install('lsb-release')
shutit.lsb_release()
elif shutit.get_current_shutit_pexpect_session_environment().install_type == 'yum':
# yum updates are so often "bad" that we let exit codes of 1 through.
# TODO: make this more sophisticated
shutit.send('yum update -y', timeout=9999, exit_values=['0', '1'])
shutit.pause_point('Anything you want to do to the target host ' + 'before the build starts?', level=2)
return True
def remove(self, shutit):
"""Removes anything performed as part of build.
"""
return True
def get_config(self, shutit):
"""Gets the configured core pacakges, and whether to perform the package
management update.
"""
return True
def module():
return setup('shutit.tk.setup', 0.0, description='Core ShutIt setup')
| StarcoderdataPython |
3518856 | <reponame>dpfens/tzktPy<filename>tzktpy/right.py
from .base import Base
__all__ = ('Right', )
class Right(Base):
__slots__ = ('type', 'cycle', 'level', 'timestamp', 'priority', 'slots', 'baker', 'status')
def __init__(self, type, cycle, level, timestamp, priority, slots, baker, status):
self.type = type
self.cycle = cycle
self.level = level
self.timestamp = timestamp
self.priority = priority
self.slots = slots
self.baker = baker
self.status = status
def __repr__(self):
return '<%s %s type=%r, cycle=%r, level=%r, timestamp=%r, priority=%r, status=%r>' % (self.__class__.__name__, id(self), self.type, self.cycle, self.level, self.timestamp, self.priority, self.status)
@classmethod
def from_api(cls, data):
data = super(Right, cls).from_api(data)
type = data['type'],
cycle = data['cycle'],
level = data['level'],
timestamp = data['timestamp'],
priority = data['priority'],
slots = data['slots'],
baker = data['baker'],
status = data['status']
if timestamp:
timestamp = cls.to_datetime(timestamp)
return cls(type, cycle, level, timestamp, priority, slots, baker, status)
@classmethod
def get(cls, **kwargs):
"""
Returns a list of rights.
Keyword Parameters:
type (str): Filters rights by type (baking, endorsing). Supports standard modifiers.
baker (str): Filters rights by baker. Supports standard modifiers.
cycle (int): Filters rights by cycle. Supports standard modifiers.
level (int): Filters rights by level. Supports standard modifiers.
slots (int): Filters rights by slots. Supports standard modifiers.
priority (int): Filters rights by priority. Supports standard modifiers.
status (str): Filters rights by status (future, realized, uncovered, missed).
sort (str): Sorts rights by specified field. Supported fields: level (default). Support sorting modifiers.
offset (int): Specifies which or how many items should be skipped. Supports standard offset modifiers.
limit (int): Maximum number of items to return.
domain (str, optional): The tzkt.io domain to use. The domains correspond to the different Tezos networks. Defaults to https://api.tzkt.io.
Returns:
list
Example:
>>> baking_rights = Right.get(type='baking')
"""
optional_params = ['type', 'baker', 'cycle', 'level', 'slots', 'priority', 'status'] + list(cls.pagination_parameters)
params, _ = cls.prepare_modifiers(kwargs, include=optional_params)
path = 'v1/rights'
response = cls._request(path, params=params, **kwargs)
data = response.json()
return [cls.from_api(item) for item in data]
@classmethod
def count(cls, **kwargs):
"""
Returns the total number of stored rights.
Keyword Parameters:
type (str): Filters rights by type (baking, endorsing). Supports standard modifiers.
baker (str): Filters rights by baker. Supports standard modifiers.
cycle (int): Filters rights by cycle. Supports standard modifiers.
level (int): Filters rights by level. Supports standard modifiers.
slots (int): Filters rights by slots. Supports standard modifiers.
priority (int): Filters rights by priority. Supports standard modifiers.
status (str): Filters rights by status (future, realized, uncovered, missed).
domain (str, optional): The tzkt.io domain to use. The domains correspond to the different Tezos networks. Defaults to https://api.tzkt.io.
Returns:
int
Example:
>>> baking_rights_count = Right.count(type='baking')
"""
path = 'v1/rights/count'
optional_params = ['type', 'baker', 'cycle', 'level', 'slots', 'priority', 'status'] + list(cls.pagination_parameters)
params, _ = cls.prepare_modifiers(kwargs, include=optional_params)
response = cls._request(path, params=params, **kwargs)
data = response.content
return int(data)
if __name__ == '__main__':
rights_count = Right.count()
print('Total Rights: %i' % rights_count)
rights = Right.get()
print(rights)
| StarcoderdataPython |
1765102 | <gh_stars>0
import numpy as np
import cv2
# 画像を読み込み Array{Float32,1} に変換
def load_img(filename: str) -> np.ndarray:
# cv2.imread の第2引数を0にするとグレースケールで読み込む
return np.float32(cv2.imread(filename, 0))
if __name__ == "__main__":
# sample1, sample2 画像読み込み
img1: np.ndarray = load_img('./puppeteer/screenshot/sample1.png')
img2: np.ndarray = load_img('./puppeteer/screenshot/sample2.png')
# 位相限定相関を計算
(dx, dy), etc = cv2.phaseCorrelate(img1, img2)
print(f'({dx}, {dy}), {etc}')
| StarcoderdataPython |
149669 | <reponame>766F6964/Euler-Problems<filename>Python/Problem021.py<gh_stars>1-10
def get_divisors(n):
sum = 1
for i in range(2, int(n ** 0.5 + 1)):
if n % i == 0:
sum += i
sum += n / i
return sum
def find_amicable_pair():
total = 0
for x in range(1, 10001):
a = get_divisors(x)
b = get_divisors(a)
if b == x and x != a:
total += x
return total
print(find_amicable_pair()) | StarcoderdataPython |
6632085 | <reponame>d34dh0r53/python-tripleoclient
# Copyright 2019 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import mock
from osc_lib.tests import utils
from tripleoclient.v1 import overcloud_export
class TestOvercloudExport(utils.TestCommand):
def setUp(self):
super(TestOvercloudExport, self).setUp()
self.cmd = overcloud_export.ExportOvercloud(self.app, None)
self.app.client_manager.orchestration = mock.Mock()
self.tripleoclient = mock.Mock()
self.app.client_manager.tripleoclient = self.tripleoclient
self.app.client_manager.tripleoclient.object_store = mock.Mock()
self.mock_open = mock.mock_open()
@mock.patch('os.path.exists')
@mock.patch('yaml.safe_dump')
@mock.patch('tripleoclient.export.export_stack')
@mock.patch('tripleoclient.export.export_passwords')
def test_export(self, mock_export_passwords,
mock_export_stack,
mock_safe_dump,
mock_exists):
argslist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
mock_exists.return_value = False
mock_export_passwords.return_value = {'key': 'value'}
mock_export_stack.return_value = {'key0': 'value0'}
with mock.patch('six.moves.builtins.open', self.mock_open):
self.cmd.take_action(parsed_args)
mock_export_passwords.assert_called_once_with(
self.app.client_manager.tripleoclient.object_store,
'overcloud', True)
path = os.path.join(os.environ.get('HOME'),
'config-download/overcloud')
mock_export_stack.assert_called_once_with(
self.app.client_manager.orchestration,
'overcloud',
False,
path)
self.assertEqual(
{'parameter_defaults': {'key': 'value',
'key0': 'value0'}},
mock_safe_dump.call_args[0][0])
@mock.patch('os.path.exists')
@mock.patch('yaml.safe_dump')
@mock.patch('tripleoclient.export.export_stack')
@mock.patch('tripleoclient.export.export_passwords')
def test_export_stack_name(self, mock_export_passwords,
mock_export_stack,
mock_safe_dump,
mock_exists):
argslist = ['--stack', 'foo']
verifylist = [('stack', 'foo')]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
mock_exists.return_value = False
with mock.patch('six.moves.builtins.open', self.mock_open):
self.cmd.take_action(parsed_args)
mock_export_passwords.assert_called_once_with(
self.app.client_manager.tripleoclient.object_store,
'foo', True)
path = os.path.join(os.environ.get('HOME'),
'config-download/foo')
mock_export_stack.assert_called_once_with(
self.app.client_manager.orchestration,
'foo',
False,
path)
@mock.patch('os.path.exists')
@mock.patch('yaml.safe_dump')
@mock.patch('tripleoclient.export.export_stack')
@mock.patch('tripleoclient.export.export_passwords')
def test_export_stack_name_and_dir(self, mock_export_passwords,
mock_export_stack,
mock_safe_dump, mock_exists):
argslist = ['--stack', 'foo',
'--config-download-dir', '/tmp/bar']
verifylist = [('stack', 'foo'),
('config_download_dir', '/tmp/bar')]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
mock_exists.return_value = False
with mock.patch('six.moves.builtins.open', self.mock_open):
self.cmd.take_action(parsed_args)
mock_export_passwords.assert_called_once_with(
self.app.client_manager.tripleoclient.object_store,
'foo', True)
mock_export_stack.assert_called_once_with(
self.app.client_manager.orchestration,
'foo',
False,
'/tmp/bar')
@mock.patch('os.path.exists')
@mock.patch('yaml.safe_dump')
@mock.patch('tripleoclient.export.export_stack')
@mock.patch('tripleoclient.export.export_passwords')
def test_export_no_excludes(self, mock_export_passwords,
mock_export_stack,
mock_safe_dump, mock_exists):
argslist = ['--stack', 'foo',
'--config-download-dir', '/tmp/bar',
'--no-password-excludes']
verifylist = [('stack', 'foo'),
('config_download_dir', '/tmp/bar'),
('no_password_excludes', True)]
parsed_args = self.check_parser(self.cmd, argslist, verifylist)
mock_exists.return_value = False
with mock.patch('six.moves.builtins.open', self.mock_open):
self.cmd.take_action(parsed_args)
mock_export_passwords.assert_called_once_with(
self.app.client_manager.tripleoclient.object_store,
'foo', False)
mock_export_stack.assert_called_once_with(
self.app.client_manager.orchestration,
'foo',
False,
'/tmp/bar')
| StarcoderdataPython |
1992474 |
import random
import torch
import omnifig as fig
@fig.AutoComponent('cyclegan-buffer')
class ReplayBuffer:
def __init__(self, max_size=50):
assert max_size > 0, "Empty buffer or trying to create a black hole. Be careful."
self.max_size = max_size
self.data = []
def push_and_pop(self, data):
to_return = []
for element in data.data:
element = torch.unsqueeze(element, 0)
if len(self.data) < self.max_size:
self.data.append(element)
to_return.append(element)
else:
if random.uniform(0, 1) > 0.5:
i = random.randint(0, self.max_size - 1)
to_return.append(self.data[i].clone())
self.data[i] = element
else:
to_return.append(element)
return torch.cat(to_return)
@fig.AutoComponent('cyclegan-scheduler-lambda')
class LambdaLR:
def __init__(self, n_epochs, offset, decay_start_epoch):
assert (n_epochs - decay_start_epoch) > 0, "Decay must start before the training session ends!"
self.n_epochs = n_epochs
self.offset = offset
self.decay_start_epoch = decay_start_epoch
def __str__(self):
return f'Cycle-GAN-Lambda(n_epochs={self.n_epochs}, offset={self.offset}, decay={self.decay_start_epoch})'
def __call__(self, epoch):
return 1.0 - max(0, epoch + self.offset - self.decay_start_epoch) / (self.n_epochs - self.decay_start_epoch)
| StarcoderdataPython |
3303073 | """
<NAME>, ПИ19-4
Задания 1-12
"""
import random
from itertools import permutations
class Task1(object):
"""
Нaпишите программу, на вход которой подаётся список чисел одной строкой.
Программа должна для каждого элемента этого списка вывести сумму двух его cоседей.
Для элeментов списка, являющиxся крайними, одним из соседей считается элемент, находящий на противоположном конце этого списка.
Например, если на вход подаётся cписок «1 3 5 6 10», то на выход ожидается cписок «13 6 9 15 7».
Если на вход пришло только однo число, надо вывести его же.
Вывoд должен содержать одну строку с чиcлами новoго списка, разделёнными пробeлом.
"""
def __init__(self):
out_text = "Введите список чисел через запятую -> "
self.input_l = [int(x) for x in input(out_text).split(",")]
self.processing()
print(self.result)
def processing(self):
out_l = []
l = self.input_l
if len(l) == 1:
self.result = l[0]
else:
# В начало добавляем сумму последнего и первого элемента
out_l.append(l[-1] + l[1])
print(l[0], "пары: ", l[-1], l[1])
for i in range(1, len(l) - 1):
print(l[i], "пары: ", l[i - 1], l[i + 1])
out_l.append(l[i - 1] + l[i + 1])
# В конец добавляем сумму предпоследнего и нулевого элемента
out_l.append(l[-2] + l[0])
print(l[-1], "пары: ", l[-2], l[0])
self.result = out_l
class Task2(object):
"""
Нaпишите прогрaмму, котoрая принимает на вход спиcок чисел в одной cтроке и выводит на экран в
oдну строкy значения, котoрые повторяются в нём бoлее одного раза.
Выводимые числа не дoлжны повторяться, пoрядок их вывода может быть произвольным.
Нaпример: 4 8 0 3 4 2 0 3
"""
def __init__(self):
out_text = "Введите список чисел через запятую -> "
self.input_l = [int(x) for x in input(out_text).split(",")]
self.processing()
print(self.result)
def processing(self):
d = {}
result = []
l = self.input_l
for e in l:
d[e] = 0
for e in l:
d[e] += 1
for k, v in d.items():
if v > 1:
result.append(k)
self.result = "Повторяющиеся значения:\n" + "".join(
str(x) + " " for x in result
)
class Task3(object):
"""
Выполните oбработку элементов прямоугольной матрицы A, имеющей N строк и M столбцов.
Все элeменты имeют целый тип. Дано целое число H.
Опрeделите, какие столбцы имeют хотя бы однo такое число, а какие не имeют.
"""
def __init__(self):
try:
self.n = int(input("Введите количество строк N в матрице -> "))
self.m = int(input("Введите количество столбцов M в матрице -> "))
except:
print("Ошибка ввода данных")
return
self.matrix_gen()
self.element_search()
def matrix_gen(self):
m = [[random.randint(10, 99) for c in range(self.m)] for r in range(self.n)]
print("Исходная матрица:")
for e in m:
print(e)
self.matrix = m
def element_search(self):
d = {}
try:
number = int(input("Введите число H для поиска по столбцам -> "))
except:
print("Ошибка ввода данных")
return
for i in range(len(self.matrix[0])):
d[i] = 0
for i in range(len(self.matrix[0])):
for j in range(len(self.matrix)):
if self.matrix[j][i] == number:
d[i] += 1
for k, v in d.items():
if v == 0:
print("Столбец №" + str(k + 1) + " - значений нет")
else:
print(
"Столбец №"
+ str(k + 1)
+ " - повторение значения "
+ str(v)
+ " раз(а)"
)
class Task4(object):
"""
Список задается пользователем с клавиатуры. Определите, является ли список симметричным .
"""
def __init__(self):
try:
self.n = int(input("Введите размерность матрицы -> "))
except:
print("Ошибка ввода данных")
return
self.matrix_input()
self.symmetry_detect()
def check_digit(self, e):
try:
return int(e)
except:
return e
# TODO СДЕЛАТЬ В ОДНУ СТРОЧКУ
def matrix_input(self):
l = []
for i in range(self.n):
l.append([])
for j in range(self.n):
l[i].append(
self.check_digit(
input("Введите элемент [" + str(i) + "][" + str(j) + "] ->")
)
)
print("\nИсходная матрица:")
for e in l:
print(e)
self.out_l = l
def symmetry_detect(self):
d = {
True: "Список является симметричным",
False: "Список НЕ является симметричным",
}
l = self.out_l
flag = True
for i in range(len(l)):
for j in range(len(l)):
if l[i][j] != l[j][i]:
flag = False
print(d[flag])
class Task5(object):
"""
Список задается пользователем с клавиатуры.
Определите, можно ли удалить из списка каких-нибудь два элемента так,
чтобы новый список оказался упорядоченным
"""
def __init__(self):
self.l = list(
set(
[
self.check_digit(e)
for e in input("Введите элементы списка через запятую -> ").split(
","
)
]
)
)
self.processing()
def check_digit(self, e):
try:
return int(e)
except:
return e
def processing(self):
this_list = self.l
perm = permutations(this_list, 2)
for e in list(perm):
print(e)
buf_list = this_list[:]
buf_list.remove(e[0])
buf_list.remove(e[1])
buf_list1 = buf_list[:]
buf_list1.sort()
if buf_list1 == buf_list:
print("Удалили элементы", e[0], "и", e[1], "\nПолучили:", buf_list)
break
class Task6(object):
"""
Список задается пользователем с клавиатуры.
Определите, сколько различных значений содержится в списке.
"""
def __init__(self):
self.processing()
def check_digit(self, e):
try:
return int(e)
except:
return e
def processing(self):
s = "Введите элементы списка через запятую ->"
r = len(set([self.check_digit(e) for e in input(s).split(",")]))
print("Уникальных значений в списке:", r)
class Task7(object):
"""
Список задается пользователем с клавиатуры.
Удаление из списка элементов, значения которых уже встречались в предыдущих элементах
"""
def __init__(self):
self.processing()
def check_digit(self, e):
try:
return int(e)
except:
return e
def processing(self):
s = "Введите элементы списка через запятую ->"
r = list(set([self.check_digit(e) for e in input(s).split(",")]))
print("Список без повторных значений: ", r)
class Task8(object):
"""
Пользователь вводит упорядоченный список книг (заданной длины по алфавиту).
Добавить новую книгу, сохранив
упорядоченность списка по алфавиту
"""
def __init__(self):
self.add_values()
self.add_new_value()
def add_values(self):
books_list = input("Введите книги через запятую -> ").split(",")
for i in range(len(books_list)):
if books_list[i][0] == " ":
books_list[i] = books_list[i][1:]
self.books_list = sorted(books_list, key=str.lower)
print("Введенный list:\n" + str(self.books_list))
def add_new_value(self):
self.new_book = input(
"Введите название книги для добавления в существующий список ->"
)
self.add_book_to_list()
def add_book_to_list(self):
buf_list = [e.lower() for e in self.books_list]
input_element = self.new_book.lower()
for i in range(len(buf_list)):
if buf_list[i] > input_element:
index = i
break
print("Индекс для вставки:", index)
out_list = self.books_list[:index] + [self.new_book] + self.books_list[index:]
print("Результирующий list:\n" + str(out_list))
# Driver function
# list = [1, 2, 4]
# n = 3
# print(insert(list, n))
class Task9(object):
"""
Дан список целых чисел. Упорядочьте по возрастанию только:
а) положительные числа;
б) элементы с четными порядковыми номерами в списке.
"""
def __init__(self):
try:
n = int(input("Введите размерность списка ->"))
except:
print("Что-то пошло не так при вводе данных")
return
self.l = [random.randint(-10, 10) for _ in range(n)]
print("Исходная матрица:\n", self.l)
self.a_processing()
self.b_processing()
print("Упорядочьте по возрастанию только положительные числа:\n", self.a_l)
print(
"Упорядочьте по возрастанию только элементы с четными порядковыми номерами в списке:\n",
self.b_l,
)
def a_processing(self):
buf_list = []
matrix = self.l
for i in range(len(matrix)):
if matrix[i] > 0:
buf_list.append(matrix[i])
buf_list.sort()
index = 0
for i in range(len(matrix)):
if matrix[i] > 0:
matrix[i] = buf_list[index]
index += 1
self.a_l = matrix
def b_processing(self):
buf_list = []
matrix = self.l
for i in range(len(matrix)):
if i % 2 == 0:
buf_list.append(matrix[i])
buf_list.sort()
index = 0
for i in range(len(matrix)):
if i % 2 == 0:
matrix[i] = buf_list[index]
index += 1
self.b_l = matrix
class Task10(object):
"""
Даны два списка. Определите, совпадают ли множества их элементов.
"""
def __init__(self):
self.l1 = []
self.l2 = []
self.input_data()
self.comparator()
def input_data(self):
try:
n1 = int(input("Введите размерность списка №1 ->"))
n2 = int(input("Введите размерность списка №2 ->"))
except:
print("Что-то пошло не так при вводе данных")
return
print("*Заполение списка №1*")
for i in range(n1):
self.l1.append(input("Введите элемент списка №" + str(i) + " -> "))
print("*Заполение списка №2*")
for i in range(n2):
self.l2.append(input("Введите элемент списка №" + str(i) + " -> "))
def comparator(self):
d = {
True: "Множества списокв совпадают",
False: "Множества списков НЕ совпадают",
}
print(d[set(self.l1) == set(self.l2)])
class Task11(object):
"""
Дан список. После каждого элемента добавьте предшествующую ему часть списка.
"""
def __init__(self):
self.l = input("Введите элементы списка через запятую -> ").split(",")
self.processing()
print(self.result)
def processing(self):
s = self.l
counter = -1
output_list = [s[0]]
for element_first in s:
counter += 1
if counter == 0:
pass
else:
output_list.append(element_first)
for element_alter in output_list[:counter]:
output_list.append(element_alter)
counter += 1
self.result = output_list
class Task12(object):
"""
Пусть элементы списка хранят символы предложения. Замените каждое вхождение слова 'itma
threpetitor' на 'silence'.
"""
def __init__(self):
self.list = list(
input("Введите строку для замены 'itmathrepetitor' на 'silence' -> ")
)
self.sub_list = list("itmathrepetitor")
self.replace_list = list("silence")
self.processing()
def get_sublist_index(self):
sub = self.sub_list
lst = self.list
sublen = len(sub)
first = sub[0] if sub else []
indx = -1
while True:
try:
indx = lst.index(first, indx + 1)
except ValueError:
break
if sub == lst[indx : indx + sublen]:
return True, indx, indx + len(sub)
return False, 0, 0
def processing(self):
print("Список до замены:\n" + str(self.list))
processing_flag = True
while processing_flag == True:
index_tuple = self.get_sublist_index()
if index_tuple[0] == True:
print("Замена подсписка по индексам", index_tuple[1], index_tuple[2])
del self.list[index_tuple[1] : index_tuple[2]]
self.list[index_tuple[1] : index_tuple[1]] = self.replace_list
else:
processing_flag = False
print("Список после замены:\n" + str(self.list))
def main():
d = {
"1": Task1,
"2": Task2,
"3": Task3,
"4": Task4,
"5": Task5,
"6": Task6,
"7": Task7,
"8": Task8,
"9": Task9,
"10": Task10,
"11": Task11,
"12": Task12,
}
input_str = input("Введите номер задания ->")
if input_str in d:
d[input_str]()
else:
print("Такого номера нет!")
if __name__ == "__main__":
main()
| StarcoderdataPython |
6542626 | from django.apps import AppConfig
class CumploApiConfig(AppConfig):
name = 'internal_api'
| StarcoderdataPython |
1923880 | <reponame>mfonism/us-pycon-2019-tutorial
from setuptools import setup
setup(name="proj", packages=["proj"])
| StarcoderdataPython |
3453563 | <filename>news/tests/test_api_views.py<gh_stars>0
from django.core.files.uploadedfile import SimpleUploadedFile
from django.urls import reverse
from django.test import TestCase, Client, override_settings
from django.contrib.auth.models import User, Permission, Group
from django.utils import timezone
import json
from datetime import date, timedelta
from rest_framework.test import APIRequestFactory, APITestCase, APIClient
from rest_framework_jwt.settings import api_settings
from accounts.models import UserProfile
from ..models import KnowledgeCategory, DocumentF, DocQuestion, DocFile, \
NewsFile, DocumentF, News, NotificationReadFlag
from ..api.views import KnowledgeListAPIView, DocQuestionListAPIView, \
UserQuestionCreateAPIView, DocFileCreateAPIView, NewsFileCreateAPIView, \
DocumentFViewSet, NewsViewSet
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
def get_token(user):
payload = jwt_payload_handler(user)
token = jwt_encode_handler(payload)
return token
class TestKnowledgeListAPIView(APITestCase):
@classmethod
def setUpTestData(cls):
cls.test_user1 = User.objects.create_user(
username='testuser1', password='<PASSWORD>')
cls.test_user2 = User.objects.create_user(
username='testuser2', password='<PASSWORD>')
cls.test_user3 = User.objects.create_user(
username='testuser3', password='<PASSWORD>')
newgroup = Group.objects.create(name='testgroup')
for each in Permission.objects.all():
newgroup.permissions.add(each)
cls.test_user1.groups.add(newgroup)
cls.test_user3.groups.add(newgroup)
cls.test_category = KnowledgeCategory.objects.create(
title='Test Category')
cls.test_category_2 = KnowledgeCategory.objects.create(
title='Test Category 2')
test_user1_userprofile = UserProfile.objects.create(
user=cls.test_user1,
name='Test User1',
telephone='11',
email='<EMAIL>',
employee_id='2',
departament='sal',
location='WAW'
)
test_user1_userprofile.save()
test_user2_userprofile = UserProfile.objects.create(
user=cls.test_user2,
name='Test User2',
telephone='222222222',
email='<EMAIL>',
employee_id='3',
departament='sal',
location='PZN'
)
test_user2_userprofile.save()
test_user3_userprofile = UserProfile.objects.create(
user=cls.test_user3,
name='<NAME>',
telephone='222222222',
email='<EMAIL>',
employee_id='3',
departament='sal',
location='PZN'
)
test_user3_userprofile.save()
cls.test_document = DocumentF.objects.create(
title="test title",
body='test body',
author=cls.test_user1,
target_location="PZN",
target_departament="sal",
date_created='2021-07-10T18:11:11.055162Z'
)
cls.factory = APIRequestFactory()
def test_GET_if_no_permission(self):
user = self.test_user2
token = get_token(user)
request = self.factory.get(
'/news/api/knowledge/', HTTP_AUTHORIZATION='JWT ' + token)
view = KnowledgeListAPIView.as_view()
response = view(request)
self.assertEquals(response.status_code, 403)
def test_GET_if_has_permission(self):
view = KnowledgeListAPIView.as_view()
user = self.test_user1
token = get_token(user)
request = self.factory.get(
'/news/api/knowledge/', HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
self.assertEquals(response.status_code, 200)
def test_view_queryset_response(self):
user = self.test_user1
view = KnowledgeListAPIView.as_view()
token = get_token(user)
request = self.factory.get(
'/news/api/knowledge/', HTTP_AUTHORIZATION='JWT ' + token, format='json')
response = view(request)
response.render()
expected_respone = [{'id': 1, 'title': 'Test Category', 'docs': [], 'files': []}, {
'id': 2, 'title': 'Test Category 2', 'docs': [], 'files': []}]
self.assertEquals(json.loads(response.content), expected_respone)
def test_view_queryset_filtering(self):
user = self.test_user3
view = KnowledgeListAPIView.as_view()
token = get_token(user)
request = self.factory.get(
'/news/api/knowledge/', HTTP_AUTHORIZATION='JWT ' + token, format='json')
response = view(request)
response.render()
expected_respone = [{'id': 1, 'title': 'Test Category', 'docs': [], 'files': []},
{'id': 2, 'title': 'Test Category 2', 'docs': [{'id': 1, 'title': 'test title', 'date_created': '2021-07-10T18:11:11.055162Z'}], 'files': []}]
self.assertEquals(json.loads(response.content), expected_respone)
class TestDocQuestionListAPIView(APITestCase):
@classmethod
def setUpTestData(cls):
cls.test_user1 = User.objects.create_user(
username='testuser1', password='<PASSWORD>')
cls.test_user2 = User.objects.create_user(
username='testuser2', password='<PASSWORD>')
cls.test_user3 = User.objects.create_user(
username='testuser3', password='<PASSWORD>')
newgroup = Group.objects.create(name='Managers')
for each in Permission.objects.all():
newgroup.permissions.add(each)
cls.test_user1.groups.add(newgroup)
cls.test_user3.groups.add(newgroup)
cls.test_category = KnowledgeCategory.objects.create(
title='Test Category')
cls.test_category_2 = KnowledgeCategory.objects.create(
title='Test Category 2')
test_user1_userprofile = UserProfile.objects.create(
user=cls.test_user1,
name='<NAME>',
telephone='11',
email='<EMAIL>',
employee_id='2',
departament='sal',
location='WAW'
)
test_user1_userprofile.save()
test_user2_userprofile = UserProfile.objects.create(
user=cls.test_user2,
name='<NAME>',
telephone='222222222',
email='<EMAIL>',
employee_id='3',
departament='sal',
location='PZN'
)
test_user2_userprofile.save()
test_user3_userprofile = UserProfile.objects.create(
user=cls.test_user3,
name='<NAME>',
telephone='222222222',
email='<EMAIL>',
employee_id='3',
departament='sal',
location='PZN'
)
test_user3_userprofile.save()
cls.test_question = DocQuestion.objects.create(
title="test title",
body='test body',
author=cls.test_user1,
answer="test answer",
category=cls.test_category,
)
cls.test_question2 = DocQuestion.objects.create(
title="test title2",
body='test body2',
author=cls.test_user1,
answer="test answer2",
category=cls.test_category,
target_location="PZN",
target_departament="sal",
)
cls.factory = APIRequestFactory()
def test_GET_if_no_permission(self):
user = self.test_user2
token = get_token(user)
request = self.factory.get(
'/news/api/faq/', HTTP_AUTHORIZATION='JWT ' + token)
view = DocQuestionListAPIView.as_view()
response = view(request)
self.assertEquals(response.status_code, 403)
def test_GET_if_has_permission(self):
view = DocQuestionListAPIView.as_view()
user = self.test_user1
token = get_token(user)
request = self.factory.get(
'/news/api/faq/', HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
self.assertEquals(response.status_code, 200)
def test_view_queryset_response(self):
user = self.test_user1
view = DocQuestionListAPIView.as_view()
token = get_token(user)
request = self.factory.get(
'/news/api/faq/', HTTP_AUTHORIZATION='JWT ' + token, format='json')
response = view(request)
response.render()
expected_respone = [{'id': 1, 'title': 'test title', 'body': 'test body', 'answer': 'test answer',
'target_departament': 'non', 'target_location': 'non', 'category': 1}]
self.assertEquals(json.loads(response.content), expected_respone)
def test_view_queryset_search(self):
user = self.test_user3
view = DocQuestionListAPIView.as_view()
token = get_token(user)
request = self.factory.get(
'/news/api/faq/?q=title2', HTTP_AUTHORIZATION='JWT ' + token, format='json')
response = view(request)
response.render()
expected_respone = [{'id': 2, 'title': 'test title2', 'body': 'test body2', 'answer': 'test answer2',
'target_departament': 'sal', 'target_location': 'PZN', 'category': 1}]
self.assertEquals(json.loads(response.content), expected_respone)
def test_view_queryset_filtering(self):
user = self.test_user1
view = DocQuestionListAPIView.as_view()
token = get_token(user)
request = self.factory.get(
'/news/api/faq/', HTTP_AUTHORIZATION='JWT ' + token, format='json')
response = view(request)
response.render()
expected_respone = [{'id': 1, 'title': 'test title', 'body': 'test body', 'answer': 'test answer',
'target_departament': 'non', 'target_location': 'non', 'category': 1}]
self.assertEquals(json.loads(response.content), expected_respone)
user3 = self.test_user3
token = get_token(user3)
request = self.factory.get(
'/news/api/faq/', HTTP_AUTHORIZATION='JWT ' + token, format='json')
response = view(request)
response.render()
expected_respone = [{'id': 2, 'title': 'test title2', 'body': 'test body2', 'answer': 'test answer2', 'target_departament': 'sal', 'target_location':
'PZN', 'category': 1}, {'id': 1, 'title': 'test title', 'body': 'test body', 'answer': 'test answer',
'target_departament': 'non', 'target_location': 'non', 'category': 1}]
self.assertEquals(json.loads(response.content), expected_respone)
def test_view_object_creation(self):
user = self.test_user1
view = DocQuestionListAPIView.as_view()
token = get_token(user)
data = {
'title': "test title",
'body': 'test body',
'answer': "test answer",
'category': 1,
}
request = self.factory.post(
'/news/api/faq/', data, HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
response.render()
expected_respone = {'id': 3, 'title': 'test title', 'body': 'test body',
'answer': 'test answer', 'target_departament': 'non', 'target_location': 'non', 'category': 1}
self.assertEquals(json.loads(response.content), expected_respone)
self.assertEquals(DocQuestion.objects.count(), 3)
self.assertEquals(response.status_code, 201)
def test_view_object_creation_no_permission(self):
user = self.test_user2
view = DocQuestionListAPIView.as_view()
token = get_token(user)
data = {
'title': "test title",
'body': 'test body',
'answer': "test answer",
'category': 1,
}
request = self.factory.post(
'/news/api/faq/', data, HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
response.render()
expected_respone = {
'detail': 'You do not have permission to perform this action.'}
self.assertEquals(json.loads(response.content), expected_respone)
self.assertEquals(DocQuestion.objects.count(), 2)
self.assertEquals(response.status_code, 403)
def test_view_object_update(self):
user = self.test_user1
view = DocQuestionListAPIView.as_view()
token = get_token(user)
data = {
'title': "test title",
'body': 'test body',
'answer': "test answer update",
'category': 1,
}
request = self.factory.patch(
'/news/api/faq/?pk=2', data, HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
response.render()
expected_respone = {'id': 2, 'title': 'test title', 'body': 'test body',
'answer': 'test answer update', 'target_departament': 'sal', 'target_location': 'PZN', 'category': 1}
#
self.assertEquals(json.loads(response.content), expected_respone)
self.assertEquals(response.status_code, 200)
def test_view_object_update_no_permission(self):
user = self.test_user2
view = DocQuestionListAPIView.as_view()
token = get_token(user)
data = {
'title': "test title",
'body': 'test body',
'answer': "test answer update",
'category': 1,
}
request = self.factory.patch(
'/news/api/faq/?pk=2', data, HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
response.render()
self.assertEquals(response.status_code, 403)
def test_view_object_delete(self):
user = self.test_user1
view = DocQuestionListAPIView.as_view()
token = get_token(user)
request = self.factory.delete(
'/news/api/faq/?pk=2', HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
response.render()
self.assertEquals(DocQuestion.objects.count(), 1)
self.assertEquals(response.status_code, 204)
def test_view_object_delete_no_permission(self):
user = self.test_user2
view = DocQuestionListAPIView.as_view()
token = get_token(user)
request = self.factory.delete(
'/news/api/faq/?pk=2', HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
response.render()
self.assertEquals(DocQuestion.objects.count(), 2)
self.assertEquals(response.status_code, 403)
class TestUserQuestionCreateAPIView(APITestCase):
@classmethod
def setUpTestData(cls):
cls.test_user1 = User.objects.create_user(
username='testuser1', password='<PASSWORD>')
newgroup = Group.objects.create(name='Managers')
for each in Permission.objects.all():
newgroup.permissions.add(each)
cls.test_user1.groups.add(newgroup)
cls.test_category = KnowledgeCategory.objects.create(
title='Test Category')
test_user1_userprofile = UserProfile.objects.create(
user=cls.test_user1,
name='Test User1',
telephone='11',
email='<EMAIL>',
employee_id='2',
departament='sal',
location='WAW'
)
test_user1_userprofile.save()
cls.factory = APIRequestFactory()
def test_view_object_creation(self):
user = self.test_user1
view = UserQuestionCreateAPIView.as_view()
token = get_token(user)
data = {
'title': "test title",
'body': 'test body',
'category': 1,
}
request = self.factory.post(
'/news/api/userquestion/', data, HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
response.render()
expected_respone = {'title': 'test title', 'body': 'test body'}
self.assertEquals(json.loads(response.content), expected_respone)
self.assertEquals(DocQuestion.objects.count(), 1)
self.assertEquals(response.status_code, 201)
class TestDocFileCreateAPIView(APITestCase):
@classmethod
def setUpTestData(cls):
cls.test_user1 = User.objects.create_user(
username='testuser1', password='<PASSWORD>')
newgroup = Group.objects.create(name='Managers')
for each in Permission.objects.all():
newgroup.permissions.add(each)
cls.test_user1.groups.add(newgroup)
cls.test_user2 = User.objects.create_user(
username='testuser2', password='<PASSWORD>')
test_user2_userprofile = UserProfile.objects.create(
user=cls.test_user2,
name='<NAME>',
telephone='222222222',
email='<EMAIL>',
employee_id='3',
departament='sal',
location='PZN'
)
test_user2_userprofile.save()
cls.test_category = KnowledgeCategory.objects.create(
title='Test Category')
test_user1_userprofile = UserProfile.objects.create(
user=cls.test_user1,
name='Test User1',
telephone='11',
email='<EMAIL>',
employee_id='2',
departament='sal',
location='WAW'
)
test_user1_userprofile.save()
cls.factory = APIRequestFactory()
def test_view_object_creation_no_permission(self):
user = self.test_user2
view = DocFileCreateAPIView.as_view()
token = get_token(user)
file = SimpleUploadedFile(
"test_file.pdf",
b"these are the file contents!"
)
files = {
'file': SimpleUploadedFile(
"test_file.pdf",
b"these are the file contents!"),
'title': 'test title',
'category': 1
}
request = self.factory.post(
'/news/api/uploaddocfile/', data=files, HTTP_AUTHORIZATION='JWT ' + token, format='multipart')
response = view(request)
response.render()
self.assertEquals(DocFile.objects.count(), 0)
self.assertEquals(response.status_code, 403)
def test_view_object_creation(self):
user = self.test_user1
view = DocFileCreateAPIView.as_view()
token = get_token(user)
file = SimpleUploadedFile(
"test_file.pdf",
b"these are the file contents!"
)
# with open (file.path, 'rb') as f:
files = {
'file': SimpleUploadedFile(
"test_file.pdf",
b"these are the file contents!"),
'title': 'test title',
'category': 1
}
request = self.factory.post(
'/news/api/uploaddocfile/', data=files, HTTP_AUTHORIZATION='JWT ' + token, format='multipart')
response = view(request)
response.render()
self.assertEquals(DocFile.objects.count(), 1)
self.assertEquals(response.status_code, 201)
class TestNewsFileCreateAPIView(APITestCase):
@classmethod
def setUpTestData(cls):
cls.test_user1 = User.objects.create_user(
username='testuser1', password='<PASSWORD>')
newgroup = Group.objects.create(name='Managers')
for each in Permission.objects.all():
newgroup.permissions.add(each)
cls.test_user2 = User.objects.create_user(
username='testuser2', password='<PASSWORD>')
test_user2_userprofile = UserProfile.objects.create(
user=cls.test_user2,
name='Test User2',
telephone='222222222',
email='<EMAIL>',
employee_id='3',
departament='sal',
location='PZN'
)
test_user2_userprofile.save()
cls.test_user1.groups.add(newgroup)
cls.test_category = KnowledgeCategory.objects.create(
title='Test Category')
test_user1_userprofile = UserProfile.objects.create(
user=cls.test_user1,
name='Test User1',
telephone='11',
email='<EMAIL>',
employee_id='2',
departament='sal',
location='WAW'
)
test_user1_userprofile.save()
cls.factory = APIRequestFactory()
def test_view_object_creation(self):
user = self.test_user1
view = NewsFileCreateAPIView.as_view()
token = get_token(user)
file = SimpleUploadedFile(
"test_file.pdf",
b"these are the file contents!"
)
# with open (file.path, 'rb') as f:
files = {
'file': SimpleUploadedFile(
"test_file.pdf",
b"these are the file contents!"),
}
request = self.factory.post(
'/news/api/uploadnewsfile/', data=files, HTTP_AUTHORIZATION='JWT ' + token, format='multipart')
response = view(request)
response.render()
self.assertEquals(NewsFile.objects.count(), 1)
self.assertEquals(response.status_code, 201)
def test_view_object_creation_no_permission(self):
user = self.test_user2
view = NewsFileCreateAPIView.as_view()
token = get_token(user)
file = SimpleUploadedFile(
"test_file.pdf",
b"these are the file contents!"
)
files = {
'file': SimpleUploadedFile(
"test_file.pdf",
b"these are the file contents!"),
}
request = self.factory.post(
'/news/api/uploadnewsfile/', data=files, HTTP_AUTHORIZATION='JWT ' + token, format='multipart')
response = view(request)
response.render()
self.assertEquals(NewsFile.objects.count(), 0)
self.assertEquals(response.status_code, 403)
class TestDocumentFViewSet(APITestCase):
@classmethod
def setUpTestData(cls):
cls.test_user1 = User.objects.create_user(
username='testuser1', password='<PASSWORD>')
cls.test_user2 = User.objects.create_user(
username='testuser2', password='<PASSWORD>')
newgroup = Group.objects.create(name='Managers')
for each in Permission.objects.all():
newgroup.permissions.add(each)
cls.test_user1.groups.add(newgroup)
# cls.test_user2.groups.add(newgroup)
test_user1_userprofile = UserProfile.objects.create(
user=cls.test_user1,
name='<NAME>',
telephone='11',
email='<EMAIL>',
employee_id='2',
departament='sal',
location='WAW'
)
test_user1_userprofile.save()
test_user2_userprofile = UserProfile.objects.create(
user=cls.test_user2,
name='<NAME>',
telephone='222222222',
email='<EMAIL>',
employee_id='3',
departament='sal',
location='PZN'
)
test_user2_userprofile.save()
cls.factory = APIRequestFactory()
def test_GET_if_no_permission(self):
user = self.test_user2
token = get_token(user)
view = DocumentFViewSet.as_view({'get': 'list'})
uri = reverse('news-api:documents-list')
request = self.factory.get(
uri, HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
self.assertEquals(response.status_code, 403)
def test_GET_if_has_permission(self):
view = DocumentFViewSet.as_view({'get': 'list'})
uri = reverse('news-api:documents-list')
user = self.test_user1
token = get_token(user)
request = self.factory.get(
uri, HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
self.assertEquals(response.status_code, 200)
class TestNewsViewSet(APITestCase):
@classmethod
def setUpTestData(cls):
cls.test_user1 = User.objects.create_user(
username='testuser1', password='<PASSWORD>')
cls.test_user2 = User.objects.create_user(
username='testuser2', password='<PASSWORD>')
cls.test_user3 = User.objects.create_user(
username='testuser3', password='<PASSWORD>')
newgroup = Group.objects.create(name='Managers')
for each in Permission.objects.all():
newgroup.permissions.add(each)
permission = Permission.objects.get(name="Can view news")
permission_publish = Permission.objects.get(name="Can change news")
cls.test_user1.groups.add(newgroup)
cls.test_user3.groups.add(newgroup)
cls.test_user2.user_permissions.add(permission)
cls.test_user2.user_permissions.add(permission_publish)
cls.test_category = KnowledgeCategory.objects.create(
title='Test Category')
cls.test_category_2 = KnowledgeCategory.objects.create(
title='Test Category 2')
test_user1_userprofile = UserProfile.objects.create(
user=cls.test_user1,
name='Test User1',
telephone='11',
email='<EMAIL>',
employee_id='2',
departament='sal',
location='WAW'
)
test_user1_userprofile.save()
test_user2_userprofile = UserProfile.objects.create(
user=cls.test_user2,
name='Test User2',
telephone='222222222',
email='<EMAIL>',
employee_id='3',
departament='sal',
location='PZN'
)
test_user2_userprofile.save()
test_user3_userprofile = UserProfile.objects.create(
user=cls.test_user3,
name='Test User3',
telephone='222222222',
email='<EMAIL>',
employee_id='3',
departament='sal',
location='PZN'
)
test_user3_userprofile.save()
test_news = News.objects.create(
title="test title",
body='test body',
author=cls.test_user1,
target_location="PZN",
target_departament="sal",
)
test_news2 = News.objects.create(
title="test title",
body='test body',
author=cls.test_user1,
)
test_news3 = News.objects.create(
title="test title",
body='test body',
author=cls.test_user1,
)
test_news.publish()
test_news2.publish()
cls.factory = APIRequestFactory()
def test_view_queryset_response(self):
view = NewsViewSet.as_view({'get': 'list'})
uri = reverse('news-api:news-list')
user = self.test_user1
token = get_token(user)
request = self.factory.get(
uri, HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
response.render()
self.assertEquals(response.status_code, 200)
self.assertEquals(json.loads(response.content)[0]['id'], 2)
self.assertEquals(json.loads(response.content)[1]['id'], 3)
user = self.test_user2
token = get_token(user)
request = self.factory.get(
uri, HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
response.render()
self.assertEquals(json.loads(response.content)[0]['id'], 2)
self.assertEquals(json.loads(response.content)[1]['id'], 1)
def test_view_publish_action(self):
view = NewsViewSet.as_view({'get': 'list'})
uri = reverse('news-api:news-list')
user = self.test_user1
token = get_token(user)
request = self.factory.get(
uri, HTTP_AUTHORIZATION='JWT ' + token)
response = view(request)
response.render()
self.assertEquals(len(json.loads(response.content)), 2)
view = NewsViewSet.as_view({'get': 'publish'},
detail=True)
uri_publish = reverse('news-api:news-publish', kwargs={'pk': '3'})
request_publish = self.factory.get(
uri_publish, HTTP_AUTHORIZATION='JWT ' + token)
response = view(request_publish, pk=3)
response.render()
user = self.test_user2
token = get_token(user)
request = self.factory.get(
uri, HTTP_AUTHORIZATION='JWT ' + token)
view = NewsViewSet.as_view({'get': 'list'})
response = view(request)
response.render()
self.assertEquals(len(json.loads(response.content)), 3)
| StarcoderdataPython |
1614113 | # -*- coding: utf-8 -*-
import argparse
import logging
import random
from collections import Counter
import math
import numpy as np
import pandas as pd
import torch
from pytorch_lightning import Trainer
from pytorch_lightning.callbacks import ModelCheckpoint, EarlyStopping
from pytorch_lightning.core.lightning import LightningModule
from torch.utils.data import DataLoader, Dataset
from torch.utils.tensorboard import SummaryWriter
from transformers.optimization import AdamW, get_cosine_schedule_with_warmup
from transformers import PreTrainedTokenizerFast, GPT2LMHeadModel
parser = argparse.ArgumentParser(description='Simsimi based on KoGPT-2')
parser.add_argument('--chat',
action='store_true',
default=False,
help='response generation on given user input')
parser.add_argument('--sentiment',
type=str,
default='0',
help='sentiment for system. 0 is neutral, 1 is negative, 2 is positive.')
parser.add_argument('--model_params',
type=str,
default='model_chp/model_-last.ckpt',
help='model binary for starting chat')
parser.add_argument('--train',
action='store_true',
default=False,
help='for training')
logger = logging.getLogger()
logger.setLevel(logging.INFO)
U_TKN = '<usr>'
S_TKN = '<sys>'
BOS = '</s>'
EOS = '</s>'
MASK = '<unused0>'
SENT = '<unused1>'
PAD = '<pad>'
TOKENIZER = PreTrainedTokenizerFast.from_pretrained("skt/kogpt2-base-v2",
bos_token=BOS, eos_token=EOS, unk_token='<unk>',
pad_token=PAD, mask_token=MASK)
class CharDataset(Dataset):
def __init__(self, chats, max_len=32):
self._data = chats
self.first = True
self.q_token = U_TKN
self.a_token = S_TKN
self.sent_token = SENT
self.bos = BOS
self.eos = EOS
self.mask = MASK
self.pad = PAD
self.max_len = max_len
self.tokenizer = TOKENIZER
def __len__(self):
return len(self._data)
def __getitem__(self, idx):
turn = self._data.iloc[idx]
q = turn['Q']
a = turn['A']
sentiment = str(turn['label'])
q_toked = self.tokenizer.tokenize(self.q_token + q + \
self.sent_token + sentiment)
q_len = len(q_toked)
#print(type(q_len)) -> labels 안에 담기는 값 추적하기1
a_toked = self.tokenizer.tokenize(self.a_token + a + self.eos)
a_len = len(a_toked)
if q_len + a_len > self.max_len:
a_len = self.max_len - q_len
if a_len <= 0:
q_toked = q_toked[-(int(self.max_len/2)):]
q_len = len(q_toked)
a_len = self.max_len - q_len
assert a_len > 0
a_toked = a_toked[:a_len]
a_len = len(a_toked)
assert a_len == len(a_toked), f'{a_len} ==? {len(a_toked)}'
# [mask, mask, ...., mask, ..., <bos>,..A.. <eos>, <pad>....]
labels = [
self.mask,
] * q_len + a_toked[1:]
if self.first:
logging.info("contexts : {}".format(q))
logging.info("toked ctx: {}".format(q_toked))
logging.info("response : {}".format(a))
logging.info("toked response : {}".format(a_toked))
logging.info('labels {}'.format(labels))
self.first = False
mask = [0] * q_len + [1] * a_len + [0] * (self.max_len - q_len - a_len)
self.max_len
labels_ids = self.tokenizer.convert_tokens_to_ids(labels)
while len(labels_ids) < self.max_len:
labels_ids += [self.tokenizer.pad_token_id]
token_ids = self.tokenizer.convert_tokens_to_ids(q_toked + a_toked)
while len(token_ids) < self.max_len:
token_ids += [self.tokenizer.pad_token_id]
return(token_ids, np.array(mask),
labels_ids)
class KoGPT2Chat(LightningModule):
def __init__(self, hparams, **kwargs):
super(KoGPT2Chat, self).__init__()
self.hparams = hparams
self.neg = -1e18
self.kogpt2 = GPT2LMHeadModel.from_pretrained('skt/kogpt2-base-v2')
self.loss_function = torch.nn.CrossEntropyLoss(reduction='none')
@staticmethod
def add_model_specific_args(parent_parser):
# add model specific args
parser = argparse.ArgumentParser(parents=[parent_parser], add_help=False)
parser.add_argument('--max-len',
type=int,
default=100,
help='max sentence length on input (default: 32)')
parser.add_argument('--batch-size',
type=int,
default=32,
help='batch size for training (default: 96)')
parser.add_argument('--lr',
type=float,
default=5e-5,
help='The initial learning rate')
parser.add_argument('--warmup_ratio',
type=float,
default=0.1,
help='warmup ratio')
return parser
def forward(self, inputs):
# (batch, seq_len, hiddens)
output = self.kogpt2(inputs, return_dict=True)
return output.logits
def training_step(self, batch, batch_idx):
token_ids, mask, label = batch
out = self(token_ids)
mask_3d = mask.unsqueeze(dim=2).repeat_interleave(repeats=out.shape[2], dim=2)
mask_out = torch.where(mask_3d == 1, out, self.neg * torch.ones_like(out))
loss = self.loss_function(mask_out.transpose(2, 1), label)
loss_avg = loss.sum() / mask.sum()
ppl=torch.exp(loss)
ppl_avg = ppl.sum() / mask.sum()
self.log('train_loss', loss_avg, on_step=True, on_epoch=True)
self.log('train_ppl', ppl_avg, on_step=True, on_epoch=True)
tensorboard_logs = {'train_loss':loss_avg,'train_ppl':ppl_avg}
return {'loss':loss_avg, 'train_ppl':ppl_avg, 'log':tensorboard_logs}
def training_epoch_end(self, outputs):
avg_loss = torch.stack([x['loss'] for x in outputs]).mean()
avg_ppl = torch.stack([x['train_ppl'] for x in outputs]).mean()
self.logger.experiment.add_scalar("Loss/Train",avg_loss,self.current_epoch)
self.logger.experiment.add_scalar("PPL/Train",avg_ppl,self.current_epoch)
def validation_step(self, batch, batch_idx):
token_ids, mask, label = batch
out = self(token_ids)
mask_3d = mask.unsqueeze(dim=2).repeat_interleave(repeats=out.shape[2], dim=2)
mask_out = torch.where(mask_3d == 1, out, self.neg * torch.ones_like(out))
val_loss = self.loss_function(mask_out.transpose(2, 1), label)
loss_avg = val_loss.sum() / mask.sum()
ppl=torch.exp(val_loss)
ppl_avg = ppl.sum() / mask.sum()
self.log('val_loss', loss_avg, on_step=True, on_epoch=True)
self.log('val_ppl', ppl_avg, on_step=True, on_epoch=True)
tensorboard_logs = {'val_loss':loss_avg,'val_ppl':ppl_avg}
# return loss_avg
return {'loss':loss_avg, 'val_ppl':ppl_avg, 'log':tensorboard_logs}
def validation_epoch_end(self, outputs):
avg_loss = torch.stack([x['loss'] for x in outputs]).mean()
avg_ppl = torch.stack([x['val_ppl'] for x in outputs]).mean()
self.logger.experiment.add_scalar("Loss/Val",avg_loss,self.current_epoch)
self.logger.experiment.add_scalar("PPL/Val",avg_ppl,self.current_epoch)
def configure_optimizers(self):
# Prepare optimizer
param_optimizer = list(self.named_parameters())
no_decay = ['bias', 'LayerNorm.bias', 'LayerNorm.weight']
optimizer_grouped_parameters = [
{'params': [p for n, p in param_optimizer if not any(nd in n for nd in no_decay)], 'weight_decay': 0.01},
{'params': [p for n, p in param_optimizer if any(nd in n for nd in no_decay)], 'weight_decay': 0.0}
]
optimizer = AdamW(optimizer_grouped_parameters,
lr=self.hparams.lr, correct_bias=False)
# warm up lr
num_train_steps = len(self.train_dataloader()) * self.hparams.max_epochs
num_warmup_steps = int(num_train_steps * self.hparams.warmup_ratio)
scheduler = get_cosine_schedule_with_warmup(
optimizer,
num_warmup_steps=num_warmup_steps, num_training_steps=num_train_steps)
lr_scheduler = {'scheduler': scheduler, 'name': 'cosine_schedule_with_warmup',
'monitor': 'loss', 'interval': 'step',
'frequency': 1}
return [optimizer], [lr_scheduler]
def _collate_fn(self, batch):
data = [item[0] for item in batch]
mask = [item[1] for item in batch]
label = [item[2] for item in batch]
return torch.LongTensor(data), torch.LongTensor(mask), torch.LongTensor(label)
def train_dataloader(self):
data = pd.read_csv('data/SDRW_long.csv')
train_len = int(0.9*len(data))
data = data.loc[0:train_len]
self.train_set = CharDataset(data, max_len=self.hparams.max_len)
train_dataloader = DataLoader(
self.train_set, batch_size=self.hparams.batch_size, num_workers=2,
shuffle=True, collate_fn=self._collate_fn)
return train_dataloader
def val_dataloader(self):
data = pd.read_csv('data/SDRW_long.csv')
train_len = int(0.9*len(data))
data = data.loc[train_len:]
self.val_set = CharDataset(data, max_len=self.hparams.max_len)
val_dataloader = DataLoader(
self.val_set, batch_size=self.hparams.batch_size, num_workers=2,
shuffle=True, collate_fn=self._collate_fn)
return val_dataloader
def generate(self,input_ids,max_length=40, do_sample=True, repetition_penalty=2.0):
output = self.kogpt2.generate(input_ids,
max_length=max_length,
do_sample=do_sample,
repetition_penalty=repetition_penalty)
return output
def chat(self,m, sent='0'):
tok = TOKENIZER
with torch.no_grad():
while 1:
q = input('user > ').strip()
if q == 'quit':
break
input_ids = torch.LongTensor(tok.encode(U_TKN + q + SENT + sent + S_TKN)).unsqueeze(dim=0)
outputs = m.generate(input_ids,
max_length=60,
do_sample=True,
repetition_penalty=2.0)
a = tok.decode(outputs[0], skip_special_tokens=True).split('0')[1:][0].strip()
if a.count('.') == 0:
idx = a.rfind(' ')
a = a.replace(a[idx:], ".")
else:
a = a.split('.')[0] + '.' + a.split('.')[1] + '...메에'
print("Wagle > {}".format(a.strip()))
parser = KoGPT2Chat.add_model_specific_args(parser)
parser = Trainer.add_argparse_args(parser)
args = parser.parse_args()
logging.info(args)
if __name__ == "__main__":
if args.train:
checkpoint_callback = ModelCheckpoint(
dirpath='model_chp',
filename='{epoch:02d}-{train_loss:.2f}',
verbose=True,
save_top_k=3,
save_last=True,
monitor='train_loss',
mode='min',
prefix='model_'
)
# python train_torch.py --train --gpus 1 --max_epochs 3
model = KoGPT2Chat(args)
model.train()
# trainer = Trainer(resume_from_checkpoint='model_chp/model_-last.ckpt', gpus=[0], checkpoint_callback=checkpoint_callback, gradient_clip_val=1.0)
trainer = Trainer.from_argparse_args(
args,
checkpoint_callback=checkpoint_callback, gradient_clip_val=1.0)
trainer.fit(model)
logging.info('best model path {}'.format(checkpoint_callback.best_model_path))
#after training init cuda
# torch.cuda.init() # cuda 초기화
# torch.cuda.empty_cache() # 사용중이 아닌 cuda 캐시메모리 해제
if args.chat:
model = KoGPT2Chat.load_from_checkpoint(args.model_params)
model.chat(m=model)
| StarcoderdataPython |
3572087 | from rest_framework import serializers
from main.models import Ingrediente,Sandwich,Pedido
class IngredienteSerializer(serializers.ModelSerializer):
class Meta:
model = Ingrediente
fields = '__all__'
class SandwichSerializer(serializers.ModelSerializer):
class Meta:
model = Sandwich
fields = '__all__'
# class Sand_IngSerializer(serializers.ModelSerializer):
# class Meta:
# model = Sand_Ing
# fields = '__all__'
class PedidoSerializer(serializers.ModelSerializer):
class Meta:
model = Pedido
fields = '__all__'
# class Ped_SandSerializer(serializers.ModelSerializer):
# class Meta:
# model = Ped_Sand
# fields = '__all__'
| StarcoderdataPython |
3570762 | from .policy import ImageClassificationPolicy
| StarcoderdataPython |
1941876 | <gh_stars>10-100
from factory.declarations import LazyAttribute, Sequence, SubFactory
from factory.django import DjangoModelFactory
from roster.factories import StudentFactory
from exams.models import ExamAttempt, PracticeExam
class ExamFactory(DjangoModelFactory):
class Meta:
model = PracticeExam
family = 'Waltz'
number = Sequence(lambda n: n + 1)
is_test = False
class ExamAttemptFactory(DjangoModelFactory):
class Meta:
model = ExamAttempt
student = SubFactory(StudentFactory)
quiz = SubFactory(ExamFactory)
score = 0
guess1 = LazyAttribute(lambda o: o.quiz.answer1)
guess2 = LazyAttribute(lambda o: o.quiz.answer2)
guess3 = LazyAttribute(lambda o: o.quiz.answer3)
guess4 = LazyAttribute(lambda o: o.quiz.answer4)
guess5 = LazyAttribute(lambda o: o.quiz.answer5)
| StarcoderdataPython |
9642142 | <reponame>matan-h/friendly<filename>tests/unit/test_run.py
"""Tests for run(), used as a program launcher from an editor
"""
from io import StringIO
import friendly
from contextlib import redirect_stdout
def test_run_error_en():
friendly.run(
"../name_error.py",
include="explain", # comprehensive
console=False,
redirect="capture",
)
result = friendly.get_output()
friendly.uninstall()
assert "The similar name `pi` was found in the local scope." in result
def test_run_error_fr():
friendly.run(
"../name_error.py",
lang="fr",
include="why", # more restricted than the English test
console=False,
redirect="capture",
)
result = friendly.get_output()
friendly.set_lang('en')
friendly.uninstall()
assert "Le nom semblable `pi` a été trouvé dans la portée locale." in result
def test_run_get_mod_dict():
"""Ensure that we capture the dict of the module that was executed
with no exception raised.
"""
file_capture = StringIO()
with redirect_stdout(file_capture):
mod_dict = friendly.run(
"tests/adder.py", # run from where pytest is run
console=False,
args=("1", "2.5", "3")
)
assert "total" in mod_dict
assert mod_dict["total"] == 6.5
assert "The sum is 6.5." in file_capture.getvalue()
| StarcoderdataPython |
1814157 | #!/usr/bin/env python
# Copyright (c) 2011 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Script to synchronise the naclports mirror of upstream archives.
This script verifies that the URL for every package is mirrored on
Google Cloud Storage. If it finds missing URLs it downloads them to
the local machine and then pushes them up using gsutil.
gsutil is required the run this script and if any mirroring operations are
required then the correct gsutil credentials will be required.
"""
import optparse
import os
import shlex
import subprocess
import sys
import urllib
import urlparse
import naclports
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
MIRROR_GS = 'gs://naclports/mirror'
def main(args):
parser = optparse.OptionParser()
parser.add_option('-n', '--dry-run', action='store_true',
help="Don't actually upload anything")
parser.add_option('--check', action='store_true',
help="Verify that the mirror is up-to-date.")
options, _ = parser.parse_args(args)
ports_root = os.path.dirname(SCRIPT_DIR)
listing = subprocess.check_output(['gsutil', 'ls', MIRROR_GS])
listing = listing.splitlines()
listing = [os.path.basename(l) for l in listing]
def CheckMirror(package):
basename = package.GetArchiveFilename()
if not basename:
return
if basename in listing:
# already mirrored
return
if options.check:
print 'update_mirror: Archive missing from mirror: %s' % basename
sys.exit(1)
# Download upstream URL
package.Download(mirror=False)
# Upload to gs
url = '%s/%s' % (MIRROR_GS, basename)
print "Uploading to mirror: %s" % url
cmd = ['gsutil', 'cp', '-a', 'public-read', package.DownloadLocation(), url]
if options.dry_run:
print cmd
else:
subprocess.check_call(cmd)
for package in naclports.PackageIterator():
CheckMirror(package)
return 0
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except naclports.Error as e:
sys.stderr.write('%s\n' % e)
sys.exit(-1)
| StarcoderdataPython |
65656 | from __future__ import annotations
from custom_components.magic_lights.const import DOMAIN
from custom_components.magic_lights.magicbase.share import get_magic
import logging
from typing import TYPE_CHECKING
from homeassistant.core import Context
_LOGGER = logging.getLogger(__name__)
if TYPE_CHECKING:
from custom_components.magic_lights.data_structures.living_space import Pipe, Zone
def create_async_call(pipe: Pipe) -> callable:
async def async_call(domain: str, service: str, service_data: dict):
for modifier in pipe.modifiers:
domain, service, service_data = modifier.update(
domain, service, service_data
)
if disabled_entity(pipe, service_data):
return
return await async_call_service(domain, service, service_data)
return async_call
def disabled_entity(pipe: Pipe, service_data: dict) -> bool:
if "entity_id" in service_data:
if service_data["entity_id"] in pipe.scene.zone.disabled_entities:
_LOGGER.debug(
"Entity %s disabled... skipping update.", service_data["entity_id"]
)
return True
return False
async def async_call_service(domain: str, service: str, service_data: dict):
_LOGGER.debug("Updating state: %s", service_data)
context = Context(None, DOMAIN)
magic = get_magic()
return await magic.hass.services.async_call(
domain, service, service_data, context=context
)
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.