text string | size int64 | token_count int64 |
|---|---|---|
def fac(n):
if n in [0, 1]:
return 1
else:
return n * fac(n-1)
def sum_of_the_factorial_of_their_digits(n):
fac_of_the_digits = [fac_dic[int(x)] for x in str(n)]
return sum(fac_of_the_digits)
def main():
for n in range(10, 2540161):
if n == sum_of_the_factorial_of_their_digits(n):
yield n
if __name__ == "__main__":
global fac_dic
fac_dic = {n : fac(n) for n in range(10)}
answer = list(main())
print(answer)
| 505 | 214 |
import torch
import torch.nn.functional as F
import torchaudio
import numpy as np
from scipy.signal import get_window
from librosa.util import pad_center, tiny
from librosa.filters import window_sumsquare
from librosa.filters import mel as librosa_mel_fn
def get_mel_basis(sampling_rate=22050, filter_length=1024, n_mel_channels=80, mel_fmin=0.0, mel_fmax=8000.0):
mel_basis = librosa_mel_fn(
sampling_rate, filter_length, n_mel_channels, mel_fmin, mel_fmax) # shape=(n_mels, 1 + n_fft/2)
mel_basis = torch.from_numpy(mel_basis).float()
return mel_basis
def dynamic_range_compression(x, C=1, clip_val=1e-5):
"""
PARAMS
------
C: compression factor
"""
return torch.log(torch.clamp(x, min=clip_val) * C)
def dynamic_range_decompression(x, C=1):
"""
PARAMS
------
C: compression factor used to compress
"""
return torch.exp(x) / C
class Inverse(torch.nn.Module):
def __init__(self, filter_length=800, hop_length=200, win_length=800,
window='hann'):
super(Inverse, self).__init__()
self.filter_length = filter_length
self.hop_length = hop_length
self.win_length = win_length
self.window = window
scale = filter_length / hop_length
fourier_basis = np.fft.fft(np.eye(filter_length))
cutoff = int((filter_length / 2 + 1))
fourier_basis = np.vstack([np.real(fourier_basis[:cutoff, :]),
np.imag(fourier_basis[:cutoff, :])])
forward_basis = torch.FloatTensor(fourier_basis[:, None, :])
inverse_basis = torch.FloatTensor(
np.linalg.pinv(scale * fourier_basis).T[:, None, :])
if window != None:
assert(filter_length >= win_length)
# get window and zero center pad it to filter_length
fft_window = get_window(window, win_length, fftbins=True)
fft_window = pad_center(fft_window, filter_length)
fft_window = torch.from_numpy(fft_window).float()
# window the bases
forward_basis *= fft_window
inverse_basis *= fft_window
self.register_buffer('forward_basis', forward_basis.float())
self.register_buffer('inverse_basis', inverse_basis.float())
def forward(self, magnitude, phase):
recombine_magnitude_phase = torch.cat(
[magnitude*torch.cos(phase), magnitude*torch.sin(phase)], dim=1)
inverse_transform = F.conv_transpose1d(
recombine_magnitude_phase,
torch.autograd.Variable(self.inverse_basis, requires_grad=False),
stride=self.hop_length,
padding=0)
if self.window != None:
window_sum = window_sumsquare(
self.window, magnitude.size(-1), hop_length=self.hop_length, win_length=self.win_length, n_fft=self.filter_length, dtype=np.float32)
# remove modulation effects
approx_nonzero_indices = torch.from_numpy(
np.where(window_sum > tiny(window_sum))[0])
window_sum = torch.autograd.Variable(
torch.from_numpy(window_sum), requires_grad=False)
window_sum = window_sum.cuda() if magnitude.is_cuda else window_sum
inverse_transform[:, :,
approx_nonzero_indices] /= window_sum[approx_nonzero_indices]
# scale by hop ratio
inverse_transform *= float(self.filter_length) / self.hop_length
inverse_transform = inverse_transform[:, :, int(
self.filter_length/2):]
inverse_transform = inverse_transform[:,
:, :-int(self.filter_length/2):]
return inverse_transform
def griffin_lim(magnitudes, inverse, n_iters=30, filter_length=1024, hop_length=256, win_length=1024,):
"""
PARAMS
------
magnitudes: spectrogram magnitudes
stft_fn: STFT class with transform (STFT) and inverse (ISTFT) methods
"""
angles = np.angle(np.exp(2j * np.pi * np.random.rand(*magnitudes.size())))
angles = angles.astype(np.float32)
angles = torch.autograd.Variable(torch.from_numpy(angles))
signal = inverse(magnitudes, angles).squeeze(1)
for i in range(n_iters):
stft = torch.stft(signal, n_fft=filter_length, hop_length=hop_length,
win_length=win_length, window=torch.hann_window(win_length))
real = stft[:, :, :, 0]
imag = stft[:, :, :, 1]
angles = torch.autograd.Variable(
torch.atan2(imag.data, real.data))
signal = inverse(magnitudes, angles).squeeze(1)
return signal
def mel2wav(mel_outputs, n_iters=30, filter_length=1024, hop_length=256, win_length=1024, n_mel_channels=80, sampling_rate=22050, mel_fmin=0.0, mel_fmax=8000.0):
mel_decompress = dynamic_range_decompression(mel_outputs)
mel_decompress = mel_decompress.transpose(1, 2).data.cpu()
mel_basis = librosa_mel_fn(
sampling_rate, filter_length, n_mel_channels, mel_fmin, mel_fmax) # shape=(n_mels, 1 + n_fft/2)
mel_basis = torch.from_numpy(mel_basis).float()
spec_from_mel_scaling = 1000
spec_from_mel = torch.mm(mel_decompress[0], mel_basis)
spec_from_mel = spec_from_mel.transpose(0, 1).unsqueeze(0)
spec_from_mel = spec_from_mel * spec_from_mel_scaling
inverse = Inverse(filter_length=filter_length,
hop_length=hop_length, win_length=win_length)
audio = griffin_lim(torch.autograd.Variable(
spec_from_mel[:, :, :-1]), inverse, n_iters, filter_length=filter_length, hop_length=hop_length, win_length=win_length)
audio = audio.squeeze()
audio = audio.cpu().numpy()
return audio
class STFT(torch.nn.Module):
def __init__(self, filter_length=1024, hop_length=256, win_length=1024,
n_mel_channels=80, sampling_rate=22050, mel_fmin=0.0,
mel_fmax=8000.0):
super(STFT, self).__init__()
self.n_mel_channels = n_mel_channels
self.sampling_rate = sampling_rate
self.filter_length = filter_length
self.hop_length = hop_length
self.win_length = win_length
mel_basis = get_mel_basis(
sampling_rate, filter_length, n_mel_channels, mel_fmin, mel_fmax) #shape=(n_mels, 1 + n_fft/2)
self.register_buffer('mel_basis', mel_basis)
def spectral_normalize(self, magnitudes):
output = dynamic_range_compression(magnitudes)
return output
def spectral_de_normalize(self, magnitudes):
output = dynamic_range_decompression(magnitudes)
return output
def mel_spectrogram(self, y):
assert(torch.min(y.data) >= -1)
assert(torch.max(y.data) <= 1)
stft = torch.stft(y,n_fft=self.filter_length, hop_length=self.hop_length,win_length=self.win_length,window=torch.hann_window(self.win_length))
real = stft[:, :, :, 0]
imag = stft[:, :, :, 1]
magnitudes = torch.sqrt(torch.pow(real, 2) + torch.pow(imag, 2))
magnitudes = magnitudes.data
mel_output = torch.matmul(self.mel_basis, magnitudes)
mel_output = self.spectral_normalize(mel_output)
return mel_output
def load_wav(full_path, resample_rate=True, resample_rate_value=22500):
data,sampling_rate = torchaudio.load(full_path)
if resample_rate and resample_rate_value != sampling_rate :
resample = torchaudio.transforms.Resample(sampling_rate, resample_rate_value)
data = resample(data)
return data[0], resample_rate_value
return data[0], resample_rate_value
| 7,638 | 2,698 |
"""This module defines the ndk_cc_toolchain_config rule.
This file is based on the `external/androidndk/cc_toolchain_config.bzl` file produced by the
built-in `android_ndk_repository` Bazel rule[1], which was used to build the SkCMS repository up
until this revision[2].
The paths in this file point to locations inside the expanded Android NDK ZIP file (found at
external/android_ndk), and must be updated every time we upgrade to a new Android NDK version.
[1] https://github.com/bazelbuild/bazel/blob/4710ef82ce34572878e07c52e83a0144d707f140/src/main/java/com/google/devtools/build/lib/bazel/rules/android/AndroidNdkRepositoryFunction.java#L422
[2] https://skia.googlesource.com/skcms/+/30c8e303800c256febb03a09fdcda7f75d119b1b/WORKSPACE#22
"""
load("@bazel_tools//tools/build_defs/cc:action_names.bzl", "ACTION_NAMES")
load(
"@bazel_tools//tools/cpp:cc_toolchain_config_lib.bzl",
"feature",
"flag_group",
"flag_set",
"tool_path",
"with_feature_set",
)
load("download_toolchains.bzl", "NDK_PATH")
# Supported CPUs.
_ARMEABI_V7A = "armeabi-v7a"
_ARM64_V8A = "arm64-v8a"
_all_compile_actions = [
ACTION_NAMES.c_compile,
ACTION_NAMES.cpp_compile,
ACTION_NAMES.linkstamp_compile,
ACTION_NAMES.assemble,
ACTION_NAMES.preprocess_assemble,
ACTION_NAMES.cpp_header_parsing,
ACTION_NAMES.cpp_module_compile,
ACTION_NAMES.cpp_module_codegen,
ACTION_NAMES.clif_match,
ACTION_NAMES.lto_backend,
]
_all_link_actions = [
ACTION_NAMES.cpp_link_executable,
ACTION_NAMES.cpp_link_dynamic_library,
ACTION_NAMES.cpp_link_nodeps_dynamic_library,
]
def _get_default_compile_flags(cpu):
if cpu == _ARMEABI_V7A:
return [
"-D__ANDROID_API__=29",
"-isystem",
NDK_PATH + "/sysroot/usr/include/arm-linux-androideabi",
"-target",
"armv7-none-linux-androideabi",
"-march=armv7-a",
"-mfloat-abi=softfp",
"-mfpu=vfpv3-d16",
"-gcc-toolchain",
NDK_PATH + "/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64",
"-fpic",
"-no-canonical-prefixes",
"-Wno-invalid-command-line-argument",
"-Wno-unused-command-line-argument",
"-funwind-tables",
"-fstack-protector-strong",
"-fno-addrsig",
"-Werror=return-type",
"-Werror=int-to-pointer-cast",
"-Werror=pointer-to-int-cast",
"-Werror=implicit-function-declaration",
]
if cpu == _ARM64_V8A:
return [
"-gcc-toolchain",
NDK_PATH + "/toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64",
"-target",
"aarch64-none-linux-android",
"-fpic",
"-isystem",
NDK_PATH + "/sysroot/usr/include/aarch64-linux-android",
"-D__ANDROID_API__=29",
"-no-canonical-prefixes",
"-Wno-invalid-command-line-argument",
"-Wno-unused-command-line-argument",
"-funwind-tables",
"-fstack-protector-strong",
"-fno-addrsig",
"-Werror=return-type",
"-Werror=int-to-pointer-cast",
"-Werror=pointer-to-int-cast",
"-Werror=implicit-function-declaration",
]
fail("Unknown CPU: " + cpu)
def _get_default_link_flags(cpu):
if cpu == _ARMEABI_V7A:
return [
"-target",
"armv7-none-linux-androideabi",
"-gcc-toolchain",
NDK_PATH + "/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64",
"-L",
NDK_PATH + "/sources/cxx-stl/llvm-libc++/libs/armeabi-v7a",
"-no-canonical-prefixes",
"-Wl,-z,relro",
"-Wl,--gc-sections",
]
if cpu == _ARM64_V8A:
return [
"-gcc-toolchain",
NDK_PATH + "/toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64",
"-target",
"aarch64-none-linux-android",
"-L",
NDK_PATH + "/sources/cxx-stl/llvm-libc++/libs/arm64-v8a",
"-no-canonical-prefixes",
"-Wl,-z,relro",
"-Wl,--gc-sections",
]
fail("Unknown CPU: " + cpu)
def _get_default_dbg_flags(cpu):
if cpu == _ARMEABI_V7A:
return ["-g", "-fno-strict-aliasing", "-O0", "-UNDEBUG"]
if cpu == _ARM64_V8A:
return ["-O0", "-g", "-UNDEBUG"]
fail("Unknown CPU: " + cpu)
def _get_default_opt_flags(cpu):
if cpu == _ARMEABI_V7A:
return ["-mthumb", "-Os", "-g", "-DNDEBUG"]
if cpu == _ARM64_V8A:
return ["-O2", "-g", "-DNDEBUG"]
fail("Unknown CPU: " + cpu)
def _get_toolchain_identifier(cpu):
if cpu == _ARMEABI_V7A:
return "ndk-armeabi-v7a-toolchain"
if cpu == _ARM64_V8A:
return "ndk-arm64-v8a-toolchain"
fail("Unknown CPU: " + cpu)
def _get_target_system_name(cpu):
if cpu == _ARMEABI_V7A:
return "arm-linux-androideabi"
if cpu == _ARM64_V8A:
return "aarch64-linux-android"
fail("Unknown CPU: " + cpu)
def _get_builtin_sysroot(cpu):
if cpu == _ARMEABI_V7A:
return NDK_PATH + "/platforms/android-29/arch-arm"
if cpu == _ARM64_V8A:
return NDK_PATH + "/platforms/android-29/arch-arm64"
fail("Unknown CPU: " + cpu)
def _get_tool_paths(cpu):
# The cc_common.create_cc_toolchain_config_info function expects tool paths to point to files
# under the directory in which it is invoked. This means we cannot directly reference tools
# under external/android_ndk. The solution is to use "trampoline" scripts that pass through
# any command-line arguments to the NDK binaries under external/android_sdk.
if cpu == _ARMEABI_V7A:
return [
tool_path(
name = "ar",
path = "trampolines/arm-linux-androideabi-ar.sh",
),
tool_path(
name = "cpp",
path = "trampolines/clang.sh",
),
tool_path(
name = "dwp",
path = "trampolines/arm-linux-androideabi-dwp.sh",
),
tool_path(
name = "gcc",
path = "trampolines/clang.sh",
),
tool_path(
name = "gcov",
path = "/bin/false",
),
tool_path(
name = "ld",
path = "trampolines/arm-linux-androideabi-ld.sh",
),
tool_path(
name = "nm",
path = "trampolines/arm-linux-androideabi-nm.sh",
),
tool_path(
name = "objcopy",
path = "trampolines/arm-linux-androideabi-objcopy.sh",
),
tool_path(
name = "objdump",
path = "trampolines/arm-linux-androideabi-objdump.sh",
),
tool_path(
name = "strip",
path = "trampolines/arm-linux-androideabi-strip.sh",
),
]
if cpu == _ARM64_V8A:
return [
tool_path(
name = "ar",
path = "trampolines/aarch64-linux-android-ar.sh",
),
tool_path(
name = "cpp",
path = "trampolines/clang.sh",
),
tool_path(
name = "dwp",
path = "trampolines/aarch64-linux-android-dwp.sh",
),
tool_path(
name = "gcc",
path = "trampolines/clang.sh",
),
tool_path(
name = "gcov",
path = "/bin/false",
),
tool_path(
name = "ld",
path = "trampolines/aarch64-linux-android-ld.sh",
),
tool_path(
name = "nm",
path = "trampolines/aarch64-linux-android-nm.sh",
),
tool_path(
name = "objcopy",
path = "trampolines/aarch64-linux-android-objcopy.sh",
),
tool_path(
name = "objdump",
path = "trampolines/aarch64-linux-android-objdump.sh",
),
tool_path(
name = "strip",
path = "trampolines/aarch64-linux-android-strip.sh",
),
]
fail("Unknown CPU: " + cpu)
def _ndk_cc_toolchain_config_impl(ctx):
default_compile_flags = _get_default_compile_flags(ctx.attr.cpu)
unfiltered_compile_flags = [
"-isystem",
NDK_PATH + "/sources/cxx-stl/llvm-libc++/include",
"-isystem",
NDK_PATH + "/sources/cxx-stl/llvm-libc++abi/include",
"-isystem",
NDK_PATH + "/sources/android/support/include",
"-isystem",
NDK_PATH + "/sysroot/usr/include",
]
default_link_flags = _get_default_link_flags(ctx.attr.cpu)
default_fastbuild_flags = [""]
default_dbg_flags = _get_default_dbg_flags(ctx.attr.cpu)
default_opt_flags = _get_default_opt_flags(ctx.attr.cpu)
opt_feature = feature(name = "opt")
fastbuild_feature = feature(name = "fastbuild")
dbg_feature = feature(name = "dbg")
supports_dynamic_linker_feature = feature(name = "supports_dynamic_linker", enabled = True)
supports_pic_feature = feature(name = "supports_pic", enabled = True)
static_link_cpp_runtimes_feature = feature(name = "static_link_cpp_runtimes", enabled = True)
default_compile_flags_feature = feature(
name = "default_compile_flags",
enabled = True,
flag_sets = [
flag_set(
actions = _all_compile_actions,
flag_groups = [flag_group(flags = default_compile_flags)],
),
flag_set(
actions = _all_compile_actions,
flag_groups = [flag_group(flags = default_fastbuild_flags)],
with_features = [with_feature_set(features = ["fastbuild"])],
),
flag_set(
actions = _all_compile_actions,
flag_groups = [flag_group(flags = default_dbg_flags)],
with_features = [with_feature_set(features = ["dbg"])],
),
flag_set(
actions = _all_compile_actions,
flag_groups = [flag_group(flags = default_opt_flags)],
with_features = [with_feature_set(features = ["opt"])],
),
],
)
default_link_flags_feature = feature(
name = "default_link_flags",
enabled = True,
flag_sets = [
flag_set(
actions = _all_link_actions,
flag_groups = [flag_group(flags = default_link_flags)],
),
],
)
user_compile_flags_feature = feature(
name = "user_compile_flags",
enabled = True,
flag_sets = [
flag_set(
actions = _all_compile_actions,
flag_groups = [
flag_group(
flags = ["%{user_compile_flags}"],
iterate_over = "user_compile_flags",
expand_if_available = "user_compile_flags",
),
],
),
],
)
sysroot_feature = feature(
name = "sysroot",
enabled = True,
flag_sets = [
flag_set(
actions = _all_compile_actions + _all_link_actions,
flag_groups = [
flag_group(
flags = ["--sysroot=%{sysroot}"],
expand_if_available = "sysroot",
),
],
),
],
)
unfiltered_compile_flags_feature = feature(
name = "unfiltered_compile_flags",
enabled = True,
flag_sets = [
flag_set(
actions = _all_compile_actions,
flag_groups = [flag_group(flags = unfiltered_compile_flags)],
),
],
)
features = [
default_compile_flags_feature,
default_link_flags_feature,
supports_dynamic_linker_feature,
supports_pic_feature,
static_link_cpp_runtimes_feature,
fastbuild_feature,
dbg_feature,
opt_feature,
user_compile_flags_feature,
sysroot_feature,
unfiltered_compile_flags_feature,
]
cxx_builtin_include_directories = [
NDK_PATH + "/toolchains/llvm/prebuilt/linux-x86_64/lib64/clang/9.0.9/include",
"%sysroot%/usr/include",
NDK_PATH + "/sysroot/usr/include",
]
# https://bazel.build/rules/lib/cc_common#create_cc_toolchain_config_info
return cc_common.create_cc_toolchain_config_info(
ctx = ctx,
toolchain_identifier = _get_toolchain_identifier(ctx.attr.cpu),
host_system_name = "local",
target_system_name = _get_target_system_name(ctx.attr.cpu),
target_cpu = ctx.attr.cpu,
target_libc = "local",
compiler = "clang9.0.9",
abi_version = ctx.attr.cpu,
abi_libc_version = "local",
features = features,
tool_paths = _get_tool_paths(ctx.attr.cpu),
cxx_builtin_include_directories = cxx_builtin_include_directories,
builtin_sysroot = _get_builtin_sysroot(ctx.attr.cpu),
)
ndk_cc_toolchain_config = rule(
implementation = _ndk_cc_toolchain_config_impl,
attrs = {
"cpu": attr.string(
mandatory = True,
values = [_ARMEABI_V7A, _ARM64_V8A],
doc = "Target CPU.",
)
},
provides = [CcToolchainConfigInfo],
)
| 13,771 | 4,528 |
import re
from http import cookiejar
from urllib import request, parse
from bs4 import BeautifulSoup
from VirtualJudgeSpider import Config
from VirtualJudgeSpider.Config import Problem, Spider, Result
from VirtualJudgeSpider.OJs.BaseClass import Base
class HDU(Base):
def __init__(self):
self.code_type = 'gb18030'
self.cj = cookiejar.CookieJar()
self.opener = request.build_opener(request.HTTPCookieProcessor(self.cj))
@staticmethod
def home_page_url(self):
url = 'http://acm.hdu.edu.cn/'
return url
def check_login_status(self):
url = 'http://acm.hdu.edu.cn/'
try:
with self.opener.open(url) as fin:
website_data = fin.read().decode(self.code_type)
if re.search(r'userloginex\.php\?action=logout', website_data) is not None:
return True
except:
return False
def login_webside(self, *args, **kwargs):
if self.check_login_status():
return True
login_page_url = 'http://acm.hdu.edu.cn/'
login_link_url = 'http://acm.hdu.edu.cn/userloginex.php?action=login&cid=0¬ice=0'
post_data = parse.urlencode(
{'username': kwargs['account'].get_username(), 'userpass': kwargs['account'].get_password()})
try:
self.opener.open(login_page_url)
req = request.Request(url=login_link_url, data=post_data.encode(self.code_type),
headers=Config.custom_headers)
self.opener.open(req)
if self.check_login_status():
return True
return False
except:
return False
def get_problem(self, *args, **kwargs):
url = 'http://acm.hdu.edu.cn/showproblem.php?pid=' + str(kwargs['pid'])
problem = Problem()
try:
website_data = Spider.get_data(url, self.code_type)
problem.remote_id = kwargs['pid']
problem.remote_url = url
problem.remote_oj = 'HDU'
problem.title = re.search(r'color:#1A5CC8\'>([\s\S]*?)</h1>', website_data).group(1)
problem.time_limit = re.search(r'(\d* MS)', website_data).group(1)
problem.memory_limit = re.search(r'/(\d* K)', website_data).group(1)
problem.special_judge = re.search(r'color=red>Special Judge</font>', website_data) is not None
problem.description = re.search(r'>Problem Description</div>[\s\S]*?panel_content>([\s\S]*?)</div>',
website_data).group(1)
problem.input = re.search(r'>Input</div>[\s\S]*?panel_content>([\s\S]*?)</div>', website_data).group(1)
problem.output = re.search(r'>Output</div>[\s\S]*?panel_content>([\s\S]*?)</div>', website_data).group(1)
match_group = re.search(r'>Sample Input</div>[\s\S]*?panel_content>([\s\S]*?)</div', website_data)
input_data = ''
if match_group:
input_data = re.search(r'(<pre><div[\s\S]*?>)?([\s\S]*)', match_group.group(1)).group(2)
output_data = ''
match_group = re.search(r'>Sample Output</div>[\s\S]*?panel_content>([\s\S]*?)</div', website_data)
if match_group:
output_data = re.search(r'(<pre><div[\s\S]*?>)?([\s\S]*)', match_group.group(1)).group(2)
if re.search('<div', output_data):
output_data = re.search(r'([\s\S]*?)<div', output_data).group(1)
problem.sample = [
{'input': input_data,
'output': output_data}]
match_group = re.search(r'>Author</div>[\s\S]*?panel_content>([\s\S]*?)</div>', website_data)
if match_group:
problem.author = match_group.group(1)
match_group = re.search(r'<i>Hint</i>[\s\S]*?/div>[\s]*([\s\S]+?)</div>', website_data)
if match_group:
problem.hint = match_group.group(1)
except:
return Problem.PROBLEM_NOT_FOUND
return problem
def submit_code(self, *args, **kwargs):
if self.login_webside(*args, **kwargs) is False:
return False
try:
code = kwargs['code']
language = kwargs['language']
pid = kwargs['pid']
url = 'http://acm.hdu.edu.cn/submit.php?action=submit'
post_data = parse.urlencode({'check': '0', 'language': language, 'problemid': pid, 'usercode': code})
req = request.Request(url=url, data=post_data.encode(self.code_type), headers=Config.custom_headers)
response = self.opener.open(req)
response.read().decode(self.code_type)
return True
except:
return False
def find_language(self, *args, **kwargs):
if self.login_webside(*args, **kwargs) is False:
return None
url = 'http://acm.hdu.edu.cn/submit.php'
languages = {}
try:
with self.opener.open(url) as fin:
data = fin.read().decode(self.code_type)
soup = BeautifulSoup(data, 'lxml')
options = soup.find('select', attrs={'name': 'language'}).find_all('option')
for option in options:
languages[option.get('value')] = option.string
finally:
return languages
def get_result(self, *args, **kwargs):
account = kwargs.get('account')
pid = kwargs.get('pid')
url = 'http://acm.hdu.edu.cn/status.php?first=&pid=' + pid + '&user=' + account.username + '&lang=0&status=0'
return self.get_result_by_url(url=url)
def get_result_by_rid(self, rid):
url = 'http://acm.hdu.edu.cn/status.php?first=' + rid + '&pid=&user=&lang=0&status=0'
return self.get_result_by_url(url=url)
def get_result_by_url(self, url):
result = Result()
try:
with request.urlopen(url) as fin:
data = fin.read().decode(self.code_type)
soup = BeautifulSoup(data, 'lxml')
line = soup.find('table', attrs={'class': 'table_text'}).find('tr', attrs={'align': 'center'}).find_all(
'td')
if line is not None:
result.origin_run_id = line[0].string
result.verdict = line[2].string
result.execute_time = line[4].string
result.execute_memory = line[5].string
return result
except:
pass
return result
def get_class_name(self):
return str('HDU')
def is_waiting_for_judge(self, verdict):
if verdict in ['Queuing', 'Compiling', 'Running']:
return True
return False
def check_status(self):
url = 'http://acm.hdu.edu.cn/'
try:
with request.urlopen(url, timeout=5) as fin:
data = fin.read().decode(self.code_type)
if re.search(r'<H1>Welcome to HDU Online Judge System</H1>', data):
return True
except:
return False
| 7,149 | 2,239 |
''' 입력 '''
n = int(input()) # 지도의 크기
square_map = []
for i in range(n):
square_map.append(list(map(int, input())))
''' 입력 '''
_house_count = 0
house = []
bundle = 0
def dfx(x, y):
global _house_count
if x <= -1 or x >= n or y <= -1 or y >= n:
return False
if square_map[x][y] == 1:
square_map[x][y] = 2
_house_count += 1
dfx(x, y - 1)
dfx(x, y + 1)
dfx(x + 1, y)
dfx(x - 1, y)
return True
return False
for i in range(n):
for j in range(n):
if dfx(i, j):
house.append(_house_count)
_house_count = 0
bundle += 1
print(bundle)
for i in sorted(house):
print(i) | 705 | 303 |
"""
Attachment File which implements the Hiven Attachment type and its methods
(endpoints)
---
Under MIT License
Copyright © 2020 - 2021 Luna Klatzer
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
# Used for type hinting and not having to use annotations for the objects
from __future__ import annotations
import logging
# Only importing the Objects for the purpose of type hinting and not actual use
from typing import TYPE_CHECKING
from .hiven_type_schemas import AttachmentSchema, get_compiled_validator
from ..base_types import DataClassObject
from ..utils import log_type_exception
if TYPE_CHECKING:
from .. import HivenClient
logger = logging.getLogger(__name__)
__all__ = ['Attachment']
class Attachment(DataClassObject):
""" Represents a Hiven Message Attachment containing a file """
_json_schema: dict = AttachmentSchema
json_validator = get_compiled_validator(_json_schema)
@log_type_exception('Attachment')
def __init__(self, data: dict, client: HivenClient):
"""
Represents a Hiven Message Attachment containing a file
:param data: Data that should be used to create the object
:param client: The HivenClient
"""
super().__init__()
self._filename = data.get('filename')
self._media_url = data.get('media_url')
self._raw = data.get('raw')
self._client = client
@classmethod
def format_obj_data(cls, data: dict) -> dict:
"""
Validates the data and appends data if it iis missing that would be
required for the creation of an instance.
:param data: Data that should be validated and used to form the object
:return: The modified dictionary, which can then be used to create a
new class instance
"""
data['raw'] = {**data.pop('raw', {}), **data}
return cls.validate(data)
@property
def filename(self) -> str:
""" Name of the file """
return getattr(self, '_filename', None)
@property
def media_url(self) -> str:
""" Media-url to access the file """
return getattr(self, '_media_url', None)
@property
def raw(self) -> dict:
""" The raw data dictionary received over the Swarm """
# Different files have different attribs
return getattr(self, '_raw', None)
| 3,329 | 972 |
from django.shortcuts import render
from .models import Task
from rest_framework import serializers
from rest_framework.response import Response
from rest_framework.decorators import api_view
# Create your views here.
class TaskSerializer(serializers.ModelSerializer):
"""
Task serializer class
"""
class Meta:
"""
Task serializer meta class
"""
model = Task
fields = ('id', 'url', 'status', 'response_content',\
'response_http_status', 'response_body')
class TaskSerializerResult(serializers.ModelSerializer):
"""
Task id serializer
"""
class Meta:
model = Task
fields = ('id',)
@api_view(("POST",))
def send(request):
if request.method == "POST":
task = Task.objects.create(url=request.data.get("url"))
return Response(TaskSerializerResult(task).data)
else:
return Response({"error": "Bad request."})
@api_view(("GET", ))
def result(request):
if request.method == "GET":
task_id = request.GET.get("id", False)
if task_id:
task = Task.objects.filter(id = task_id).first()
print(task)
if task:
return Response(TaskSerializer(task).data)
else:
task = Task.objects.all().order_by('-id')[:10]
print(task)
return Response(TaskSerializer(task, many = True).data)
else:
return Response({"status": "Bad id"})
else:
return Response({"status": "Bad request"})
@api_view(("GET",))
def start_tasks(request):
Task.objects.all().update(status=0)
return Response({"status": "all task gets status New, and will updating every 2 min in case it's still new"})
| 1,530 | 539 |
# -*- coding: utf-8 -*-
"""
This file contains all the settings that defines the development server.
SECURITY WARNING: don't run with debug turned on in production!
"""
import logging
from typing import List
from server.settings.components.common import INSTALLED_APPS, MIDDLEWARE
# Setting the development status:
DEBUG = True
# Static files:
# https://docs.djangoproject.com/en/1.11/ref/settings/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS: List[str] = []
| 471 | 158 |
import sys
import json
from random import choice, random
import time
from pyaidoop_graphql_client.api import Client
def main(argv):
workspaceName = argv[1]
client = Client("http://localhost:3000", "system")
client.signin("admin@hatiolab.com", "admin")
# client.robot_go_home(name='robot01')
# client.robot_task_moveby(
# name='robot01', pose={'x': 0.0, 'y': 0.01, 'z': 0.0, 'u': 0.0, 'v': 0.0, 'w': 0.0})
print(client.get_robot_status(name="robot01"))
print(client.get_robot_status(name="robot01")["moveFinished"])
if __name__ == "__main__":
main(sys.argv)
| 605 | 252 |
#!/bin/env python3
"""
Extracts all metro and RER stations from an OSM dump.
"""
import xml.etree.cElementTree as ET
import argparse
import csv
from math import radians, cos, sin, asin, sqrt
class Station(object):
"""A train station"""
def __init__(self, name, osm_id, lat, lon, accessible=False):
self._name = name
self._osm_ids = set([int(osm_id)])
self._lat = lat
self._lon = lon
self._accessible = accessible
@property
def name(self):
"""Name of the station."""
return self._name
@property
def osm_ids(self):
"""OpenStreetMap ID"""
return self._osm_ids
@property
def lat(self):
"""Latitude of the station."""
return self._lat
@property
def lon(self):
"""Longitude of the station."""
return self._lon
@property
def accessible(self):
"""True if the station is accessible."""
return self._accessible
def distance(self, other):
"""
Calculate the great circle distance between two points
on the earth (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = [radians(x) for x in \
[self.lon, self.lat, other.lon, other.lat]]
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
c = 2 * asin(sqrt(a))
r = 6371.0 # Radius of earth in kilometers. Use 3956 for miles
return c * r
def merge(self, other):
self._osm_ids.update(other.osm_ids)
@staticmethod
def from_node(node):
"""Creates a Station from an XML node in OSM format."""
name_tags = node.findall("./tag[@k='name']")
name = None
if len(name_tags) != 0 :
name = name_tags[0].get("v")
osm_id = node.get("id")
lat = float(node.get("lat"))
lon = float(node.get("lon"))
return Station(name, osm_id, lat, lon)
def __repr__(self):
return "Station(%s)" % (self.name)
def __eq__(self, other):
if isinstance(other, Station):
return self.name == other.name
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return hash(self.__repr__())
def extract_stations_from_dump(dump_path):
"""Extract a list of |Station|s from an XML dump."""
tree = ET.parse(dump_path)
root = tree.getroot()
allstation_nodes = root.findall('./node')
allstations = {}
for station_node in allstation_nodes:
station = Station.from_node(station_node)
if station.name in allstations:
allstations[station.name].merge(station)
else:
allstations[station.name] = station
return merge_osm_stations(allstations.values())
MERGE_STATIONS = {
26824135: [27371889, 1309031698, 1308998006], # Gare de Lyon
1731763794: [241928557], # Nation
3533789791: [3542631493], # Saint Lazare
243496033: [1731763792], # Etoile
3574677130: [1785132453], # Pont du Garigliano
3586000197: [137533248], # La Défense
269296749: [241926523], # Marne la Vallée Chessy
225119209: [3530909557, 1882558198], # CDG 2
3531066587: [1883637808], # La Fraternelle - Rungis
327613695: [3090733718], # Gare du Nord
255687197: [2367372622], # Issy Val de Seine
264778142: [2799009872], # Porte de la Villette
}
def merge_osm_stations(stations):
stations = list(stations)
def get_station(osm_id):
for station_index in range(len(stations)):
if osm_id in stations[station_index].osm_ids:
return station_index, stations[station_index]
return -1, None
for osm_id, ids_to_merge in MERGE_STATIONS.items():
_, receiver = get_station(osm_id)
for id_to_merge in ids_to_merge:
index_to_merge, station_to_merge = get_station(id_to_merge)
receiver.merge(station_to_merge)
del stations[index_to_merge]
return stations
def extract_accessible_stations(csv_filepath):
"""Extracts stations from a csv file listing accessible stations."""
stations = []
with open(csv_filepath) as reader:
csvreader = csv.reader(reader)
for row in csvreader:
stations.append(Station(row[0], row[4], float(row[2]), float(row[3]), True))
return stations
def merge_stations(all_stations, accessible_stations):
"""Merge two lists of stations."""
merged_stations = []
merged_count = 0
for station1 in all_stations:
found = False
for station2 in accessible_stations:
if len(station1.osm_ids.intersection(station2.osm_ids)):
merged_stations.append(station2)
found = True
merged_count += 1
if not found and station1.name:
merged_stations.append(station1)
print(merged_count)
return merged_stations
def print_to_csv(stations):
"""Print a list of stations to CSV."""
with open("full-list.csv", "w") as writer:
csvwriter = csv.writer(writer)
csvwriter.writerow(
["name", "osm_id", "latitude", "longitude", "accessible"])
for station in stations:
csvwriter.writerow(
[station.name, station.osm_ids, station.lat, station.lon, station.accessible])
def _parse_args():
"""Define and parse command-line arguments."""
parser = argparse.ArgumentParser(description='Extract station information.')
parser.add_argument('--osm_dump', type=str,
help='Path of the OSM dump containing train stations')
parser.add_argument('--accessible_csv', type=str,
help='Path to the list of accessible stations (CSV)')
return parser.parse_args()
def _main():
"""Script entry-point."""
args = _parse_args()
all_stations = extract_stations_from_dump(args.osm_dump)
accessible_stations = extract_accessible_stations(args.accessible_csv)
merged_stations = merge_stations(all_stations, accessible_stations)
print_to_csv(merged_stations)
if __name__ == '__main__':
_main()
| 6,275 | 2,161 |
# note : this does not create the link between the map and the world. It only spawns the robots.
# Please make sure to go back and manually add the path to the bitmap file
file_name = 'plots.txt'
f = open("../new_results/" + file_name, "w+")
counter = 1
for i in range(1, 10):
for j in range(1, 6):
f.write('\subfloat{\includegraphics[width=0.5\linewidth]{figures/test_%d_%d.png}}\n' % (i, j))
if counter % 2 == 0:
f.write(r'\\ ')
counter+=1
f.close()
| 493 | 173 |
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2018-2019 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Entitlement formatter."""
from aquilon.aqdb.model import (
EntitlementArchetypeGrnMap,
EntitlementArchetypeUserMap,
EntitlementClusterGrnMap,
EntitlementClusterUserMap,
EntitlementGrnGrnMap,
EntitlementGrnUserMap,
EntitlementHostGrnMap,
EntitlementHostUserMap,
EntitlementOnArchetype,
EntitlementOnCluster,
EntitlementOnGrn,
EntitlementOnHost,
EntitlementOnHostEnvironment,
EntitlementOnLocation,
EntitlementOnPersonality,
EntitlementPersonalityGrnMap,
EntitlementPersonalityUserMap,
EntitlementToGrn,
EntitlementToUser,
EntitlementType,
)
from aquilon.worker.formats.formatters import ObjectFormatter
class EntitlementTypeFormatter(ObjectFormatter):
def format_raw(self, entit_type, indent="", embedded=True,
indirect_attrs=True):
details = []
details.append('{}Entitlement type: {}'.format(
indent, entit_type.name))
details.append('{} To GRN: {}'.format(
indent, 'enabled' if entit_type.to_grn else 'disabled'))
if entit_type.to_user_types:
user_types = set(m.user_type.name
for m in entit_type.to_user_types)
details.append('{} To User Types: {}'.format(
indent, ', '.join(sorted(user_types))))
if entit_type.comments:
details.append('{} Comments: {}'.format(
indent, entit_type.comments))
return '\n'.join(details)
ObjectFormatter.handlers[EntitlementType] = EntitlementTypeFormatter()
class EntitlementFormatter(ObjectFormatter):
def format_raw(self, entit, indent="", embedded=True, indirect_attrs=True):
details = []
def add(txt):
details.append('{}{}'.format(indent, txt))
add('Entitlement: {}'.format(entit.type.name))
if isinstance(entit, EntitlementToGrn):
add(' To {0:c}: {0.grn}'.format(entit.grn))
elif isinstance(entit, EntitlementToUser):
add(' To {type} {0:c}: {0.name}'.format(
entit.user, type=entit.user.type.name.title()))
if isinstance(entit, EntitlementOnHost):
add(' On {0:c}: {0.hardware_entity.primary_name.fqdn.fqdn}'
.format(entit.host))
elif isinstance(entit, EntitlementOnCluster):
add(' On {0:c}: {0.name}'.format(entit.cluster))
elif isinstance(entit, EntitlementOnPersonality):
add(' On {0:c}: {0.name}'.format(entit.personality))
elif isinstance(entit, EntitlementOnArchetype):
add(' On {0:c}: {0.name}'.format(entit.archetype))
elif isinstance(entit, EntitlementOnGrn):
add(' On {0:c}: {0.grn}'.format(entit.target_grn))
if isinstance(entit, EntitlementOnHostEnvironment):
add(' On {0:c}: {0.name}'.format(entit.host_environment))
if isinstance(entit, EntitlementOnLocation):
add(' On {0:c}: {0.name}'.format(entit.location))
return '\n'.join(details)
def fill_proto(self, entit, skeleton, embedded=True, indirect_attrs=True):
skeleton.type = entit.type.name
if isinstance(entit, EntitlementToGrn):
skeleton.eonid = entit.grn.eon_id
elif isinstance(entit, EntitlementToUser):
self.redirect_proto(entit.user, skeleton.user,
indirect_attrs=False)
if isinstance(entit, EntitlementOnHost):
self.redirect_proto(entit.host, skeleton.host,
indirect_attrs=False)
elif isinstance(entit, EntitlementOnCluster):
self.redirect_proto(entit.cluster, skeleton.cluster,
indirect_attrs=False)
elif isinstance(entit, EntitlementOnPersonality):
self.redirect_proto(entit.personality, skeleton.personality,
indirect_attrs=False)
elif isinstance(entit, EntitlementOnArchetype):
self.redirect_proto(entit.archetype, skeleton.archetype,
indirect_attrs=False)
elif isinstance(entit, EntitlementOnGrn):
skeleton.target_eonid = entit.target_grn.eon_id
if isinstance(entit, EntitlementOnHostEnvironment):
skeleton.host_environment = entit.host_environment.name
if isinstance(entit, EntitlementOnLocation):
self.redirect_proto(entit.location, skeleton.location,
indirect_attrs=False)
for cls in [
EntitlementArchetypeGrnMap,
EntitlementArchetypeUserMap,
EntitlementClusterGrnMap,
EntitlementClusterUserMap,
EntitlementGrnGrnMap,
EntitlementGrnUserMap,
EntitlementHostGrnMap,
EntitlementHostUserMap,
EntitlementPersonalityGrnMap,
EntitlementPersonalityUserMap,
]:
ObjectFormatter.handlers[cls] = EntitlementFormatter()
| 5,615 | 1,769 |
from types import SimpleNamespace
import pytest
import threading
from py4web.core import Fixture
result = {'seq': []}
def run_thread(func, *a):
t = threading.Thread(target=func, args=a)
return t
class Foo(Fixture):
def on_request(self):
self._safe_local = SimpleNamespace()
@property
def bar(self):
return self._safe_local.a
@bar.setter
def bar(self, a):
self._safe_local.a = a
foo = Foo()
def before_request():
Fixture.__init_request_ctx__()
@pytest.fixture
def init_foo():
def init(key, a, evnt_done=None, evnt_play=None):
result['seq'].append(key)
before_request()
foo.on_request()
foo.bar = a
evnt_done and evnt_done.set()
evnt_play and evnt_play.wait()
result[key] = foo.bar
return foo
return init
def test_fixtute_local_storage(init_foo):
assert init_foo('t1', 'a1') is foo
evnt_done = threading.Event()
evnt_play = threading.Event()
t2 = run_thread(init_foo, 't2', 'a2', evnt_done, evnt_play)
t3 = run_thread(init_foo, 't3', 'a3', None, None)
t2.start()
evnt_done.wait()
t3.start()
t3.join()
evnt_play.set()
t2.join()
assert foo.bar == 'a1'
assert result['t2'] == 'a2'
assert result['t3'] == 'a3'
assert ','.join(result['seq']) == 't1,t2,t3'
def test_fixtute_error():
before_request()
# attempt to access _safe_local prop without on_request-call
with pytest.raises(RuntimeError) as err:
foo.bar
assert 'py4web hint' in err.value.args[0]
assert 'Foo object' in err.value.args[0]
| 1,618 | 599 |
# uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\objects\components\object_inventory_component.py
# Compiled at: 2020-10-06 03:00:48
# Size of source mod 2**32: 16791 bytes
from animation.posture_manifest import AnimationParticipant
from event_testing.resolver import DoubleObjectResolver
from objects.components import componentmethod, types
from objects.components.get_put_component_mixin import GetPutComponentMixin
from objects.components.inventory import InventoryComponent
from objects.components.inventory_enums import InventoryType
from objects.components.inventory_item_trigger import ItemStateTrigger
from objects.components.inventory_owner_tuning import InventoryTuning
from objects.components.state import ObjectStateValue
from objects.object_enums import ItemLocation, ResetReason
from objects.system import create_object
from postures.posture_specs import PostureSpecVariable
from sims4.tuning.tunable import TunableList, TunableReference, TunableEnumEntry, Tunable, OptionalTunable, TunableTuple
from statistics.statistic import Statistic
import services, sims4.resources
logger = sims4.log.Logger('Inventory', default_owner='tingyul')
class ObjectInventoryComponent(GetPutComponentMixin, InventoryComponent, component_name=types.INVENTORY_COMPONENT):
DEFAULT_OBJECT_INVENTORY_AFFORDANCES = TunableList(TunableReference(description='\n Affordances for all object inventories.\n ',
manager=(services.get_instance_manager(sims4.resources.Types.INTERACTION))))
FACTORY_TUNABLES = {'description':'\n Generate an object inventory for this object\n ',
'inventory_type':TunableEnumEntry(description='\n Inventory Type must be set for the object type you add this for.\n ',
tunable_type=InventoryType,
default=InventoryType.UNDEFINED,
invalid_enums=(
InventoryType.UNDEFINED, InventoryType.SIM)),
'visible':Tunable(description='\n If this inventory is visible to player.',
tunable_type=bool,
default=True),
'starting_objects':TunableList(description='\n Objects in this list automatically populate the inventory when its\n owner is created. Currently, to keep the game object count down, an\n object will not be added if the object inventory already has\n another object of the same type.',
tunable=TunableReference(manager=(services.definition_manager()),
description='Objects to populate inventory with.',
pack_safe=True)),
'purchasable_objects':OptionalTunable(description='\n If this list is enabled, an interaction to buy the purchasable\n objects through a dialog picker will show on the inventory object.\n \n Example usage: a list of books for the bookshelf inventory.\n ',
tunable=TunableTuple(show_description=Tunable(description='\n Toggles whether the object description should show in the \n purchase picker.\n ',
tunable_type=bool,
default=False),
objects=TunableList(description='\n A list of object definitions that can be purchased.\n ',
tunable=TunableReference(manager=(services.definition_manager()),
description='')))),
'purge_inventory_state_triggers':TunableList(description='\n Trigger the destruction of all inventory items if the inventory owner hits\n any of the tuned state values.\n \n Only considers state-values present at and after zone-load finalize (ignores\n default values that change during load based on state triggers, for example). \n ',
tunable=ObjectStateValue.TunableReference(description='\n The state value of the owner that triggers inventory item destruction.\n ')),
'score_contained_objects_for_autonomy':Tunable(description='\n Whether or not to score for autonomy any objects contained in this object.',
tunable_type=bool,
default=True),
'item_state_triggers':TunableList(description="\n The state triggers to modify inventory owner's state value based on\n inventory items states.\n ",
tunable=ItemStateTrigger.TunableFactory()),
'allow_putdown_in_inventory':Tunable(description="\n This inventory allows Sims to put objects away into it, such as books\n or other carryables. Ex: mailbox has an inventory but we don't want\n Sims putting away items in the inventory.",
tunable_type=bool,
default=True),
'test_set':OptionalTunable(description='\n If enabled, the ability to pick up items from and put items in this\n object is gated by this test.\n ',
tunable=TunableReference(manager=(services.get_instance_manager(sims4.resources.Types.SNIPPET)),
class_restrictions=('TestSetInstance', ))),
'count_statistic':OptionalTunable(description='\n A statistic whose value will be the number of objects in this\n inventory. It will automatically be added to the object owning this\n type of component.\n ',
tunable=Statistic.TunableReference()),
'return_owned_objects':Tunable(description="\n If enabled, inventory objects will return to their household\n owner's inventory when this object is destroyed off lot. This is\n because build buy can undo actions on lot and cause object id\n collisions.\n \n We first consider the closest instanced Sims, and finally move to\n the household inventory if we can't move to a Sim's inventory.\n ",
tunable_type=bool,
default=False),
'_use_top_item_tooltip':Tunable(description="\n If checked, this inventory would use the top item's tooltip as its\n own tooltip. \n ",
tunable_type=bool,
default=False)}
def __init__(self, owner, inventory_type, visible, starting_objects, purchasable_objects, purge_inventory_state_triggers, score_contained_objects_for_autonomy, item_state_triggers, allow_putdown_in_inventory, test_set, count_statistic, return_owned_objects, _use_top_item_tooltip, **kwargs):
(super().__init__)(owner, **kwargs)
self._inventory_type = inventory_type
self.visible = visible
self.starting_objects = starting_objects
self.purchasable_objects = purchasable_objects
self.purge_inventory_state_triggers = purge_inventory_state_triggers
self.score_contained_objects_for_autonomy = score_contained_objects_for_autonomy
self.item_state_triggers = item_state_triggers
self.allow_putdown_in_inventory = allow_putdown_in_inventory
self.test_set = test_set
self.count_statistic = count_statistic
self.return_owned_objects = return_owned_objects
self._use_top_item_tooltip = _use_top_item_tooltip
@property
def inventory_type(self):
return self._inventory_type
@property
def default_item_location(self):
return ItemLocation.OBJECT_INVENTORY
@componentmethod
def get_inventory_access_constraint(self, sim, is_put, carry_target, use_owner_as_target_for_resolver=False):
if use_owner_as_target_for_resolver:
def constraint_resolver(animation_participant, default=None):
if animation_participant in (AnimationParticipant.SURFACE, PostureSpecVariable.SURFACE_TARGET,
AnimationParticipant.TARGET, PostureSpecVariable.INTERACTION_TARGET):
return self.owner
return default
else:
constraint_resolver = None
return self._get_access_constraint(sim, is_put, carry_target, resolver=constraint_resolver)
@componentmethod
def get_inventory_access_animation(self, *args, **kwargs):
return (self._get_access_animation)(*args, **kwargs)
@property
def should_score_contained_objects_for_autonomy(self):
return self.score_contained_objects_for_autonomy
@property
def use_top_item_tooltip(self):
return self._use_top_item_tooltip
def _get_inventory_count_statistic(self):
return self.count_statistic
def on_add(self):
for trigger in self.item_state_triggers:
self.add_state_trigger(trigger(self))
super().on_add()
def on_reset_component_get_interdependent_reset_records(self, reset_reason, reset_records):
if reset_reason == ResetReason.BEING_DESTROYED:
if not services.current_zone().is_zone_shutting_down:
if not self.is_shared_inventory:
if self.return_owned_objects:
if not self.owner.is_on_active_lot():
household_manager = services.household_manager()
objects_to_transfer = list(iter(self))
for obj in objects_to_transfer:
household_id = obj.get_household_owner_id()
if household_id is not None:
household = household_manager.get(household_id)
if household is not None:
household.move_object_to_sim_or_household_inventory(obj)
super().on_reset_component_get_interdependent_reset_records(reset_reason, reset_records)
def on_post_bb_fixup(self):
self._add_starting_objects()
def _add_starting_objects(self):
for definition in self.starting_objects:
if self.has_item_with_definition(definition):
continue
new_object = create_object(definition, loc_type=(ItemLocation.OBJECT_INVENTORY))
if new_object is None:
logger.error('Failed to create object {}', definition)
continue
new_object.set_household_owner_id(self.owner.get_household_owner_id())
if not self.player_try_add_object(new_object):
logger.error('Failed to add object {} to inventory {}', new_object, self)
new_object.destroy(source=(self.owner), cause='Failed to add starting object to inventory.')
continue
def component_interactable_gen(self):
yield self
def component_super_affordances_gen(self, **kwargs):
if self.visible:
for affordance in self.DEFAULT_OBJECT_INVENTORY_AFFORDANCES:
yield affordance
def _can_access(self, sim):
if self.test_set is not None:
resolver = DoubleObjectResolver(sim, self.owner)
result = self.test_set(resolver)
if not result:
return False
return True
@componentmethod
def can_access_for_pickup(self, sim):
if not self._can_access(sim):
return False
if any((self.owner.state_value_active(value) for value in InventoryTuning.INVALID_ACCESS_STATES)):
return False
return True
@componentmethod
def can_access_for_putdown(self, sim):
if not self.allow_putdown_in_inventory:
return False
else:
return self._can_access(sim) or False
return True
def _check_state_value_for_purge(self, state_value):
return state_value in self.purge_inventory_state_triggers
def _purge_inventory_from_state_change(self, new_value):
if not self._check_state_value_for_purge(new_value):
return
else:
current_zone = services.current_zone()
if current_zone is None:
return
return current_zone.zone_spin_up_service.is_finished or None
self.purge_inventory()
def on_state_changed(self, state, old_value, new_value, from_init):
if self.purge_inventory_state_triggers:
if not from_init:
self._purge_inventory_from_state_change(new_value)
def _purge_inventory_from_load_finalize(self):
owner_state_component = self.owner.state_component
if owner_state_component is None:
logger.error('Attempting to purge an inventory based on state-triggers but the owner ({}) has no state component. Purge fails.', self.owner)
return
for active_state_value in owner_state_component.values():
if self._check_state_value_for_purge(active_state_value):
self.purge_inventory()
return
def on_finalize_load(self):
if self.purge_inventory_state_triggers:
self._purge_inventory_from_load_finalize() | 13,049 | 3,622 |
from __future__ import absolute_import, unicode_literals
from django.core.management import BaseCommand, CommandError
from sqs_consumer.worker.service import WorkerService
class Command(BaseCommand):
help = 'Command to process tasks from one or more SQS queues'
def add_arguments(self, parser):
parser.add_argument('--queues', '-q',
dest='queue_names',
help='Name of queues to process, separated by commas')
def handle(self, *args, **options):
if not options['queue_names']:
raise CommandError('Queue names (--queues) not specified')
queue_names = [queue_name.rstrip() for queue_name in options['queue_names'].split(',')]
WorkerService().process_queues(queue_names)
| 786 | 214 |
import numpy as np
def indep_array(start, finish, num_steps):
x = np.zeros(num_steps)
for i in range(0, num_steps):
x[i] = start * ((finish / start) ** (1. / (num_steps - 1.))) ** i
return x
| 212 | 82 |
# coding: utf-8
from the import expect
from filesystem import paths
from filesystem.paths import root
class TestFilesystemPaths:
def setup_method(self):
pass
def test_that_root_exists(self):
expect(paths.ROOT).to.be.NOT.empty
def test_that_app_is_correct(self):
expect(paths.APP).to.be.eq('/app')
def test_that_app_http_is_correct(self):
expect(paths.APP_HTTP).to.be.eq('/app/http')
def test_that_app_http_controllers_is_correct(self):
expect(paths.APP_HTTP_CONTROLLERS).to.be.eq('/app/http/controllers')
def test_that_app_http_middleware_is_correct(self):
expect(paths.APP_HTTP_MIDDLEWARE).to.be.eq('/app/http/middleware')
def test_that_app_providers_is_correct(self):
expect(paths.APP_PROVIDERS).to.be.eq('/app/providers')
def test_that_bootstrap_is_correct(self):
expect(paths.BOOTSTRAP).to.be.eq('/bootstrap')
def test_that_bootstrap_cache_is_correct(self):
expect(paths.BOOTSTRAP_CACHE).to.be.eq('/bootstrap/cache')
def test_that_config_is_correct(self):
expect(paths.CONFIG).to.be.eq('/config')
def test_that_databases_is_correct(self):
expect(paths.DATABASES).to.be.eq('/databases')
def test_that_databases_migrations_is_correct(self):
expect(paths.DATABASES_MIGRATIONS).to.be.eq('/databases/migrations')
def test_that_resources_is_correct(self):
expect(paths.RESOURCES).to.be.eq('/resources')
def test_that_resources_templates_is_correct(self):
expect(paths.RESOURCES_TEMPLATES).to.be.eq('/resources/templates')
def test_that_resources_snippets_is_correct(self):
expect(paths.RESOURCES_SNIPPETS).to.be.eq('/resources/snippets')
def test_that_routes_is_correct(self):
expect(paths.ROUTES).to.be.eq('/routes')
def test_that_storage_is_correct(self):
expect(paths.STORAGE).to.be.eq('/storage')
def test_that_storage_compiled_is_correct(self):
expect(paths.STORAGE_COMPILED).to.be.eq('/storage/compiled')
def test_that_storage_static_is_correct(self):
expect(paths.STORAGE_STATIC).to.be.eq('/storage/static')
def test_that_storage_uploads_is_correct(self):
expect(paths.STORAGE_UPLOADS).to.be.eq('/storage/uploads')
def test_that_tests_is_correct(self):
expect(paths.TESTS).to.be.eq('/tests')
# root paths
def test_that_root_app_is_correct(self):
expect(root.APP).to.be.eq(paths.ROOT + paths.APP)
def test_that_root_app_http_is_correct(self):
expect(root.APP_HTTP).to.be.eq(paths.ROOT + paths.APP_HTTP)
def test_that_root_app_http_controllers_is_correct(self):
expect(root.APP_HTTP_CONTROLLERS).to.be.eq(paths.ROOT + paths.APP_HTTP_CONTROLLERS)
def test_that_root_app_http_middleware_is_correct(self):
expect(root.APP_HTTP_MIDDLEWARE).to.be.eq(paths.ROOT + paths.APP_HTTP_MIDDLEWARE)
def test_that_root_app_providers_is_correct(self):
expect(root.APP_PROVIDERS).to.be.eq(paths.ROOT + paths.APP_PROVIDERS)
def test_that_root_bootstrap_is_correct(self):
expect(root.BOOTSTRAP).to.be.eq(paths.ROOT + paths.BOOTSTRAP)
def test_that_root_bootstrap_cache_is_correct(self):
expect(root.BOOTSTRAP_CACHE).to.be.eq(paths.ROOT + paths.BOOTSTRAP_CACHE)
def test_that_root_config_is_correct(self):
expect(root.CONFIG).to.be.eq(paths.ROOT + paths.CONFIG)
def test_that_root_databases_is_correct(self):
expect(root.DATABASES).to.be.eq(paths.ROOT + paths.DATABASES)
def test_that_root_databases_migrations_is_correct(self):
expect(root.DATABASES_MIGRATIONS).to.be.eq(paths.ROOT + paths.DATABASES_MIGRATIONS)
def test_that_root_resources_is_correct(self):
expect(root.RESOURCES).to.be.eq(paths.ROOT + paths.RESOURCES)
def test_that_root_resources_templates_is_correct(self):
expect(root.RESOURCES_TEMPLATES).to.be.eq(paths.ROOT + paths.RESOURCES_TEMPLATES)
def test_that_root_resources_snippets_is_correct(self):
expect(root.RESOURCES_SNIPPETS).to.be.eq(paths.ROOT + paths.RESOURCES_SNIPPETS)
def test_that_root_routes_is_correct(self):
expect(root.ROUTES).to.be.eq(paths.ROOT + paths.ROUTES)
def test_that_root_storage_is_correct(self):
expect(root.STORAGE).to.be.eq(paths.ROOT + paths.STORAGE)
def test_that_root_storage_compiled_is_correct(self):
expect(root.STORAGE_COMPILED).to.be.eq(paths.ROOT + paths.STORAGE_COMPILED)
def test_that_root_storage_static_is_correct(self):
expect(root.STORAGE_STATIC).to.be.eq(paths.ROOT + paths.STORAGE_STATIC)
def test_that_root_storage_uploads_is_correct(self):
expect(root.STORAGE_UPLOADS).to.be.eq(paths.ROOT + paths.STORAGE_UPLOADS)
def test_that_root_tests_is_correct(self):
expect(root.TESTS).to.be.eq(paths.ROOT + paths.TESTS)
| 4,890 | 1,807 |
from openpharmacophore.databases.zinc import get_zinc_urls, discretize_values
import pytest
@pytest.mark.parametrize("subset,mol_weight,logp,format", [
("Drug-Like", None, None, "smi"),
(None, (250, 350), (-1, 1), "smi"),
(None, (365, 415), (1.5, 2.25), "smi"),
("Drug-Like", None, None, "sdf"),
(None, (200, 300), (-1, 2), "sdf"),
])
def test_download_ZINC2D_smiles(subset, mol_weight, logp, format):
url_list = get_zinc_urls(
subset=subset,
mw_range=mol_weight,
logp_range=logp,
file_format=format,
)
if format == "smi":
base_url = "http://files.docking.org/2D/"
if subset == "Drug-like":
assert len(url_list) == 90 * 4 * 2
assert url_list[0] == base_url + "BA/BAAA.smi"
assert url_list[-1] == base_url + "JJ/JJEB.smi"
elif mol_weight == (250, 350):
assert len(url_list) == 12 * 4 * 2
assert url_list[0] == base_url + "BA/BAAA.smi"
assert url_list[-1] == base_url + "EC/ECEB.smi"
elif mol_weight == (365, 415):
assert len(url_list) == 12 * 4 * 2
assert url_list[0] == base_url + "EC/ECAA.smi"
assert url_list[-1] == base_url + "HE/HEEB.smi"
else:
base_url = "http://files.docking.org/3D/"
if subset == "Drug-like":
assert len(url_list) == 19420
assert url_list[0] == base_url + "JJ/EDRP/JJEDRP.xaa.sdf.gz"
assert url_list[-1] == base_url + "AB/AAMM/ABAAMM.xaa.sdf.gz"
elif mol_weight == (200, 300):
assert len(url_list) == 3720
assert url_list[0] == base_url + "AA/AAML/AAAAML.xaa.sdf.gz"
assert url_list[-1] == base_url + "DC/EDRP/DCEDRP.xaa.sdf.gz"
@pytest.mark.parametrize("value,lower", [
(230, True),
(484, False),
(600, True)
])
def test_discretize_values(value, lower):
bins = [200, 250, 300, 325, 350, 375, 400, 425, 450, 500, 550]
new_value = discretize_values(value=value, bins=bins, name="Test", lower=lower)
if value == 230:
assert new_value == 200
elif value == 484:
assert new_value == 500
else:
assert new_value == 550 | 2,222 | 959 |
import numpy as np
import matplotlib.pyplot as plt
from scipy.integrate import odeint
from scipy.optimize import minimize
import pandas as pd
# generate data file from TCLab or get sample data file from:
# http://apmonitor.com/pdc/index.php/Main/ArduinoEstimation2
# Import data file
# Column 1 = time (t)
# Column 2 = input (u)
# Column 3 = output (yp)
data = np.loadtxt('data.txt',delimiter=',',skiprows=1)
# extract data columns
t = data[:,0].T
Q1 = data[:,1].T
Q2 = data[:,2].T
T1meas = data[:,3].T
T2meas = data[:,4].T
# number of time points
ns = len(t)
# define energy balance model
def heat(x,t,Q1,Q2,p):
# Optimized parameters
U,alpha1,alpha2 = p
# Parameters
Ta = 23 + 273.15 # K
m = 4.0/1000.0 # kg
Cp = 0.5 * 1000.0 # J/kg-K
A = 10.0 / 100.0**2 # Area in m^2
As = 2.0 / 100.0**2 # Area in m^2
eps = 0.9 # Emissivity
sigma = 5.67e-8 # Stefan-Boltzman
# Temperature States
T1 = x[0] + 273.15
T2 = x[1] + 273.15
# Heat Transfer Exchange Between 1 and 2
conv12 = U*As*(T2-T1)
rad12 = eps*sigma*As * (T2**4 - T1**4)
# Nonlinear Energy Balances
dT1dt = (1.0/(m*Cp))*(U*A*(Ta-T1) \
+ eps * sigma * A * (Ta**4 - T1**4) \
+ conv12 + rad12 \
+ alpha1*Q1)
dT2dt = (1.0/(m*Cp))*(U*A*(Ta-T2) \
+ eps * sigma * A * (Ta**4 - T2**4) \
- conv12 - rad12 \
+ alpha2*Q2)
return [dT1dt,dT2dt]
def simulate(p):
T = np.zeros((len(t),2))
T[0,0] = T1meas[0]
T[0,1] = T2meas[0]
T0 = T[0]
for i in range(len(t)-1):
ts = [t[i],t[i+1]]
y = odeint(heat,T0,ts,args=(Q1[i],Q2[i],p))
T0 = y[-1]
T[i+1] = T0
return T
# define objective
def objective(p):
# simulate model
Tp = simulate(p)
# calculate objective
obj = 0.0
for i in range(len(t)):
obj += ((Tp[i,0]-T1meas[i])/T1meas[i])**2 \
+((Tp[i,1]-T2meas[i])/T2meas[i])**2
# return result
return obj
# Parameter initial guess
U = 10.0 # Heat transfer coefficient (W/m^2-K)
alpha1 = 0.0100 # Heat gain 1 (W/%)
alpha2 = 0.0075 # Heat gain 2 (W/%)
p0 = [U,alpha1,alpha2]
# show initial objective
print('Initial SSE Objective: ' + str(objective(p0)))
# optimize parameters
# bounds on variables
bnds = ((2.0, 20.0),(0.005,0.02),(0.002,0.015))
solution = minimize(objective,p0,method='SLSQP',bounds=bnds)
p = solution.x
# show final objective
print('Final SSE Objective: ' + str(objective(p)))
# optimized parameter values
U = p[0]
alpha1 = p[1]
alpha2 = p[2]
print('U: ' + str(U))
print('alpha1: ' + str(alpha1))
print('alpha2: ' + str(alpha2))
# calculate model with updated parameters
Ti = simulate(p0)
Tp = simulate(p)
# Plot results
plt.figure(1)
plt.subplot(3,1,1)
plt.plot(t/60.0,Ti[:,0],'y:',label=r'$T_1$ initial')
plt.plot(t/60.0,T1meas,'b-',label=r'$T_1$ measured')
plt.plot(t/60.0,Tp[:,0],'r--',label=r'$T_1$ optimized')
plt.ylabel('Temperature (degC)')
plt.legend(loc='best')
plt.subplot(3,1,2)
plt.plot(t/60.0,Ti[:,1],'y:',label=r'$T_2$ initial')
plt.plot(t/60.0,T2meas,'b-',label=r'$T_2$ measured')
plt.plot(t/60.0,Tp[:,1],'r--',label=r'$T_2$ optimized')
plt.ylabel('Temperature (degC)')
plt.legend(loc='best')
plt.subplot(3,1,3)
plt.plot(t/60.0,Q1,'g-',label=r'$Q_1$')
plt.plot(t/60.0,Q2,'k--',label=r'$Q_2$')
plt.ylabel('Heater Output')
plt.legend(loc='best')
plt.xlabel('Time (min)')
plt.show()
| 3,608 | 1,751 |
from math import gcd
moons = [(-16, -1, -12), (0, -4, -17), (-11, 11, 0), (2, 2, -6)]
velocities = [(0,0,0),(0,0,0),(0,0,0),(0,0,0)]
x_positions = set()
y_positions = set()
z_positions = set()
x_positions.add((moons[0][0],moons[1][0],moons[2][0],moons[3][0],velocities[0][0],velocities[1][0],velocities[2][0],velocities[3][0]))
y_positions.add((moons[0][1],moons[1][1],moons[2][1],moons[3][1],velocities[0][1],velocities[1][2],velocities[2][1],velocities[3][1]))
z_positions.add((moons[0][2],moons[1][2],moons[2][2],moons[3][2],velocities[0][2],velocities[1][0],velocities[2][2],velocities[3][2]))
x_sequences = {(moons[0][0],moons[1][0],moons[2][0],moons[3][0],velocities[0][0],velocities[1][0],velocities[2][0],velocities[3][0]): 0}
y_sequences = {(moons[0][1],moons[1][1],moons[2][1],moons[3][1],velocities[0][1],velocities[1][1],velocities[2][1],velocities[3][1]): 0}
z_sequences = {(moons[0][2],moons[1][2],moons[2][2],moons[3][2],velocities[0][2],velocities[1][2],velocities[2][2],velocities[3][2]): 0}
ctr = 0
def step():
for i in range(0,4):
moon = moons[i]
gravity_delta = (sum([-1 if other_moon[0] < moon[0] else 1 if other_moon[0] > moon[0] else 0 for other_moon in moons]),
sum([-1 if other_moon[1] < moon[1] else 1 if other_moon[1] > moon[1] else 0 for other_moon in moons]),
sum([-1 if other_moon[2] < moon[2] else 1 if other_moon[2] > moon[2] else 0 for other_moon in moons])
)
velocity = velocities[i]
velocities[i] = (velocity[0]+gravity_delta[0],velocity[1]+gravity_delta[1],velocity[2]+gravity_delta[2])
for i in range(0,4):
moon = moons[i]
velocity = velocities[i]
moons[i] = (moon[0]+velocity[0],moon[1]+velocity[1],moon[2]+velocity[2])
x_cycle_length = 0
y_cycle_length = 0
z_cycle_length = 0
while True:
ctr += 1
step()
if (moons[0][0],moons[1][0],moons[2][0],moons[3][0],velocities[0][0],velocities[1][0],velocities[2][0],velocities[3][0]) in x_positions:
x_cycle_length = ctr-x_sequences[(moons[0][0],moons[1][0],moons[2][0],moons[3][0],velocities[0][0],velocities[1][0],velocities[2][0],velocities[3][0])]
pass
if (moons[0][1],moons[1][1],moons[2][1],moons[3][1],velocities[0][1],velocities[1][2],velocities[2][1],velocities[3][1]) in y_positions:
y_cycle_length = ctr-y_sequences[(moons[0][1],moons[1][1],moons[2][1],moons[3][1],velocities[0][1],velocities[1][2],velocities[2][1],velocities[3][1])]
pass
if (moons[0][2],moons[1][2],moons[2][2],moons[3][2],velocities[0][2],velocities[1][0],velocities[2][2],velocities[3][2]) in z_positions:
z_cycle_length = ctr-z_sequences[(moons[0][2],moons[1][2],moons[2][2],moons[3][2],velocities[0][2],velocities[1][0],velocities[2][2],velocities[3][2])]
pass
if x_cycle_length != 0 and y_cycle_length != 0 and z_cycle_length != 0:
break
x_positions.add((moons[0][0],moons[1][0],moons[2][0],moons[3][0],velocities[0][0],velocities[1][0],velocities[2][0],velocities[3][0]))
y_positions.add((moons[0][1],moons[1][1],moons[2][1],moons[3][1],velocities[0][1],velocities[1][2],velocities[2][1],velocities[3][1]))
z_positions.add((moons[0][2],moons[1][2],moons[2][2],moons[3][2],velocities[0][2],velocities[1][0],velocities[2][2],velocities[3][2]))
x_sequences[(moons[0][0],moons[1][0],moons[2][0],moons[3][0],velocities[0][0],velocities[1][0],velocities[2][0],velocities[3][0])] = ctr
y_sequences[(moons[0][1],moons[1][1],moons[2][1],moons[3][1],velocities[0][1],velocities[1][2],velocities[2][1],velocities[3][1])] = ctr
z_sequences[(moons[0][2],moons[1][2],moons[2][2],moons[3][2],velocities[0][2],velocities[1][0],velocities[2][2],velocities[3][2])] = ctr
print('Cycles found:')
print(f'x lasts {x_cycle_length}')
print(f'y lasts {y_cycle_length}')
print(f'z lasts {z_cycle_length}')
print((x_cycle_length,y_cycle_length,z_cycle_length))
def compute_lcm(x, y):
return (x*y)/gcd(x,y)
print(int(compute_lcm(x_cycle_length, int(compute_lcm(y_cycle_length, z_cycle_length)))))
| 4,052 | 1,921 |
import AutoTicketsBot as tBot
configDestination = 'var/config.yml'
args = tBot.addArgs()
config = tBot.configRead(configDestination)
if tBot.configWrite(configDestination, args, config) is True:
print("Successfully store new config to {}".format(configDestination))
ticketsBot = tBot.AutoTicketsBot(config)
#scheduleBot(ticketsBot, config['Config']['startTime'])
try:
tBot.websiteSignIn(ticketsBot, retryCounter=3)
tBot.buyTickets(ticketsBot)
tBot.notifyUser('AutoTicketsBot Notification', 'Got tickets!!!!!')
tBot.terminateBot(ticketsBot, waitTime=900)
except RuntimeError as e:
tBot.terminateBot(ticketsBot, waitTime=0)
print(e) | 643 | 253 |
def get_map_from_input(input_location):
f = open(input_location, 'r')
input_map = f.read().split('\n')
f.close()
lines = len(input_map)
columns = len(input_map[0])
print(f"Original map = {lines} x {columns}")
extended_map = []
for line in input_map:
extended_map.append(line * 200)
print(
f"Extended map = {str(len(extended_map))} x {str(len(extended_map[0]))}")
return extended_map
def traverse_map_counting_trees(extended_map, right, down):
squares = []
i = 0
j = 0
while i < len(extended_map):
if i == 0:
squares.append(extended_map[i][j])
else:
try:
squares.append(extended_map[i][(j * right)])
except:
print("Error")
break
i += down
j+= 1
tree_counter = 0
for char in squares:
if char == '#':
tree_counter += 1
return tree_counter
extended_map = get_map_from_input('input')
number_of_threes = traverse_map_counting_trees(extended_map, 1, 1)
print(f"1x1 => {number_of_threes}")
number_of_threes = traverse_map_counting_trees(extended_map, 3, 1)
print(f"3x1 => {number_of_threes}")
number_of_threes = traverse_map_counting_trees(extended_map, 5, 1)
print(f"5x1 => {number_of_threes}")
number_of_threes = traverse_map_counting_trees(extended_map, 7, 1)
print(f"7x1 => {number_of_threes}")
number_of_threes = traverse_map_counting_trees(extended_map, 1, 2)
print(f"1x2 => {number_of_threes}")
total = traverse_map_counting_trees(extended_map, 1, 1) * traverse_map_counting_trees(extended_map, 3, 1) * traverse_map_counting_trees(
extended_map, 5, 1) * traverse_map_counting_trees(extended_map, 7, 1) * traverse_map_counting_trees(extended_map, 1, 2)
print(f"Numbers multiplied = {total}") | 1,834 | 700 |
import os
import sys
import pytest
from django.contrib.auth import get_user_model
from users.tests.factories import UserFactory
sys.path.append(os.path.join(os.path.dirname(__file__), 'app'))
User = get_user_model()
@pytest.fixture(autouse=True)
def enable_db(db):
pass
@pytest.fixture
def user() -> User:
return UserFactory()
| 343 | 121 |
import numpy as np
import numba
from numba import jit
@jit(nopython=True)
def distance(a,b):
d = 0
for i in range(max(len(a),len(b))):
d += (a[i] - b[i])**2
return d**0.5 | 191 | 84 |
'''
Smallest factor to reach a number composed of digit '1'
Status: Accepted
'''
###############################################################################
def main():
"""Read input and print output"""
while True:
try:
number = int(input())
except EOFError:
break
if number == 1:
print('1')
else:
assert number % 2 != 0
assert number % 5 != 0
digits, remainder = 1, 1
while remainder:
remainder = (remainder * 10 + 1) % number
digits += 1
print(digits)
###############################################################################
if __name__ == '__main__':
main()
| 751 | 191 |
from flask import request, abort
from flask_restful_swagger_3 import Resource, swagger
from flask_jwt_extended import jwt_required, get_jwt_identity, get_jwt
from models.message import Message, MessageCreate
from database.manager import db
from emails import send_new_message
class MessageAPICreate(Resource):
@jwt_required()
@swagger.doc({
'tags': ['message'],
'security': [
{'BearerAuth': []}
],
'requestBody': {
'required': True,
'content': {
'application/json': {
'schema': Message
}
}
},
'responses': {
'201': {
'description': 'Created message',
'content': {
'application/json': {
'schema': Message
}
}
},
'401': {
'description': 'Not authenticated'
},
'403': {
'description': 'Update forbidden'
}
}
})
def post(self):
"""Create a message"""
args = request.json
author_id = get_jwt_identity()
args['author_id'] = author_id
try:
# Validate request body with schema model
message = MessageCreate(**args)
except ValueError as e:
abort(400, e.args[0])
props = None
editLatest = message['editLatest']
del message['editLatest']
if editLatest:
last_msg = db.get_last_message(message['event_id'])
if last_msg and last_msg['author_id'] == author_id:
nb = db.edit_message(last_msg['id'], message['comment'], last_msg['author_id'], last_msg['event_id'])
if nb == 1:
last_msg['comment'] = message['comment']
props = last_msg
else:
abort(500, 'Error updating comment')
else:
abort(403, 'Can only update the latest comment if it is yours')
else:
try:
props = db.insert_message(**message)
except Exception as e:
abort(500, e.args[0])
# Email
if not editLatest:
claims = get_jwt()
author_name = claims['firstname'] + ' ' + claims['lastname']
send_new_message(author_name, author_id, props['event_id'], props['comment'])
return Message(**props), 201, {'Location': request.path + '/' + str(props['id'])}
| 2,190 | 688 |
#!/usr/bin/env python3
#
# Copyright (c) 2015, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#*******************************************************************
#
# NAME: dgemm
#
# PURPOSE: This program tests the efficiency with which a dense matrix
# dense multiplication is carried out
#
# USAGE: The program takes as input the matrix order,
# the number of times the matrix-matrix multiplication
# is carried out.
#
# <progname> <# iterations> <matrix order>
#
# The output consists of diagnostics to make sure the
# algorithm worked, and of timing statistics.
#
# HISTORY: Written by Rob Van der Wijngaart, February 2009.
# Converted to Python by Jeff Hammond, February 2016.
# PyOMP support, ave+std_dev by Tim Mattson, May 2021
# *******************************************************************
import sys
from numba import njit
from numba.openmp import openmp_context as openmp
from numba.openmp import omp_set_num_threads, omp_get_thread_num, omp_get_num_threads, omp_get_wtime
import numpy as np
#from time import process_time as timer
#@njit(enable_ssa=False, cache=True) What does "enable_ssa" mean?
@njit(fastmath=True)
def dgemm(iters,order):
# ********************************************************************
# ** Allocate space for the input and transpose matrix
# ********************************************************************
print('inside dgemm')
A = np.zeros((order,order))
B = np.zeros((order,order))
C = np.zeros((order,order))
for i in range(order):
A[:,i] = float(i)
B[:,i] = float(i)
# print(omp_get_num_threads())
for kiter in range(0,iters+1):
if kiter==1:
t0 = omp_get_wtime()
tSum=0.0
tsqSum=0.0
with openmp("parallel for schedule(static) private(j,k)"):
for i in range(order):
for k in range(order):
for j in range(order):
C[i][j] += A[i][k] * B[k][j]
if kiter>0:
tkiter = omp_get_wtime()
t = tkiter - t0
tSum = tSum + t
tsqSum = tsqSum+t*t
t0 = tkiter
dgemmAve = tSum/iters
dgemmStdDev = ((tsqSum-iters*dgemmAve*dgemmAve)/(iters-1))**0.5
print('finished with computations')
# ********************************************************************
# ** Analyze and output results.
# ********************************************************************
checksum = 0.0;
for i in range(order):
for j in range(order):
checksum += C[i][j];
ref_checksum = order*order*order
ref_checksum *= 0.25*(order-1.0)*(order-1.0)
ref_checksum *= (iters+1)
epsilon=1.e-8
if abs((checksum - ref_checksum)/ref_checksum) < epsilon:
print('Solution validates')
nflops = 2.0*order*order*order
recipDiff = (1.0/(dgemmAve-dgemmStdDev) - 1.0/(dgemmAve+dgemmStdDev))
GfStdDev = 1.e-6*nflops*recipDiff/2.0
print('nflops: ',nflops)
print('Rate: ',1.e-6*nflops/dgemmAve,' +/- (MF/s): ',GfStdDev)
else:
print('ERROR: Checksum = ', checksum,', Reference checksum = ', ref_checksum,'\n')
# sys.exit("ERROR: solution did not validate")
# ********************************************************************
# read and test input parameters
# ********************************************************************
print('Parallel Research Kernels version ') #, PRKVERSION
print('Python Dense matrix-matrix multiplication: C = A x B')
if len(sys.argv) != 3:
print('argument count = ', len(sys.argv))
sys.exit("Usage: ./dgemm <# iterations> <matrix order>")
itersIn = int(sys.argv[1])
if itersIn < 1:
sys.exit("ERROR: iterations must be >= 1")
orderIn = int(sys.argv[2])
if orderIn < 1:
sys.exit("ERROR: order must be >= 1")
print('Number of iterations = ', itersIn)
print('Matrix order = ', orderIn)
dgemm(itersIn, orderIn)
| 5,544 | 1,827 |
# coding:utf-8
# example 17: quick_sort.py
import random
# def quick_sort(array):
# if len(array) <= 1:
# return array
# pivot_idx = 0
# pivot = array[pivot_idx]
# less_part = [num for num in array[pivot_idx + 1:] if num <= pivot]
# great_part = [num for num in array[pivot_idx + 1:] if num > pivot]
# return quick_sort(less_part) + [pivot] + quick_sort(great_part)
# def test_quick_sort():
# import random
# array = [random.randint(1, 100) for _ in range(10)]
# sorted_array = sorted(array)
# my_sorted_array = quick_sort(array)
# assert my_sorted_array == sorted_array
def partition(array, start, stop): # [start, stop)
pivot_idx = start
pivot = array[pivot_idx]
left = pivot_idx + 1
right = stop - 1
while left <= right:
while left <= right and array[left] < pivot:
left += 1
while left <= right and pivot <= array[right]:
right -= 1
if left < right:
array[left], array[right] = array[right], array[left]
array[pivot_idx], array[right] = array[right], array[pivot_idx]
return right
def test_partition():
lst = [3, 1, 4, 2]
assert partition(lst, 0, len(lst)) == 2
lst = [1, 2, 3, 4]
assert partition(lst, 0, len(lst)) == 0
lst = [4, 3, 2, 1]
assert partition(lst, 0, len(lst)) == 3
lst = [3, 5, 4, 3, 6, 7, 2, 3]
assert partition(lst, 0, len(lst)) == 1
def quick_sort_inplace(array, start, stop): # [start, stop)
if start < stop:
pivot = partition(array, start, stop)
quick_sort_inplace(array, start, pivot)
quick_sort_inplace(array, pivot + 1, stop)
def test_quick_sort_inplace():
seq = [random.randint(-100, 100) for _ in range(10)]
sorted_seq = sorted(seq)
quick_sort_inplace(seq, 0, len(seq))
assert seq == sorted_seq
| 1,859 | 699 |
from zope.interface import implements
from epsilon.extime import Time
from axiom.iaxiom import IScheduler
from axiom.item import Item
from axiom.attributes import text
from axiom.test.historic.stubloader import StubbedTest
from xquotient.exmess import _UndeferTask, Message, INBOX_STATUS, CLEAN_STATUS
from xquotient.test.historic.stub_undefertask1to2 import FakeScheduler
from xquotient.test.historic import stub_undefertask1to2
from xquotient.test.util import DummyMessageImplementationMixin
class DummyMessageImplementation(Item, DummyMessageImplementationMixin):
"""
Satisfy the requirement imposed by this database to have an item with
this type name.
This is an extremely terrible hack necessitated by the use of "dummy"
items in the test package which aren't actually stable. This should be
avoided as much as possible, since it can easily result in tests which
have mutually exclusive requirements in order to pass, and at the very
least impose an excessive maintenance burden as the codebase is updated.
Do not copy this hack. Do not define new schemas which might eventually
require it.
"""
typeName = 'xquotient_test_test_workflow_dummymessageimplementation'
senderInfo = text(
doc="""
The sender as passed by the factory which created this implementation;
used to provide a sensible implementation of relatedAddresses.
""",
default=None, allowNone=True)
def walk(self):
"""
Necessary for the tests for upgrading Message to version 6.
"""
return ()
class UndeferTaskTest(StubbedTest):
def setUp(self):
stub_undefertask1to2.SCHEDULE_LOG = []
return StubbedTest.setUp(self)
def getStatuses(self):
"""
@return: A C{set} of statuses for the deferred message.
"""
return set(self.store.findFirst(Message).iterStatuses())
def test_correctScheduling(self):
"""
Check that the old task has been unscheduled and the new task has been
scheduled.
"""
task = self.store.findFirst(_UndeferTask)
self.assertEqual(list(zip(*stub_undefertask1to2.SCHEDULE_LOG)[0]),
['unschedule', 'schedule'])
self.assertEqual(stub_undefertask1to2.SCHEDULE_LOG[-1][1], task)
self.assertNotEqual(stub_undefertask1to2.SCHEDULE_LOG[0][1], task)
def test_notInInbox(self):
"""
Test that the deferred message is not in the inbox.
"""
stats = self.getStatuses()
self.failIfIn(INBOX_STATUS, stats)
def test_inAll(self):
"""
Test that the deferred message does appear in the "all" view.
"""
stats = self.getStatuses()
self.failUnlessIn(CLEAN_STATUS, stats)
def test_notFrozen(self):
"""
Test that the deferred message is not 'frozen' with
L{Message.freezeStatus}.
"""
# NOTE: This is added as documentation, not TDD -- it passes already.
for status in self.getStatuses():
self.failIf(status.startswith('.'))
| 3,129 | 918 |
"""
A test file for testing zestimation
The learned file could be downloaded at
[learned_zqso_only_model_outdata_full_dr9q_minus_concordance_norm_1176-1256.mat]
(https://drive.google.com/file/d/1SqAU_BXwKUx8Zr38KTaA_nvuvbw-WPQM/view?usp=sharing)
"""
import os
import re
import time
import numpy as np
from .test_selection import filenames, z_qsos
from gpy_dla_detection.read_spec import read_spec, retrieve_raw_spec
from gpy_dla_detection.zqso_set_parameters import ZParameters
from gpy_dla_detection.zqso_samples import ZSamples
from gpy_dla_detection.zqso_gp import ZGPMAT
def test_zestimation(nspec: int):
filename = filenames[nspec]
if not os.path.exists(filename):
plate, mjd, fiber_id = re.findall(
r"spec-([0-9]+)-([0-9]+)-([0-9]+).fits", filename,
)[0]
retrieve_raw_spec(int(plate), int(mjd), int(fiber_id))
params = ZParameters()
z_qso_samples = ZSamples(params)
wavelengths, flux, noise_variance, pixel_mask = read_spec(filename)
z_qso_gp = ZGPMAT(
params,
z_qso_samples,
learned_file="data/dr12q/processed/learned_zqso_only_model_outdata_full_dr9q_minus_concordance_norm_1176-1256.mat",
)
tic = time.time()
z_qso_gp.inference_z_qso(wavelengths, flux, noise_variance, pixel_mask)
print("Z True : {:.3g}".format(z_qsos[nspec]))
toc = time.time()
print("spent {} mins; {} seconds".format((toc - tic) // 60, (toc - tic) % 60))
return z_qso_gp.z_map, z_qsos[nspec]
def test_batch(num_quasars: int = 100):
all_z_diffs = np.zeros((num_quasars,))
for nspec in range(num_quasars):
z_map, z_true = test_zestimation(nspec)
z_diff = z_map - z_true
print("[Info] z_diff = z_map - z_true = {:.8g}".format(z_diff))
all_z_diffs[nspec] = z_diff
print("[Info] abs(z_diff) < 0.5 = {:.4g}".format(accuracy(all_z_diffs, 0.5)))
print("[Info] abs(z_diff) < 0.05 = {:.4g}".format(accuracy(all_z_diffs, 0.05)))
# we got ~99% accuracy in https://arxiv.org/abs/2006.07343
# so at least we need to ensure ~98% here
assert accuracy(all_z_diffs, 0.5) > 0.98
def accuracy(z_diff: np.ndarray, z_thresh: float):
num_quasars = z_diff.shape[0]
corrects = (np.abs(z_diff) < z_thresh).sum()
return corrects / num_quasars
| 2,304 | 1,005 |
import pytest
from quart.xml_parser_quart import fusion_vulnerability_dictionaries
EXPECTED_1 = \
{u'1': {'category': u'Category 1',
'consequence': u'Consequence 1',
'diagnosis': u'Diagnosis 1',
'hosts': [{'ip': u'1.1.1.1', 'name': 'host1'},
{'ip': u'2.2.2.2', 'name': 'host2'}],
'qid': u'1',
'severity': 5,
'solution': u'Solution 1',
'title': u'Vulnerability Title 1'},
u'2': {'category': u'Category 2',
'consequence': u'Consequence 2',
'diagnosis': u'Diagnosis 2',
'hosts': [{'ip': u'2.2.2.2', 'name': 'host2'}],
'qid': u'2',
'severity': 4,
'solution': u'Solution 2',
'title': u'Vulnerability Title 2'}}
EXPECTED_2 = \
{u'2': {'category': u'Category 2',
'consequence': u'Consequence 2',
'diagnosis': u'Diagnosis 2',
'hosts': [{'ip': u'3.3.3.3', 'name': 'host3'}],
'qid': u'2',
'severity': 4,
'solution': u'Solution 2',
'title': u'Vulnerability Title 2'},
u'3': {'category': u'Category 3',
'consequence': u'Consequence 3',
'diagnosis': u'Diagnosis 3',
'hosts': [{'ip': u'4.4.4.4', 'name': 'host4'}],
'qid': u'3',
'severity': 3,
'solution': u'Solution 3',
'title': u'Vulnerability Title 3'}}
EXPECTED_1_2= \
{u'1': {'category': u'Category 1',
'consequence': u'Consequence 1',
'diagnosis': u'Diagnosis 1',
'hosts': [{'ip': u'1.1.1.1', 'name': 'host1'},
{'ip': u'2.2.2.2', 'name': 'host2'}],
'qid': u'1',
'severity': 5,
'solution': u'Solution 1',
'title': u'Vulnerability Title 1'},
u'2': {'category': u'Category 2',
'consequence': u'Consequence 2',
'diagnosis': u'Diagnosis 2',
'hosts': [{'ip': u'2.2.2.2', 'name': 'host2'},
{'ip': u'3.3.3.3', 'name': 'host3'}],
'qid': u'2',
'severity': 4,
'solution': u'Solution 2',
'title': u'Vulnerability Title 2'},
u'3': {'category': u'Category 3',
'consequence': u'Consequence 3',
'diagnosis': u'Diagnosis 3',
'hosts': [{'ip': u'4.4.4.4', 'name': 'host4'}],
'qid': u'3',
'severity': 3,
'solution': u'Solution 3',
'title': u'Vulnerability Title 3'}}
@pytest.mark.parametrize('dictionary_1, dictionary_2, fusion_dictionary', (
({}, {}, {}),
({}, EXPECTED_1, EXPECTED_1),
(EXPECTED_2, {}, EXPECTED_2),
(EXPECTED_1, EXPECTED_1, EXPECTED_1),
(EXPECTED_1, EXPECTED_2, EXPECTED_1_2),
))
def test_fusion_vulnerability_dictionaries(dictionary_1, dictionary_2,
fusion_dictionary):
assert fusion_vulnerability_dictionaries(dictionary_1, dictionary_2) ==\
fusion_dictionary | 2,981 | 1,178 |
from poker import poker, kind, two_pair, hand_rank, card_ranks, best_hand
def test():
"Test cases for the functions in poker program"
sf = "6C 7C 8C 9C TC".split() # Straight Flush
fk = "9D 9H 9S 9C 7D".split() # Four of a Kind
fh = "TD TC TH 7C 7D".split() # Full House
tp = "5S 5D AC AS KS".split() # Two Pair
s1 = "AC 2S 3C 4D 5D".split() # A-5 Straight
s2 = "2S 3C 4D 5D 6S".split() # 2-6 Straight
ah = "AC 2S 9C 4D 6D".split() # A High
sh = "7C 2S 6C 3D 5D".split() # 7 High
assert poker([s1, s2, ah, sh]) == [s2]
assert poker([s1, ah, sh]) == [s1]
fkranks = card_ranks(fk)
tpranks = card_ranks(tp)
assert kind(4, fkranks) == 9
assert kind(3, fkranks) is None
assert kind(2, fkranks) is None
assert kind(1, fkranks) == 7
assert two_pair(fkranks) is None
assert two_pair(tpranks) == (14, 5)
assert poker([sf, fk, fh]) == [sf]
assert poker([fk, fh]) == [fk]
assert poker([fh, fh]) == [fh, fh]
assert poker([sf]) == [sf]
assert poker([sf] + 99*[fh]) == [sf]
assert hand_rank(sf) == (8, 10)
assert hand_rank(fk) == (7, 9, 7)
assert hand_rank(fh) == (6, 10, 7)
return 'tests pass'
print test()
def test_best_hand():
test_1 = sorted(best_hand("6C 7C 8C 9C TC 5C JS".split()))
assert (test_1 == ['6C', '7C', '8C', '9C', 'TC'])
test_2 = sorted(best_hand("TD TC TH 7C 7D 8C 8S".split()))
assert (test_2 == ['8C', '8S', 'TC', 'TD', 'TH'])
test_3 = sorted(best_hand("JD TC TH 7C 7D 7S 7H".split()))
assert (test_3 == ['7C', '7D', '7H', '7S', 'JD'])
return 'test_best_hand passes'
print test_best_hand()
| 1,651 | 781 |
""" base estimator class for megaman """
# Author: James McQueen -- <jmcq@u.washington.edu>
# LICENSE: Simplified BSD https://github.com/mmp2/megaman/blob/master/LICENSE
import numpy as np
from scipy.sparse import isspmatrix
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.utils.validation import check_array
from ..geometry.geometry import Geometry
# from sklearn.utils.validation import FLOAT_DTYPES
FLOAT_DTYPES = (np.float64, np.float32, np.float16)
class BaseEmbedding(BaseEstimator, TransformerMixin):
""" Base Class for all megaman embeddings.
Inherits BaseEstimator and TransformerMixin from sklearn.
BaseEmbedding creates the common interface to the geometry
class for all embeddings as well as providing a common
.fit_transform().
Parameters
----------
n_components : integer
number of coordinates for the manifold.
radius : float (optional)
radius for adjacency and affinity calculations. Will be overridden if
either is set in `geom`
geom : dict or megaman.geometry.Geometry object
specification of geometry parameters: keys are
["adjacency_method", "adjacency_kwds", "affinity_method",
"affinity_kwds", "laplacian_method", "laplacian_kwds"]
Attributes
----------
geom_ : a fitted megaman.geometry.Geometry object.
"""
def __init__(self, n_components=2, radius=None, geom=None):
self.n_components = n_components
self.radius = radius
self.geom = geom
def _validate_input(self, X, input_type):
if input_type == 'data':
sparse_formats = None
elif input_type in ['adjacency', 'affinity']:
sparse_formats = ['csr', 'coo', 'lil', 'bsr', 'dok', 'dia']
else:
raise ValueError("unrecognized input_type: {0}".format(input_type))
return check_array(X, dtype=FLOAT_DTYPES, accept_sparse=sparse_formats)
# # The world is not ready for this...
# def estimate_radius(self, X, input_type='data', intrinsic_dim=None):
# """Estimate a radius based on the data and intrinsic dimensionality
#
# Parameters
# ----------
# X : array_like, [n_samples, n_features]
# dataset for which radius is estimated
# intrinsic_dim : int (optional)
# estimated intrinsic dimensionality of the manifold. If not
# specified, then intrinsic_dim = self.n_components
#
# Returns
# -------
# radius : float
# The estimated radius for the fit
# """
# if input_type == 'affinity':
# return None
# elif input_type == 'adjacency':
# return X.max()
# elif input_type == 'data':
# if intrinsic_dim is None:
# intrinsic_dim = self.n_components
# mean_std = np.std(X, axis=0).mean()
# n_features = X.shape[1]
# return 0.5 * mean_std / n_features ** (1. / (intrinsic_dim + 6))
# else:
# raise ValueError("Unrecognized input_type: {0}".format(input_type))
def fit_geometry(self, X=None, input_type='data'):
"""Inputs self.geom, and produces the fitted geometry self.geom_"""
if self.geom is None:
self.geom_ = Geometry()
elif isinstance(self.geom, Geometry):
self.geom_ = self.geom
else:
try:
kwds = dict(**self.geom)
except TypeError:
raise ValueError("geom must be a Geometry instance or "
"a mappable/dictionary")
self.geom_ = Geometry(**kwds)
if self.radius is not None:
self.geom_.set_radius(self.radius, override=False)
# if self.radius == 'auto':
# if X is not None and input_type != 'affinity':
# self.geom_.set_radius(self.estimate_radius(X, input_type),
# override=False)
# else:
# self.geom_.set_radius(self.radius,
# override=False)
if X is not None:
self.geom_.set_matrix(X, input_type)
return self
def fit_transform(self, X, y=None, input_type='data'):
"""Fit the model from data in X and transform X.
Parameters
----------
input_type : string, one of: 'data', 'distance' or 'affinity'.
The values of input data X. (default = 'data')
X: array-like, shape (n_samples, n_features)
Training vector, where n_samples in the number of samples
and n_features is the number of features.
If self.input_type is 'distance':
X : array-like, shape (n_samples, n_samples),
Interpret X as precomputed distance or adjacency graph
computed from samples.
Returns
-------
X_new: array-like, shape (n_samples, n_components)
"""
self.fit(X, y=y, input_type=input_type)
return self.embedding_
def transform(self, X, y=None, input_type='data'):
raise NotImplementedError("transform() not implemented. "
"Try fit_transform()")
| 5,249 | 1,567 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#
# generated by wxGlade 0.9.4 on Sat Feb 1 19:14:54 2020
#
import wx
from pymorsecode import MorseCode
# begin wxGlade: dependencies
# end wxGlade
# begin wxGlade: extracode
# end wxGlade
class MyFrame(wx.Frame):
def __init__(self, *args, **kwds):
# begin wxGlade: MyFrame.__init__
kwds["style"] = kwds.get("style", 0) | wx.DEFAULT_FRAME_STYLE
wx.Frame.__init__(self, *args, **kwds)
self.SetSize((602, 444))
self.text_ctrl_txt = wx.TextCtrl(self, wx.ID_ANY, "Enter String", style=wx.TE_MULTILINE | wx.TE_NO_VSCROLL)
self.spin_ctrl_wpm = wx.SpinCtrl(self, wx.ID_ANY, "15", min=5, max=25)
self.spin_ctrl_hz = wx.SpinCtrl(self, wx.ID_ANY, "800", min=500, max=1000)
self.text_ctrl_morsecode = wx.TextCtrl(self, wx.ID_ANY, "Generated Morse Code", style=wx.TE_MULTILINE | wx.TE_NO_VSCROLL | wx.TE_READONLY)
self.button_clear = wx.Button(self, wx.ID_ANY, "Clear")
self.button_open = wx.Button(self, wx.ID_ANY, "Open")
self.button_save = wx.Button(self, wx.ID_ANY, "Save")
self.button_play = wx.Button(self, wx.ID_ANY, "Play")
self.button_generate = wx.Button(self, wx.ID_ANY, "Generate")
self.button_exit = wx.Button(self, wx.ID_ANY, "Exit")
self.morse = MorseCode("Kayleb Walter")
print("Starting program")
# Menu Bar
self.frame_menubar = wx.MenuBar()
wxglade_tmp_menu = wx.Menu()
wxglade_tmp_menu.Append(1, "Open", "")
self.Bind(wx.EVT_MENU, self.onOpen, id=1)
wxglade_tmp_menu.Append(2, "Save", "")
self.Bind(wx.EVT_MENU, self.onSave, id=2)
wxglade_tmp_menu.AppendSeparator()
wxglade_tmp_menu.Append(3, "Generate", "")
self.Bind(wx.EVT_MENU, self.onGenerate, id=3)
wxglade_tmp_menu.Append(4, "Play", "")
self.Bind(wx.EVT_MENU, self.onPlay, id=4)
wxglade_tmp_menu.AppendSeparator()
wxglade_tmp_menu.Append(5, "Exit", "")
self.Bind(wx.EVT_MENU, self.onExit, id=5)
self.frame_menubar.Append(wxglade_tmp_menu, "File")
self.SetMenuBar(self.frame_menubar)
# Menu Bar end
self.__set_properties()
self.__do_layout()
self.Bind(wx.EVT_BUTTON, self.onClear, self.button_clear)
self.Bind(wx.EVT_BUTTON, self.onOpen, self.button_open)
self.Bind(wx.EVT_BUTTON, self.onSave, self.button_save)
self.Bind(wx.EVT_BUTTON, self.onPlay, self.button_play)
self.Bind(wx.EVT_BUTTON, self.onGenerate, self.button_generate)
self.Bind(wx.EVT_BUTTON, self.onExit, self.button_exit)
self.Bind(wx.EVT_SPINCTRL, self.onWpm, self.spin_ctrl_wpm)
self.Bind(wx.EVT_SPINCTRL, self.onHz, self.spin_ctrl_hz)
# end wxGlade
def __set_properties(self):
# begin wxGlade: MyFrame.__set_properties
self.SetTitle("Morse Code Program")
self.text_ctrl_txt.SetToolTip("Enter the text you want to convert to morse code in this text box")
self.text_ctrl_txt.SetFocus()
self.spin_ctrl_wpm.SetToolTip("Morse Code speed in words per minute")
self.spin_ctrl_hz.SetToolTip("Audio Frequency for the tone")
self.text_ctrl_morsecode.SetToolTip("Morse Code will be generated in this text box")
self.button_clear.SetToolTip("Clears the textboxes")
self.button_open.SetToolTip("Opens a wave file")
self.button_save.SetToolTip("Save the morse code audio to a wave file")
self.button_play.SetToolTip("Play an audio version of the morse code")
self.button_generate.SetToolTip("Takes a text string and convert it to morse code")
self.button_exit.SetToolTip("Exit the program")
# end wxGlade
def __do_layout(self):
# begin wxGlade: MyFrame.__do_layout
sizer_1 = wx.BoxSizer(wx.VERTICAL)
sizer_2 = wx.BoxSizer(wx.VERTICAL)
sizer_5 = wx.BoxSizer(wx.HORIZONTAL)
sizer_6 = wx.StaticBoxSizer(wx.StaticBox(self, wx.ID_ANY, "Actions"), wx.VERTICAL)
sizer_3 = wx.BoxSizer(wx.HORIZONTAL)
sizer_4 = wx.StaticBoxSizer(wx.StaticBox(self, wx.ID_ANY, "Options"), wx.VERTICAL)
label_1 = wx.StaticText(self, wx.ID_ANY, "Morse Code Generator", style=wx.ALIGN_CENTER)
label_1.SetFont(wx.Font(12, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, 0, ""))
sizer_2.Add(label_1, 0, wx.EXPAND, 0)
sizer_3.Add(self.text_ctrl_txt, 3, wx.ALL | wx.EXPAND, 5)
label_2 = wx.StaticText(self, wx.ID_ANY, "Speed (words/minute):")
sizer_4.Add(label_2, 0, 0, 0)
sizer_4.Add(self.spin_ctrl_wpm, 0, 0, 0)
label_3 = wx.StaticText(self, wx.ID_ANY, "Audio Frequency (Hz):")
sizer_4.Add(label_3, 0, 0, 0)
sizer_4.Add(self.spin_ctrl_hz, 0, 0, 0)
sizer_3.Add(sizer_4, 1, wx.ALL | wx.EXPAND, 3)
sizer_2.Add(sizer_3, 1, wx.EXPAND, 0)
sizer_5.Add(self.text_ctrl_morsecode, 3, wx.ALL | wx.EXPAND, 5)
sizer_6.Add(self.button_clear, 0, wx.ALL | wx.EXPAND, 1)
sizer_6.Add(self.button_open, 0, wx.ALL | wx.EXPAND, 1)
sizer_6.Add(self.button_save, 0, wx.ALL | wx.EXPAND, 1)
sizer_6.Add(self.button_play, 0, wx.ALL | wx.EXPAND, 1)
sizer_6.Add(self.button_generate, 0, wx.ALL | wx.EXPAND, 1)
sizer_6.Add(self.button_exit, 0, wx.ALL | wx.EXPAND, 1)
sizer_5.Add(sizer_6, 1, wx.ALL | wx.EXPAND, 3)
sizer_2.Add(sizer_5, 1, wx.EXPAND, 0)
sizer_1.Add(sizer_2, 1, wx.EXPAND, 0)
self.SetSizer(sizer_1)
self.Layout()
# end wxGlade
def onClear(self, event): # wxGlade: MyFrame.<event_handler>
self.text_ctrl_txt.SetValue("")
self.text_ctrl_morsecode.SetValue("")
def onOpen(self, event): # wxGlade: MyFrame.<event_handler>
openfile = wx.LoadFileSelector('Open a Wave file', '.wav', 'testmorse.wav', None)
if(openfile != ""):
self.SetTitle("Morse Code Wave File - " + openfile)
self.text_ctrl_txt.SetValue("Please Wait Generating....")
self.text_ctrl_morsecode.SetValue("Please Wait Generating....")
self.morse.sound_info(openfile)
print("Generating morse code from audio")
self.morse.sound_to_morse(openfile)
self.text_ctrl_txt.SetValue(self.morse.morse_text)
self.text_ctrl_morsecode.SetValue(self.morse.morse_code)
print("Finished with translating audio to morse code")
def onSave(self, event): # wxGlade: MyFrame.<event_handler>
savefile = wx.SaveFileSelector('Open a Wave file', '.wav', 'testmorse.wav', None)
if(savefile != ""):
self.SetTitle("Morse Code Wave File - " + savefile)
self.morse.save_wav(savefile)
def onGenerate(self, event): # wxGlade: MyFrame.<event_handler>
print("Generating morse code")
morse_text = self.text_ctrl_txt.GetValue()
morse_code = self.morse.to_morse(morse_text)
self.text_ctrl_morsecode.SetValue(self.morse.morse_code)
def onExit(self, event): # wxGlade: MyFrame.<event_handler>
print("Closing program")
self.Destroy()
def onPlay(self, event): # wxGlade: MyFrame.<event_handler>
print("Playing morse code")
morse_code = self.text_ctrl_morsecode.GetValue()
self.morse.play_morse(morse_code)
def onWpm(self, event):
wpm = self.spin_ctrl_wpm.GetValue()
print("Change the WPM value")
self.morse.time_period = self.morse.set_time_period(wpm)
def onHz(self, event):
hz = self.spin_ctrl_hz.GetValue()
print("Change the frequency value")
self.morse.tone = self.morse.set_tone(hz)
# end of class MyFrame
class MyApp(wx.App):
def OnInit(self):
self.frame = MyFrame(None, wx.ID_ANY, "")
self.SetTopWindow(self.frame)
self.frame.Show()
return True
# end of class MyApp
if __name__ == "__main__":
app = MyApp(0)
app.MainLoop()
| 8,048 | 3,146 |
"""update user profile definition
Revision ID: d3f96fb8b8e5
Revises: 2b13f89aa1b3
Create Date: 2021-10-18 15:45:33.906745
"""
from alembic import op
import sqlalchemy as sa
from alembic_utils.pg_function import PGFunction
from sqlalchemy import text as sql_text
# revision identifiers, used by Alembic.
revision = 'd3f96fb8b8e5'
down_revision = '2b13f89aa1b3'
branch_labels = None
depends_on = None
# Update user profile to add registrations table and miscellaneous (future) preferences.
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('user_profiles', sa.Column('registrations_table', sa.JSON(), nullable=True))
op.add_column('user_profiles', sa.Column('misc_preferences', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('user_profiles', 'misc_preferences')
op.drop_column('user_profiles', 'registrations_table')
# ### end Alembic commands ###
| 1,032 | 374 |
"""
On an 8 x 8 chessboard, there is one white rook. There also may be empty squares, white bishops, and black pawns. These are given as characters 'R', '.', 'B', and 'p' respectively. Uppercase characters represent white pieces, and lowercase characters represent black pieces.
The rook moves as in the rules of Chess: it chooses one of four cardinal directions (north, east, west, and south), then moves in that direction until it chooses to stop, reaches the edge of the board, or captures an opposite colored pawn by moving to the same square it occupies. Also, rooks cannot move into the same square as other friendly bishops.
Return the number of pawns the rook can capture in one move.
Example 1:
Input: [[".",".",".",".",".",".",".","."],[".",".",".","p",".",".",".","."],[".",".",".","R",".",".",".","p"],[".",".",".",".",".",".",".","."],[".",".",".",".",".",".",".","."],[".",".",".","p",".",".",".","."],[".",".",".",".",".",".",".","."],[".",".",".",".",".",".",".","."]]
Output: 3
Explanation:
In this example the rook is able to capture all the pawns.
Example 2:
Input: [[".",".",".",".",".",".",".","."],[".","p","p","p","p","p",".","."],[".","p","p","B","p","p",".","."],[".","p","B","R","B","p",".","."],[".","p","p","B","p","p",".","."],[".","p","p","p","p","p",".","."],[".",".",".",".",".",".",".","."],[".",".",".",".",".",".",".","."]]
Output: 0
Explanation:
Bishops are blocking the rook to capture any pawn.
Example 3:
Input: [[".",".",".",".",".",".",".","."],[".",".",".","p",".",".",".","."],[".",".",".","p",".",".",".","."],["p","p",".","R",".","p","B","."],[".",".",".",".",".",".",".","."],[".",".",".","B",".",".",".","."],[".",".",".","p",".",".",".","."],[".",".",".",".",".",".",".","."]]
Output: 3
Explanation:
The rook can capture the pawns at positions b5, d6 and f5.
Note:
board.length == board[i].length == 8
board[i][j] is either 'R', '.', 'B', or 'p'
There is exactly one cell with board[i][j] == 'R'
"""
class Solution(object):
def numRookCaptures(self, board):
"""
:type board: List[List[str]]
:rtype: int
"""
ri, rj = 0, 0
found = False
for i in xrange(len(board)):
for j in xrange(len(board[0])):
c = board[i][j]
if c == 'R':
ri, rj = i, j
found = True
break
if found:
break
num = 0
dirs = [[1, 0], [-1, 0], [0, 1], [0, -1]]
for di, dj in dirs:
i, j = ri + di, rj + dj
while i >= 0 and i < len(board) and j >= 0 and j < len(board[0]):
c = board[i][j]
if c == '.':
pass
elif c == 'p':
num += 1
break
else:
break
i += di
j += dj
return num | 2,948 | 1,052 |
# Copyright (c) 2020, Huawei Technologies.All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import numpy as np
import sys
import copy
from common_utils import TestCase, run_tests
from common_device_type import dtypes, instantiate_device_type_tests
from util_test import create_common_tensor
class TestSin(TestCase):
def cpu_op_exec(self, input1):
output = torch.sin(input1)
output = output.numpy()
return output
def npu_op_exec(self, input1):
output = torch.sin(input1)
output = output.to("cpu")
output = output.numpy()
return output
def npu_op_exec_out(self, input1, input2):
torch.sin(input1, out=input2)
output = input2.to("cpu")
output = output.numpy()
return output
def test_sin_common_shape_format(self, device):
shape_format = [
[[np.float32, 0, (5,3)]],
]
for item in shape_format:
cpu_input1, npu_input1 = create_common_tensor(item[0], -10, 10)
cpu_output = self.cpu_op_exec(cpu_input1)
npu_output = self.npu_op_exec(npu_input1)
self.assertRtolEqual(cpu_output, npu_output)
def test_sin_out_common_shape_format(self, device):
shape_format = [
[[np.float16, -1, (4, 3, 128, 128)], [np.float16, -1, (4, 3, 128, 128)]],
[[np.float16, 0, (4, 3, 128, 128)], [np.float16, 0, (10, 3, 64, 128)]],
[[np.float16, 0, (4, 3, 128, 128)], [np.float16, 0, (2, 3, 256, 128)]],
[[np.float32, 0, (4, 3, 128, 128)], [np.float32, 0, (4, 3, 128, 128)]],
[[np.float32, 0, (4, 3, 128, 128)], [np.float32, 0, (8, 3, 64, 128)]],
[[np.float32, -1, (4, 3, 128, 128)], [np.float32, -1, (4, 3, 256, 64)]],
]
for item in shape_format:
cpu_input1, npu_input1 = create_common_tensor(item[0], -10, 10)
cpu_input2, npu_input2 = create_common_tensor(item[0], -10, 10)
cpu_input3, npu_input3 = create_common_tensor(item[1], -10, 10)
if cpu_input1.dtype == torch.float16:
cpu_input1 = cpu_input1.to(torch.float32)
cpu_output = self.cpu_op_exec(cpu_input1)
npu_output_out1 = self.npu_op_exec_out(npu_input1, npu_input2)
npu_output_out2 = self.npu_op_exec_out(npu_input1, npu_input3)
cpu_output = cpu_output.astype(npu_output_out1.dtype)
self.assertRtolEqual(cpu_output, npu_output_out1)
self.assertRtolEqual(cpu_output, npu_output_out2)
instantiate_device_type_tests(TestSin, globals(), except_for='cpu')
if __name__ == "__main__":
run_tests()
| 3,200 | 1,235 |
# -*- coding: utf-8 -*-
from simmate.website.core_components.filters import (
Structure,
Forces,
Thermodynamics,
Calculation,
)
from simmate.database.base_data_types.dynamics import (
DynamicsRun as DynamicsRunTable,
DynamicsIonicStep as DynamicsIonicStepTable,
)
class DynamicsRun(Structure, Calculation):
class Meta:
model = DynamicsRunTable
fields = dict(
temperature_start=["range"],
temperature_end=["range"],
time_step=["range"],
nsteps=["range"],
**Structure.get_fields(),
**Calculation.get_fields(),
)
class DynamicsIonicStep(Structure, Forces, Thermodynamics):
class Meta:
model = DynamicsIonicStepTable
fields = dict(
number=["range"],
temperature=["range"],
**Structure.get_fields(),
**Thermodynamics.get_fields(),
**Forces.get_fields(),
)
| 968 | 282 |
import numpy as np
import pandas as pd
from taller1.models import Userid_Timestamp_Count
class Coseno():
def recomendacionUsuario(self,usuario_activo):
print("Modelo Coseno Usuario")
cant = 10
df_mapreduce = Coseno.cargarDatos(self)
print("df_mapreduce.shape",df_mapreduce.shape)
df_pivot = df_mapreduce.pivot('userid','artist','count').fillna(0)
print("Pivot.shape=", df_pivot.shape)
lista_coseno_usuario = Coseno.iterarUsuario(self,df_pivot,usuario_activo)
print("Termina calculo coseno=",len(lista_coseno_usuario))
lista_coseno_usuario.sort(key=lambda k:k['coseno'], reverse = True)
print("Termina ordenar lista coseno")
usuario_mas_similar = lista_coseno_usuario[0]['usuario_similar']
print("Usuario mas similar=",usuario_mas_similar)
lista_recomendacion = Coseno.artistaMasEscuchadoPorUsuario(self,usuario_mas_similar,cant,df_pivot)
resp = {"lista_coseno_usuario":lista_coseno_usuario[:cant],
"lista_recomendacion":lista_recomendacion}
return resp
def cargarDatos(self):
#df_mapreduce = pd.read_csv('part-r-00000',sep='\t',names=['userid','artist','count'])
df_mapreduce = pd.DataFrame(list(Userid_Timestamp_Count.objects.all().values('userid','artist','count')))
return df_mapreduce.dropna()
def iterarUsuario(self,df_pivot,usuario_activo):
v_usuario_activo = df_pivot.loc[usuario_activo].values
lista_coseno=[]
for user_evaluado in df_pivot.index.tolist():
if usuario_activo != user_evaluado:
object = {}
object['usuario_similar']=user_evaluado
v_usuario_evaluado = df_pivot.loc[user_evaluado].values
object['coseno']=Coseno.cos_sim(self,v_usuario_activo, v_usuario_evaluado)
lista_coseno.append(object)
return lista_coseno
def valorCoseno(self):
return val['coseno']
def artistaMasEscuchadoPorUsuario(self,usuario_evaluado,cant,df_pivot):
artistas_escuchados = df_pivot.loc[usuario_evaluado]
df_r = pd.DataFrame(artistas_escuchados)
df_r = df_r.sort_values(by=[usuario_evaluado], ascending=False).index.tolist()
return df_r[:cant]
def cos_sim(self,a, b):
#Takes 2 vectors a, b and returns the cosine similarity according
#to the definition of the dot product
dot_product = np.dot(a, b)
norm_a = np.linalg.norm(a)
norm_b = np.linalg.norm(b)
return dot_product / (norm_a * norm_b)
def recomendacionItem(self,usuario_activo):
print("Modelo Coseno Item")
df_mapreduce = Coseno.cargarDatos(self)
print("df_mapreduce.shape",df_mapreduce.shape)
df_pivotA = df_mapreduce.pivot('userid','artist','count').fillna(0)
print("Usuario Pivot.shape=", df_pivotA.shape)
artista_activo = Coseno.artistaMasEscuchadoPorUsuario(self,usuario_activo,10,df_pivotA)
cant = 10
df_pivot = df_mapreduce.pivot('artist','userid','count').fillna(0)
print("Artista Pivot.shape=", df_pivot.shape)
lista_coseno_artista = Coseno.iterarArtistas(self,df_pivot,artista_activo[:1])
print("Termina calculo coseno=",len(lista_coseno_artista))
lista_coseno_artista.sort(key=lambda k:k['coseno'], reverse = True)
print("Termina ordenar lista coseno")
resp = {"lista_coseno_artista":lista_coseno_artista[:cant],
"artista_activo":artista_activo}
return resp
def iterarArtistas(self,df_pivot_artista,artista_activo):
v_artista_activo = df_pivot_artista.loc[artista_activo].values
lista_coseno=[]
for artista_evaluado in df_pivot_artista.index.tolist():
if artista_activo != artista_evaluado:
object = {}
object['artista_similar']=artista_evaluado
v_artista_evaluado = df_pivot_artista.loc[artista_evaluado].values
object['coseno']=Coseno.cos_sim(self,v_artista_activo, v_artista_evaluado)
lista_coseno.append(object)
return lista_coseno | 3,821 | 1,608 |
"""Convert Shapefiles to Tecplot plt format
usage:
> python shapefile_to_plt.py shapefile.shp outfile.plt
Necessary modules
-----------------
pyshp
The Python Shapefile Library (pyshp) reads and writes ESRI Shapefiles in pure Python.
https://pypi.python.org/pypi/pyshp
https://www.esri.com/library/whitepapers/pdfs/shapefile.pdf
Description
-----------
This script is used to convert Shapefiles (.shp) to Tecplot plt format.
Users will need to answer a few questions about their shapefile to accurately
import into Tecplot format.
First select a conversion type: Convert to a single zone or one zone per shape.
Next select variable names to use: x/y or lon/lat
Finally, if using one zone per shape, select the column to name the zones
After running the script, append the new plt file to the active frame and match the
variable names.
"""
import sys
import os
import time
import shapefile as sf
import tecplot as tp
from tecplot.constant import *
def create_connectivity_list(shape, element_offset=0):
"""Use the element indices for each shape to create the connectivity list"""
num_points = len(shape.points)
num_parts = len(shape.parts)
elements = []
for i in range(num_parts):
# parts[] returns the point index at the start of each part
# These values will define the connectivity list of the line segments
p1 = shape.parts[i]
# Check to see if we're at the last part so we don't over index the list
if i < num_parts - 1:
p2 = shape.parts[i + 1] - 1
else:
p2 = num_points - 1
p1 += element_offset
p2 += element_offset
# Create the connectivity list for this part. Each point is connected to the next
for i in range(p1, p2):
elements.append((i, i + 1))
return elements
def convert_to_single_zone(s, zone_name, dataset):
"""Loop over all the shapes, collecting their point values and generating
the FE-Line Segment connectivity list."""
x = []
y = []
elements = []
num_points = 0
for shapeRec in s.shapeRecords():
elements.extend(create_connectivity_list(shapeRec.shape, num_points))
x.extend([n[0] for n in shapeRec.shape.points])
y.extend([n[1] for n in shapeRec.shape.points])
num_points += len(shapeRec.shape.points)
# Now that we have the points and connectivity list we add a zone to the dataset
zone = dataset.add_fe_zone(ZoneType.FELineSeg, zone_name, num_points, len(elements))
zone.values(0)[:] = x
zone.values(1)[:] = y
zone.nodemap[:] = elements
def convert_to_one_zone_per_shape(s, name_index, dataset):
"""Create a Tecplot zone for each shape"""
for i, shapeRec in enumerate(s.shapeRecords()):
# Extract the zone name from the appropriate location in the shape record
zone_name = shapeRec.record[name_index]
if len(zone_name) == 0:
zone_name = 'NONE'
num_points = len(shapeRec.shape.points)
elements = create_connectivity_list(shapeRec.shape)
x = [n[0] for n in shapeRec.shape.points]
y = [n[1] for n in shapeRec.shape.points]
# Create the Tecplot zone and add the point data as well as the connectivity list
zone = dataset.add_fe_zone(ZoneType.FELineSeg, zone_name, num_points, len(elements))
zone.values(0)[:] = x
zone.values(1)[:] = y
zone.nodemap[:] = elements
# Print dots to give the user an indication that something is happening
sys.stdout.write('.')
sys.stdout.flush()
def get_var_names():
"""Choose the variable names to use"""
print("1 - Use 'x' and 'y'")
print("2 - Use 'lon' and 'lat'")
var_name_choice = int(input("Enter your choice for variable names: ")) - 1
return var_name_choice
def get_name_index(shape_reader):
"""Displays Shapefile column used to name zones"""
first_record = shape_reader.shapeRecords()[0].record
# Record is the "column" information for the shape
index = 1
for f, r in zip(shape_reader.fields[1:], first_record):
print(index, "- ", f[0], ": ", r)
index += 1
name_index = int(input("Enter the index to use for zone names: ")) - 1
return name_index
def get_conversion_option(shape_records):
"""Prompts user for conversion options"""
print("1 - Convert to a single zone")
print("2 - Convert to one zone per shape (%d zones) (this can take a while)" % (len(shape_records)))
import_option = int(input("Enter your conversion selection: "))
return import_option
def main(shapefilename, outfilename):
# define index from record for zone name
s = sf.Reader(shapefilename)
shape_records = s.shapeRecords()
conversion_option = get_conversion_option(shape_records)
if get_var_names() == 0:
x_var_name = 'x'
y_var_name = 'y'
else:
x_var_name = 'lon'
y_var_name = 'lat'
dataset = tp.active_frame().create_dataset("Shapefile", [x_var_name, y_var_name])
if conversion_option == 1: # Single Zone
start = time.time()
convert_to_single_zone(s, os.path.basename(shapefilename), dataset)
else: # One Zone per Shape
name_index = get_name_index(s)
start = time.time()
convert_to_one_zone_per_shape(s, name_index, dataset)
tp.data.save_tecplot_plt(outfilename)
print("Elapsed time: ", time.time() - start)
if len(sys.argv) != 3:
print("Usage:\nshapefile_to_plt.py shapefile.shp outfile.plt")
else:
shapefilename = sys.argv[1]
outfilename = sys.argv[2]
main(shapefilename, outfilename)
| 5,646 | 1,778 |
"""When creating factory functions, plain functions are good unless you need to
inherit from a higher level class. If you don't need to inherit, dont use a
class."""
from functools import partial
from .suits import *
from .cards import *
def card(rank, suit):
if rank == 1:
return AceCard('A', suit)
elif 2 <= rank < 11:
return NumberCard(str(rank), suit)
elif 11 <= rank < 14:
name = {11: "J", 12: "Q", 13: "K"}[rank]
return FaceCard(name, suit)
else:
"""The else clause is there to make explicit what inputs this function
will handle"""
raise Exception("Rank out of range.")
def card_better_elif(rank, suit):
if rank == 1:
return AceCard('A', suit)
elif 2 <= rank < 11:
return NumberCard(str(rank), suit)
elif rank == 11:
return FaceCard("J", suit)
elif rank == 12:
return FaceCard("Q", suit)
elif rank == 13:
return FaceCard("K", suit)
else:
"""The else clause is there to make explicit what inputs this function
will handle"""
raise Exception("Rank out of range.")
def card_mapping(rank, suit):
"""Get the desired rank. If the rank isnt there by default, return a nubmer
card"""
class_ = {1: AceCard, 11: FaceCard, 12: FaceCard, 13: FaceCard}.get(rank, NumberCard)
return class_(rank, suit)
def card_functools_mapping(rank, suit):
part_class = {
1: partial(AceCard, 'A'),
11: partial(FaceCard, 'J'),
12: partial(FaceCard, 'Q'),
13: partial(FaceCard, 'K')
}.get(rank, partial(NumberCard, str(rank)))
return part_class(suit)
class CardFactory:
"""This class is designed to contain a 'fluent api'. That means that one
function call happens after the next. In the example, its x.a().b(). This
class is returning itself, which the next function uses to generate the
card. We are containing this in one object for the sake of simplicity.
It seems like the minute we decide to do a different API... I don't know
how this woulf be useful exactly. A lot of these are just examples of stuff
you can do with collections."""
def rank(self, rank):
self.class_, self.rank_str = {
1: (AceCard, 'A'),
11: (FaceCard, 'J'),
12: (FaceCard, 'Q'),
13: (FaceCard, 'K')
}.get(rank, (NumberCard, str(rank)))
return self
def suit(self, suit):
return self.class_(self.rank_str, suit)
def get_deck(self):
return [self.rank(r + 1).suit(s) for r in range(13) for s in (Club,
Diamond, Heart, Spade)]
factory_functions = [card, card_better_elif, card_mapping,
card_functools_mapping]
| 2,746 | 871 |
# -*- coding: utf-8 -*-
import unittest
from app.services.spider_qidian import QidianSpider
class TestSpider(unittest.TestCase):
def test_init(self):
spider = QidianSpider()
self.assertTrue(isinstance(spider, QidianSpider))
def test_query_book_list(self):
spider = QidianSpider()
count = spider.queryBookList("https://www.qidian.com/all?orderId=&style=2&pageSize=50&siteid=1&pubflag=0&hiddenField=0&page=1")
self.assertEqual(count, 50)
def test_chapter_api(self):
spider = QidianSpider()
chapters = spider.queryChapterApi("107580")
self.assertEqual(len(chapters), 2600)
self.assertEqual(chapters[2583]["chapterName"], "第十一卷 真仙降临 第两千四百四十六章 飞升仙界(大结局)")
self.assertEqual(chapters[2583]["xchapterId"], "48169888")
self.assertEqual(chapters[2583]["wordNumbers"], "3288")
self.assertEqual(chapters[2583]["updatetime"], "2013-09-23 22:10:37")
self.assertEqual(chapters[2583]["free"], 0)
self.assertEqual(chapters[2583]["xbookId"], "107580")
def test_query_book_info(self):
spider = QidianSpider()
book = spider.queryBookInfo("https://book.qidian.com/info/107580")
self.assertEqual(book["bookName"] , "凡人修仙传")
self.assertEqual(book["cover"] , "https://qidian.qpic.cn/qdbimg/349573/107580/180")
self.assertEqual(book["wordNumbers"], "744.75")
self.assertEqual(book["author"], "忘语")
self.assertEqual(book["tags"], "")
self.assertEqual(book["xbookId"], "107580")
self.assertEqual(book["status"], 0)
self.assertEqual(book["lastupdate"], "2016-01-05 17:02:39")
book_b = spider.queryBookInfo("https://book.qidian.com/info/1010626574")
self.assertEqual(book_b["bookName"] , "无限刷钱系统")
self.assertEqual(book_b["cover"] , "https://qidian.qpic.cn/qdbimg/349573/1010626574/180")
self.assertEqual(book_b["wordNumbers"], "89.21")
self.assertEqual(book_b["author"], "二发凉了")
self.assertEqual(book_b["tags"], "明星|爆笑|系统流|赚钱")
self.assertEqual(book_b["xbookId"], "1010626574")
self.assertEqual(book_b["status"], 1)
self.assertEqual(book_b["lastupdate"], "2018-03-16 17:17:32")
def test_query_content(self):
spider = QidianSpider()
content = spider.queryContent("https://read.qidian.com/chapter/_khZq99sDTj7X4qr8VpWrA2/yocLiS1ZCjPM5j8_3RRvhw2")
print(content)
self.assertEqual(content["wordNumbers"], '2707')
self.assertEqual(content["xchapterId"], 'yocLiS1ZCjPM5j8_3RRvhw2')
self.assertEqual(content["xbookId"], '1010626574')
| 2,645 | 1,143 |
class Solution:
def watchedVideosByFriends(self, watchedVideos: List[List[str]], friends: List[List[int]], ID: int, level: int) -> List[str]:
n = len(friends)
# BFS
frontier = [ID]
levels = {ID : 0}
nsteps = 0
while frontier:
if level == 0:
break
level -= 1
next_level = []
for u in frontier:
for v in friends[u]:
if v not in levels:
levels[v] = nsteps + 1
next_level.append(v)
frontier = next_level
nsteps += 1
counter = collections.Counter()
for ID in frontier:
for video in watchedVideos[ID]:
counter[video] += 1
return sorted(counter, key=lambda x : (counter[x], x))
| 847 | 237 |
from __future__ import annotations
import logging
import socket
import pytest
from .constants import TEST_HOST, TEST_PORT
logger = logging.getLogger("tests.fixtures.client")
class BaseClient:
"""
Blocking test client to connect to an app server
"""
name: str
def __init__(self, name: str):
self.name = name
def __str__(self):
return self.name
class SocketClient(BaseClient):
socket: socket.socket | None
buffer: bytes
def __init__(self, name: str):
super().__init__(name)
self.buffer = b""
def connect(self, host: str, port: int):
logger.debug(f"Socket client {self} connecting")
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect((host, port))
logger.debug(f"Socket client {self} connected")
def write(self, raw: bytes):
if not self.socket:
raise ValueError("Socket not open")
logger.debug(f"Socket client {self} writing {raw!r}")
self.socket.sendall(raw)
def read(self, len: int = 1024) -> bytes:
if not self.socket:
raise ValueError("Socket not open")
raw: bytes = self.socket.recv(len)
logger.debug(f"Socket client {self} received {raw!r}")
return raw
def read_line(self, len: int = 1024) -> bytes:
if b"\r\n" not in self.buffer:
self.buffer += self.read(len)
if b"\r\n" not in self.buffer:
raise ValueError("Line not found")
line, self.buffer = self.buffer.split(b"\r\n", 1)
return line
def close(self):
if not self.socket:
raise ValueError("Socket not open")
logger.debug(f"Socket client {self} closing")
self.socket.close()
logger.debug(f"Socket client {self} closed")
@pytest.fixture
def socket_client_factory(request: pytest.FixtureRequest):
"""
Socket client factory fixture
Usage::
def test_client(app_harness, socket_client_factory):
app_harness(myapp)
client = socket_client_factory()
client.write(b'hello')
assert client.read() == b'hello'
"""
clients = []
def connect(name: str | None = None, host: str = TEST_HOST, port: int = TEST_PORT):
client_name = request.node.name
if name is not None:
client_name = f"{client_name}:{name}"
client = SocketClient(client_name)
client.connect(host, port)
clients.append(client)
return client
yield connect
for client in clients:
client.close()
| 2,614 | 795 |
"""Standard raspberry GPIO access layer.
It defines abstract layer that extends InOutInterface to access all standard
ports on rapsberry pi. It uses RPi.GPIO under the hood.
Thanks to that you have a standardized way of accessing these ports, as well
as any others implementing InOutInterface.
"""
import logging
from base import InOutInterface
from base import get_gpio
from base import Settings
from base import PortListener
from exceptions import InvalidPortNumberError
from port import Port
class PiInterface(InOutInterface):
"""Standard GPIO interface abstraction layer.
Some examples of raw calls to ports using RPi.GPIO
GPIO.setmode(GPIO.BOARD) // set usual port numbering
GPIO.setup(7, GPIO.OUT)
GPIO.output(7, GPIO.HIGH)
GPIO.output(7, GPIO.LOW)
GPIO.cleanup()
"""
_GROUND = (6, 9, 14, 20, 25, 30, 34, 39)
_POWER_5V = (2, 4)
_POWER_3V3 = (1, 17)
_I2C = (3, 5, 27, 28)
_FORBIDDEN = _GROUND + _POWER_5V + _POWER_3V3 + _I2C
PULL_UP = 'pull_up'
PULL_DOWN = 'pull_down'
def __init__(self):
super(PiInterface, self).__init__(40)
for number in range(1, 41):
if number not in self._FORBIDDEN:
self._ports[number] = Port(self, number)
# Defines the pull up or pull down rezistor for inputs.
# Possible values are:
# 1. self.PULL_UP
# 2. self.PULL_DOWN
# 3. None (input fluctuating by default)
self.pull_up_down_rezistor = self.PULL_UP
self._port_listeners = {}
self._initialize_ports()
def __str__(self):
return 'Raspberry PI GPIO'
def _validate_port_number(self, port_number):
super(PiInterface, self)._validate_port_number(port_number)
if port_number in self._GROUND:
raise InvalidPortNumberError(
'This port number(%d) is reserved for GROUND.', port_number)
if port_number in self._POWER_3V3:
raise InvalidPortNumberError(
'This port number(%d) is reserved for 3.3V POWER.', port_number)
if port_number in self._POWER_5V:
raise InvalidPortNumberError(
'This port number(%d) is reserved for 5V POWER.', port_number)
if port_number in self._I2C:
raise InvalidPortNumberError(
'This port number(%d) is reserved for I2c.', port_number)
if port_number in self._FORBIDDEN:
raise InvalidPortNumberError(
'This port number(%d) is forbidden to take.', port_number)
def _gpio_setup(self, port_number, gpio_attr_name):
self._validate_port_number(port_number)
if Settings.IS_NO_HARDWARE_MODE:
logging.warning('No hardware mode, no value written')
else:
gpio = get_gpio()
if gpio_attr_name == 'IN':
# Special case for settings port as input.
# Pullup or pulldown rezistor should be set here.
kwargs = {}
if self.pull_up_down_rezistor == self.PULL_UP:
kwargs['pull_up_down'] = gpio.PUD_UP
elif self.pull_up_down_rezistor == self.PULL_DOWN:
kwargs['pull_up_down'] = gpio.PUD_DOWN
gpio.setup(
port_number,
getattr(gpio, gpio_attr_name),
**kwargs)
else:
gpio.setup(port_number, getattr(gpio, gpio_attr_name))
def _gpio_output(self, port_number, value):
self._validate_port_number(port_number)
if Settings.IS_NO_HARDWARE_MODE:
logging.warning('No hardware mode, no value written')
else:
gpio = get_gpio()
gpio.output(
port_number,
gpio.HIGH if value == self.HIGH else gpio.LOW
)
def get_value(self, port_number):
self._validate_port_number(port_number)
value = self._check_no_hardware_port_value(port_number)
if value is not None:
return value
else:
gpio = get_gpio()
value = gpio.input(port_number)
# logging.debug(
# 'Read gpio port value (%s): %s',
# self.get_port(port_number),
# value)
return self.HIGH if value == gpio.HIGH else self.LOW
def set_as_input(self, port_number):
self._gpio_setup(port_number, 'IN')
self._in_out_registry[port_number] = self._INPUT
return self
def set_as_output(self, port_number):
self._gpio_setup(port_number, 'OUT')
self._in_out_registry[port_number] = self._OUTPUT
return self
def set_high(self, port_number):
self._validate_port_number(port_number)
self._validate_write_port_number(port_number)
self._gpio_output(port_number, self.HIGH)
return self
def set_low(self, port_number):
self._validate_port_number(port_number)
self._validate_write_port_number(port_number)
self._gpio_output(port_number, self.LOW)
return self
def add_event(
self,
port_number,
on_rising_callback=None,
on_falling_callback=None):
"""Adds listening event on given port.
In this case 2nd argument passed to a callback is a value read
during callback invocation, which in theory might not be the one
that actually cause triggering the event.
"""
if Settings.IS_NO_HARDWARE_MODE:
logging.warning('No hardware mode, adding read event failed.')
else:
port_listener = self._port_listeners.get(port_number)
if not port_listener:
port_listener = _PiPortListener(self.get_port(port_number))
gpio = get_gpio()
gpio.add_event_detect(
port_number,
gpio.BOTH,
callback=port_listener.trigger_callbacks,
bouncetime=Settings.READ_SWITCH_DEBOUNCE)
self._port_listeners[port_number] = port_listener
if on_rising_callback:
logging.debug(
'Adding rising callback for interface (%s) on port %d',
self, port_number)
port_listener.add_rising_callback(on_rising_callback)
if on_falling_callback:
logging.debug(
'Adding falling callback for interface (%s) on port %d',
self, port_number)
port_listener.add_falling_callback(on_falling_callback)
def clear_read_events(self, port_number):
if not Settings.IS_NO_HARDWARE_MODE:
get_gpio().remove_event_detect(port_number)
if port_number in self._port_listeners:
del self._port_listeners[port_number]
class _PiPortListener(PortListener):
def get_callbacks_to_trigger(self):
if not self._rising_callbacks and not self._falling_callbacks:
return []
to_trigger = []
port_value = self.port.value
if (port_value == InOutInterface.HIGH):
to_trigger.extend(self._rising_callbacks)
logging.debug(
'Event detected on interface (%s) on port (%d). '
'Type: RISING.',
self.port.interface,
self.port.number)
elif (port_value == InOutInterface.LOW):
to_trigger.extend(self._falling_callbacks)
logging.debug(
'Event detected on interface (%s) on port (%d). '
'Type: FALLING.',
self.port.interface,
self.port.number)
return to_trigger | 7,768 | 2,363 |
import discord
import os
import json
from discord.ext import commands, tasks
import time
import asyncio
import random
from discord.utils import MAX_ASYNCIO_SECONDS
##########################################################################
#generalrole = discord.utils.get(ctx.guild.roles, id=661454256251076613)
#logchannel = discord.utils.get(client.get_all_channels(), id = 753619980548833401)
#SERVER INFO
ownerid = 631441731350691850
chanwoo = 631441731350691850
yewon = 819734468465786891
saji = 785135229894524959
donggu = 543680309661663233
hanjae = 406822771524501516
mintchocolate = 434328592739074048
csticker = 864745666580316170
dohyun = 652531481767444498
##########################################################################
#USEFUL FUNCTIONS
##########################################################################
def checkidentity(supposeid):
if int(supposeid) == chanwoo:
return "chanwoo"
elif int(supposeid) == yewon:
return "yewon"
elif int(supposeid) == saji:
return "saji"
elif int(supposeid) == donggu:
return "donggu"
elif int(supposeid) == hanjae:
return "hanjae"
elif int(supposeid) == mintchocolate:
return "mint"
elif int(supposeid) == csticker:
return "csticker"
elif int(supposeid) == dohyun:
return "dohyun"
else:
return None
def sendrandom(providedlist, min, max):
howmuchtosend = random.randint(min, max)
sizeoflist = len(providedlist)
i = 1
returnlist = []
while i <= howmuchtosend:
i += 1
thingtoadd = providedlist[random.randrange(0, sizeoflist)]
returnlist.append(thingtoadd)
return returnlist
def getlist(sendid):
sendid = str(sendid)
path = "ments/ments.json"
with open(path) as f:
jsondata = json.load(f)
f.close()
try:
mylist = jsondata[sendid]
except:
mylist = None
return mylist
class ments(commands.Cog):
def __init__(self, client):
self.client = client
@commands.command(aliases=["테스트"])
async def test(self, ctx):
checkme = checkidentity(ctx.author.id)
#await ctx.message.delete()
if ctx.author.id == 434328592739074048:
await ctx.send('...나는 모구모구')
await ctx.send(file=discord.File('image/mogumogu.jpg'))
else:
grablist = getlist(ctx.author.id)
if grablist == None:
await ctx.send("아직 너는 잘 모르겠는데..")
else:
herelist = sendrandom(grablist, 1, 1)
for i in herelist:
await ctx.send(i)
@commands.command()
async def joinvc(self, ctx):
if ctx.author.id == ownerid:
await ctx.message.delete()
channel = ctx.author.voice.channel
await channel.connect()
@commands.command()
async def leavevc(self, ctx):
if ctx.author.id == ownerid:
await ctx.message.delete()
await ctx.voice_client.disconnect()
@commands.command()
async def sendjson(self, ctx):
if ctx.author.id == ownerid:
await ctx.author.send(file=discord.File('ments/ments.json'))
@commands.command(aliases=["전송"])
async def dm(self, ctx, target: discord.Member, *, message):
try:
await ctx.message.delete()
except:
await ctx.send("이 명령어는 서버에서 사용해 주세요")
embed = discord.Embed(
title = f"📨 메세지가 도착했습니다!",
description = f"```{message}```\n\n답장해도 보내지지 않으니 직접 그 사람에게 말하세용\n명령어: `?전송 @유저 메세지 내용`",
color = discord.Color.from_rgb(255,105,180)
)
embed.set_footer(text=f"{ctx.author.name}님이 보낸 메세지")
try:
await target.send(embed=embed)
except:
await ctx.send(f"{target.mention}, 도착한 메세지가 있었지만 디엠 수신 기능이 꺼져있어 보내지 못하였습니다.")
#find a channel with an id 879895499338039301 from all the servers the bot is in
channel = discord.utils.get(self.client.get_all_channels(), id = 879895499338039301)
await channel.send(embed=embed)
def setup(client):
client.add_cog(ments(client)) | 4,190 | 1,703 |
#First, we import the relevant libraries
import sys
import pandas as pd
from sqlalchemy import create_engine
def load_data(messages_filepath, categories_filepath):
'''This function will load the messages and categories datasets.
Then, this function will merge the datasets by left join using the common id and then return a pandas dataframe.
If the input is invalid or the data does not exist, this function will raise an error.
INPUT:
messages_filepath --> location of messages data file from the project root
categories_filepath --> location of the categories data file from the project root
OUTPUT:
df --> a DataFrame containing the merged dataset
'''
#load the messages dataset
messages = pd.read_csv(messages_filepath)
#load the categories dataset
categories = pd.read_csv(categories_filepath)
#merge the two datasets
df = pd.merge(messages, categories, on='id', how = 'left')
return df
def clean_data(df):
''' This function will clean and prepare the merged data to make it more efficient to work with.
The steps this function will take to clean and prepare the data are:
- Split the categories into separate category columns
- Rename every column to its corresponding category
- Convert category values to a boolean format (0 and 1)
- Replace the original categories column in the merged dataframe with the new category columns
- Drop any dulplicates in the newly merged dataset
If the input is invalid or the data does not exist, this function will raise an error.
INPUT:
df --> a Pandas DataFrame with the merged data
OUTPUT:
df --> a new Pandas Dataframe with each category as a column and its entries as 0/1 indicators.
This is to flag if a message is classified under each category column.
'''
#Split the categories into 36 individual category columns and create a dataframe
cat_cols = df["categories"].str.split(";", expand=True)
#Rename every column to its corresponding category
##First, calling the first row of cat_cols to extract a new list of new column names
##Using a lambda function that takes everything
##up to the second to last character of each string with slicing
row = cat_cols.iloc[0]
string_slicer = lambda x: x[:-2]
cat_colnames = [string_slicer(i) for i in list(row)]
cat_cols.columns = cat_colnames
#Convert category values to a boolean format (0 and 1)
#Iterating through the category columns in df to keep only the last character of each string (the 1 or 0)
##Then convert the string into a numeric value
##Using the slicing method once again
int_slicer = lambda x: int(x[-1])
for column in cat_cols:
cat_cols[column] = [int_slicer(i) for i in list(cat_cols[column])]
#Replace the original categories column in the merged dataframe with the new category columns
df = df.drop(['categories'], axis=1)
df = pd.merge(df, cat_cols, left_index=True, right_index=True)
df['related'] = df['related'].astype('str').str.replace('2', '1')
df['related'] = df['related'].astype('int')
#Drop any dulplicates in the newly merged dataset
df = df.drop_duplicates()
return df
def save_data(df, database_filename):
''' This function will load the prepared data into a SQLite database file.
If the input is invalid or the data does not exist, this function will raise an error.
INPUT:
df --> a Pandas DataFrame containing the prepared data
DisasterResponse.db --> database to store data for model ingestion
'''
engine = create_engine('sqlite:///DisasterResponse.db')
df.to_sql('categorised_messages', engine, index=False, if_exists='replace')
def main():
''' This is the mail ETL function that extracts, transforms and loads the data.
'''
if len(sys.argv) == 4:
messages_filepath, categories_filepath, database_filepath = sys.argv[1:]
print('Loading data...\n MESSAGES: {}\n CATEGORIES: {}'
.format(messages_filepath, categories_filepath))
df = load_data(messages_filepath, categories_filepath)
print('Cleaning data...')
df = clean_data(df)
print('Saving data...\n DATABASE: {}'.format(database_filepath))
save_data(df, database_filepath)
print('Cleaned data saved to database!')
else:
print('Please provide the filepaths of the messages and categories '\
'datasets as the first and second argument respectively, as '\
'well as the filepath of the database to save the cleaned data '\
'to as the third argument. \n\nExample: python process_data.py '\
'disaster_messages.csv disaster_categories.csv '\
'DisasterResponse.db')
if __name__ == '__main__':
main() | 5,007 | 1,359 |
import torch
from argparse import ArgumentParser, Namespace
import logging
import os
from collections import namedtuple
import random
import torch
Transition = namedtuple('Transition',
('state', 'action', 'next_state', 'reward', 'done'))
class AgentMemory(object):
def __init__(self, capacity):
self.capacity = capacity
self.reset()
def push(self, *args):
"""Saves a transition."""
if len(self.memory) < self.capacity:
self.memory.append(None)
self.memory[self.position] = Transition(*args)
self.position = (self.position + 1) % self.capacity
def pushNewObservation(self, observation, reward, done, info, action):
if self._lastState is None:
self._lastState = observation
else:
next_state = observation
self.push(self._lastState, action, next_state, reward, done)
def sample(self, batch_size):
return random.sample(self.memory, batch_size)
def last(self):
return [] if len(self.memory) <= 0 else [self.memory[-1]]
def all(self):
return self.memory[:]
def reset(self):
self.memory = []
self.position = 0
self._lastState = None
def __len__(self):
return len(self.memory)
class AgentModel:
_model = None
_optimizer = None
_criterion = None
_loaded = False
def __init__(self, saveFolder, modelName, logger=None, modelSaveSuffix=""):
self._model = None
self._optimizer = None
self.modelName = modelName
self.modelPath = os.path.join(saveFolder, modelName + modelSaveSuffix +
".tar")
self.bestModelPath = os.path.join(saveFolder, modelName +
modelSaveSuffix + "_best.tar")
self._maxMeanReward = -1
if logger is None:
self.logger = logging.getLogger(modelName + "_" + modelSaveSuffix)
else:
self.logger = logger
def loadModel(self, model, optimizer, criterion):
self._model = model
self._optimizer = optimizer
self._criterion = criterion
self._loaded = True
def loaded(self):
return self._loaded
def saveModel(self, epoch, meanReward):
if self._model is None:
self.logger.info("No model to save")
return
if meanReward > self._maxMeanReward:
self._maxMeanReward= meanReward
torch.save({
'epoch': epoch + 1,
'arch': self.modelName,
'state_dict': self._model.state_dict(),
'meanReward': meanReward,
'bestMeanReward': self._maxMeanReward
}, self.bestModelPath)
torch.save({
'epoch': epoch + 1,
'arch': self.modelName,
'state_dict': self._model.state_dict(),
'mean_reward': meanReward,
'best_mean_reward': self._maxMeanReward
}, self.modelPath)
def loadModelFromFile(self, path):
if os.path.isfile(path):
self.logger.info("=> loading checkpoint '{}'".format(path))
checkpoint = torch.load(path)
self.start_epoch = checkpoint['epoch']
self._maxMeanReward = checkpoint['best_mean_reward']
self._model.load_state_dict(checkpoint['state_dict'])
self.logger.info("=> loaded checkpoint '{}' (epoch {})"
.format(path, checkpoint['epoch']))
else:
self.logger.debug("=> no checkpoint found at '{}'".format(path))
def modelReport(self):
pass
class Agent:
def __init__(self, name, cfg):
self.modelDataType = cfg.general.use_cuda
self.saveFolder = cfg.general.save_folder
self.logger = logging.getLogger(name + "_" + str(self.agentID))
self.sharedModel = cfg.model.shared
self.transitionMemory = cfg.model.transition_memory
self._modelClass = None
self._memory = AgentMemory(self.transitionMemory)
self._crtStep = 0
self._crtEpoch = 0
self._useCUDA = False
if self.modelDataType:
self.dtype = torch.cuda.FloatTensor
self._useCUDA = True
else:
self.dtype = torch.FloatTensor
#Instantiate Agents model
if self.sharedModel:
if hasattr(self.env, "_agentsModel"):
self._modelClass = self.env._agentsModel
else:
self._modelClass = AgentModel(self.saveFolder, self.name,
logger=self.logger)
self.env._agentsModel = self._modelClass
else:
self._modelClass = AgentModel(self.saveFolder, self.name,
logger=self.logger,
modelSaveSuffix=str(self.agentID))
"""
Baseline methods.
Should not be overridden when extending
"""
def __post_init__(self):
if not (self.sharedModel and self._modelClass.loaded()):
self._createLearningArchitecture()
self._modelClass._model.type(self.dtype)
self.logger.info("Loaded architecture")
def act(self, observation, reward, done, is_training):
self._crtStep += 1
observation = observation
reward = reward
action = self._act(observation, reward, done, is_training)
self._postAction()
self._memory.pushNewObservation(observation, reward, done, None, action)
self._optimizeModel()
return action.view(-1)
def restart(self):
"""
Called when game restarts
"""
self._lastState = None
self._restart()
def epochFinished(self):
"""
Called after end of training epoch
"""
self._crtEpoch += 1
self._epochFinished()
pass
def report(self):
"""
Should log internal information
"""
self._modelClass.modelReport()
self._report()
def saveModel(self, epoch, meanReward):
"""
save model information
"""
self._modelClass.saveModel(epoch, meanReward)
self._saveModel(epoch, meanReward)
"""
Classes extending this class should override only methods starting "_"
to keep base class methods
"""
def _act(self, observation, reward, done, info):
pass
def _reset(self):
pass
def _epochFinished(self):
pass
def _report(self):
pass
def _saveModel(self, epoch, meanReward):
pass
def _postAction(self):
pass
def _createLearningArchitecture(self):
"""
Should create learning architecture
#!!! Instantiate self._modelClass._model (sibling of nn.Module)
#Instantiate other learning models
"""
self._modelClass._model = None
def _optimizeModel(self):
"""
Is called after registering each new transition.
"""
pass
| 7,123 | 2,037 |
import pandas as pd
from modules import bedtools
from modules import intervals
def generator(ARGUMENTS):
if not ARGUMENTS.input_bed and not ARGUMENTS.gtf_anno:
print(f"get random intervals from genome {ARGUMENTS.reference}")
RANDOM_BED = bedtools.random_interval(
ARGUMENTS.reference, ARGUMENTS.int_size, ARGUMENTS.N
)
elif ARGUMENTS.gtf_anno:
print(f"get intervals from annotation file {ARGUMENTS.gtf_anno}")
RANDOM_BED = intervals.gtf_to_bed(
file_name=ARGUMENTS.gtf_anno,
feature=ARGUMENTS.feature,
int_size=ARGUMENTS.int_size,
N=ARGUMENTS.N,
)
elif ARGUMENTS.input_bed:
print(f"load input bed file {ARGUMENTS.input_bed}")
RANDOM_BED = ARGUMENTS.input_bed
else:
print("nothing to do")
if ARGUMENTS.avoid_int and RANDOM_BED:
print("removing positive intervals")
RANDOM_BED = bedtools.intersect(
RANDOM_BED, ARGUMENTS.avoid_int, opt=ARGUMENTS.intersect_opt
)
return RANDOM_BED
def make_set(ARGUMENTS):
df_list = list()
tmp_size = 0
while tmp_size < ARGUMENTS.N:
RANDOM_BED = generator(ARGUMENTS)
tmp_df = pd.read_csv(RANDOM_BED, sep="\t", header=None)
tmp_size += tmp_df.shape[0]
df_list.append(tmp_df)
if df_list:
merge_df = pd.concat(df_list, axis=0).sample(n=ARGUMENTS.N)
merge_df.to_csv(RANDOM_BED, sep="\t", header=False, index=False)
return RANDOM_BED
if __name__ == "__main__":
pass
| 1,568 | 605 |
#!/usr/bin/env python3
f=1
fprev=1
n=2
while f < 10**999:
f,fprev = f + fprev,f
n += 1
print(n) | 100 | 59 |
test = list()
test.append('Werberty')
test.append(21)
galera = list()
galera.append(test[:])
test[0] = 'Maria'
test[1] = 22
galera.append(test[:])
print(galera)
pessoal = [['joão', 19], ['Ana', 33], ['Joaquim', 13], ['Maria', 45]]
print(pessoal[1])
print(pessoal[2][1])
for p in pessoal:
print(f'{p[0]} tem {p[1]} anos de idade.')
galerinha = list()
dado = list()
for c in range(0, 3):
dado.append(str(input('Nome: ')))
dado.append(int(input('idade: ')))
galerinha.append(dado[:])
dado.clear()
print(galerinha) | 532 | 244 |
from summariser.ngram_vector.base import Sentence
from summariser.utils.data_helpers import *
from nltk.stem.porter import PorterStemmer
from summariser.ngram_vector.state_type import *
import random
class Vectoriser:
def __init__(self,docs,sum_len=100,no_stop_words=True,stem=True,block=1,base=200,lang='english'):
self.docs = docs
self.without_stopwords = no_stop_words
self.stem = stem
self.block_num = block
self.base_length = base
self.language = lang
self.sum_token_length = sum_len
self.stemmer = PorterStemmer()
self.stoplist = set(stopwords.words(self.language))
self.sim_scores = {}
self.stemmed_sentences_list = []
self.load_data()
def sampleRandomReviews(self,num,heuristic_reward=True,rouge_reward=True,models=None):
heuristic_list = []
rouge_list = []
act_list = []
for ii in range(num):
state = State(self.sum_token_length, self.base_length, len(self.sentences),
self.block_num, self.language)
while state.available_sents != [0]:
new_id = random.choice(state.available_sents)
if new_id == 0:
continue
if new_id > 0 and len(self.sentences[new_id-1].untokenized_form.split(' ')) > self.sum_token_length:
continue
state.updateState(new_id-1,self.sentences)
actions = state.historical_actions
act_list.append(actions)
if heuristic_reward:
rew = state.getTerminalReward(self.sentences,self.stemmed_sentences_list,self.sent2tokens,self.sim_scores)
heuristic_list.append(rew)
if rouge_reward:
assert models is not None
r_dic = {}
for model in models:
model_name = model[0].split('/')[-1].strip()
rew = state.getOptimalTerminalRougeScores(model)
r_dic[model_name] = rew
rouge_list.append(r_dic)
return act_list, heuristic_list, rouge_list
def getSummaryVectors(self,summary_acts_list):
vector_list = []
for act_list in summary_acts_list:
state = State(self.sum_token_length, self.base_length, len(self.sentences), self.block_num, self.language)
for i, act in enumerate(act_list):
state.updateState(act, self.sentences, read=True)
vector = state.getSelfVector(self.top_ngrams_list, self.sentences)
vector_list.append(vector)
return vector_list
def sent2tokens(self, sent_str):
if self.without_stopwords and self.stem:
return sent2stokens_wostop(sent_str, self.stemmer, self.stoplist, self.language)
elif self.without_stopwords == False and self.stem:
return sent2stokens(sent_str, self.stemmer, self.language)
elif self.without_stopwords and self.stem == False:
return sent2tokens_wostop(sent_str, self.stoplist, self.language)
else: # both false
return sent2tokens(sent_str, self.language)
def load_data(self):
self.sentences = []
for doc_id, doc in enumerate(self.docs):
doc_name, doc_sents = doc
doc_tokens_list = []
for sent_id, sent_text in enumerate(doc_sents):
token_sent = word_tokenize(sent_text, self.language)
current_sent = Sentence(token_sent, doc_id, sent_id + 1)
untokenized_form = untokenize(token_sent)
current_sent.untokenized_form = untokenized_form
current_sent.length = len(untokenized_form.split(' '))
self.sentences.append(current_sent)
sent_tokens = self.sent2tokens(untokenized_form)
doc_tokens_list.extend(sent_tokens)
stemmed_form = ' '.join(sent_tokens)
self.stemmed_sentences_list.append(stemmed_form)
#print('total sentence num: ' + str(len(self.sentences)))
self.state_length_computer = StateLengthComputer(self.block_num, self.base_length, len(self.sentences))
self.top_ngrams_num = self.state_length_computer.getStatesLength(self.block_num)
self.vec_length = self.state_length_computer.getTotalLength()
sent_list = []
for sent in self.sentences:
sent_list.append(sent.untokenized_form)
self.top_ngrams_list = getTopNgrams(sent_list, self.stemmer, self.language,
self.stoplist, 2, self.top_ngrams_num)
| 4,667 | 1,441 |
from setuptools import setup
setup(name='betbright_test',
version='0.1',
description='Python product category classification kit',
url='',
author='Oleksii Nidzelskyi',
author_email='alexey.education@gmail.com',
license='MIT',
packages=['betbright_test'],
zip_safe=False)
| 320 | 101 |
from docsie_universal_importer.providers.base.urls import default_urlpatterns
from .import_provider import GoogleCloudStorageProvider
urlpatterns = default_urlpatterns(GoogleCloudStorageProvider)
| 198 | 53 |
import argparse
import json
import os
import time
from elf.segmentation.multicut import get_multicut_solver, _to_objective
from elf.segmentation.utils import load_multicut_problem
def simple_performance_experiments(problem, solvers):
os.makedirs("problems", exist_ok=True)
path = f"./problems/{problem}"
sample, size = problem.split("_")
graph, costs = load_multicut_problem(sample, size, path)
objective = _to_objective(graph, costs)
results = {}
print("Measure performance for sample:", problem)
for solver_name in solvers:
# get the mode for RAMA solvers
if solver_name.startswith("rama"):
_, mode = solver_name.split("_")
solver = get_multicut_solver("rama")
kwargs = {"mode": mode}
else:
solver = get_multicut_solver(solver_name)
kwargs = {}
t0 = time.time()
node_labels = solver(graph, costs, **kwargs)
t0 = time.time() - t0
energy = objective.evalNodeLabels(node_labels)
print("Solver", solver_name, "runtime:", t0, "s, energy:", energy)
results[solver_name] = (energy, t0)
return results
# TODO add large problems! where decomp should shine...
def main():
parser = argparse.ArgumentParser()
# default_solvers = ["decomposition", "kernighan-lin", "greedy-additive", "greedy-fixation"]
default_solvers = ["decomposition", "kernighan-lin", "greedy-additive", "greedy-fixation",
"rama_P", "rama_PD+"]
parser.add_argument("--solvers", "-s", default=default_solvers)
default_problems = ["A_small", "B_small", "C_small",
"A_medium", "B_medium", "C_medium"]
parser.add_argument("--problems", "-p", default=default_problems)
# TODO save as a single csv instead
print("Simple multicut performance experiments:")
args = parser.parse_args()
for problem in args.problems:
res = simple_performance_experiments(problem, args.solvers)
res_path = f"./results_{problem}.json"
with open(res_path, "w") as f:
json.dump(res, f, sort_keys=True, indent=2)
if __name__ == "__main__":
main()
| 2,182 | 695 |
IMAP_GMAIL = 'imap.gmail.com'
IMAP_OUTLOOK = 'outlook.office365.com'
IMAP_YAHOO = 'imap.mail.yahoo.com'
| 104 | 56 |
"""Unit test package for oadds."""
| 35 | 12 |
from schematics.types import StringType, BooleanType, MD5Type, BaseType
from schematics.exceptions import ValidationError
from schematics.types.compound import ModelType
from openprocurement.api.models import ListType
from openprocurement.tender.core.procedure.models.award import (
Award as BaseAward,
PatchAward as BasePatchAward,
PostAward as BasePostAward,
)
from openprocurement.tender.core.procedure.models.milestone import QualificationMilestoneListMixin
from openprocurement.tender.openua.procedure.models.item import Item
class Award(QualificationMilestoneListMixin, BaseAward):
complaints = BaseType()
items = ListType(ModelType(Item, required=True))
qualified = BooleanType(default=False)
eligible = BooleanType(default=False)
def validate_qualified(self, data, qualified):
if data["status"] == "active" and not qualified:
raise ValidationError("This field is required.")
def validate_eligible(self, data, eligible):
if data["status"] == "active" and not eligible:
raise ValidationError("This field is required.")
class PatchAward(BasePatchAward):
items = ListType(ModelType(Item, required=True))
qualified = BooleanType()
eligible = BooleanType()
class PostAward(BasePostAward):
pass
| 1,296 | 382 |
from __future__ import print_function
import numpy as np
from fidimag.atomistic import Sim
from fidimag.common import CuboidMesh
from fidimag.atomistic import UniformExchange
def init_m(pos):
x, y, z = pos
return (x - 0.5, y - 0.5, z - 0.5)
def test_exch_1d():
"""
Test the x component of the exchange field
in a 1D mesh, with the spin ordering:
0 1 2 3 4 5
"""
mesh = CuboidMesh(nx=5, ny=1, nz=1)
sim = Sim(mesh)
exch = UniformExchange(1)
sim.add(exch)
sim.set_m(init_m, normalise=False)
field = exch.compute_field()
assert field[0] == 1
assert field[1 * 3] == 2
assert field[2 * 3] == 4
assert field[3 * 3] == 6
assert field[4 * 3] == 3
assert np.max(field[2::3]) == 0
assert np.max(field[1::3]) == 0
def test_exch_1d_pbc():
mesh = CuboidMesh(nx=5, ny=1, nz=1, periodicity=(True, False, False))
sim = Sim(mesh)
exch = UniformExchange(1)
sim.add(exch)
sim.set_m(init_m, normalise=False)
field = exch.compute_field()
assert field[0] == 1 + 4
assert field[3] == 2
assert field[6] == 4
assert field[9] == 6
assert field[12] == 3 + 0
assert np.max(field[2::3]) == 0
assert np.max(field[1::3]) == 0
def test_exch_2d():
mesh = CuboidMesh(nx=5, ny=2, nz=1)
sim = Sim(mesh)
exch = UniformExchange(1)
sim.add(exch)
sim.set_m(init_m, normalise=False)
field = exch.compute_field()
assert np.max(field[2::3]) == 0
assert field[0] == 1
assert field[3] == 2 + 1
assert field[6] == 1 + 2 + 3
assert field[9] == 2 + 3 + 4
assert field[12] == 3 + 4
def test_exch_2d_pbc2d():
"""
Test the exchange field components in a 2D mesh with PBCs
The mesh sites:
3 4 5 --> (0,1,0) (1,1,0) (2,1,0)
y ^ 0 1 2 (0,0,0) (1,0,0) (2,0,0)
|
x -->
The expected components are in increasing order along x
"""
mesh = CuboidMesh(nx=3, ny=2, nz=1, periodicity=(True, True, False))
print(mesh.neighbours)
sim = Sim(mesh)
exch = UniformExchange(1)
sim.add(exch)
sim.set_m(init_m, normalise=False)
field = exch.compute_field()
expected_x = np.array([3, 4, 5, 3, 4, 5])
expected_y = np.array([2, 2, 2, 2, 2, 2])
# Since the field ordering is now: fx1 fy1 fz1 fx2 ...
# We extract the x components jumping in steps of 3
assert np.max(abs(field[::3] - expected_x)) == 0
# For the y component is similar, now we start at the 1th
# entry and jump in steps of 3
assert np.max(abs(field[1::3] - expected_y)) == 0
# Similar fot he z component
assert np.max(field[2::3]) == 0
def test_exch_3d():
"""
Test the exchange field of the spins in this 3D mesh:
bottom layer:
8 9 10 11
4 5 6 7 x 2
0 1 2 3
Assertions are according to the mx component of the spins, since J is set
to 1
Spin components are given according to the (i, j) index position in the
lattice:
i lattice site
[[ 0. 0. 0.] --> 0 j=0
[ 1. 0. 0.] --> 1
[ 2. 0. 0.] --> 2
[ 3. 0. 0.] --> 3
[ 0. 1. 0.] --> 4 j=1
[ 1. 1. 0.]
...
Remember the field ordering: fx0, fy0, fz0, fx1, ...
"""
mesh = CuboidMesh(nx=4, ny=3, nz=2)
sim = Sim(mesh)
exch = UniformExchange(1)
sim.add(exch)
sim.set_m(init_m, normalise=False)
field = exch.compute_field()
# print field
# Exchange from 0th spin
assert field[0] == 1
# Exchange from 1st spin
# spin: 2 0 5 13
# mx: 2 0 1 1
assert field[3] == 2 + 0 + 1 + 1
# Exchange from 2nd spin
# spin: 3 1 6 14
# mx: 3 1 2 2
assert field[6] == 3 + 1 + 2 + 2
# ...
assert field[9] == 2 + 3 + 3
assert field[4 * 3] == 1
assert field[5 * 3] == 5
assert field[6 * 3] == 10
assert field[7 * 3] == 11
def test_exch_energy_1d():
mesh = CuboidMesh(nx=2, ny=1, nz=1)
sim = Sim(mesh)
exch = UniformExchange(1.23)
sim.add(exch)
sim.set_m((0, 0, 1))
energy = exch.compute_energy()
assert energy == -1.23
if __name__ == '__main__':
# test_exch_1d()
# test_exch_1d_pbc()
# test_exch_2d()
test_exch_2d_pbc2d()
# test_exch_3d()
# test_exch_energy_1d()
| 4,448 | 1,894 |
from geomdl import BSpline
import numpy as np
import math
import ezdxf
import burin.types
class Spline:
def __init__(self,degree, control, knots):
self.degree = degree
self.control = np.array(control)
self.knots = np.array(knots)
def length_upper_bound(self):
acc = 0
n, _ = self.control.shape
for i in range(1,n):
delta = self.control[i] - self.control[i - 1]
acc += np.sqrt(delta.dot(delta))
return acc
def render_to_tolerance(self,tolerance):
curve = BSpline.Curve()
curve.degree = self.degree
curve.ctrlpts = self.control.tolist()
curve.knotvector = self.knots.tolist()
curve.sample_size = max(2, math.ceil(self.length_upper_bound() / tolerance))
return burin.types.BSpline(curve, tolerance)
@staticmethod
def from_dxf(s):
return Spline(s.dxf.degree, s.control_points, s.knots)
class Polyline:
def __init__(self, points):
self.points = points
def render_to_tolerance(self, tolerance):
# Here for parallelism with Splines - doesn't apply the tolerance
return burin.types.Polyline(self.points[:,0:2])
@staticmethod
def from_dxf(line):
vertices = [v.dxf.location for v in line.vertices]
if line.dxf.flags & 1:
vertices.append(vertices[0])
return Polyline(np.array(vertices))
class Point:
def __init__(self, coords):
self.coords = coords
def render_to_tolerance(self, _):
a,b = self.coords[0:2]
return burin.types.Point((a,b))
@staticmethod
def from_dxf(point):
return Point(np.array(point.dxf.location))
class Arc:
def __init__(self, start, end, center):
self.start = start
self.end = end
self.center = center
def render_to_tolerance(self, _):
return burin.types.Arc(self.start, self.end, self.center, False)
@staticmethod
def from_dxf(circle):
""" Is it an arc? Is it a circle? Who knows? """
center, radius = np.array(circle.dxf.center)[0:2], circle.dxf.radius
if isinstance(circle, ezdxf.entities.Arc):
start = np.array(circle.start_point)[0:2]
end = np.array(circle.end_point)[0:2]
return Arc(start, end, center)
point = center + np.array([radius, 0])
return Arc(point, point, center)
| 2,475 | 833 |
API_ROOT = 'http://localhost:6000' #'https://bookbeat.herokuapp.com'
TOKEN_VALIDATION_ABS_ENDPOINT = API_ROOT + '/validate-token'
REQ_USER_ID_KEY_NAME = 'id' | 161 | 73 |
from output.models.nist_data.list_pkg.date.schema_instance.nistschema_sv_iv_list_date_min_length_3_xsd.nistschema_sv_iv_list_date_min_length_3 import NistschemaSvIvListDateMinLength3
__all__ = [
"NistschemaSvIvListDateMinLength3",
]
| 238 | 97 |
#!/usr/bin/env python
#*********************************************************************
# Software License Agreement (BSD License)
#
# Copyright (c) 2011 andrewtron3000
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the Willow Garage nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#********************************************************************/
import roslib; roslib.load_manifest('face_follow')
import rospy
from geometry_msgs.msg import Point
from geometry_msgs.msg import PointStamped
from pid_control import PID
from pid_control.msg import PIDDiagnostics
from geometry_msgs.msg import Twist
from geometry_msgs.msg import Vector3
pid_controller = PID.PIDController('face_follow', -0.2, 0.2, -0.2, 0.2)
twist_publisher = rospy.Publisher("/cmd_vel", Twist)
def listener():
rospy.init_node('listener', anonymous=True)
while True:
try:
data = rospy.wait_for_message("face_coords", PointStamped, timeout=0.5)
control = pid_controller.update(352.0/2.0 - data.point.x, data.point.x)
twist_publisher.publish(Twist(angular = Vector3(x = 0.0, y = 0.0, z = control)))
except:
twist_publisher.publish(Twist(angular = Vector3(x = 0.0, y = 0.0, z = 0.0)))
rospy.sleep(1.0)
if __name__ == '__main__':
listener()
| 2,717 | 941 |
# coding:utf-8
import matplotlib.pyplot as plt
import matplotlib as mpl
# Def
def GetDefRate(d):
if d/(3360.0+d) > 0.9:
return 0.9
return d/(3360.0+d)
def GetDamageRate(d):
return 1-GetDefRate(d)
def GetDefImp(currDef, prevDef, pofang):
return 1 - GetDamageRate(currDef*(1-pofang)) / GetDamageRate(prevDef*(1-pofang))
# HP
def GetHPImp(currHP, prevHP):
return 1 - float(prevHP) / currHP
# CriDef
def GetCriDefRate(d):
if d/(4410.0+d) > 0.9:
return 0.9
return d/(4410.0+d)
def GetCriDefImp(currCriDef, prevCriDef, Cri, CriPlus):
prevRate = GetCriDefRate(prevCriDef)
currRate = GetCriDefRate(currCriDef)
deltaRate = currRate - prevRate
return deltaRate / ((Cri-prevRate) + 1/CriPlus)
# CriDec
def GetCriDecRate(d):
return d/(1050+d)
def GetCriDecImp(currCriDec, prevCriDec, Cri, CriPlus):
prevRate = GetCriDecRate(prevCriDec)
currRate = GetCriDecRate(currCriDec)
deltaRate = currRate - prevRate
return deltaRate / ((CriPlus - prevRate) + 1/Cri)
# Dodge
def GetDodgeRate(d):
return d/(4350.0+d*0.7)
def GetRealDodgeRate(d, cri):
if d+cri < 1:
return d
else:
return d/(d+cri)
def GetDodgeImp(currDodge, prevDodge, cri, criDam):
currDodgeRate = GetRealDodgeRate(currDodge, cri)
prevDodgeRate = GetRealDodgeRate(prevDodge, cri)
return (currDodgeRate - prevDodgeRate)*1.7 / (1 + cri*criDam - prevDodgeRate * 0.7)
# Yuling
def GetYulingDamageRate(d):
return 1-d/(d+3112.5)
def GetYulingImp(currYuling, prevYuling):
return 1- GetYulingDamageRate(currYuling) / GetYulingDamageRate(prevYuling)
def GetVal(xo, yo, x, const):
ret_x = []
ret_y = []
assert(len(xo) == len(yo))
lastx = xo[0]
for x_curr in x:
for i in range(1, len(xo)):
thisx = xo[i]
lastx = xo[i-1]
if lastx <= x_curr <= thisx or lastx >= x_curr >= thisx:
ret_x.append(x_curr)
ret_y.append((((yo[i] - yo[i-1])/(xo[i]-xo[i-1]))*(x_curr-lastx)+yo[i-1])*const)
return [ret_x, ret_y]
if __name__ == "__main__":
print mpl.matplotlib_fname()
DEF_CONST = 120
HP_CONST = 432
CRI_DEF_CONST = 54
DODGE_CONST = 54
YULING_CONST = 54
fig = plt.figure()
ax1 = fig.add_subplot(111)
xa = []
defense_0 = []
defense_75 = []
HP = []
d = []
defCri_s = []
defCri_l = []
dodge = []
yuling = []
lastp = 0
first = True
for p in range(1, 170, 1):
if first != True:
xa.append(p)
defense_0.append(GetDefImp(p*DEF_CONST, lastp*DEF_CONST, 0))
defense_75.append(GetDefImp(p*DEF_CONST, lastp*DEF_CONST, 0.75))
HP.append(GetHPImp(p*HP_CONST, lastp*HP_CONST))
defCri_s.append(GetCriDefImp(p*CRI_DEF_CONST, lastp*CRI_DEF_CONST, 0.3, 1.0))
defCri_l.append(GetCriDefImp(p*CRI_DEF_CONST, lastp*CRI_DEF_CONST, 0.75, 1.9))
dodge.append(GetDodgeImp(p*DODGE_CONST, lastp*DODGE_CONST, 0.7, 1.9))
yuling.append(GetYulingImp(p*YULING_CONST, lastp*YULING_CONST))
else:
first = False
lastp = p
print "d0:",GetVal(defense_0, xa, [x*0.001 for x in range(0, 20, 1)], DEF_CONST)
print "d75:",GetVal(defense_75, xa, [x*0.001 for x in range(0, 20, 1)], DEF_CONST)
print "hp:",GetVal(HP, xa, [x*0.001 for x in range(0, 20, 1)], HP_CONST)
print "fb1:",GetVal(defCri_s, xa, [x*0.001 for x in range(0, 20, 1)], CRI_DEF_CONST)
print "fb2:",GetVal(defCri_l, xa, [x*0.001 for x in range(0, 20, 1)], CRI_DEF_CONST)
print "sb:",GetVal(dodge, xa, [x*0.001 for x in range(0, 20, 1)], DODGE_CONST)
print "yl:",GetVal(yuling, xa, [x*0.001 for x in range(0, 20, 1)], YULING_CONST)
ax1.set_xlabel(u"收益")
fy1, = ax1.plot(defense_0, xa, label=u"防御(0%):" + str(DEF_CONST))
fy2, = ax1.plot(defense_75, xa, label=u"防御(75%):" + str(DEF_CONST))
hp, = ax1.plot(HP, xa, label=u"血量:" + str(HP_CONST))
dc1, = ax1.plot(defCri_s, xa, label=u"暴防(30%, 100%):" + str(CRI_DEF_CONST))
dc2, = ax1.plot(defCri_l, xa, label=u"暴防(75%, 190%):" + str(CRI_DEF_CONST))
ds, = ax1.plot(dodge, xa, label=u"闪避:" + str(DODGE_CONST))
yl, = ax1.plot(yuling, xa, label=u"御灵:" + str(YULING_CONST))
ax1.set_xlim([0, 0.025])
plt.legend(handles=[fy1,fy2,hp,dc1,dc2,ds,yl])
plt.show()
| 4,359 | 2,085 |
# Generated by Django 3.1.7 on 2021-03-09 16:06
from django.db import migrations, models
import djrichtextfield.models
class Migration(migrations.Migration):
dependencies = [
('portfolio_app', '0004_delete_service'),
]
operations = [
migrations.AddField(
model_name='project',
name='date',
field=models.DateField(auto_now_add=True, null=True),
),
migrations.AlterField(
model_name='blog',
name='description',
field=djrichtextfield.models.RichTextField(),
),
migrations.AlterField(
model_name='project',
name='description',
field=djrichtextfield.models.RichTextField(),
),
]
| 759 | 226 |
"""Demo for latent factor model"""
from __future__ import division
import numpy as np
import numpy.random as nr
import matplotlib.pyplot as plt
from IBPFM import IBPFM
from utils.tracePlot import trace
from utils.scaledimage import scaledimage
N = 100
chain = 1000
K_finite = 6
# # read the keyboard input for the number of images
# N = raw_input("Enter the number of noisy images for learning features: ")
# try:
# N = int(N)
# except ValueError:
# print "Not a number"
# sys.exit('Try again')
# # read the keyboard input for the number of MCMC chain
# chain = raw_input("Enter the number of MCMC chain: ")
# try:
# chain = int(chain)
# except ValueError:
# print "Not a number"
# sys.exit('Try again')
# # read the keyboard input for the number of finite K
# K_finite = raw_input("Enter the finite number (upper bound) of features K: ")
# try:
# K_finite = int(K_finite)
# except ValueError:
# print "Not a number"
# sys.exit('Try again')
# ------------------------------------------------------------------------------
# Model parameter
(alpha, alpha_a, alpha_b) = (1., 1., 1.)
(sigma_x, sigma_xa, sigma_xb) = (.5, 1., 1.)
(sigma_a, sigma_aa, sigma_ab) = (1., 1., 1.)
# ------------------------------------------------------------------------------
# Generate image data from the known features
feature1 = np.array([[0,1,0,0,0,0],[1,1,1,0,0,0],[0,1,0,0,0,0],\
[0,0,0,0,0,0],[0,0,0,0,0,0],[0,0,0,0,0,0]])
feature2 = np.array([[0,0,0,1,1,1],[0,0,0,1,0,1],[0,0,0,1,1,1],\
[0,0,0,0,0,0],[0,0,0,0,0,0],[0,0,0,0,0,0]])
feature3 = np.array([[0,0,0,0,0,0],[0,0,0,0,0,0],[0,0,0,0,0,0],\
[1,0,0,0,0,0],[1,1,0,0,0,0],[1,1,1,0,0,0]])
feature4 = np.array([[0,0,0,0,0,0],[0,0,0,0,0,0],[0,0,0,0,0,0],\
[0,0,0,1,1,1],[0,0,0,0,1,0],[0,0,0,0,1,0]])
D = 36
f1 = feature1.reshape(D)
f2 = feature2.reshape(D)
f3 = feature3.reshape(D)
f4 = feature4.reshape(D)
trueWeights = np.vstack((f1, f2, f3, f4))
# ------------------------------------------------------------------------------
# Generate noisy image data
K = 4
sig_x_true = 0.5
A = np.vstack((f1, f2, f3, f4)).astype(np.float)
Z_orig = nr.binomial(1, 0.5, (N, K)).astype(np.float)
V_orig = nr.normal(0, 1, size=(N, K))
# V_orig = nr.exponential(1, size=(N, K))
Z_orig = np.multiply(Z_orig, V_orig)
X = np.dot(Z_orig, A)
noise = nr.normal(0, sig_x_true, (N, D))
X += noise
# ------------------------------------------------------------------------------
# Return MCMC result
(K_save, alpha_save, sigma_x_save, sigma_a_save, loglikelihood_save, Z_save, A_save) = \
IBPFM(iteration=chain, data=X, upperbound_K=K_finite,
alpha=(alpha, alpha_a, alpha_b),
sigma_x=(sigma_x, sigma_xa, sigma_xb),
sigma_a=(sigma_a, sigma_aa, sigma_ab), realvaluedZ=True,
proposeNewfeature=True,
updateAlpha=True, updateSigma_x=True, updateSigma_a=True,
initZ=None, stdData=False)
# Save trace plots
trace(K_save, alpha_save, sigma_x_save, sigma_a_save, loglikelihood_save)
# Save true latent feature plot
(orig, sub) = plt.subplots(1, 4)
for sa in sub.flatten():
sa.set_visible(False)
orig.suptitle('True Latent Features')
for (i, true) in enumerate(trueWeights):
ax = sub[i]
ax.set_visible(True)
scaledimage(true.reshape(6, 6), pixwidth=3, ax=ax)
orig.set_size_inches(13, 3)
orig.savefig('Original_Latent_Features.png')
plt.close()
# Save some of example figures from data X
examples = X[0:4, :]
(ex, sub) = plt.subplots(1, 4)
for sa in sub.flatten():
sa.set_visible(False)
ex.suptitle('Image Examples')
for (i, true) in enumerate(examples):
ax = sub[i]
ax.set_visible(True)
scaledimage(true.reshape(6, 6), pixwidth=3, ax=ax)
ex.set_size_inches(13, 3)
ex.savefig('Image_Examples.png')
plt.close()
# Show and save result
lastZ = Z_save[:, :, chain]
mcount = (lastZ != 0).astype(np.int).sum(axis=0)
index = np.where(mcount > 0)
lastK = K_save[chain].astype(np.int)
lastA = A_save[index, :, chain]
A = lastA.reshape(len(index[0]), D)
A_row = A.shape[0]
for i in range(A_row):
cur_row = A[i, :].tolist()
abs_row = [abs(j) for j in cur_row]
max_index = abs_row.index(max(abs_row))
if cur_row[max_index] < 0:
A[i, :] = -np.array(cur_row)
K = max(len(trueWeights), len(A))
(fig, subaxes) = plt.subplots(2, K)
for sa in subaxes.flatten():
sa.set_visible(False)
fig.suptitle('Ground truth (top) vs learned factors (bottom)')
for (idx, trueFactor) in enumerate(trueWeights):
ax = subaxes[0, idx]
ax.set_visible(True)
scaledimage(trueFactor.reshape(6, 6),
pixwidth=3, ax=ax)
for (idx, learnedFactor) in enumerate(A):
ax = subaxes[1, idx]
scaledimage(learnedFactor.reshape(6, 6),
pixwidth=3, ax=ax)
ax.set_visible(True)
#fig.savefig("IBP_meanA.png")
plt.show()
| 4,882 | 2,015 |
# 기본 파라미터 설정 #########################
Nin = 784
Nh_l = [100, 50]
number_of_class = 10
Nout = number_of_class
# 분류 DNN 모델 구현 ########################
from keras import layers, models
class DNN(models.Sequential):
def __init__(self, Nin, Nh_l, Nout):
super().__init__()
self.add(layers.Dense(Nh_l[0], activation='relu', input_shape=(Nin,), name='Hidden-1'))
self.add(layers.Dense(Nh_l[1], activation='relu', name='Hidden-2'))
self.add(layers.Dense(Nout, activation='softmax'))
self.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
# 데이터 준비 ##############################
import numpy as np
from keras import datasets
from keras.utils import np_utils
(X_train, y_train), (X_test, y_test) = datasets.mnist.load_data()
Y_train = np_utils.to_categorical(y_train)
Y_test = np_utils.to_categorical(y_test)
L, W, H = X_train.shape
X_train = X_train.reshape(-1, W * H)
X_test = X_test.reshape(-1, W * H)
X_train = X_train / 255.0
X_test = X_test / 255.0
# 분류 DNN 학습 및 테스팅 ####################
model = DNN(Nin, Nh_l, Nout)
history = model.fit(X_train, Y_train, epochs=5, batch_size=100, validation_split=0.2)
performace_test = model.evaluate(X_test, Y_test, batch_size=100)
print('Test Loss and Accuracy ->', performace_test)
| 1,391 | 588 |
def dynamicArray(n, queries):
Q = len(queries)
seqList = [[] for _ in range(n)]
lastAnswer = 0
la = []
for i in range(Q):
t, x, y = queries[i][0], queries[i][1], queries[i][2]
if t == 1:
seqList[(x ^ lastAnswer) % n].append(y)
else:
lastAnswer = seqList[(x ^ lastAnswer) % n][
y % len(seqList[(x ^ lastAnswer) % n])
]
print(lastAnswer)
la.append(lastAnswer)
return la
| 492 | 165 |
"""
The exceptions for the crosslang module
"""
class CompilationError(Exception):
"""
Exception raised when failed to compile the user script
"""
def __init__(self, file, reason):
self.file = file
self.reason = reason
def __str__(self):
return "Failed to compile '{}':\n{}".format(self.file, self.reason)
class MLClientExecutionError(Exception):
"""
Exception raised when an error occurred while running non-python ml script
"""
def __init__(self, message):
"""
Constructor
"""
self.message = message
def __str__(self):
return self.message
| 649 | 173 |
from sqlalchemy import Column, Sequence
from sqlalchemy import Integer, String, Text
from meirin.db.base_class import Base
class MetaPolicy(Base):
__tablename__ = 'metapolicy'
# helper
metapolicy_id_seq = Sequence('metapolicy_id_seq', metadata=Base.metadata)
# columns
id = Column(Integer, metapolicy_id_seq, server_default=metapolicy_id_seq.next_value(), primary_key = True)
name = Column(String(64))
match = Column(Text)
mode = Column(String(64))
# TODO: index https://docs.sqlalchemy.org/en/14/orm/extensions/indexable.html
def __repr__(self):
return f"MetaPolicy(id={self.id!r}, name={self.name!r})"
| 663 | 239 |
from .bn import ABN, InPlaceABN, InPlaceABNSync
from .functions import ACT_RELU, ACT_LEAKY_RELU, ACT_ELU, ACT_NONE
| 115 | 53 |
import os
import pkg_resources
from mitba import cached_function
@cached_function
def get_logged_in_username():
try:
import pwd
user_id = os.getuid()
os_info = pwd.getpwuid(user_id)
return os_info.pw_name
except (ImportError, KeyError):
# ImportError: For windows users
# KeyError: In case getpwuid fails to retrieve the user information
return os.environ.get('USERNAME') or os.environ.get('USER', 'unknown')
@cached_function
def get_hostname():
import socket
return socket.getfqdn()
@cached_function
def get_infinisdk_version():
try:
return pkg_resources.get_distribution('infinisdk').version # pylint: disable=no-member
except pkg_resources.DistributionNotFound:
return 'N/A'
| 779 | 245 |
import sys
import lief
import json
import struct
import os
def filter_file(fname):
f = fname.replace("/", "_") + ".json"
if f[0] == ".":
f = f[1:]
return f
def main(fname):
# load filter
ffname = "policy_%s" % filter_file(fname)
filters = None
try:
filters = json.loads(open(ffname).read())
except:
print("[-] Could not load filter file %s" % ffname)
return 1
print("[+] Allowed syscalls: %d" % len(filters["syscalls"]))
# inject sandboxing library
binary = lief.parse(fname)
binary.add_library("libchestnut.so")
# add seccomp library as well
binary.add_library("libseccomp.so.2")
binary.write("%s_patched" % fname)
with open("%s_patched" % fname, "ab") as elf:
filter_data = json.dumps(filters).encode()
elf.write(filter_data)
elf.write(struct.pack("I", len(filter_data)))
os.chmod("%s_patched" % fname, 0o755);
#print(binary)
print("[+] Saved patched binary as %s_patched" % fname)
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: %s <binary>" % sys.argv[0])
else:
main(sys.argv[1])
| 1,176 | 433 |
"""Generic regex-based parser."""
import re
from collections import deque
from typing import Union, Any, List, Deque, Tuple, Dict, Callable
from scoff.parsers.linematch import MatcherError, LineMatcher
EMPTY_LINE = re.compile(b"\s*$")
class ParserError(Exception):
"""Parser error."""
class DataParser:
"""Simple data parser.
Tokens are regular expression-based
"""
def __init__(
self,
initial_state: Union[str, int, None] = None,
consume_spaces: bool = False,
**kwargs,
):
"""Initialize.
:param initial_state: Initial state of the parser
:param consume_spaces: Consume stray space characters
"""
self._state_hooks = {}
super().__init__(**kwargs)
self._state_stack: Deque[Union[str, int, None]] = deque()
self._state = initial_state
self._consume = consume_spaces
self._current_position = 1
self._current_line = 1
self._data = None
self._abort = False
@property
def state(self):
"""Get current state."""
return self._state
def add_state_hook(self, state: Union[str, int], hook: Callable):
"""Add state hook (callback).
A callback will be called when the parser reaches a specified state.
:param state: The parser state to add a callback to
:param hook: The callback to be added
"""
if not callable(hook):
raise TypeError("hook must be callable")
if state not in self.states:
print(self.states)
raise ParserError(f"unknown state '{state}'")
if state not in self._state_hooks:
self._state_hooks[state] = {hook}
else:
self._state_hooks[state] |= {hook}
def _handle_match(self, candidate):
"""Handle candidate match."""
def _handle_options(self, **options: Any):
"""Handle candidate options."""
def _try_parse(
self, candidates: List[LineMatcher], position: int
) -> Tuple[int, LineMatcher, Dict[str, str]]:
if self._consume:
m = EMPTY_LINE.match(self._data, position)
if m is not None:
# an empty line, consume
return (m.span()[1], None, None)
for candidate in candidates:
try:
if not isinstance(candidate, LineMatcher):
raise TypeError("candidate must be LineMatcher object")
size, fields = candidate.parse_first(self._data, position)
except MatcherError:
continue
options = candidate.options.copy()
change_state = options.pop("change_state", None)
push_state = options.pop("push_state", None)
pop_state = options.pop("pop_state", None)
if change_state is not None:
self._change_state(change_state)
elif push_state is not None:
self._push_state(push_state)
elif pop_state is not None:
self._pop_state(pop_state)
# handle other options
self._handle_options(**options)
# handle other custom options
self._handle_match(candidate)
# advance position
self._current_position += size
# advance line
self._current_line += (
self._data.count(b"\n", position, position + size) + 1
)
return (size, candidate, fields)
raise ParserError("could not parse data")
def _current_state_function(self, position: int) -> int:
if not hasattr(self, "_state_{}".format(self._state)):
raise RuntimeError(f"in unknown state: {self._state}")
size, stmt, fields = getattr(self, "_state_{}".format(self._state))(
position
)
# call hooks
if self._state in self._state_hooks:
for hook in self._state_hooks[self._state]:
hook(self._state, stmt, fields)
return size
def _abort_parser(self):
"""Stop parsing."""
self._abort = True
@property
def current_pos(self):
"""Get current position."""
return self._current_position
@property
def current_line(self):
"""Get current line."""
return self._current_line
@property
def states(self):
"""Get possible states."""
return [
attr_name.split("_")[2]
for attr_name in dir(self)
if attr_name.startswith("_state")
]
def parse(self, data: str) -> int:
"""Parse data.
:param data: Textual data to be parsed
:return: Current position in data
"""
self._data = data.encode()
self._current_position = 1
self._current_line = 1
current_pos = 0
while current_pos < len(data):
if self._abort is True:
break
size = self._current_state_function(current_pos)
# consume data
current_pos += size + 1
return current_pos
def _state_change_handler(self, old_state, new_state):
"""State change handler."""
def _change_state(self, new_state):
"""Change state."""
old_state = self._state
self._state = new_state
# call state change handler
self._state_change_handler(old_state, new_state)
def _push_state(self, new_state):
"""Push into state stack and change state."""
self._state_stack.append(self._state)
self._change_state(new_state)
def _pop_state(self, count):
"""Pop from state stack and change state."""
for num in range(count):
state = self._state_stack.popleft()
self._change_state(state)
| 5,826 | 1,605 |
#!/usr/bin/python3
'''
@author: snellenbach
Config sequence model
'''
from enum import Enum, unique
import re
from cfg.model.RegModelWrapper import RegModelWrapper
from cfg.model.Utils import MsgUtils
from cfg.output.OutBuilder import OutBuilder as ob
# ------- config model node classes
class BaseCfgNode:
_nodeStack = [] # stack of active config nodes for auto-add
_outBuilder = None
def __init__(self, sourceAstNode=None, comment=''):
self.sourceAstNode = sourceAstNode
self.comment = comment
self.children = []
self.parent = None
self.allowedTags = set() # set of allowed versions for this level (parser allows currently allows in class, method)
# add this node to parent (top of stack)
if __class__._nodeStack:
self.parent = __class__._nodeStack[-1]
self.parent.addChild(self)
def addChild(self, child):
self.children.append(child)
def popChild(self):
''' pop last added child from this node '''
if self.children:
self.children.pop()
def display(self, indent = 0):
''' display config model node info recursively '''
print(' '*indent + 'base:')
for child in self.children:
child.display(indent+1)
@staticmethod
def finishNode(omit):
''' Pop current node from the active model stack. Optionally, remove this node if omit is set. '''
__class__.popNode()
if omit:
parent = __class__._nodeStack[-1]
parent.popChild()
@staticmethod
def popNode():
''' pop cfg node from top of the stack '''
return __class__._nodeStack.pop()
@staticmethod
def peekNode():
''' return cfg node at top of the stack '''
return __class__._nodeStack[-1]
def hierDisplay(self, indent, s):
''' display config model node info recursively '''
print(' '*indent + s)
for child in self.children:
child.display(indent+1)
def resolvePaths(self):
''' resolve all paths in this config model node info recursively '''
for child in self.children:
child.resolvePaths()
def setOutBuilder(self, outBuilder):
''' set specified output builder '''
#print(f'BaseCfgNode setOutBuilder: called in {type(self)}, outBuilder type={type(outBuilder)}')
BaseCfgNode._outBuilder = outBuilder
def generateOutput(self):
''' generate specified output for this config model recursively '''
#print(f'BaseCfgNode generateOutput: called in {type(self)}')
for child in self.children:
child.generateOutput()
class HierCfgNode(BaseCfgNode):
''' hierarchical node (pushed to node stack on create) '''
def __init__(self, sourceAstNode = None, comment=''):
BaseCfgNode.__init__(self, sourceAstNode, comment)
# append this node to the stack
__class__._nodeStack.append(self)
self.vars = {} # dict of vars defined in this node scope
def whatami(self):
return 'unspecified hierarchy'
def findVar(self, varName, allowInputs = True):
''' find a variable by name traversing from current node thru ancestors '''
if self.vars.__contains__(varName):
retVar = self.vars[varName]
if allowInputs or (type(retVar) is not CfgInputVariable):
return retVar
MsgUtils.errorExit('input variable ' + varName + ' can not be assigned a value.')
return None
elif self.parent is None:
return None
else:
return self.parent.findVar(varName)
def getInputList(self):
return {k: v for k, v in self.vars.items() if type(v) is CfgInputVariable}
def verifyInputParms(self, inputListStr, callingNode):
''' check that a list of call parameter strings matches inputs for this hier and return the list of resolved inputs '''
if type(inputListStr) is not str:
MsgUtils.errorExit(f'misformed input list found when in call of {self.whatami()} {self.name}')
inputList = [] if not inputListStr else inputListStr.split(',')
inputCount = len(inputList)
inputParms = self.getInputList()
inputParmCount = len(inputParms)
#print(f"HierCfgNode verifyInputParms: inputList={inputList}, in len={inputCount}, vars=({', '.join(str(e) for e in inputParms.values())}), parm len={inputParmCount}, callNode type={type(callingNode)}")
if inputCount != inputParmCount:
MsgUtils.errorExit(f'incorrect number of input parameters (found {inputCount}, expected {inputParmCount}) in call of {self.whatami()} {self.name}')
# loop and resolve inputs CfgVariable.resolveRhsExpression(className, CfgClassNode, False, True)
resolvedInputList = []
for inVal, inParm in zip(inputList, inputParms.values()):
resolvedInputList.append(CfgVariable.resolveRhsExpression(inVal, inParm.vartype, True, True))
return resolvedInputList
class CfgClassNode(HierCfgNode):
_classes = {}
_current = None
def __init__(self, name, sourceAstNode = None, comment=''):
HierCfgNode.__init__(self, sourceAstNode, comment)
self.name = name
self.methods = {}
__class__._classes[self.name] = self
__class__._current = self
#print('creating class node, name=', self.name)
def whatami(self):
return 'class'
@staticmethod
def getCurrent():
''' return last created CfgClassNode '''
return __class__._current
@staticmethod
def findClass(className):
''' return a CfgClassNode by name '''
return None if className not in __class__._classes else __class__._classes[className]
def findMethod(self, methodName):
''' return a CfgMethodNode in this class by name '''
return None if methodName not in self.methods else self.methods[methodName]
def display(self, indent = 0):
inParms = self.getInputList()
self.hierDisplay(indent, f"class: {self.name}, vars=({', '.join(str(e) for e in self.vars.values())}), inputs=({', '.join(str(e) for e in inParms.values())}), allowed versions='{self.allowedTags}")
def generateOutput(self):
''' generate specified output for this class node '''
#print(f'CfgClassNode generateOutput: called in {type(self)}')
BaseCfgNode._outBuilder.enterClass(self)
for child in self.children:
child.generateOutput()
BaseCfgNode._outBuilder.exitClass(self)
class CfgMethodNode(HierCfgNode):
def __init__(self, name, sourceAstNode = None, comment=''):
HierCfgNode.__init__(self, sourceAstNode, comment)
self.name = name
self.args = []
# add method to dict in current class scope
parent = BaseCfgNode._nodeStack[-2]
parent.methods[self.name] = self
#print('creating method node, name=', self.name)
def whatami(self):
return 'method'
def display(self, indent = 0):
inParms = self.getInputList()
self.hierDisplay(indent, f"method: {self.name}, vars=({', '.join(str(e) for e in self.vars.values())}), inputs=({', '.join(str(e) for e in inParms.values())})")
def generateOutput(self):
''' generate specified output for this method node '''
#print(f'CfgMethodNode generateOutput: called in {type(self)}')
BaseCfgNode._outBuilder.enterMethod(self)
for child in self.children:
child.generateOutput()
BaseCfgNode._outBuilder.exitMethod(self)
@unique
class ConfigAssignType(Enum):
UNSUPPORTED = 0
EQ = 1
def isSupported(self):
return type(self) is not ConfigAssignType.UNSUPPORTED
@staticmethod
def resolve(opStr):
''' convert a string to ConfigAssignType '''
if type(opStr) is ConfigAssignType: # if type is already correct, just return input
return opStr
if opStr == '=':
return ConfigAssignType.EQ
else:
return ConfigAssignType.UNSUPPORTED
class CfgAssign(BaseCfgNode):
def __init__(self, left=None, op=ConfigAssignType.UNSUPPORTED, right=None, sourceAstNode = None):
BaseCfgNode.__init__(self, sourceAstNode)
self.op = ConfigAssignType.resolve(op)
self.left = left # TODO - resolve here and remove checks from builder or allow default var create?
self.right = right # maybe pass target type into assign? or verify type match?
def isValid(self):
if self.op.isSupported() and (self.left is not None) and (self.right is not None):
return True
return False
def isRead(self):
''' return True if assign involves a reg read '''
return (type(self.right) is CfgReadNode)
def display(self, indent = 0):
self.hierDisplay(indent, f'assign: {self.left} {self.op.name} {self.right}')
def resolvePaths(self):
if self.isRead():
self.right.resolvePaths()
class CfgMethodCall(BaseCfgNode):
def __init__(self, className, methodName, parmList, sourceAstNode = None):
BaseCfgNode.__init__(self, sourceAstNode)
# if className specified in call path resolve class as a variable, else use current class
if className:
cfgClassVar = CfgVariable.resolveRhsExpression(className, CfgClassNode, False, True) # find the class variable
#self.cfgClass = CfgClassNode.getCurrent() # TODO add findVar option for non-none className
self.cfgClass = CfgClassNode.findClass(cfgClassVar.val[0].name) # TODO - saved call name structure shoul be fixed
else:
self.cfgClass = CfgClassNode.getCurrent()
#if not cfgClass:
# MsgUtils.errorExit('unable to resolve cfgClass ' + str(className) + ' in call of method ' + methodName)
self.cfgMethod = self.cfgClass.findMethod(methodName)
if not self.cfgMethod:
MsgUtils.errorExit(f'unable to resolve method {methodName} in cfgClass {self.cfgClass.name}')
self.parmList = self.cfgMethod.verifyInputParms(parmList, self.parent)
def display(self, indent = 0):
self.hierDisplay(indent, f'call: cfgClass={self.cfgClass.name}, method={self.cfgMethod.name}, parms={self.parmList}')
class CfgCaseNode(HierCfgNode):
def __init__(self, selectVar, sourceAstNode = None):
HierCfgNode.__init__(self, sourceAstNode)
self.selectVar = HierCfgNode.findVar(self, selectVar)
#print('creating case node, select var=' + str(self.selectVar))
def display(self, indent = 0):
self.hierDisplay(indent, f'case: select var={self.selectVar}')
class CfgCaseBlockNode(HierCfgNode):
_currentChoices = set() # init current choice set
def __init__(self, sourceAstNode = None):
HierCfgNode.__init__(self, sourceAstNode)
self.selectVals = set(__class__._currentChoices) # copy current set of choices
__class__._currentChoices.clear() # clear current choices
#print('creating case block node, choices=' + str(self.selectVals))
def display(self, indent = 0):
self.hierDisplay(indent, f'case block: choices={self.selectVals}')
@staticmethod
def addChoice(choiceName):
__class__._currentChoices.add(choiceName)
class CfgNumericForNode(HierCfgNode):
def __init__(self, name, rangeStart, rangeEnd, sourceAstNode = None):
HierCfgNode.__init__(self, sourceAstNode)
self.forVar = CfgVariable(name, CfgNumDataType)
self.rangeStart = CfgVariable.resolveRhsExpression(rangeStart, CfgNumDataType)
self.rangeEnd = CfgVariable.resolveRhsExpression(rangeEnd, CfgNumDataType)
#print('creating numeric for loop node, iterator var=' + str(self.forVar) + ' rangeStart=' + str(self.rangeStart) + ' rangeEnd=' + str(self.rangeEnd))
def display(self, indent = 0):
self.hierDisplay(indent, f'for (numeric): iterator={self.forVar} rangeStart={self.rangeStart} rangeEnd={self.rangeEnd}')
class CfgPathForNode(HierCfgNode):
def __init__(self, name, path, sourceAstNode = None):
HierCfgNode.__init__(self, sourceAstNode)
self.forVar = CfgVariable(name, CfgPathDataType)
self.path = CfgVariable.resolveRhsExpression(path, CfgPathDataType) # create path range
self.forVar.val = self.path # assign path to loop var so full path prefix can be extracted recursively using var
#print('creating path for loop node, iterator var=' + str(self.forVar) + ' path=' + str(self.path))
def display(self, indent = 0):
self.hierDisplay(indent, f'for (path): iterator={self.forVar}, range={self.path}')
def resolvePaths(self):
''' resolve paths in this for node '''
print(f'resolve CfgPathForNode path: {self.path}') # TODO
if type(self.path) is CfgPathDataType:
self.path.resolvePath(self.allowedTags) #TODO - any checks for a var?, how is version resolve handled?
# resolve paths in child nodes
for child in self.children:
child.resolvePaths()
class CfgPrintNode(BaseCfgNode):
def __init__(self, form, form_vars, sourceAstNode = None):
BaseCfgNode.__init__(self, sourceAstNode)
self.form = form # form can also be a list of comma separated args
self.form_vars = form_vars
#print('creating display node, form=', self.form, 'form_vars=', self.form_vars)
def display(self, indent = 0):
self.hierDisplay(indent, 'print: ' + str(self.form) + ', vars=' + str(self.form_vars))
class CfgWaitNode(BaseCfgNode):
def __init__(self, time, sourceAstNode = None):
BaseCfgNode.__init__(self, sourceAstNode)
self.time = time # time in ms
#print('creating wait node, time=', self.time)
def display(self, indent = 0):
self.hierDisplay(indent, 'wait: ' + str(self.time))
class CfgWriteNode(BaseCfgNode):
def __init__(self, path, value, wtype, isRmw = False, sourceAstNode = None):
BaseCfgNode.__init__(self, sourceAstNode)
self.path = CfgVariable.resolveRhsExpression(path, CfgPathDataType)
self.wtype = CfgPathHierType.resolve(wtype)
self.value = CfgVariable.resolveRhsExpression(value, CfgNumDataType)
self.isRmw = isRmw
#print('creating write node, path=', str(self.path), 'value=', str(self.value))
def display(self, indent = 0):
self.hierDisplay(indent, 'write: ' + str(self.path) + ', wtype=' + str(self.wtype) + ', value=' + str(self.value) + ', rmw=' + str(self.isRmw))
pass
def resolvePaths(self):
''' resolve paths in this write node '''
print(f'resolve CfgWriteNode path: {self.path}, wtype: {self.wtype}, rwm: {self.isRmw} --- self.path type={type(self.path)}') # TODO
if type(self.path) is CfgPathDataType:
self.path.resolvePath(self.allowedTags, self.wtype) #TODO - any checks for a var?, how is version resolve handled?
def generateOutput(self):
''' generate specified output for this write node '''
#print(f'CfgWriteNode generateOutput: called in {type(self)}')
if self.wtype.isReg():
BaseCfgNode._outBuilder.doRegWrite(self)
else:
BaseCfgNode._outBuilder.doFieldWrite(self)
class CfgWhileNode(HierCfgNode):
def __init__(self, compare, delay = 1, timeout = None, sourceAstNode = None):
HierCfgNode.__init__(self, sourceAstNode)
self.compare = compare
self.delay = delay
self.timeout = timeout
#print('creating poll node, compare=', self.compare, 'delay=', self.delay)
CfgWaitNode(self.delay)
def display(self, indent = 0):
prefix = 'poll ' if self.compare.isPoll() else ''
self.hierDisplay(indent, prefix + 'while: ' + str(self.compare) + ' timeout=' + str(self.timeout))
def isPoll(self):
''' return True if compare involves a reg read '''
return self.compare.isPoll()
def resolvePaths(self):
''' resolve paths in this for while node '''
if self.isPoll():
self.compare.resolvePaths()
for child in self.children:
child.resolvePaths()
# ------- config model support classes (not BaseCfgNode children)
@unique
class CfgPathHierType(Enum):
UNKNOWN = 0
REGSET = 1
REG = 2
FIELDSET = 3
FIELD = 4
@staticmethod
def resolve(hierStr):
''' convert a string to CfgPathHierType '''
if type(hierStr) is CfgPathHierType: # if type is already correct, just return input
return hierStr
if 'RegSet' in hierStr:
return CfgPathHierType.REGSET
elif 'FieldSet' in hierStr:
return CfgPathHierType.FIELDSET
elif 'Reg' in hierStr:
return CfgPathHierType.REG
elif 'Field' in hierStr:
return CfgPathHierType.FIELD
else:
return CfgPathHierType.UNKNOWN
def isReg(self):
return self is CfgPathHierType.REG
def matchesRegModelType(self, regModType):
if self is CfgPathHierType.UNKNOWN:
return True
#print(f' -> CfgPathHierType matchesRegModelType: self type={self.name}, regModType={regModType.name}') # TODO
if self.name == regModType.name:
return True
return False
class CfgReadNode():
def __init__(self, path, rtype = CfgPathHierType.UNKNOWN, sourceAstNode = None):
self.path = CfgVariable.resolveRhsExpression(path, CfgPathDataType)
self.rtype = CfgPathHierType.resolve(rtype)
self.sourceAstNode = sourceAstNode # TODO - change to srcInfo
#print('creating read node, path=', self.path)
def __str__(self):
return f'read {self.path}, rtype={self.rtype}'
def resolvePaths(self):
''' resolve paths in this read '''
print(f'resolve CfgReadNode path: {self.path}, rtype={self.rtype}') # TODO
if type(self.path) is CfgPathDataType:
self.path.resolvePath(set(), self.rtype) # read node has no allowed tag override, TODO - any checks for a var?, how is version resolve handled?
# ------- config model data classes
class CfgDataType():
def __init__(self):
pass
def isValid(self):
return hasattr(self, 'val') and (self.val is not None)
class CfgBoolDataType(CfgDataType):
def __init__(self):
pass
class CfgNumDataType(CfgDataType):
def __init__(self, s):
self.size = None
self.hasSize = False
intval = __class__.strToInt(s)
if intval is not None:
self.val = intval
@staticmethod
def strToInt(s):
''' convert str to int if possible, else return None '''
try:
out = int(s, 0)
return out
except ValueError:
return None
def __str__(self):
return str(self.val) + (('(size=' + str(self.size) + ')') if self.size else '') if self.isValid() else 'invalid num'
def needsSize(self):
return self.hasSize and self.size is None
class CfgEnumDataType(CfgNumDataType): # FIXME use separate type
def __init__(self):
pass
class CfgPathDataElement():
def __init__(self, pelemstr):
self.name = None # invalid if name is None
self.start = None
self.end = None
self.isIndexed = False
self.hasRange = False # is element indexed with start not equal to end
self.annotations = {}
if '[' in pelemstr: # detect an array
self.isIndexed = True
pat = re.compile('(\\w+)\\s*\\[(.*)\\]')
mat = pat.match(pelemstr)
if mat:
self.name = mat.group(1)
arraystr = mat.group(2)
if ':' in arraystr:
self.hasRange = True
pat = re.compile('(\\w+|\\*)\\s*:\\s*(\\w+|\\*)')
mat = pat.match(arraystr)
if mat:
leftstr = mat.group(1)
rightstr = mat.group(2)
if leftstr == '*':
self.hasRange = True
else:
self.start = leftstr
if rightstr == '*':
self.hasRange = True
else:
self.end = rightstr
#else:
# print('CfgPathDataElement array match failed for s=' + arraystr)
elif '*' in arraystr: # detect full range wildcard
self.hasRange = True
else:
self.start = arraystr # single index case
self.end = arraystr
else:
self.name = pelemstr # scalar, so just save the name
def isVar(self):
''' return true if this path element is a path variable '''
return hasattr(self, 'baseVar')
def isRootVar(self):
''' return true if this path element is a path variable representing root of the reg model '''
return self.isVar() and (self.name == 'root')
def needsResolution(self):
return self.isIndexed and ((self.start is None) or (self.end is None))
def getElementString(self, unrollBase, leftIdx, rightIdx=None):
if unrollBase and self.isVar() and not self.isRootVar():
return self.baseVar.val.genFullPathStr()
if not self.isIndexed:
return self.name
if not rightIdx or (rightIdx == leftIdx):
return f'{self.name}[{leftIdx}]'
return f'{self.name}[{leftIdx}:{rightIdx}]'
def getFullElementString(self): # TODO
''' return full element string '''
startStr = str(self.start) if self.start else '*'
endStr = str(self.end) if self.end else '*'
return self.getElementString(True, startStr, endStr)
def getRawElementString(self):
''' return raw element string '''
startStr = str(self.start) if self.start else '*'
endStr = str(self.end) if self.end else '*'
return self.getElementString(False, startStr, endStr)
def getSampleElementString(self):
''' return sample element string for model lookup with indices set to 0 '''
return self.getElementString(True, 0)
def __str__(self):
return self.getRawElementString()
class CfgPathDataType(CfgDataType):
def __init__(self, pathstr):
self.htype = CfgPathHierType.UNKNOWN # resolved path type is unknown by default
self.call = None # default to no call
basepathstr = ''
if '(' in pathstr: # detect a call and remove from path
pat = re.compile('(.*)\\.(\\w+)')
mat = pat.match(pathstr)
if mat:
basepathstr = mat.group(1)
self.call = mat.group(2)
#print(f'found call match path={self.val}, call={self.call}')
else:
basepathstr = pathstr # TODO - store as path elem tuples? also TODO allow range wildcards
# create a list of path elements
self.val = []
newlist = basepathstr.split('.')
for elemstr in newlist:
elem = CfgPathDataElement(elemstr)
self.val.append(elem)
# check for valid path var extract
if not self.val:
MsgUtils.errorExit(f'unable create path from string={pathstr}')
firstPathElement = self.getBasePathElem()
# check for valid path base variable
baseVar = CfgVariable.resolveLhsExpression(firstPathElement.name, CfgPathDataType, False, False) # check for existing base path variable
if not baseVar:
MsgUtils.errorExit(f'unable to resolve root of path {pathstr}')
firstPathElement.baseVar = baseVar # save the referenced path variable in first element
def genFullPathStr(self):
''' return path with base var unrolled '''
return '.'.join([ elem.getFullElementString() for elem in self.getPathList() ])
def genRawPathStr(self):
''' return raw path (no base var unroll) '''
return '.'.join([ elem.getRawElementString() for elem in self.getPathList() ])
def genSamplePathStr(self):
''' return sample path for model lookup with all indices set to 0 '''
return '.'.join([ elem.getSampleElementString() for elem in self.getPathList() ])
def hasCall(self):
return self.call is not None
def setRegset(self):
self.htype = CfgPathHierType.REGSET
def setReg(self):
self.htype = CfgPathHierType.REG
def setFieldset(self):
self.htype = CfgPathHierType.FIELDSET
def setField(self):
self.htype = CfgPathHierType.FIELD
def getBasePathElem(self):
''' return the base path element '''
return self.getPathList()[0]
def getBasePathVar(self):
''' return the base path variable '''
return self.getBasePathElem().baseVar
def needsResolution(self):
if not self.getBasePathVar(): # or self.getBasePath().needsResolution(): # TODO - need variable needsResolution method?
return True
for elem in self.getPathList(): # check to see if any path elems are unresolved
if elem.needsResolution():
return True
return False
def isMultiPath(self):
for elem in self.getPathList(): # check to see if any path elems have more than single element range
if elem.hasRange:
return True
return False
def resolvePath(self, allowedTags, targetType=CfgPathHierType.UNKNOWN): # TODO also pass in allowedTags
''' resolve path type and any path index wildcards by referencing the regmodel '''
print(f' -> resolvePath CfgPathDataType raw path: {self} full path: {self.genFullPathStr()} sample path: {self.genSamplePathStr()}') # TODO
regModel = RegModelWrapper.getRegModelRoot()
if not regModel:
if self.needsResolution():
MsgUtils.errorExit(f'Path {self} has unresolved info, but no register model is defined.')
return # if no model and resolved we're done
# extract valid version tags and annotate path elements for each
validTags = RegModelWrapper.getValidTags(allowedTags)
print(f' -> resolvePath CfgPathDataType: allowedTags={allowedTags}, regmod tags: {RegModelWrapper.getRegModelTags()} valid tags: {validTags}') # TODO
for tag in validTags:
plist = regModel.get_path_instance_list(tag, self.genSamplePathStr())
if 'error' in plist:
MsgUtils.errorExit(f'Path {self.genRawPathStr()} was not found in register model using tag="{tag}".')
if not targetType.matchesRegModelType(plist['type']): # check that path type returned from model matches target
MsgUtils.errorExit(f'Expected type of path {self.genRawPathStr()} ({targetType}) does not match returned register model type ({plist["type"]}).')
# TODO - check that MultPath elems are allowed
self.annotatePath(tag, plist['instances'])
#print(f' -> resolvePath CfgPathDataType model returns: {plist}')
def annotatePath(self, tag, regModelPath):
# extract the full path by expanding lead path vars
expandedPath = self.getExpandedPathList()
#print(f' -> CfgPathDataType annotatePath: this path len={len(self.getPathList())}, expanded path len={len(expandedPath)}, regmod path len={len(regModelPath)}, path={regModelPath}')
if len(expandedPath) != len(regModelPath):
MsgUtils.errorExit(f'Path {self.genRawPathStr()} does not match form of returned register model path.')
# now loop and append regmodel info to local (non expanded) path elements
localIndex = len(expandedPath) - len(self.getPathList())
for pathElem, regModElem in zip(self.getPathList(), regModelPath[localIndex:]): # only annotate local path elements
print(f' -> CfgPathDataType annotatePath: element annotation, tag={tag}, elem={pathElem.name}, mod elem type={type(regModElem)}')
annotation = RegModelWrapper.createAnnotation(regModElem)
pathElem.annotations[tag] = annotation # annotate pathElem by tag
def getPathList(self):
''' return non-expanded path list '''
return self.val
def getExpandedPathList(self):
''' generate full path list by unrolling base path variable '''
if self.getBasePathElem().isRootVar():
return self.getPathList()
else:
if len(self.getPathList()) > 1:
return self.getBasePathElem().baseVar.val.getExpandedPathList() + self.getPathList()[1:] # remove lead element and append remainder
else:
return self.getBasePathElem().val.getExpandedPathList()
def __str__(self):
return f'ptype={self.htype.name}, path={self.genRawPathStr()}, needsResolution={self.needsResolution()}'
# ------- variable classes
class CfgVariable:
def __init__(self, name, vartype = CfgNumDataType):
self.name = name
self.vartype = vartype
self.val = None
# add var in current scope
parent = BaseCfgNode._nodeStack[-1]
if parent.findVar(self.name):
MsgUtils.errorExit('variable ' + self.name + ' is already defined.')
if not name.isalnum():
MsgUtils.errorExit('variable name ' + self.name + ' is not valid.')
parent.vars[self.name] = self
#print (f'--- cfg_model CfgVariable: adding var {self.name}, parent type is {type(parent)}')
def __str__(self):
return self.vartype.__name__ + ' ' + self.name
@staticmethod
def resolveRhsExpression(inVal, targetVarType, allowInstCreate = True, exitOnFail = True): # targetVarType is valid CfgDataType
''' given an unknown rhs expression, return an existing variable or instance (new from str or existing) of specified target data type '''
if type(inVal) is targetVarType: # already target type so done
return inVal
if (type(inVal) is CfgVariable) and (inVal.vartype is targetVarType): # already a variable so done
return inVal
if type(inVal) is str:
# try to find an existing variable
foundVar = HierCfgNode.peekNode().findVar(inVal)
if (foundVar is not None) and (foundVar.vartype is targetVarType):
return foundVar
# else try creating new target instance
if allowInstCreate:
newVal = targetVarType(inVal)
if newVal.isValid():
return newVal
if exitOnFail:
MsgUtils.errorExit('unable to resolve rhs expression ' + str(inVal) + ' to a value or variable.')
@staticmethod
def resolveLhsExpression(inVar, targetVarType, allowVarCreate = True, exitOnFail = True): # targetVarType is valid CfgDataType
''' given an unknown lhs expression, return an existing variable or create a new variable of specified target data type from str '''
if (type(inVar) is CfgVariable) and (inVar.vartype is targetVarType): # already a variable so done
return inVar
if type(inVar) is str:
# try to find an existing (non-input) variable
foundVar = HierCfgNode.peekNode().findVar(inVar, False) # input variables are not allowed on lhs
if (foundVar is not None) and (foundVar.vartype is targetVarType):
return foundVar
# else create a new var of target type
if allowVarCreate:
return CfgVariable(inVar, targetVarType)
if exitOnFail:
MsgUtils.errorExit('unable to resolve lhs expression ' + str(inVar) + ' to a variable.')
class CfgInputVariable(CfgVariable):
def __str__(self):
return 'input ' + self.vartype.__name__ + ' ' + self.name
# ------- config model compare class
@unique
class ConfigCompareType(Enum):
UNSUPPORTED = 0
EQ = 1
NE = 2
GT = 3
LT = 4
GE = 5
LE = 6
def isSupported(self):
return type(self) is not ConfigCompareType.UNSUPPORTED
@staticmethod
def resolve(opStr):
''' convert a string to ConfigCompareType '''
if type(opStr) is ConfigCompareType: # if type is already correct, just return input
return opStr
if opStr == '==':
return ConfigCompareType.EQ
elif opStr == '!=':
return ConfigCompareType.NE
elif opStr == '>':
return ConfigCompareType.GT
elif opStr == '<':
return ConfigCompareType.LT
elif opStr == '>=':
return ConfigCompareType.GE
elif opStr == '<=':
return ConfigCompareType.LE
else :
return ConfigCompareType.UNSUPPORTED
class CfgCompare():
def __init__(self, left=None, op=ConfigCompareType.UNSUPPORTED, right=None):
self.op = op if type(op) is ConfigCompareType else ConfigCompareType.resolve(op)
self.left = left if type(left) is CfgReadNode else left # TODO - extract into val or variable
self.right = right if type(right) is CfgReadNode else right # TODO - extract into val or variable
def isValid(self):
if self.op.isSupported() and (self.left is not None) and (self.right is not None):
return True
return False
def leftIsPoll(self):
return type(self.left) is CfgReadNode
def rightIsPoll(self):
return type(self.right) is CfgReadNode
def isPoll(self):
''' return True if compare involves a reg read '''
return self.leftIsPoll() or self.rightIsPoll()
def __str__(self):
return f'l=({self.left}) op={self.op.name} r=({self.right})'
def resolvePaths(self):
''' resolve paths in this compare node '''
if self.leftIsPoll():
self.left.resolvePaths()
if self.rightIsPoll():
self.right.resolvePaths()
# ------ config model visitor TODO
| 34,279 | 9,863 |
# Exercício Python 097: Faça um programa que tenha uma função chamada escreva(),
# que receba um texto qualquer como parâmetro e mostre uma mensagem com tamanho adaptável.
# Ex:
# escreva(‘Olá, Mundo!’) Saída:
# ~~~~~~~~~
# Olá, Mundo!
# ~~~~~~~~~
def write(text):
size = len(text) + 4
print("="*size)
print(f" {text}")
print("="*size)
write("HELLO WORLD")
write("I AM PROGRAMMER") | 922 | 192 |
#!/usr/bin/env python
# vim: ai:sw=4:ts=4:sta:et:fo=croql
# coding=utf-8
import pytest
# Uncomment to run test in debug mode
# import pudb; pudb.set_trace()
from reinforcement_learning.dict_qtable import DictQTable
from test_qaction import QActionTest
from test_qstate import QStateTest
"""
DictQTable
"""
@pytest.mark.incremental
class TestDictQTable(object):
action_a = QActionTest(3)
action_b = QActionTest(4)
action_c = QActionTest(5)
state_a = QStateTest([action_a, action_b])
state_b = QStateTest([action_c])
value_a = 123.1
value_b = 234.5
def test_set_value(self):
# given
obj = DictQTable()
obj.set_value(self.state_a, self.action_a, self.value_a)
# when
stored_states = obj.get_all_stored_states()
# then
assert stored_states is not None, 'Table: {}'.format(obj)
assert len(stored_states) is 1, 'Table: {}'.format(obj)
assert stored_states[0] is self.state_a, 'Table: {}'.format(obj)
value = obj.get_value(self.state_a, self.action_a)
assert value is not None, 'Table: {}'.format(obj)
assert value is self.value_a, 'Table: {}'.format(obj)
def test_get_stored_action_values(self):
# given
obj = DictQTable()
obj.set_value(self.state_a, self.action_a, self.value_a)
obj.set_value(self.state_a, self.action_b, self.value_b)
# when
stored_action_values = obj.get_stored_action_values(self.state_a)
# then
assert stored_action_values is not None, 'Table: {}'.format(obj)
assert len(stored_action_values) is 2, 'Table: {}'.format(obj)
assert self.action_a in stored_action_values.keys(), \
'Table: {}'.format(obj)
assert stored_action_values[self.action_a] is self.value_a, \
'Table: {}'.format(obj)
assert self.action_b in stored_action_values.keys(), \
'Table: {}'.format(obj)
assert stored_action_values[self.action_b] is self.value_b, \
'Table: {}'.format(obj)
| 2,061 | 720 |
import tornado.web
import tornado.gen
import json
import io
import logging
import motor
from bson.objectid import ObjectId
import mickey.userfetcher
from mickey.basehandler import BaseHandler
class AcceptMemberHandler(BaseHandler):
@tornado.web.asynchronous
@tornado.gen.coroutine
def post(self):
coll = self.application.db.groups
publish = self.application.publish
token = self.request.headers.get("Authorization", "")
data = json.loads(self.request.body.decode("utf-8"))
groupid = data.get("groupid", "")
inviteid = data.get("invite_id", self.p_userid)
members = data.get("members", [])
logging.info("begin to add members to group %s" % groupid)
if not groupid or not members:
logging.error("invalid request")
self.set_status(403)
self.finish()
return
result = yield coll.find_one({"_id":ObjectId(groupid)})
if not result:
logging.error("group %s does not exist" % groupid)
self.set_status(404)
self.finish()
return
if result.get("owner", "") != self.p_userid:
logging.error("%s are not the owner" % self.p_userid)
self.set_status(403)
self.finish()
return;
#get exist members
exist_ids = [x.get("id", "") for x in result.get("members", [])]
# get members and the receivers
add_members = list(filter(lambda x: x not in exist_ids, [x.get("id", "") for x in members]))
notify = {}
notify["name"] = "mx.group.authgroup_invited"
notify["pub_type"] = "any"
notify["nty_type"] = "device"
notify["msg_type"] = "other"
notify["groupid"] = groupid
notify["groupname"] = result.get("name", "")
notify["userid"] = inviteid
opter_info = yield mickey.userfetcher.getcontact(inviteid, token)
if opter_info:
notify["username"] = opter_info.get("name", "")
else:
logging.error("get user info failed %s" % inviteid)
adddb_members = list(filter(lambda x: x.get("id", "") in add_members, members))
append_result = yield coll.find_and_modify({"_id":ObjectId(groupid)},
{
"$addToSet":{"appendings":{"$each": adddb_members}},
"$unset": {"garbage": 1}
})
if append_result:
self.set_status(200)
publish.publish_multi(add_members, notify)
else:
self.set_status(500)
logging.error("add user failed %s" % groupid)
return
self.finish()
| 2,851 | 819 |
from typing import Optional
from dataclasses import dataclass
from .InvoiceAppearance import InvoiceAppearance
from .InvoiceCategory import InvoiceCategory
from .PaymentMethod import PaymentMethod
from .Source import Source
@dataclass
class AdditionalQueryParams:
"""Additional params of the invoice query
:param tax_number: Tax number of the supplier or the customer of the invoice (the search criteria depends on the value of the invoiceDirection tag)
:param group_member_tax_number: Tax number of group member of the supplier or the customer of the invoice (the search criteria depends on the value of the invoiceDirection tag)
:param name: Query param of the supplier or the customer of the invoice for leading match pattern (the search criteria depends on the value of the invoiceDirection tag)
:param invoice_category: Type of invoice
:param payment_method: Method of payment
:param invoice_appearance: Form of appearance of the invoice
:param source: Data exchange source
:param currency: Currency of the invoice
"""
tax_number: Optional[str]
group_member_tax_number: Optional[str]
name: Optional[str]
invoice_category: Optional[InvoiceCategory]
payment_method: Optional[PaymentMethod]
invoice_appearance: Optional[InvoiceAppearance]
source: Optional[Source]
currency: Optional[str]
| 1,366 | 347 |
#
# GDELT data will be published to Kafka
# Topics will be created - one topic per country using the Alpha-2 country codes
#
from kafka import KafkaProducer
from datetime import datetime
import pickle
#
# Initialize the producer
#
brokerlist='ec2-54-186-208-110.us-west-2.compute.amazonaws.com:9092,ec2-52-11-172-126.us-west-2.compute.amazonaws.com:9092,ec2-52-88-204-111.us-west-2.compute.amazonaws.com:9092,ec2-52-35-101-204.us-west-2.compute.amazonaws.com:9092'
producer = KafkaProducer(bootstrap_servers=brokerlist)
cnt = 0
payload = [ '0123456789', '01234567890123456789012345678901234567890123456789', '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789' ]
msgcnt = [ 1000, 10000, 1000000 ]
def testit(pload,n):
beg_time = datetime.now()
for i in range(n):
sendmsg = pickle.dumps(pload)
producer.send('foobar',sendmsg)
return datetime.now() - beg_time
for testload in payload:
for n in msgcnt:
timetaken = testit(testload,n)
lenpayload = len(testload)
print('Payload:\t%d\tMsgCnt:\t%d\tTime:\t%d secs %d microsecs' % (lenpayload, n, timetaken.seconds, timetaken.microseconds))
producer.flush()
| 1,213 | 604 |
from src.infra.config import *
from src.infra.db_entities import *
from src.main.utils import hash_password
db_conn = DBConnectionHandler()
engine = db_conn.get_engine()
Base.metadata.create_all(engine)
with DBConnectionHandler() as db:
try:
new_user = Users(name='admin', role=1, email='admin@suzy.com', password=hash_password('admin'))
db.session.add(new_user)
db.session.commit()
except Exception as ex:
print(ex)
db.session.rollback()
finally:
db.session.close()
| 529 | 168 |
import logging.config
import random
import re
from collections import defaultdict
from datetime import datetime
from enum import Enum
import requests
from example.util import log_call
from icq.bot import ICQBot, FileNotFoundException
from icq.constant import TypingStatus
from icq.filter import MessageFilter
from icq.handler import MessageHandler
try:
from urllib import parse
except ImportError:
import urlparse as parse
logging.config.fileConfig("logging.ini")
log = logging.getLogger(__name__)
NAME = "Hermes Bot"
VERSION = "0.0.2"
TOKEN = "000.0000000000.0000000000:000000000"
PHRASES = (
"Sweet lion of Zion!", "Sweet manatee of Galilee!", "Sweet llamas of the Bahamas!",
"Sweet something... of... someplace...", "Great cow of Moscow!", "Sweet giant anteater of Santa Anita!",
"Sweet ghost of Babylon!", "Sacred boa of West and Eastern Samoa!", "Sacred hog of Prague!",
"Cursed bacteria of Liberia!", "Sweet guinea pig of Winnipeg!", "Great bonda of Uganda!",
"Sweet three-toed sloth of the ice planet Hoth!", "Sweet honey bee of infinity!",
"Sweet yeti of the Serengeti!", "Sweet bongo of the Congo!", "Sweet squid of Madrid!",
"Sweet kookaburra of Edinburgh!", "Sweet topology of cosmology!", "Sweet coincidence of Port-au-Prince!",
"Sweet orca of Mallorca!", "Sweet candelabra of Le Havre, LaBarbara!"
)
def logging_iterator(name, iterable):
for item in iterable:
log.debug("Processing line ({name}): '{item}'.".format(name=name, item=item))
yield item
class HTTPMethod(Enum):
GET = "GET"
POST = "POST"
HEAD = "HEAD"
OPTIONS = "OPTIONS"
PUT = "PUT"
DELETE = "DELETE"
TRACE = "TRACE"
CONNECT = "CONNECT"
PATCH = "PATCH"
class HTTPRequest(object):
pattern = re.compile(r"^Connected to (?P<host>\S+) \((?P<ip>[^)]+)\) port (?P<port>\d+) \(#\d+\)$", re.IGNORECASE)
_pattern_request_line = re.compile(
r"^(?P<method>" + "|".join(m.value for m in HTTPMethod) + r")\s(?P<uri>/\S*)\sHTTP/(?P<version>\d\.\d)$",
flags=re.IGNORECASE
)
_pattern_http_header = re.compile(
r"^\s*(?P<name>X-[^:]*?|Host|User-Agent|Accept|Accept-Encoding|Connection|Content-Length|Content-Type|Expect|If"
r"-None-Match)\s*:\s*(?P<value>.*?)\s*$", flags=re.IGNORECASE
)
@log_call
def __init__(self, ip, method, url, version, headers, data):
super(HTTPRequest, self).__init__()
self.ip = ip
self.method = method
self.url = url
self.version = version
self.headers = headers
self.data = data
@staticmethod
@log_call
def parse(match, lines):
for line in lines:
request_line_match = HTTPRequest._pattern_request_line.search(line)
if request_line_match:
log.debug("Line matched with 'HTTPRequest._pattern_request_line' pattern.")
break
else:
raise ParseException("Can't find request line!")
headers = defaultdict(list)
for line in lines:
header_match = re.search(HTTPRequest._pattern_http_header, line)
if header_match:
headers[header_match.group("name")].append(header_match.group("value"))
else:
break
method = HTTPMethod(request_line_match.group("method"))
# Crutch for handling "Expect" request.
if "Expect" in headers:
if len(headers["Expect"]) != 1 and headers["Expect"][0] != "100-continue":
raise ParseException("Unknown 'Expect' request header value ('{}')!".format(headers["Expect"]))
line = next(lines)
if line != "HTTP/1.1 100 Continue":
raise ParseException("Unknown status line ('{}') for 'Expect' response!".format(line))
line = next(lines)
if line == "We are completely uploaded and fine":
# No data, seems like client logging bug.
data = None
else:
data = line
else:
if method is HTTPMethod.GET:
data = None
elif method is HTTPMethod.POST:
data = next(lines)
else:
raise ParseException("Unsupported HTTP method ('{}')!".format(method))
return HTTPRequest(
ip=match.group("ip"),
method=method,
url=parse.urlparse("{scheme}://{host}{uri}".format(
scheme={80: "HTTP", 443: "HTTPS"}[int(match.group("port"))],
host=match.group("host"),
uri=request_line_match.group("uri")
)),
version=request_line_match.group("version"),
headers=headers,
data=data
)
def __repr__(self):
return (
"HTTPRequest(method='{self.method}', url='{self.url}', version='{self.version}', headers='{self.headers}', "
"data='{self.data}')".format(self=self)
)
class HTTPResponse(object):
pattern = re.compile(r"^HTTP/(?P<version>\d\.\d)\s(?P<status_code>\d{3})\s(?P<reason_phrase>.+)$", re.IGNORECASE)
_pattern_http_header = re.compile(
r"^\s*(?P<name>X-[^:]*?|Server|Date|Content-Type|Content-Length|Content-Encoding|Connection|Keep-Alive|Access-C"
r"ontrol-Allow-Origin|Transfer-Encoding|Pragma|Cache-Control|ETag|Strict-Transport-Security|Set-Cookie)\s*:\s*("
r"?P<value>.*?)\s*$", re.IGNORECASE
)
_pattern_elapsed = re.compile(r"^Completed in (?P<elapsed>\d+) ms$", re.IGNORECASE)
@log_call
def __init__(self, version, status_code, reason_phrase, headers, data, elapsed):
super(HTTPResponse, self).__init__()
self.version = version
self.status_code = status_code
self.reason_phrase = reason_phrase
self.headers = headers
self.data = data
self.elapsed = elapsed
@staticmethod
@log_call
def parse(match, lines):
headers = defaultdict(list)
for line in lines:
(key, value) = map(lambda s: s.strip(), line.split(":", 1))
headers[key].append(value)
data = next(lines)
for line in lines:
elapsed_match = re.search(HTTPResponse._pattern_elapsed, line)
if elapsed_match:
log.debug("Line matched with 'HTTPResponse._pattern_elapsed' pattern.")
elapsed = elapsed_match.group("elapsed")
break
else:
raise ParseException("Can't find elapsed time!")
return HTTPResponse(
version=match.group("version"),
status_code=match.group("status_code"),
reason_phrase=match.group("reason_phrase"),
headers=headers,
data=data,
elapsed=elapsed
)
def __repr__(self):
return (
"HTTPResponse(version='{self.version}', status_code='{self.status_code}', reason_phrase='{self.reason_phras"
"e}', headers='{self.headers}', data='{self.data}', elapsed='{self.elapsed}')".format(self=self)
)
class LogRecord(object):
pattern = re.compile(
r"^\[(?P<week_day>Sun|Mon|Tue|Wed|Thu|Fri|Sat)\s(?P<month>Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s{1,"
r"2}(?P<day>\d{1,2})\s(?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})\s(?P<year>\d+)\.(?P<microsecond>\d{1,"
r"3})\]\.\[(?:0x)?[0-9a-fA-F]+\]\s*$", re.IGNORECASE
)
@log_call
def __init__(self, date_time, request=None, response=None):
super(LogRecord, self).__init__()
self.date_time = date_time
self.request = request
self.response = response
@staticmethod
@log_call
def parse(match, lines):
date_time = datetime(
year=int(match.group("year")),
month=int(datetime.strptime(match.group("month"), "%b").month),
day=int(match.group("day")),
hour=int(match.group("hour")),
minute=int(match.group("minute")),
second=int(match.group("second")),
microsecond=int(match.group("microsecond")) * 1000,
)
for line in lines:
request_match = HTTPRequest.pattern.search(line)
if request_match:
log.debug("Line matched with 'HTTPRequest.pattern' pattern.")
buffer = []
# noinspection PyAssignmentToLoopOrWithParameter
for line in lines:
response_match = re.search(HTTPResponse.pattern, line)
if response_match:
log.debug("Line matched with 'HTTPResponse.pattern' pattern.")
return LogRecord(
date_time=date_time,
request=HTTPRequest.parse(request_match, logging_iterator(HTTPRequest.__name__, buffer)),
response=HTTPResponse.parse(
response_match, logging_iterator(HTTPResponse.__name__, list(lines))
)
)
else:
buffer.append(line)
return LogRecord(date_time=date_time)
def fix_log(lines):
status_line_regexp = re.compile(r"^(?P<body>.*)(?P<status_line>HTTP/\d\.\d\s\d{3}\s.+)$", re.IGNORECASE)
connection_left_regexp = re.compile(r"^.*Connection #\d+ to host \S+ left intact$", re.IGNORECASE)
upload_sent_regexp = re.compile(r"^.*upload completely sent off: \d+ out of \d+ bytes$", re.IGNORECASE)
prev_line = None
for line in lines:
log.debug("Processing line: '{}'.".format(line))
if prev_line == "HTTP/1.1 100 Continue":
match = re.search(status_line_regexp, line)
if match:
log.debug("Fixing '100-continue' problem line.")
yield match.group("body")
yield match.group("status_line")
elif re.search(connection_left_regexp, line):
log.debug("Fixing 'Connection blah-blah left intact' problem line.")
# yield re.split(connection_left_split_regexp, line)[0]
elif re.search(upload_sent_regexp, line):
log.debug("Fixing 'Upload completely sent blah-blah' problem line.")
# result = re.split(upload_sent_split_regexp, line)[0]
else:
yield line
prev_line = line
def iterate_log(lines):
buffer = []
match = None
for line in lines:
m = re.search(LogRecord.pattern, line)
if m:
log.debug("Line matched with 'LogRecord.pattern' pattern.")
if buffer and match:
yield LogRecord.parse(match, logging_iterator(LogRecord.__name__, buffer))
buffer = []
match = m
else:
buffer.append(line)
def file_callback(bot, event):
source_uin = event.data["source"]["aimId"]
message = event.data["message"]
try:
bot.set_typing(target=source_uin, typing_status=TypingStatus.TYPING)
# Getting info for file in message.
path = parse.urlsplit(message.strip()).path
file_id = path.rsplit("/", 1).pop()
file_info_response = bot.get_file_info(file_id=file_id)
if file_info_response.status_code == requests.codes.not_found:
raise FileNotFoundException
url = file_info_response.json()["file_list"].pop()["dlink"]
# Starting file download.
file_response = bot.http_session.get(url, stream=True)
if file_response.encoding is None:
file_response.encoding = "utf-8"
# Downloading file and calculating stats.
stats = defaultdict(int)
status_codes = defaultdict(int)
for log_record in iterate_log(fix_log(
line for line in file_response.iter_lines(chunk_size=1024, decode_unicode=True) if line
)):
if log_record.request:
stats["requests_count"] += 1
if log_record.request.url.path == "/aim/startSession":
stats["start_session_count"] += 1
if log_record.request.url.path == "/genToken":
stats["gen_token_count"] += 1
if log_record.response:
key = log_record.response.status_code + " " + log_record.response.reason_phrase
status_codes[key] += 1
else:
stats["no_response_count"] += 1
bot.send_im(
target=source_uin,
message=(
"Total requests: {requests_count}\n /aim/startSession: {start_session_count}\n /genToken: {gen_to"
"ken_count}\n\nResponse count by status code:\n{status_codes}\n\nFound problems:\n{problems}\n\n{phrase"
"}"
).format(
requests_count=stats["requests_count"],
start_session_count=stats["start_session_count"],
gen_token_count=stats["gen_token_count"],
status_codes="\n".join([
" {code}: {count}".format(
code=code, count=count
) for (code, count) in sorted(status_codes.items())
]),
problems=" Requests without response: {no_response_count}".format(
no_response_count=stats["no_response_count"]
),
phrase=random.choice(PHRASES)
)
)
except FileNotFoundException:
bot.send_im(target=source_uin, message=random.choice(PHRASES) + " Give me your log right now!")
except ParseException as e:
bot.send_im(
target=source_uin,
message="{phrase} Log format is not supported! Error: '{error}'.".format(
phrase=random.choice(PHRASES), error=e
)
)
raise
except Exception:
bot.send_im(target=source_uin, message=random.choice(PHRASES) + " Something has gone wrong!")
raise
finally:
bot.set_typing(target=source_uin, typing_status=TypingStatus.NONE)
class ParseException(Exception):
pass
def main():
# Creating a new bot instance.
bot = ICQBot(token=TOKEN, name=NAME, version=VERSION)
# Registering message handlers.
bot.dispatcher.add_handler(MessageHandler(
callback=file_callback,
filters=MessageFilter.file & ~(MessageFilter.image | MessageFilter.video | MessageFilter.audio)
))
# Starting a polling thread watching for new events from server. This is a non-blocking call.
bot.start_polling()
# Blocking the current thread while the bot is working until SIGINT, SIGTERM or SIGABRT is received.
bot.idle()
if __name__ == "__main__":
main()
| 14,767 | 4,527 |
#! /usr/bin/python3
import curses
import sys
import subprocess
def main_menu(stdscr):
k = 0
cursor_x = 0
cursor_y = 0
# Start colors in curses
curses.start_color()
curses.init_pair(1, curses.COLOR_CYAN, curses.COLOR_BLACK)
curses.init_pair(2, curses.COLOR_RED, curses.COLOR_BLACK)
curses.init_pair(3, curses.COLOR_BLACK, curses.COLOR_WHITE)
# Set mode
switch = 0
# Loop where k is the last character pressed
while True:
if k == ord('q'):
sys.exit()
# Respond if the switch was pressed
if k == ord('.'):
if switch == 0:
switch = 1
else:
switch = 0
k = -1
# Initialization
curses.curs_set(False)
stdscr.nodelay(True)
stdscr.clear()
height, width = stdscr.getmaxyx()
# Call qstat
if switch == 0:
process = subprocess.Popen("qstat -u '*'", stdout=subprocess.PIPE, shell=True)
else:
process = subprocess.Popen('qstat', stdout=subprocess.PIPE)
stdout, stderr = process.communicate()
qstat = str(stdout)[2:-1].split('\\n')[:-1]
# Strings
statusbarstr = " github.com/miferg | '.' to toggle all or user | 'q' to exit "
if switch == 0:
title = " qstat all users, {} jobs".format(len(qstat)-2)
title_empty = " qstat all users, no jobs"
if switch == 1:
title = " qstat current user, {} jobs".format(len(qstat)-2)
title_empty = " qstat current user, no jobs"
# Centering calculations
start_x_title = int((width // 2) - (len(title) // 2) - len(title) % 2)
# Render status bar
stdscr.attron(curses.color_pair(3))
stdscr.addstr(height-1, 0, statusbarstr)
stdscr.addstr(height-1, len(statusbarstr), " " * (width - len(statusbarstr) - 1))
stdscr.attroff(curses.color_pair(3))
# Rendering title
stdscr.attron(curses.color_pair(3))
stdscr.attron(curses.A_BOLD)
if len(qstat)-2 == -2:
stdscr.addstr(0, 0, title_empty)
stdscr.addstr(0, len(title_empty), " " * (width - len(title) - 1))
else:
stdscr.addstr(0, 0, title)
stdscr.addstr(0, len(title), " " * (width - len(title) - 1))
stdscr.attroff(curses.color_pair(3))
# Turning off attributes for title
stdscr.attroff(curses.color_pair(2))
stdscr.attroff(curses.A_BOLD)
# Print the qstat report, line by line until the screen is filled
for i in range(0, min(len(qstat),height-3)):
stdscr.addstr(i+1, 0, qstat[i])
# Refresh the screen
stdscr.refresh()
curses.napms(100)
# Wait for next input
k = stdscr.getch()
def main():
curses.wrapper(main_menu)
if __name__ == "__main__":
main()
| 2,920 | 1,024 |
import plotly.graph_objects as go
import numpy as np
def multi_trace(x, y, color, label=None, fig=None, insert_nans=False):
"""Create multiple traces that are associated with a single legend label
Parameters
----------
x: array-like
y: array-like
color: str
label: str, optional
fig: go.FigureWidget
Returns
-------
"""
if fig is None:
fig = go.FigureWidget()
if insert_nans:
y_nans = []
x_nans = []
for xx,yy in zip(x,y):
y_nans.append(np.append(yy,np.nan))
x_nans.append(np.append(xx, np.nan))
y_plot = np.concatenate(y_nans,axis=0)
x_plot = np.concatenate(x_nans, axis=0)
fig.add_scattergl(
x=x_plot,
y=y_plot,
name=label,
line={"color": color},
)
return fig
else:
for i, yy in enumerate(y):
if label is not None and i:
showlegend = False
else:
showlegend = True
fig.add_scattergl(
x=x,
y=yy,
legendgroup=label,
name=label,
showlegend=showlegend,
line={"color": color},
)
return fig
def event_group(
times_list,
offset=0,
color="Black",
label=None,
fig=None,
marker=None,
line_width=None,
):
"""Create an event raster that are all associated with a single legend label
Parameters
----------
times_list: list of array-like
offset: float, optional
label: str, optional
fig: go.FigureWidget
optional, passed to go.Scatter.marker:
marker: str
line_width: str
color: str
default: Black
Returns
-------
"""
if fig is None:
fig = go.FigureWidget()
if label is not None:
showlegend = True
else:
showlegend = False
for i, times in enumerate(times_list):
if len(times):
fig.add_scattergl(
x=times,
y=np.ones_like(times) * (i + offset),
marker=dict(
color=color, line_width=line_width, symbol=marker, line_color=color
),
legendgroup=str(label),
name=label,
showlegend=showlegend,
mode="markers",
)
showlegend = False
return fig
| 2,458 | 742 |
from django.conf import settings
class SAMLServiceProviderSettings(object):
contact_info = {
# Contact information template, it is recommended to suply a
# technical and support contacts.
"technical": {
"givenName": settings.SAML['CONTACT_INFO']['TECHNICAL']['GIVEN_NAME'],
"emailAddress": settings.SAML['CONTACT_INFO']['TECHNICAL']['EMAIL'],
},
"support": {
"givenName": settings.SAML['CONTACT_INFO']['SUPPORT']['GIVEN_NAME'],
"emailAddress": settings.SAML['CONTACT_INFO']['SUPPORT']['EMAIL'],
}
}
organization_info = {
# Organization information template, the info in en_US lang is
# recommended, add more if required.
"en-US": {
"name": settings.SAML['ORGANIZATION_INFO']['EN_US']['NAME'],
"displayname": settings.SAML['ORGANIZATION_INFO']['EN_US']['DISPLAY_NAME'],
"url": settings.SAML['ORGANIZATION_INFO']['EN_US']['URL'],
}
}
def __init__(self,
debug=False,
strict=True,
sp_metadata_url=None, sp_login_url=None, sp_logout_url=None, sp_x509cert=None, sp_private_key=None, # Service provider settings (e.g. us)
idp_metadata_url=None, idp_sso_url=None, idp_slo_url=None, idp_x509cert=None, idp_x509_fingerprint=None, # Identify provider settings (e.g. onelogin)
):
super(SAMLServiceProviderSettings, self).__init__()
self.settings = default_settings = {
# If strict is True, then the Python Toolkit will reject unsigned
# or unencrypted messages if it expects them to be signed or encrypted.
# Also it will reject the messages if the SAML standard is not strictly
# followed. Destination, NameId, Conditions ... are validated too.
"strict": strict,
# Enable debug mode (outputs errors).
"debug": debug,
# Service Provider Data that we are deploying.
"sp": {
# Identifier of the SP entity (must be a URI)
"entityId": sp_metadata_url,
# Specifies info about where and how the <AuthnResponse> message MUST be
# returned to the requester, in this case our SP.
"assertionConsumerService": {
# URL Location where the <Response> from the IdP will be returned
"url": sp_login_url,
# SAML protocol binding to be used when returning the <Response>
# message. OneLogin Toolkit supports this endpoint for the
# HTTP-POST binding only.
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
},
# Specifies info about where and how the <Logout Response> message MUST be
# returned to the requester, in this case our SP.
"singleLogoutService": {
# URL Location where the <Response> from the IdP will be returned
"url": sp_logout_url,
# SAML protocol binding to be used when returning the <Response>
# message. OneLogin Toolkit supports the HTTP-Redirect binding
# only for this endpoint.
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
},
# Specifies the constraints on the name identifier to be used to
# represent the requested subject.
# Take a look on src/onelogin/saml2/constants.py to see the NameIdFormat that are supported.
"NameIDFormat": "urn:oasis:names:tc:SAML:2.0:nameid-format:unspecified",
# Usually x509cert and privateKey of the SP are provided by files placed at
# the certs folder. But we can also provide them with the following parameters
'x509cert': sp_x509cert,
'privateKey': sp_private_key
},
# Identity Provider Data that we want connected with our SP.
"idp": {
# Identifier of the IdP entity (must be a URI)
"entityId": idp_metadata_url,
# SSO endpoint info of the IdP. (Authentication Request protocol)
"singleSignOnService": {
# URL Target of the IdP where the Authentication Request Message
# will be sent.
"url": idp_sso_url,
# SAML protocol binding to be used when returning the <Response>
# message. OneLogin Toolkit supports the HTTP-Redirect binding
# only for this endpoint.
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
},
# SLO endpoint info of the IdP.
"singleLogoutService": {
# URL Location of the IdP where SLO Request will be sent.
"url": idp_slo_url,
# SAML protocol binding to be used when returning the <Response>
# message. OneLogin Toolkit supports the HTTP-Redirect binding
# only for this endpoint.
"binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
},
# Public x509 certificate of the IdP
"x509cert": idp_x509cert,
# Instead of use the whole x509cert you can use a fingerprint
# (openssl x509 -noout -fingerprint -in "idp.crt" to generate it)
"certFingerprint": idp_x509_fingerprint
},
"organization": self.organization_info,
'contactPerson': self.contact_info,
}
if not idp_x509cert:
del self.settings['idp']['x509cert']
if not idp_x509_fingerprint:
del self.settings['idp']['certFingerprint']
| 5,997 | 1,597 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import re
from oslo.config import cfg
from heat.engine import environment
from heat.common import exception
from heat.engine import dependencies
from heat.common import identifier
from heat.engine import resource
from heat.engine import resources
from heat.engine import scheduler
from heat.engine import template
from heat.engine import timestamp
from heat.engine import update
from heat.engine.parameters import Parameters
from heat.engine.template import Template
from heat.engine.clients import Clients
from heat.db import api as db_api
from heat.openstack.common import log as logging
from heat.openstack.common.gettextutils import _
from heat.common.exception import StackValidationFailed
logger = logging.getLogger(__name__)
(PARAM_STACK_NAME, PARAM_REGION) = ('AWS::StackName', 'AWS::Region')
class Stack(object):
ACTIONS = (CREATE, DELETE, UPDATE, ROLLBACK, SUSPEND, RESUME
) = ('CREATE', 'DELETE', 'UPDATE', 'ROLLBACK', 'SUSPEND',
'RESUME')
STATUSES = (IN_PROGRESS, FAILED, COMPLETE
) = ('IN_PROGRESS', 'FAILED', 'COMPLETE')
created_time = timestamp.Timestamp(functools.partial(db_api.stack_get,
show_deleted=True),
'created_at')
updated_time = timestamp.Timestamp(functools.partial(db_api.stack_get,
show_deleted=True),
'updated_at')
_zones = None
def __init__(self, context, stack_name, tmpl, env=None,
stack_id=None, action=None, status=None,
status_reason='', timeout_mins=60, resolve_data=True,
disable_rollback=True, parent_resource=None, owner_id=None):
'''
Initialise from a context, name, Template object and (optionally)
Environment object. The database ID may also be initialised, if the
stack is already in the database.
'''
if owner_id is None:
if re.match("[a-zA-Z][a-zA-Z0-9_.-]*$", stack_name) is None:
raise ValueError(_('Invalid stack name %s'
' must contain only alphanumeric or '
'\"_-.\" characters, must start with alpha'
) % stack_name)
self.id = stack_id
self.owner_id = owner_id
self.context = context
self.clients = Clients(context)
self.t = tmpl
self.name = stack_name
self.action = action
self.status = status
self.status_reason = status_reason
self.timeout_mins = timeout_mins
self.disable_rollback = disable_rollback
self.parent_resource = parent_resource
self._resources = None
self._dependencies = None
resources.initialise()
self.env = env or environment.Environment({})
self.parameters = Parameters(self.name, self.t,
user_params=self.env.params)
self._set_param_stackid()
if resolve_data:
self.outputs = self.resolve_static_data(self.t[template.OUTPUTS])
else:
self.outputs = {}
@property
def resources(self):
if self._resources is None:
template_resources = self.t[template.RESOURCES]
self._resources = dict((name, resource.Resource(name, data, self))
for (name, data) in
template_resources.items())
return self._resources
@property
def dependencies(self):
if self._dependencies is None:
self._dependencies = self._get_dependencies(
self.resources.itervalues())
return self._dependencies
def reset_dependencies(self):
self._dependencies = None
@property
def root_stack(self):
'''
Return the root stack if this is nested (otherwise return self).
'''
if (self.parent_resource and self.parent_resource.stack):
return self.parent_resource.stack.root_stack
return self
def total_resources(self):
'''
Total number of resources in a stack, including nested stacks below.
'''
total = 0
for res in iter(self.resources.values()):
if hasattr(res, 'nested') and res.nested():
total += res.nested().total_resources()
total += 1
return total
def _set_param_stackid(self):
'''
Update self.parameters with the current ARN which is then provided
via the Parameters class as the AWS::StackId pseudo parameter
'''
# This can fail if constructor called without a valid context,
# as it is in many tests
try:
stack_arn = self.identifier().arn()
except (AttributeError, ValueError, TypeError):
logger.warning("Unable to set parameters StackId identifier")
else:
self.parameters.set_stack_id(stack_arn)
@staticmethod
def _get_dependencies(resources):
'''Return the dependency graph for a list of resources.'''
deps = dependencies.Dependencies()
for resource in resources:
resource.add_dependencies(deps)
return deps
@classmethod
def load(cls, context, stack_id=None, stack=None, resolve_data=True,
parent_resource=None, show_deleted=True):
'''Retrieve a Stack from the database.'''
if stack is None:
stack = db_api.stack_get(context, stack_id,
show_deleted=show_deleted)
if stack is None:
message = 'No stack exists with id "%s"' % str(stack_id)
raise exception.NotFound(message)
template = Template.load(context, stack.raw_template_id)
env = environment.Environment(stack.parameters)
stack = cls(context, stack.name, template, env,
stack.id, stack.action, stack.status, stack.status_reason,
stack.timeout, resolve_data, stack.disable_rollback,
parent_resource, owner_id=stack.owner_id)
return stack
def store(self, backup=False):
'''
Store the stack in the database and return its ID
If self.id is set, we update the existing stack
'''
s = {
'name': self._backup_name() if backup else self.name,
'raw_template_id': self.t.store(self.context),
'parameters': self.env.user_env_as_dict(),
'owner_id': self.owner_id,
'username': self.context.username,
'tenant': self.context.tenant_id,
'action': self.action,
'status': self.status,
'status_reason': self.status_reason,
'timeout': self.timeout_mins,
'disable_rollback': self.disable_rollback,
}
if self.id:
db_api.stack_update(self.context, self.id, s)
else:
# Create a context containing a trust_id and trustor_user_id
# if trusts are enabled
if cfg.CONF.deferred_auth_method == 'trusts':
trust_context = self.clients.keystone().create_trust_context()
new_creds = db_api.user_creds_create(trust_context)
else:
new_creds = db_api.user_creds_create(self.context)
s['user_creds_id'] = new_creds.id
new_s = db_api.stack_create(self.context, s)
self.id = new_s.id
self._set_param_stackid()
return self.id
def _backup_name(self):
return '%s*' % self.name
def identifier(self):
'''
Return an identifier for this stack.
'''
return identifier.HeatIdentifier(self.context.tenant_id,
self.name, self.id)
def __iter__(self):
'''
Return an iterator over this template's resources in the order that
they should be started.
'''
return iter(self.dependencies)
def __reversed__(self):
'''
Return an iterator over this template's resources in the order that
they should be stopped.
'''
return reversed(self.dependencies)
def __len__(self):
'''Return the number of resources.'''
return len(self.resources)
def __getitem__(self, key):
'''Get the resource with the specified name.'''
return self.resources[key]
def __setitem__(self, key, value):
'''Set the resource with the specified name to a specific value.'''
self.resources[key] = value
def __contains__(self, key):
'''Determine whether the stack contains the specified resource.'''
return key in self.resources
def keys(self):
'''Return a list of resource keys for the stack.'''
return self.resources.keys()
def __str__(self):
'''Return a human-readable string representation of the stack.'''
return 'Stack "%s"' % self.name
def resource_by_refid(self, refid):
'''
Return the resource in this stack with the specified
refid, or None if not found
'''
for r in self.resources.values():
if r.state in (
(r.CREATE, r.IN_PROGRESS),
(r.CREATE, r.COMPLETE),
(r.RESUME, r.IN_PROGRESS),
(r.RESUME, r.COMPLETE),
(r.UPDATE, r.IN_PROGRESS),
(r.UPDATE, r.COMPLETE)) and r.FnGetRefId() == refid:
return r
def validate(self):
'''
http://docs.amazonwebservices.com/AWSCloudFormation/latest/\
APIReference/API_ValidateTemplate.html
'''
# TODO(sdake) Should return line number of invalid reference
# Check duplicate names between parameters and resources
dup_names = set(self.parameters.keys()) & set(self.resources.keys())
if dup_names:
logger.debug("Duplicate names %s" % dup_names)
raise StackValidationFailed(message="Duplicate names %s" %
dup_names)
for res in self:
try:
result = res.validate()
except exception.Error as ex:
logger.exception(ex)
raise ex
except Exception as ex:
logger.exception(ex)
raise StackValidationFailed(message=str(ex))
if result:
raise StackValidationFailed(message=result)
def requires_deferred_auth(self):
'''
Returns whether this stack may need to perform API requests
during its lifecycle using the configured deferred authentication
method.
'''
return any(res.requires_deferred_auth for res in self)
def state_set(self, action, status, reason):
'''Update the stack state in the database.'''
if action not in self.ACTIONS:
raise ValueError("Invalid action %s" % action)
if status not in self.STATUSES:
raise ValueError("Invalid status %s" % status)
self.action = action
self.status = status
self.status_reason = reason
if self.id is None:
return
stack = db_api.stack_get(self.context, self.id)
stack.update_and_save({'action': action,
'status': status,
'status_reason': reason})
@property
def state(self):
'''Returns state, tuple of action, status.'''
return (self.action, self.status)
def timeout_secs(self):
'''
Return the stack creation timeout in seconds, or None if no timeout
should be used.
'''
if self.timeout_mins is None:
return None
return self.timeout_mins * 60
def create(self):
'''
Create the stack and all of the resources.
'''
def rollback():
if not self.disable_rollback and self.state == (self.CREATE,
self.FAILED):
self.delete(action=self.ROLLBACK)
creator = scheduler.TaskRunner(self.stack_task,
action=self.CREATE,
reverse=False,
post_func=rollback)
creator(timeout=self.timeout_secs())
@scheduler.wrappertask
def stack_task(self, action, reverse=False, post_func=None):
'''
A task to perform an action on the stack and all of the resources
in forward or reverse dependency order as specfifed by reverse
'''
self.state_set(action, self.IN_PROGRESS,
'Stack %s started' % action)
stack_status = self.COMPLETE
reason = 'Stack %s completed successfully' % action.lower()
res = None
def resource_action(r):
# Find e.g resource.create and call it
action_l = action.lower()
handle = getattr(r, '%s' % action_l)
return handle()
action_task = scheduler.DependencyTaskGroup(self.dependencies,
resource_action,
reverse)
try:
yield action_task()
except exception.ResourceFailure as ex:
stack_status = self.FAILED
reason = 'Resource %s failed: %s' % (action.lower(), str(ex))
except scheduler.Timeout:
stack_status = self.FAILED
reason = '%s timed out' % action.title()
self.state_set(action, stack_status, reason)
if callable(post_func):
post_func()
def _backup_stack(self, create_if_missing=True):
'''
Get a Stack containing any in-progress resources from the previous
stack state prior to an update.
'''
s = db_api.stack_get_by_name(self.context, self._backup_name(),
owner_id=self.id)
if s is not None:
logger.debug('Loaded existing backup stack')
return self.load(self.context, stack=s)
elif create_if_missing:
prev = type(self)(self.context, self.name, self.t, self.env,
owner_id=self.id)
prev.store(backup=True)
logger.debug('Created new backup stack')
return prev
else:
return None
def update(self, newstack):
'''
Compare the current stack with newstack,
and where necessary create/update/delete the resources until
this stack aligns with newstack.
Note update of existing stack resources depends on update
being implemented in the underlying resource types
Update will fail if it exceeds the specified timeout. The default is
60 minutes, set in the constructor
'''
updater = scheduler.TaskRunner(self.update_task, newstack)
updater()
@scheduler.wrappertask
def update_task(self, newstack, action=UPDATE):
if action not in (self.UPDATE, self.ROLLBACK):
logger.error("Unexpected action %s passed to update!" % action)
self.state_set(self.UPDATE, self.FAILED,
"Invalid action %s" % action)
return
if self.status != self.COMPLETE:
if (action == self.ROLLBACK and
self.state == (self.UPDATE, self.IN_PROGRESS)):
logger.debug("Starting update rollback for %s" % self.name)
else:
self.state_set(action, self.FAILED,
'State invalid for %s' % action)
return
self.state_set(self.UPDATE, self.IN_PROGRESS,
'Stack %s started' % action)
oldstack = Stack(self.context, self.name, self.t, self.env)
backup_stack = self._backup_stack()
try:
update_task = update.StackUpdate(self, newstack, backup_stack,
rollback=action == self.ROLLBACK)
updater = scheduler.TaskRunner(update_task)
self.env = newstack.env
self.parameters = newstack.parameters
try:
updater.start(timeout=self.timeout_secs())
yield
while not updater.step():
yield
finally:
self.reset_dependencies()
if action == self.UPDATE:
reason = 'Stack successfully updated'
else:
reason = 'Stack rollback completed'
stack_status = self.COMPLETE
except scheduler.Timeout:
stack_status = self.FAILED
reason = 'Timed out'
except exception.ResourceFailure as e:
reason = str(e)
stack_status = self.FAILED
if action == self.UPDATE:
# If rollback is enabled, we do another update, with the
# existing template, so we roll back to the original state
if not self.disable_rollback:
yield self.update_task(oldstack, action=self.ROLLBACK)
return
else:
logger.debug('Deleting backup stack')
backup_stack.delete()
self.state_set(action, stack_status, reason)
# flip the template to the newstack values
# Note we do this on success and failure, so the current
# stack resources are stored, even if one is in a failed
# state (otherwise we won't remove them on delete)
self.t = newstack.t
template_outputs = self.t[template.OUTPUTS]
self.outputs = self.resolve_static_data(template_outputs)
self.store()
def delete(self, action=DELETE):
'''
Delete all of the resources, and then the stack itself.
The action parameter is used to differentiate between a user
initiated delete and an automatic stack rollback after a failed
create, which amount to the same thing, but the states are recorded
differently.
'''
if action not in (self.DELETE, self.ROLLBACK):
logger.error("Unexpected action %s passed to delete!" % action)
self.state_set(self.DELETE, self.FAILED,
"Invalid action %s" % action)
return
stack_status = self.COMPLETE
reason = 'Stack %s completed successfully' % action.lower()
self.state_set(action, self.IN_PROGRESS, 'Stack %s started' % action)
backup_stack = self._backup_stack(False)
if backup_stack is not None:
backup_stack.delete()
if backup_stack.status != backup_stack.COMPLETE:
errs = backup_stack.status_reason
failure = 'Error deleting backup resources: %s' % errs
self.state_set(action, self.FAILED,
'Failed to %s : %s' % (action, failure))
return
action_task = scheduler.DependencyTaskGroup(self.dependencies,
resource.Resource.destroy,
reverse=True)
try:
scheduler.TaskRunner(action_task)(timeout=self.timeout_secs())
except exception.ResourceFailure as ex:
stack_status = self.FAILED
reason = 'Resource %s failed: %s' % (action.lower(), str(ex))
except scheduler.Timeout:
stack_status = self.FAILED
reason = '%s timed out' % action.title()
self.state_set(action, stack_status, reason)
if stack_status != self.FAILED:
# If we created a trust, delete it
stack = db_api.stack_get(self.context, self.id)
user_creds = db_api.user_creds_get(stack.user_creds_id)
trust_id = user_creds.get('trust_id')
if trust_id:
self.clients.keystone().delete_trust(trust_id)
# delete the stack
db_api.stack_delete(self.context, self.id)
self.id = None
def suspend(self):
'''
Suspend the stack, which invokes handle_suspend for all stack resources
waits for all resources to become SUSPEND_COMPLETE then declares the
stack SUSPEND_COMPLETE.
Note the default implementation for all resources is to do nothing
other than move to SUSPEND_COMPLETE, so the resources must implement
handle_suspend for this to have any effect.
'''
sus_task = scheduler.TaskRunner(self.stack_task,
action=self.SUSPEND,
reverse=True)
sus_task(timeout=self.timeout_secs())
def resume(self):
'''
Resume the stack, which invokes handle_resume for all stack resources
waits for all resources to become RESUME_COMPLETE then declares the
stack RESUME_COMPLETE.
Note the default implementation for all resources is to do nothing
other than move to RESUME_COMPLETE, so the resources must implement
handle_resume for this to have any effect.
'''
sus_task = scheduler.TaskRunner(self.stack_task,
action=self.RESUME,
reverse=False)
sus_task(timeout=self.timeout_secs())
def output(self, key):
'''
Get the value of the specified stack output.
'''
value = self.outputs[key].get('Value', '')
return self.resolve_runtime_data(value)
def restart_resource(self, resource_name):
'''
stop resource_name and all that depend on it
start resource_name and all that depend on it
'''
deps = self.dependencies[self[resource_name]]
failed = False
for res in reversed(deps):
try:
scheduler.TaskRunner(res.destroy)()
except exception.ResourceFailure as ex:
failed = True
logger.error('delete: %s' % str(ex))
for res in deps:
if not failed:
try:
res.state_reset()
scheduler.TaskRunner(res.create)()
except exception.ResourceFailure as ex:
logger.exception('create')
failed = True
else:
res.state_set(res.CREATE, res.FAILED,
'Resource restart aborted')
# TODO(asalkeld) if any of this fails we Should
# restart the whole stack
def get_availability_zones(self):
if self._zones is None:
self._zones = [
zone.zoneName for zone in
self.clients.nova().availability_zones.list(detailed=False)]
return self._zones
def resolve_static_data(self, snippet):
return resolve_static_data(self.t, self, self.parameters, snippet)
def resolve_runtime_data(self, snippet):
return resolve_runtime_data(self.t, self.resources, snippet)
def resolve_static_data(template, stack, parameters, snippet):
'''
Resolve static parameters, map lookups, etc. in a template.
Example:
>>> from heat.common import template_format
>>> template_str = '# JSON or YAML encoded template'
>>> template = Template(template_format.parse(template_str))
>>> parameters = Parameters('stack', template, {'KeyName': 'my_key'})
>>> resolve_static_data(template, None, parameters, {'Ref': 'KeyName'})
'my_key'
'''
return transform(snippet,
[functools.partial(template.resolve_param_refs,
parameters=parameters),
functools.partial(template.resolve_availability_zones,
stack=stack),
functools.partial(template.resolve_resource_facade,
stack=stack),
template.resolve_find_in_map,
template.reduce_joins])
def resolve_runtime_data(template, resources, snippet):
return transform(snippet,
[functools.partial(template.resolve_resource_refs,
resources=resources),
functools.partial(template.resolve_attributes,
resources=resources),
template.resolve_split,
template.resolve_member_list_to_map,
template.resolve_select,
template.resolve_joins,
template.resolve_replace,
template.resolve_base64])
def transform(data, transformations):
'''
Apply each of the transformation functions in the supplied list to the data
in turn.
'''
for t in transformations:
data = t(data)
return data
| 25,972 | 6,754 |
from typing import List
from please_conform import PleaseConform
from structures import Interval
class PleaseConformSquared(PleaseConform):
def please_conform(self, caps: List[str]) -> List[Interval]:
if len(caps) == 0:
return list()
caps: List[str] = caps.copy()
caps.append('end')
interval_inputs: List[Interval] = list()
count_forward: int = 0
count_backward: int = 0
index_previous: int = 0
for index_current in range(1, len(caps)):
cap_current = caps[index_current]
cap_previous = caps[index_previous]
if cap_current != cap_previous:
interval_input = Interval(start=index_previous,
end=index_current - 1,
cap_type=cap_previous)
interval_inputs.append(interval_input)
if cap_previous == 'F':
count_forward += 1
else:
count_backward += 1
index_previous = index_current
cap_to_flip: str
if count_forward < count_backward:
cap_to_flip = 'F'
else:
cap_to_flip = 'B'
interval_results: List[Interval] = list()
for interval_input in interval_inputs:
if interval_input.cap_type == cap_to_flip:
interval_result: Interval = interval_input
interval_results.append(interval_result)
return interval_results
| 1,547 | 421 |
from django import forms
from .models import *
class FormCadastraModelo(forms.ModelForm):
class Meta:
model = Modelo
fields = ['nome', 'descricao', ]
def __init__(self, *args, **kwargs):
super(FormCadastraModelo, self).__init__(*args, **kwargs)
for key in self.fields.keys():
self.fields[key].widget.attrs.update({'class': 'form-control'})
class FormCadastraSecao(forms.ModelForm):
class Meta:
model = Secao
fields = ['nome', 'descricao', ]
def __init__(self, *args, **kwargs):
super(FormCadastraSecao, self).__init__(*args, **kwargs)
for key in self.fields.keys():
self.fields[key].widget.attrs.update({'class': 'form-control'})
class FormEntradaOnt1(forms.Form):
modelo = forms.ChoiceField()
secao = forms.ChoiceField()
def __init__(self, *args, **kwargs):
super(FormEntradaOnt1, self).__init__(*args, **kwargs)
modelos = Modelo.objects.all().order_by('nome')
modelos_name = [(i.id, i.nome.upper()) for i in modelos]
self.fields['modelo'] = forms.ChoiceField(
choices=modelos_name,
label='Modelo',
help_text='Modelo das ONT\'s a serem inseridas',
)
secoes = Secao.objects.all().order_by('nome')
secoes_name = [(i.id, i.nome.upper()) for i in secoes]
self.fields['secao'] = forms.ChoiceField(
choices=secoes_name,
label='Seção',
help_text='Atividade de destino das ONT\'s a serem inseridas',
)
for key in self.fields.keys():
self.fields[key].widget.attrs.update({'class': 'form-control'})
class NonstickyCharfield(forms.TextInput):
"""Custom text input widget that's "non-sticky"
(i.e. does not remember submitted values).
"""
def get_context(self, name, value, attrs):
value = None # Clear the submitted value.
return super().get_context(name, value, attrs)
class FormEntradaOnt2(forms.Form):
serial = forms.CharField(required=True, widget=NonstickyCharfield())
def __init__(self, *args, **kwargs):
super(FormEntradaOnt2, self).__init__(*args, **kwargs)
self.fields['serial'].widget.attrs.update(
{'autofocus': 'autofocus', 'required': 'required'}
)
for key in self.fields.keys():
self.fields[key].widget.attrs.update({'class': 'form-control'})
def clean(self):
form_data = super().clean()
serial = form_data['serial'].upper()
if serial.find('4857544', 0, 7) >= 0:
if len(serial) != 16:
self.errors['serial'] = ['Serial de Ont Huawei inválido']
return form_data
elif serial.find('ZNTS', 0, 5) >= 0:
if len(serial) != 12:
self.errors['serial'] = ['Serial de Ont Zhone inválido']
return form_data
else:
self.errors['serial'] = ['Serial de Ont inválido']
return form_data
class FormOntFechamento(forms.Form):
serial = forms.CharField(required=True, widget=NonstickyCharfield())
def __init__(self, *args, **kwargs):
super(FormOntFechamento, self).__init__(*args, **kwargs)
self.fields['serial'].widget.attrs.update(
{'autofocus': 'autofocus', 'required': 'required'}
)
for key in self.fields.keys():
self.fields[key].widget.attrs.update({'class': 'form-control'})
def clean(self):
form_data = super().clean()
serial = form_data['serial'].upper()
if Ont.objects.filter(codigo=serial).exists():
form_data['serial'] = Ont.objects.get(codigo=serial)
else:
self.errors['serial'] = ['Ont não cadastrada no sistema, cadastre-a para registrá-la como com defeito']
return form_data
class FormOntManutencao1(forms.Form):
modelo = forms.ChoiceField()
def __init__(self, *args, **kwargs):
super(FormOntManutencao1, self).__init__(*args, **kwargs)
modelos = Modelo.objects.all().order_by('nome')
modelos_name = [(i.id, i.nome.upper()) for i in modelos]
self.fields['modelo'] = forms.ChoiceField(
choices=modelos_name,
label='Modelo',
)
for key in self.fields.keys():
self.fields[key].widget.attrs.update({'class': 'form-control'})
class FormPswLogin(forms.Form):
"""
Formulário de login de usuário no psw
"""
username = forms.CharField(max_length=150, label='Chave da Copel')
password = forms.CharField(widget=forms.PasswordInput)
widgets = {
'password': forms.PasswordInput(),
}
def __init__(self, *args, **kwargs):
super(FormPswLogin, self).__init__(*args, **kwargs)
for key in self.fields.keys():
self.fields[key].widget.attrs.update({'class': 'form-control'})
class FormPswContrato(forms.Form):
"""
Formulário de busca de contrato no psw
"""
contratos = forms.CharField(
label='Contratos',
widget=forms.TextInput(
attrs={'placeholder': 'Ex: 1234567,1234568, 1234569'}
)
)
def __init__(self, *args, **kwargs):
super(FormPswContrato, self).__init__(*args, **kwargs)
for key in self.fields.keys():
self.fields[key].widget.attrs.update({'class': 'form-control'})
class FormSerial(forms.Form):
"""
Formulário de busca de serial
"""
serial = forms.CharField(label='Serial', required=False)
def __init__(self, *args, **kwargs):
super(FormSerial, self).__init__(*args, **kwargs)
for key in self.fields.keys():
self.fields[key].widget.attrs.update({'class': 'form-control'})
| 5,784 | 1,879 |
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 2 16:43:45 2020
@author: micha
"""
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
from MD_Analysis import Angle_Calc
from pydmd import DMD
pdb="pdbs/WT_295K_200ns_50ps_0_run.pdb"
#Extract phi, psi angles
AC=Angle_Calc(pdb)
Angle_DF=AC.get_phi_psi()
def cossin(data):
cols = data.columns
data = data.to_numpy()
coss = np.cos(data/180.*np.pi)
sins = np.sin(data/180.*np.pi)
res=pd.DataFrame()
for i in range(len(cols)):
res[cols[i]+"_cos"] = coss[:,i]
res[cols[i]+"_sin"] = sins[:,i]
return res
def halftime(data):
dropindex = [1+2*i for i in (range(int(data.shape[0]/2)))]
return data.drop(dropindex)
#half = halftime(Angle_DF)
angle_cossin = cossin(Angle_DF)
angle_cossin_full = angle_cossin.copy()
angle_cossin_full.drop(angle_cossin_full.tail(1).index,inplace=True)
f=angle_cossin_full.to_numpy()
dt=50*(10**-12)
xi=np.linspace(np.min(f),np.max(f),f.shape[0])
t=np.linspace(0,f.shape[0],f.shape[1])*dt #+200*10**-9
Xgrid,T=np.meshgrid(xi,t)
dmd = DMD(svd_rank=40)
dmd.fit(f.T)
xl=np.linspace(0,4000*dt,2000)
yl=range(40)
xlabel,ylabel=np.meshgrid(xl,yl)
#Actual
fig = plt.figure(figsize=(17,6))
plt.pcolor(xl, yl, f.real.T)
plt.yticks([])
plt.title('Actual Data')
plt.colorbar()
plt.show()
fig.savefig("PyDMD Actual Data.png")
#Reconstructed
fig2 = plt.figure(figsize=(17,6))
plt.pcolor(xl, yl, dmd.reconstructed_data.real)
plt.yticks([])
plt.title('Reconstructed Data')
plt.colorbar()
plt.show()
fig2.savefig("PyDMD Reconstructed Data.png")
#Error
fig3 = plt.figure(figsize=(17,6))
plt.pcolor(xl, yl, (np.sqrt(f.T-dmd.reconstructed_data)**2).real)
plt.yticks([])
plt.title('RMSE Error')
plt.colorbar()
plt.show()
fig3.savefig("PyDMD Error.png")
#Eigenvalues
dmd.plot_eigs(show_axes=True, show_unit_circle=True)
| 1,980 | 966 |
# Generated by Django 4.0.1 on 2022-02-18 10:26
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Review',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('headline', models.CharField(max_length=128)),
('body', models.CharField(blank=True, max_length=8192)),
('rating', models.PositiveSmallIntegerField(validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(5)])),
('time_created', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Ticket',
fields=[
('id', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=128)),
('description', models.CharField(max_length=8192)),
('time_created', models.DateTimeField(auto_now_add=True)),
('review_id', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.review')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='review',
name='ticket_id',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.ticket'),
),
migrations.AddField(
model_name='review',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.CreateModel(
name='UserFollow',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('followed_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='followed_user', to=settings.AUTH_USER_MODEL)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL)),
],
options={
'unique_together': {('user', 'followed_user')},
},
),
]
| 2,629 | 782 |
from telegram.ext import Updater, CommandHandler
import re
import os
import logging
from model import Anime
from gateway import *
def recommend_anime(bot, update):
logging.debug('Received recommendation request')
anime = get_best_anime().to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_airing_anime(bot, update):
logging.debug('Received airing request')
anime = get_best_anime(subtype=Subtype.AIRING).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_movie(bot, update):
logging.debug('Received movie request')
anime = get_best_anime(subtype=Subtype.MOVIE).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_action_anime(bot, update):
logging.debug('Received action request')
anime = get_anime_by_genre(Genre.ACTION).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_adventure_anime(bot, update):
logging.debug('Received adventure request')
anime = get_anime_by_genre(Genre.ADVENTURE).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_cars_anime(bot, update):
logging.debug('Received cars request')
anime = get_anime_by_genre(Genre.CARS).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_comedy_anime(bot, update):
logging.debug('Received comedy request')
anime = get_anime_by_genre(Genre.COMEDY).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_dementia_anime(bot, update):
logging.debug('Received dementia request')
anime = get_anime_by_genre(Genre.DEMENTIA).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_demons_anime(bot, update):
logging.debug('Received demons request')
anime = get_anime_by_genre(Genre.DEMONS).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_mystery_anime(bot, update):
logging.debug('Received mystery request')
anime = get_anime_by_genre(Genre.MYSTERY).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_drama_anime(bot, update):
logging.debug('Received drama request')
anime = get_anime_by_genre(Genre.DRAMA).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_ecchi_anime(bot, update):
logging.debug('Received ecchi request')
anime = get_anime_by_genre(Genre.ECCHI).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_fantasy_anime(bot, update):
logging.debug('Received fantasy request')
anime = get_anime_by_genre(Genre.FANTASY).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_game_anime(bot, update):
logging.debug('Received game request')
anime = get_anime_by_genre(Genre.GAME).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_hentai_anime(bot, update):
logging.debug('Received hentai request')
anime = get_anime_by_genre(Genre.HENTAI).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_historical_anime(bot, update):
logging.debug('Received historical request')
anime = get_anime_by_genre(Genre.HISTORICAL).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_horror_anime(bot, update):
logging.debug('Received horror request')
anime = get_anime_by_genre(Genre.HORROR).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_magic_anime(bot, update):
logging.debug('Received magic request')
anime = get_anime_by_genre(Genre.MAGIC).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_martial_arts_anime(bot, update):
logging.debug('Received martial arts request')
anime = get_anime_by_genre(Genre.MARTIAL_ARTS).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_mecha_anime(bot, update):
logging.debug('Received mecha request')
anime = get_anime_by_genre(Genre.MECHA).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_music_anime(bot, update):
logging.debug('Received music request')
anime = get_anime_by_genre(Genre.MUSIC).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_parody_anime(bot, update):
logging.debug('Received parody request')
anime = get_anime_by_genre(Genre.PARODY).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_samurai_anime(bot, update):
logging.debug('Received samurai request')
anime = get_anime_by_genre(Genre.SAMURAI).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_romance_anime(bot, update):
logging.debug('Received romance request')
anime = get_anime_by_genre(Genre.ROMANCE).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_school_anime(bot, update):
logging.debug('Received school request')
anime = get_anime_by_genre(Genre.SCHOOL).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_sci_fi_anime(bot, update):
logging.debug('Received sci-fi request')
anime = get_anime_by_genre(Genre.SCI_FI).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_shojo_anime(bot, update):
logging.debug('Received shojo request')
anime = get_anime_by_genre(Genre.SHOJO).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_shojo_ai_anime(bot, update):
logging.debug('Received shojo-ai request')
anime = get_anime_by_genre(Genre.SHOJO_AI).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_shonen_anime(bot, update):
logging.debug('Received shonen request')
anime = get_anime_by_genre(Genre.SHONEN).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_shonen_ai_anime(bot, update):
logging.debug('Received shonen-ai request')
anime = get_anime_by_genre(Genre.SHONEN_AI).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_space_anime(bot, update):
logging.debug('Received space request')
anime = get_anime_by_genre(Genre.SPACE).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_sports_anime(bot, update):
logging.debug('Received sports request')
anime = get_anime_by_genre(Genre.SPORTS).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_super_power_anime(bot, update):
logging.debug('Received super power request')
anime = get_anime_by_genre(Genre.SUPER_POWER).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_vampire_anime(bot, update):
logging.debug('Received vampire request')
anime = get_anime_by_genre(Genre.VAMPIRE).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_yaoi_anime(bot, update):
logging.debug('Received yaoi request')
anime = get_anime_by_genre(Genre.YAOI).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_yuri_anime(bot, update):
logging.debug('Received yuri request')
anime = get_anime_by_genre(Genre.YURI).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_harem_anime(bot, update):
logging.debug('Received harem request')
anime = get_anime_by_genre(Genre.HAREM).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_slice_of_life_anime(bot, update):
logging.debug('Received slice of life request')
anime = get_anime_by_genre(Genre.SLICE_OF_LIFE).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_supernatural_anime(bot, update):
logging.debug('Received supernatural request')
anime = get_anime_by_genre(Genre.SUPERNATURAL).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_military_anime(bot, update):
logging.debug('Received military request')
anime = get_anime_by_genre(Genre.MILITARY).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_police_anime(bot, update):
logging.debug('Received police request')
anime = get_anime_by_genre(Genre.POLICE).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_psychological_anime(bot, update):
logging.debug('Received psychological request')
anime = get_anime_by_genre(Genre.PSYCHOLOGICAL).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_thriller_anime(bot, update):
logging.debug('Received thriller request')
anime = get_anime_by_genre(Genre.THRILLER).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_seinen_anime(bot, update):
logging.debug('Received seinen request')
anime = get_anime_by_genre(Genre.SEINEN).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def recommend_josei_anime(bot, update):
logging.debug('Received josei request')
anime = get_anime_by_genre(Genre.JOSEI).to_prettified_dict()
logging.debug('Answer: '.format(anime))
chat_id = update.message.chat_id
image = anime.pop('Image')
caption = '{}\n'.format(anime.pop('Title'))
for key in anime:
caption += '{}: {}\n'.format(key, anime[key])
bot.send_photo(chat_id=chat_id, photo=image, caption=caption)
def main():
updater = Updater(os.environ.get("API_KEY"))
debug_log = os.environ.get('DEBUG_LOG', None)
if debug_log is not None:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
dispatcher = updater.dispatcher
dispatcher.add_handler(CommandHandler('recommend', recommend_anime))
dispatcher.add_handler(CommandHandler('airing', recommend_airing_anime))
dispatcher.add_handler(CommandHandler('movie', recommend_movie))
dispatcher.add_handler(CommandHandler('action', recommend_action_anime))
dispatcher.add_handler(CommandHandler('adventure', recommend_adventure_anime))
dispatcher.add_handler(CommandHandler('cars', recommend_cars_anime))
dispatcher.add_handler(CommandHandler('comedy', recommend_comedy_anime))
dispatcher.add_handler(CommandHandler('dementia', recommend_dementia_anime))
dispatcher.add_handler(CommandHandler('demons', recommend_demons_anime))
dispatcher.add_handler(CommandHandler('mystery', recommend_mystery_anime))
dispatcher.add_handler(CommandHandler('drama', recommend_drama_anime))
dispatcher.add_handler(CommandHandler('ecchi', recommend_ecchi_anime))
dispatcher.add_handler(CommandHandler('fantasy', recommend_fantasy_anime))
dispatcher.add_handler(CommandHandler('game', recommend_game_anime))
dispatcher.add_handler(CommandHandler('hentai', recommend_hentai_anime))
dispatcher.add_handler(CommandHandler('historical', recommend_historical_anime))
dispatcher.add_handler(CommandHandler('horror', recommend_horror_anime))
dispatcher.add_handler(CommandHandler('magic', recommend_magic_anime))
dispatcher.add_handler(CommandHandler('martialarts', recommend_martial_arts_anime))
dispatcher.add_handler(CommandHandler('mecha', recommend_mecha_anime))
dispatcher.add_handler(CommandHandler('music', recommend_music_anime))
dispatcher.add_handler(CommandHandler('parody', recommend_parody_anime))
dispatcher.add_handler(CommandHandler('samurai', recommend_samurai_anime))
dispatcher.add_handler(CommandHandler('romance', recommend_romance_anime))
dispatcher.add_handler(CommandHandler('school', recommend_school_anime))
dispatcher.add_handler(CommandHandler('scifi', recommend_sci_fi_anime))
dispatcher.add_handler(CommandHandler('shojo', recommend_shojo_anime))
dispatcher.add_handler(CommandHandler('shoujo', recommend_shojo_anime))
dispatcher.add_handler(CommandHandler('shojoai', recommend_shojo_ai_anime))
dispatcher.add_handler(CommandHandler('shoujoai', recommend_shojo_ai_anime))
dispatcher.add_handler(CommandHandler('shonen', recommend_shonen_anime))
dispatcher.add_handler(CommandHandler('shounen', recommend_shonen_anime))
dispatcher.add_handler(CommandHandler('shonenai', recommend_shonen_ai_anime))
dispatcher.add_handler(CommandHandler('shounenai', recommend_shonen_ai_anime))
dispatcher.add_handler(CommandHandler('space', recommend_space_anime))
dispatcher.add_handler(CommandHandler('sports', recommend_sports_anime))
dispatcher.add_handler(CommandHandler('superpower', recommend_super_power_anime))
dispatcher.add_handler(CommandHandler('vampire', recommend_vampire_anime))
dispatcher.add_handler(CommandHandler('yaoi', recommend_yaoi_anime))
dispatcher.add_handler(CommandHandler('yuri', recommend_yuri_anime))
dispatcher.add_handler(CommandHandler('harem', recommend_harem_anime))
dispatcher.add_handler(CommandHandler('sliceoflife', recommend_slice_of_life_anime))
dispatcher.add_handler(CommandHandler('supernatural', recommend_supernatural_anime))
dispatcher.add_handler(CommandHandler('military', recommend_military_anime))
dispatcher.add_handler(CommandHandler('police', recommend_police_anime))
dispatcher.add_handler(CommandHandler('psychological', recommend_psychological_anime))
dispatcher.add_handler(CommandHandler('thriller', recommend_thriller_anime))
dispatcher.add_handler(CommandHandler('seinen', recommend_seinen_anime))
dispatcher.add_handler(CommandHandler('josei', recommend_josei_anime))
updater.start_polling()
updater.idle()
if __name__ == '__main__':
main()
| 24,914 | 8,783 |
from dataclasses import dataclass, field
from typing import Optional
from xml.etree.ElementTree import QName
from .t_event_definition import TEventDefinition
__NAMESPACE__ = "http://www.omg.org/spec/BPMN/20100524/MODEL"
@dataclass
class TCompensateEventDefinition(TEventDefinition):
class Meta:
name = "tCompensateEventDefinition"
wait_for_completion: Optional[bool] = field(
default=None,
metadata={
"name": "waitForCompletion",
"type": "Attribute",
}
)
activity_ref: Optional[QName] = field(
default=None,
metadata={
"name": "activityRef",
"type": "Attribute",
}
)
| 695 | 209 |
import html
from googletrans import Translator
from slackbot.bot import default_reply, respond_to, listen_to
translator = Translator()
def translate(message):
msg_in = html.unescape(message.body["text"])
if msg_in != "":
if translator.detect(msg_in).lang == "en":
text = translator.translate(msg_in, dest = "ja").text
else:
text = translator.translate(msg_in, dest = "en").text
msg_out = "```{}```".format(text)
if message.thread_ts == message.body["event_ts"]:
message.send(msg_out)
else:
message.reply(msg_out)
@default_reply
def my_default_handler(message):
translate(message)
@respond_to(".*")
def all_replies(message):
translate(message)
@listen_to(".*")
def all_messages(message):
translate(message)
| 822 | 266 |
from . import database | 22 | 5 |
import collections
import os
from pathlib import Path
from typing import List
import streamlit as st
from sentence_transformers import SentenceTransformer
from search.engine import Engine, Result
from search.model import load_minilm_model
from search.utils import get_memory_usage
os.environ["TOKENIZERS_PARALLELISM"] = "false"
_DATA_DIR = os.environ.get("DATA_DIR", "data/people_pm_minilm")
st.set_page_config(page_title="Search Engine", layout="wide")
st.markdown(
"""
<style>
.big-font {
font-size:20px;
}
</style>
""",
unsafe_allow_html=True,
)
@st.cache(allow_output_mutation=True)
def load_engine() -> Engine:
engine = Engine(
data_dir=Path(_DATA_DIR),
)
return engine
@st.cache(allow_output_mutation=True)
def load_model() -> SentenceTransformer:
return load_minilm_model()
engine = load_engine()
model = load_model()
st.error("Create a text input for the query.")
st.error("Create a slider with the number of results to retrieve.")
with st.spinner("Querying index ..."):
st.error("Get query embedding.")
st.error("Search results (engine.search).")
# Show the results.
# You can use st.markdown to render markdown.
# e.g. st.markdown("**text**") will add text in bold font.
st.error("Render results")
st.markdown(f"**Mem Usage**: {get_memory_usage()}MB")
| 1,324 | 445 |
from tkinter import *
class EditBoxWindow:
def __init__(self, parent = None):
if parent == None:
parent = Tk()
self.myParent = parent
self.top_frame = Frame(parent)
# Criando a barra de rolagem
scrollbar = Scrollbar(self.top_frame)
self.editbox = Text(self.top_frame, yscrollcommand=scrollbar.set)
scrollbar.pack(side=RIGHT, fill=Y)
scrollbar.config(command=self.editbox.yview)
# Área do texto
self.editbox.pack(anchor=CENTER, fill=BOTH)
self.top_frame.pack(side=TOP)
# Texto a procurar
self.bottom_left_frame = Frame(parent)
self.textfield = Entry(self.bottom_left_frame)
self.textfield.pack(side=LEFT, fill=X, expand=1)
# Botão Find
buttonSearch = Button(self.bottom_left_frame, text='Find', command=self.find)
buttonSearch.pack(side=RIGHT)
self.bottom_left_frame.pack(side=LEFT, expand=1)
self.bottom_right_frame = Frame(parent)
def find(self):
self.editbox.tag_remove('found', '1.0', END)
s = self.textfield.get()
if s:
idx = '1.0'
while True:
idx =self.editbox.search(s, idx, nocase=1, stopindex=END)
if not idx:
break
lastidx = '%s+%dc' % (idx, len(s))
self.editbox.tag_add('found', idx, lastidx)
idx = lastidx
self.editbox.tag_config('found', foreground='red')
if __name__=="__main__":
root = Tk()
myapp = EditBoxWindow(root) | 1,596 | 516 |
import psycopg
from flask import Flask, render_template
from flask_compress import Compress
app = Flask(__name__)
DATABASE_URL = ""
Compress(app)
@app.route("/")
def index():
with psycopg.connect(DATABASE_URL, sslmode="require") as conn:
with conn.cursor() as cur:
cur.execute("SELECT * FROM topics;")
items = cur.fetchall()
return render_template("index.html", items=items)
| 426 | 131 |
import re
class Bag:
def __init__(self, _name, _contents):
self.name = _name
self.contents = _contents
self.c_cache = None
self.has_cache = {}
def hasBagType(self, _name, bags):
try:
return self.has_cache[_name]
except:
if _name != self.name:
for i in self.contents:
if bags[i[1]].hasBagType(_name, bags):
break
else:
return False
return True
else:
return True
def children_count(self):
if self.c_cache != None:
return self.c_cache
count = 0
for i in self.contents:
count += i[0] + (i[0] * bags[i[1]].children_count())
self.c_cache = count
return count
input_lines = []
with open('input.txt') as f:
input_lines = f.readlines()
input_lines = list(filter(None, input_lines))
bags = {}
for i in input_lines:
bag, contents = re.search(r'^((?:[\w]+ ){2})bags contain ([\S\s]+)', i).groups()
if contents.strip() == "no other bags.":
bag = bag.strip()
bags[bag] = Bag(bag, [])
continue
contents = contents.split(', ')
contents = list(map(lambda x: re.search(r'(\d)+ ((?:[\w]+ ){2})', x).groups(), contents))
# cleaning up
contents = list(map(lambda x: (int(x[0]), x[1].strip()), contents))
bag = bag.strip()
bags[bag] = Bag(bag, contents)
part1 = -1
for i in bags.values():
if i.hasBagType("shiny gold", bags):
part1 += 1
print(f'part 1: {part1}')
print(f'part 2: {bags["shiny gold"].children_count()}') | 1,404 | 606 |
from datetime import datetime, timedelta
from io import BytesIO
from pathlib import Path
from time import altzone, daylight, localtime, timezone
from pydrive.auth import GoogleAuth, AuthenticationRejected
from pydrive.drive import GoogleDrive as Drive, GoogleDriveFile as File
from requests import patch
from .auth import gauth
from ._this import ENDL, res_
CHAT_LOG: File = None
FILE_TYPE = 'application/vnd.google-apps.file'
FOLDER_TYPE = 'application/vnd.google-apps.folder'
LAST_READ: datetime = None
UTC_OFFSET_SECS = -(altzone if daylight and localtime().tm_isdst > 0 else timezone)
drive: Drive = None
def setup_gauth():
path = res_('client_secrets.json')
if not Path(path).is_file():
raise FileNotFoundError
GoogleAuth.DEFAULT_SETTINGS['client_config_file'] = path
def empty_contents_of_(file):
patch(
f"https://www.googleapis.com/upload/drive/v3/files/{file['id']}?uploadType=multipart",
headers={'Authorization': f"Bearer {gauth.credentials.token_response['access_token']}"},
files={
'data': ('metadata', '{}', 'application/json'),
'file': BytesIO()
}
)
def ensure_item(title: str, mime_type=None, parents=None, trashed=False):
query = f"title='{title}'"
if mime_type:
query += f" and mimeType='{mime_type}'"
if parents:
query += f""" and {
' and '.join(f"'{item['id']}' in parents" for item in parents)
}""" if type(parents) is list else f" and '{parents['id']}' in parents"
if trashed is not None:
query += f' and trashed={str(trashed).lower()}'
try:
return drive.ListFile({'q': query}).GetList()[0]
except IndexError:
metadata = {'title': title}
if mime_type:
metadata['mimeType'] = mime_type
if parents:
metadata['parents'] = [
{'id': item['id']} for item in parents
] if type(parents) is list else [{'id': parents['id']}]
file = drive.CreateFile(metadata)
file.Upload()
return file
def log_into_drive():
creds_path = res_('creds.json')
if Path(creds_path).is_file():
gauth.LoadCredentialsFile(creds_path)
else:
try:
gauth.LocalWebserverAuth()
gauth.SaveCredentialsFile(creds_path)
except:
return None
return Drive(gauth)
def login_and_init():
global CHAT_LOG, drive
drive = log_into_drive()
if drive is None:
return False
app_data = ensure_item('AppData', FOLDER_TYPE)
app_folder = ensure_item('pydrive-chat', FOLDER_TYPE, app_data)
CHAT_LOG = ensure_item('chat_log.txt', parents=app_folder)
return True
def append_to_log(text):
CHAT_LOG.SetContentString(f'{CHAT_LOG.GetContentString()}{text}{ENDL}')
CHAT_LOG.Upload()
def overwrite_log(text=None):
if not text:
empty_contents_of_(CHAT_LOG)
CHAT_LOG.Upload()
CHAT_LOG.SetContentString('')
else:
CHAT_LOG.SetContentString(text)
CHAT_LOG.Upload()
def read_log():
return CHAT_LOG.GetContentString()
def read_if_modified():
global LAST_READ, LINES_READ
def was_modified():
modified_at = when_modified()
if LAST_READ < modified_at:
LAST_READ = modified_at
return True
return False
if LAST_READ is None or was_modified():
return CHAT_LOG.GetContentString()
return None
def when_modified():
return datetime.strptime(CHAT_LOG['modifiedDate'], '%Y-%m-%dT%H:%M:%S.%fZ') + timedelta(seconds=UTC_OFFSET_SECS)
| 3,209 | 1,223 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import pkg_resources
import pytest
import string
import yaml
definition_yamls = {
fn
for fn in pkg_resources.resource_listdir("dials_data", "definitions")
if fn.endswith(".yml")
}
hashinfo_yamls = {
fn
for fn in pkg_resources.resource_listdir("dials_data", "hashinfo")
if fn.endswith(".yml")
}
def is_valid_name(filename):
if not filename.endswith(".yml") or len(filename) <= 4:
return False
allowed_characters = frozenset(string.ascii_letters + string.digits + "_")
return all(c in allowed_characters for c in filename[:-4])
@pytest.mark.parametrize("yaml_file", definition_yamls)
def test_yaml_file_is_valid_definition(yaml_file):
assert is_valid_name(yaml_file)
definition = yaml.safe_load(
pkg_resources.resource_stream("dials_data", "definitions/" + yaml_file).read()
)
fields = set(definition)
required = {"name", "data", "description"}
optional = {"license", "url", "author"}
assert fields >= required, "Required fields missing: " + str(
sorted(required - fields)
)
assert fields <= (required | optional), "Unknown fields present: " + str(
sorted(fields - required - optional)
)
@pytest.mark.parametrize("yaml_file", hashinfo_yamls)
def test_yaml_file_is_valid_hashinfo(yaml_file):
assert is_valid_name(yaml_file)
assert (
yaml_file in definition_yamls
), "hashinfo file present without corresponding definition file"
hashinfo = yaml.safe_load(
pkg_resources.resource_stream("dials_data", "hashinfo/" + yaml_file).read()
)
fields = set(hashinfo)
required = {"definition", "formatversion", "verify"}
assert fields >= required, "Required fields missing: " + str(
sorted(required - fields)
)
assert fields <= required, "Unknown fields present: " + str(
sorted(fields - required)
)
| 1,967 | 628 |