hexsha
stringlengths
40
40
size
int64
5
2.06M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
3
248
max_stars_repo_name
stringlengths
5
125
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
listlengths
1
10
max_stars_count
int64
1
191k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
3
248
max_issues_repo_name
stringlengths
5
125
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
listlengths
1
10
max_issues_count
int64
1
67k
max_issues_repo_issues_event_min_datetime
stringdate
2015-01-01 00:00:47
2022-03-31 23:42:18
max_issues_repo_issues_event_max_datetime
stringdate
2015-01-01 17:43:30
2022-03-31 23:59:58
max_forks_repo_path
stringlengths
3
248
max_forks_repo_name
stringlengths
5
125
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
listlengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
5
2.06M
avg_line_length
float64
1
1.02M
max_line_length
int64
3
1.03M
alphanum_fraction
float64
0
1
ccf122ab24b6c118407351673ac8790f51122e47
505
py
Python
31-100/31-40/34.py
higee/project_euler
2ecdefb6e4a588f50cea47321c88ee7c7ac28110
[ "MIT" ]
null
null
null
31-100/31-40/34.py
higee/project_euler
2ecdefb6e4a588f50cea47321c88ee7c7ac28110
[ "MIT" ]
null
null
null
31-100/31-40/34.py
higee/project_euler
2ecdefb6e4a588f50cea47321c88ee7c7ac28110
[ "MIT" ]
null
null
null
def fac(n): if n in [0, 1]: return 1 else: return n * fac(n-1) def sum_of_the_factorial_of_their_digits(n): fac_of_the_digits = [fac_dic[int(x)] for x in str(n)] return sum(fac_of_the_digits) def main(): for n in range(10, 2540161): if n == sum_of_the_factorial_of_their_digits(n): yield n if __name__ == "__main__": global fac_dic fac_dic = {n : fac(n) for n in range(10)} answer = list(main()) print(answer)
21.041667
57
0.584158
ccf2d7d2d0ffc342a84d86056b65cf383d097b4c
7,638
py
Python
audio_processing.py
poria-cat/Transformer-TTS-Pytorch
1e9e2dccc16c17372bf86ca73001f76645f53338
[ "MIT" ]
null
null
null
audio_processing.py
poria-cat/Transformer-TTS-Pytorch
1e9e2dccc16c17372bf86ca73001f76645f53338
[ "MIT" ]
null
null
null
audio_processing.py
poria-cat/Transformer-TTS-Pytorch
1e9e2dccc16c17372bf86ca73001f76645f53338
[ "MIT" ]
null
null
null
import torch import torch.nn.functional as F import torchaudio import numpy as np from scipy.signal import get_window from librosa.util import pad_center, tiny from librosa.filters import window_sumsquare from librosa.filters import mel as librosa_mel_fn def get_mel_basis(sampling_rate=22050, filter_length=1024, n_mel_channels=80, mel_fmin=0.0, mel_fmax=8000.0): mel_basis = librosa_mel_fn( sampling_rate, filter_length, n_mel_channels, mel_fmin, mel_fmax) # shape=(n_mels, 1 + n_fft/2) mel_basis = torch.from_numpy(mel_basis).float() return mel_basis def dynamic_range_compression(x, C=1, clip_val=1e-5): """ PARAMS ------ C: compression factor """ return torch.log(torch.clamp(x, min=clip_val) * C) def dynamic_range_decompression(x, C=1): """ PARAMS ------ C: compression factor used to compress """ return torch.exp(x) / C class Inverse(torch.nn.Module): def __init__(self, filter_length=800, hop_length=200, win_length=800, window='hann'): super(Inverse, self).__init__() self.filter_length = filter_length self.hop_length = hop_length self.win_length = win_length self.window = window scale = filter_length / hop_length fourier_basis = np.fft.fft(np.eye(filter_length)) cutoff = int((filter_length / 2 + 1)) fourier_basis = np.vstack([np.real(fourier_basis[:cutoff, :]), np.imag(fourier_basis[:cutoff, :])]) forward_basis = torch.FloatTensor(fourier_basis[:, None, :]) inverse_basis = torch.FloatTensor( np.linalg.pinv(scale * fourier_basis).T[:, None, :]) if window != None: assert(filter_length >= win_length) # get window and zero center pad it to filter_length fft_window = get_window(window, win_length, fftbins=True) fft_window = pad_center(fft_window, filter_length) fft_window = torch.from_numpy(fft_window).float() # window the bases forward_basis *= fft_window inverse_basis *= fft_window self.register_buffer('forward_basis', forward_basis.float()) self.register_buffer('inverse_basis', inverse_basis.float()) def forward(self, magnitude, phase): recombine_magnitude_phase = torch.cat( [magnitude*torch.cos(phase), magnitude*torch.sin(phase)], dim=1) inverse_transform = F.conv_transpose1d( recombine_magnitude_phase, torch.autograd.Variable(self.inverse_basis, requires_grad=False), stride=self.hop_length, padding=0) if self.window != None: window_sum = window_sumsquare( self.window, magnitude.size(-1), hop_length=self.hop_length, win_length=self.win_length, n_fft=self.filter_length, dtype=np.float32) # remove modulation effects approx_nonzero_indices = torch.from_numpy( np.where(window_sum > tiny(window_sum))[0]) window_sum = torch.autograd.Variable( torch.from_numpy(window_sum), requires_grad=False) window_sum = window_sum.cuda() if magnitude.is_cuda else window_sum inverse_transform[:, :, approx_nonzero_indices] /= window_sum[approx_nonzero_indices] # scale by hop ratio inverse_transform *= float(self.filter_length) / self.hop_length inverse_transform = inverse_transform[:, :, int( self.filter_length/2):] inverse_transform = inverse_transform[:, :, :-int(self.filter_length/2):] return inverse_transform def griffin_lim(magnitudes, inverse, n_iters=30, filter_length=1024, hop_length=256, win_length=1024,): """ PARAMS ------ magnitudes: spectrogram magnitudes stft_fn: STFT class with transform (STFT) and inverse (ISTFT) methods """ angles = np.angle(np.exp(2j * np.pi * np.random.rand(*magnitudes.size()))) angles = angles.astype(np.float32) angles = torch.autograd.Variable(torch.from_numpy(angles)) signal = inverse(magnitudes, angles).squeeze(1) for i in range(n_iters): stft = torch.stft(signal, n_fft=filter_length, hop_length=hop_length, win_length=win_length, window=torch.hann_window(win_length)) real = stft[:, :, :, 0] imag = stft[:, :, :, 1] angles = torch.autograd.Variable( torch.atan2(imag.data, real.data)) signal = inverse(magnitudes, angles).squeeze(1) return signal def mel2wav(mel_outputs, n_iters=30, filter_length=1024, hop_length=256, win_length=1024, n_mel_channels=80, sampling_rate=22050, mel_fmin=0.0, mel_fmax=8000.0): mel_decompress = dynamic_range_decompression(mel_outputs) mel_decompress = mel_decompress.transpose(1, 2).data.cpu() mel_basis = librosa_mel_fn( sampling_rate, filter_length, n_mel_channels, mel_fmin, mel_fmax) # shape=(n_mels, 1 + n_fft/2) mel_basis = torch.from_numpy(mel_basis).float() spec_from_mel_scaling = 1000 spec_from_mel = torch.mm(mel_decompress[0], mel_basis) spec_from_mel = spec_from_mel.transpose(0, 1).unsqueeze(0) spec_from_mel = spec_from_mel * spec_from_mel_scaling inverse = Inverse(filter_length=filter_length, hop_length=hop_length, win_length=win_length) audio = griffin_lim(torch.autograd.Variable( spec_from_mel[:, :, :-1]), inverse, n_iters, filter_length=filter_length, hop_length=hop_length, win_length=win_length) audio = audio.squeeze() audio = audio.cpu().numpy() return audio class STFT(torch.nn.Module): def __init__(self, filter_length=1024, hop_length=256, win_length=1024, n_mel_channels=80, sampling_rate=22050, mel_fmin=0.0, mel_fmax=8000.0): super(STFT, self).__init__() self.n_mel_channels = n_mel_channels self.sampling_rate = sampling_rate self.filter_length = filter_length self.hop_length = hop_length self.win_length = win_length mel_basis = get_mel_basis( sampling_rate, filter_length, n_mel_channels, mel_fmin, mel_fmax) #shape=(n_mels, 1 + n_fft/2) self.register_buffer('mel_basis', mel_basis) def spectral_normalize(self, magnitudes): output = dynamic_range_compression(magnitudes) return output def spectral_de_normalize(self, magnitudes): output = dynamic_range_decompression(magnitudes) return output def mel_spectrogram(self, y): assert(torch.min(y.data) >= -1) assert(torch.max(y.data) <= 1) stft = torch.stft(y,n_fft=self.filter_length, hop_length=self.hop_length,win_length=self.win_length,window=torch.hann_window(self.win_length)) real = stft[:, :, :, 0] imag = stft[:, :, :, 1] magnitudes = torch.sqrt(torch.pow(real, 2) + torch.pow(imag, 2)) magnitudes = magnitudes.data mel_output = torch.matmul(self.mel_basis, magnitudes) mel_output = self.spectral_normalize(mel_output) return mel_output def load_wav(full_path, resample_rate=True, resample_rate_value=22500): data,sampling_rate = torchaudio.load(full_path) if resample_rate and resample_rate_value != sampling_rate : resample = torchaudio.transforms.Resample(sampling_rate, resample_rate_value) data = resample(data) return data[0], resample_rate_value return data[0], resample_rate_value
40.2
161
0.660775
ccf38eaa3eb535456d8a6a2a6262774cda8e86a7
13,771
bzl
Python
toolchain/ndk_cc_toolchain_config.bzl
jbeich/skcms
9c30a95f0f167ee1513e5a1ea6846b15a010385c
[ "BSD-3-Clause" ]
null
null
null
toolchain/ndk_cc_toolchain_config.bzl
jbeich/skcms
9c30a95f0f167ee1513e5a1ea6846b15a010385c
[ "BSD-3-Clause" ]
null
null
null
toolchain/ndk_cc_toolchain_config.bzl
jbeich/skcms
9c30a95f0f167ee1513e5a1ea6846b15a010385c
[ "BSD-3-Clause" ]
null
null
null
"""This module defines the ndk_cc_toolchain_config rule. This file is based on the `external/androidndk/cc_toolchain_config.bzl` file produced by the built-in `android_ndk_repository` Bazel rule[1], which was used to build the SkCMS repository up until this revision[2]. The paths in this file point to locations inside the expanded Android NDK ZIP file (found at external/android_ndk), and must be updated every time we upgrade to a new Android NDK version. [1] https://github.com/bazelbuild/bazel/blob/4710ef82ce34572878e07c52e83a0144d707f140/src/main/java/com/google/devtools/build/lib/bazel/rules/android/AndroidNdkRepositoryFunction.java#L422 [2] https://skia.googlesource.com/skcms/+/30c8e303800c256febb03a09fdcda7f75d119b1b/WORKSPACE#22 """ load("@bazel_tools//tools/build_defs/cc:action_names.bzl", "ACTION_NAMES") load( "@bazel_tools//tools/cpp:cc_toolchain_config_lib.bzl", "feature", "flag_group", "flag_set", "tool_path", "with_feature_set", ) load("download_toolchains.bzl", "NDK_PATH") # Supported CPUs. _ARMEABI_V7A = "armeabi-v7a" _ARM64_V8A = "arm64-v8a" _all_compile_actions = [ ACTION_NAMES.c_compile, ACTION_NAMES.cpp_compile, ACTION_NAMES.linkstamp_compile, ACTION_NAMES.assemble, ACTION_NAMES.preprocess_assemble, ACTION_NAMES.cpp_header_parsing, ACTION_NAMES.cpp_module_compile, ACTION_NAMES.cpp_module_codegen, ACTION_NAMES.clif_match, ACTION_NAMES.lto_backend, ] _all_link_actions = [ ACTION_NAMES.cpp_link_executable, ACTION_NAMES.cpp_link_dynamic_library, ACTION_NAMES.cpp_link_nodeps_dynamic_library, ] def _get_default_compile_flags(cpu): if cpu == _ARMEABI_V7A: return [ "-D__ANDROID_API__=29", "-isystem", NDK_PATH + "/sysroot/usr/include/arm-linux-androideabi", "-target", "armv7-none-linux-androideabi", "-march=armv7-a", "-mfloat-abi=softfp", "-mfpu=vfpv3-d16", "-gcc-toolchain", NDK_PATH + "/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64", "-fpic", "-no-canonical-prefixes", "-Wno-invalid-command-line-argument", "-Wno-unused-command-line-argument", "-funwind-tables", "-fstack-protector-strong", "-fno-addrsig", "-Werror=return-type", "-Werror=int-to-pointer-cast", "-Werror=pointer-to-int-cast", "-Werror=implicit-function-declaration", ] if cpu == _ARM64_V8A: return [ "-gcc-toolchain", NDK_PATH + "/toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64", "-target", "aarch64-none-linux-android", "-fpic", "-isystem", NDK_PATH + "/sysroot/usr/include/aarch64-linux-android", "-D__ANDROID_API__=29", "-no-canonical-prefixes", "-Wno-invalid-command-line-argument", "-Wno-unused-command-line-argument", "-funwind-tables", "-fstack-protector-strong", "-fno-addrsig", "-Werror=return-type", "-Werror=int-to-pointer-cast", "-Werror=pointer-to-int-cast", "-Werror=implicit-function-declaration", ] fail("Unknown CPU: " + cpu) def _get_default_link_flags(cpu): if cpu == _ARMEABI_V7A: return [ "-target", "armv7-none-linux-androideabi", "-gcc-toolchain", NDK_PATH + "/toolchains/arm-linux-androideabi-4.9/prebuilt/linux-x86_64", "-L", NDK_PATH + "/sources/cxx-stl/llvm-libc++/libs/armeabi-v7a", "-no-canonical-prefixes", "-Wl,-z,relro", "-Wl,--gc-sections", ] if cpu == _ARM64_V8A: return [ "-gcc-toolchain", NDK_PATH + "/toolchains/aarch64-linux-android-4.9/prebuilt/linux-x86_64", "-target", "aarch64-none-linux-android", "-L", NDK_PATH + "/sources/cxx-stl/llvm-libc++/libs/arm64-v8a", "-no-canonical-prefixes", "-Wl,-z,relro", "-Wl,--gc-sections", ] fail("Unknown CPU: " + cpu) def _get_default_dbg_flags(cpu): if cpu == _ARMEABI_V7A: return ["-g", "-fno-strict-aliasing", "-O0", "-UNDEBUG"] if cpu == _ARM64_V8A: return ["-O0", "-g", "-UNDEBUG"] fail("Unknown CPU: " + cpu) def _get_default_opt_flags(cpu): if cpu == _ARMEABI_V7A: return ["-mthumb", "-Os", "-g", "-DNDEBUG"] if cpu == _ARM64_V8A: return ["-O2", "-g", "-DNDEBUG"] fail("Unknown CPU: " + cpu) def _get_toolchain_identifier(cpu): if cpu == _ARMEABI_V7A: return "ndk-armeabi-v7a-toolchain" if cpu == _ARM64_V8A: return "ndk-arm64-v8a-toolchain" fail("Unknown CPU: " + cpu) def _get_target_system_name(cpu): if cpu == _ARMEABI_V7A: return "arm-linux-androideabi" if cpu == _ARM64_V8A: return "aarch64-linux-android" fail("Unknown CPU: " + cpu) def _get_builtin_sysroot(cpu): if cpu == _ARMEABI_V7A: return NDK_PATH + "/platforms/android-29/arch-arm" if cpu == _ARM64_V8A: return NDK_PATH + "/platforms/android-29/arch-arm64" fail("Unknown CPU: " + cpu) def _get_tool_paths(cpu): # The cc_common.create_cc_toolchain_config_info function expects tool paths to point to files # under the directory in which it is invoked. This means we cannot directly reference tools # under external/android_ndk. The solution is to use "trampoline" scripts that pass through # any command-line arguments to the NDK binaries under external/android_sdk. if cpu == _ARMEABI_V7A: return [ tool_path( name = "ar", path = "trampolines/arm-linux-androideabi-ar.sh", ), tool_path( name = "cpp", path = "trampolines/clang.sh", ), tool_path( name = "dwp", path = "trampolines/arm-linux-androideabi-dwp.sh", ), tool_path( name = "gcc", path = "trampolines/clang.sh", ), tool_path( name = "gcov", path = "/bin/false", ), tool_path( name = "ld", path = "trampolines/arm-linux-androideabi-ld.sh", ), tool_path( name = "nm", path = "trampolines/arm-linux-androideabi-nm.sh", ), tool_path( name = "objcopy", path = "trampolines/arm-linux-androideabi-objcopy.sh", ), tool_path( name = "objdump", path = "trampolines/arm-linux-androideabi-objdump.sh", ), tool_path( name = "strip", path = "trampolines/arm-linux-androideabi-strip.sh", ), ] if cpu == _ARM64_V8A: return [ tool_path( name = "ar", path = "trampolines/aarch64-linux-android-ar.sh", ), tool_path( name = "cpp", path = "trampolines/clang.sh", ), tool_path( name = "dwp", path = "trampolines/aarch64-linux-android-dwp.sh", ), tool_path( name = "gcc", path = "trampolines/clang.sh", ), tool_path( name = "gcov", path = "/bin/false", ), tool_path( name = "ld", path = "trampolines/aarch64-linux-android-ld.sh", ), tool_path( name = "nm", path = "trampolines/aarch64-linux-android-nm.sh", ), tool_path( name = "objcopy", path = "trampolines/aarch64-linux-android-objcopy.sh", ), tool_path( name = "objdump", path = "trampolines/aarch64-linux-android-objdump.sh", ), tool_path( name = "strip", path = "trampolines/aarch64-linux-android-strip.sh", ), ] fail("Unknown CPU: " + cpu) def _ndk_cc_toolchain_config_impl(ctx): default_compile_flags = _get_default_compile_flags(ctx.attr.cpu) unfiltered_compile_flags = [ "-isystem", NDK_PATH + "/sources/cxx-stl/llvm-libc++/include", "-isystem", NDK_PATH + "/sources/cxx-stl/llvm-libc++abi/include", "-isystem", NDK_PATH + "/sources/android/support/include", "-isystem", NDK_PATH + "/sysroot/usr/include", ] default_link_flags = _get_default_link_flags(ctx.attr.cpu) default_fastbuild_flags = [""] default_dbg_flags = _get_default_dbg_flags(ctx.attr.cpu) default_opt_flags = _get_default_opt_flags(ctx.attr.cpu) opt_feature = feature(name = "opt") fastbuild_feature = feature(name = "fastbuild") dbg_feature = feature(name = "dbg") supports_dynamic_linker_feature = feature(name = "supports_dynamic_linker", enabled = True) supports_pic_feature = feature(name = "supports_pic", enabled = True) static_link_cpp_runtimes_feature = feature(name = "static_link_cpp_runtimes", enabled = True) default_compile_flags_feature = feature( name = "default_compile_flags", enabled = True, flag_sets = [ flag_set( actions = _all_compile_actions, flag_groups = [flag_group(flags = default_compile_flags)], ), flag_set( actions = _all_compile_actions, flag_groups = [flag_group(flags = default_fastbuild_flags)], with_features = [with_feature_set(features = ["fastbuild"])], ), flag_set( actions = _all_compile_actions, flag_groups = [flag_group(flags = default_dbg_flags)], with_features = [with_feature_set(features = ["dbg"])], ), flag_set( actions = _all_compile_actions, flag_groups = [flag_group(flags = default_opt_flags)], with_features = [with_feature_set(features = ["opt"])], ), ], ) default_link_flags_feature = feature( name = "default_link_flags", enabled = True, flag_sets = [ flag_set( actions = _all_link_actions, flag_groups = [flag_group(flags = default_link_flags)], ), ], ) user_compile_flags_feature = feature( name = "user_compile_flags", enabled = True, flag_sets = [ flag_set( actions = _all_compile_actions, flag_groups = [ flag_group( flags = ["%{user_compile_flags}"], iterate_over = "user_compile_flags", expand_if_available = "user_compile_flags", ), ], ), ], ) sysroot_feature = feature( name = "sysroot", enabled = True, flag_sets = [ flag_set( actions = _all_compile_actions + _all_link_actions, flag_groups = [ flag_group( flags = ["--sysroot=%{sysroot}"], expand_if_available = "sysroot", ), ], ), ], ) unfiltered_compile_flags_feature = feature( name = "unfiltered_compile_flags", enabled = True, flag_sets = [ flag_set( actions = _all_compile_actions, flag_groups = [flag_group(flags = unfiltered_compile_flags)], ), ], ) features = [ default_compile_flags_feature, default_link_flags_feature, supports_dynamic_linker_feature, supports_pic_feature, static_link_cpp_runtimes_feature, fastbuild_feature, dbg_feature, opt_feature, user_compile_flags_feature, sysroot_feature, unfiltered_compile_flags_feature, ] cxx_builtin_include_directories = [ NDK_PATH + "/toolchains/llvm/prebuilt/linux-x86_64/lib64/clang/9.0.9/include", "%sysroot%/usr/include", NDK_PATH + "/sysroot/usr/include", ] # https://bazel.build/rules/lib/cc_common#create_cc_toolchain_config_info return cc_common.create_cc_toolchain_config_info( ctx = ctx, toolchain_identifier = _get_toolchain_identifier(ctx.attr.cpu), host_system_name = "local", target_system_name = _get_target_system_name(ctx.attr.cpu), target_cpu = ctx.attr.cpu, target_libc = "local", compiler = "clang9.0.9", abi_version = ctx.attr.cpu, abi_libc_version = "local", features = features, tool_paths = _get_tool_paths(ctx.attr.cpu), cxx_builtin_include_directories = cxx_builtin_include_directories, builtin_sysroot = _get_builtin_sysroot(ctx.attr.cpu), ) ndk_cc_toolchain_config = rule( implementation = _ndk_cc_toolchain_config_impl, attrs = { "cpu": attr.string( mandatory = True, values = [_ARMEABI_V7A, _ARM64_V8A], doc = "Target CPU.", ) }, provides = [CcToolchainConfigInfo], )
34.002469
188
0.559872
ccf57840058ae1e39f456a4f292c8353027f974d
7,149
py
Python
VirtualJudgeSpider/OJs/HDUClass.py
mr-kkid/OnlineJudgeSpider
c83c01d8e989ae87834bdabdb3fae0984eae2eaa
[ "MIT" ]
null
null
null
VirtualJudgeSpider/OJs/HDUClass.py
mr-kkid/OnlineJudgeSpider
c83c01d8e989ae87834bdabdb3fae0984eae2eaa
[ "MIT" ]
null
null
null
VirtualJudgeSpider/OJs/HDUClass.py
mr-kkid/OnlineJudgeSpider
c83c01d8e989ae87834bdabdb3fae0984eae2eaa
[ "MIT" ]
null
null
null
import re from http import cookiejar from urllib import request, parse from bs4 import BeautifulSoup from VirtualJudgeSpider import Config from VirtualJudgeSpider.Config import Problem, Spider, Result from VirtualJudgeSpider.OJs.BaseClass import Base class HDU(Base): def __init__(self): self.code_type = 'gb18030' self.cj = cookiejar.CookieJar() self.opener = request.build_opener(request.HTTPCookieProcessor(self.cj)) @staticmethod def home_page_url(self): url = 'http://acm.hdu.edu.cn/' return url def check_login_status(self): url = 'http://acm.hdu.edu.cn/' try: with self.opener.open(url) as fin: website_data = fin.read().decode(self.code_type) if re.search(r'userloginex\.php\?action=logout', website_data) is not None: return True except: return False def login_webside(self, *args, **kwargs): if self.check_login_status(): return True login_page_url = 'http://acm.hdu.edu.cn/' login_link_url = 'http://acm.hdu.edu.cn/userloginex.php?action=login&cid=0&notice=0' post_data = parse.urlencode( {'username': kwargs['account'].get_username(), 'userpass': kwargs['account'].get_password()}) try: self.opener.open(login_page_url) req = request.Request(url=login_link_url, data=post_data.encode(self.code_type), headers=Config.custom_headers) self.opener.open(req) if self.check_login_status(): return True return False except: return False def get_problem(self, *args, **kwargs): url = 'http://acm.hdu.edu.cn/showproblem.php?pid=' + str(kwargs['pid']) problem = Problem() try: website_data = Spider.get_data(url, self.code_type) problem.remote_id = kwargs['pid'] problem.remote_url = url problem.remote_oj = 'HDU' problem.title = re.search(r'color:#1A5CC8\'>([\s\S]*?)</h1>', website_data).group(1) problem.time_limit = re.search(r'(\d* MS)', website_data).group(1) problem.memory_limit = re.search(r'/(\d* K)', website_data).group(1) problem.special_judge = re.search(r'color=red>Special Judge</font>', website_data) is not None problem.description = re.search(r'>Problem Description</div>[\s\S]*?panel_content>([\s\S]*?)</div>', website_data).group(1) problem.input = re.search(r'>Input</div>[\s\S]*?panel_content>([\s\S]*?)</div>', website_data).group(1) problem.output = re.search(r'>Output</div>[\s\S]*?panel_content>([\s\S]*?)</div>', website_data).group(1) match_group = re.search(r'>Sample Input</div>[\s\S]*?panel_content>([\s\S]*?)</div', website_data) input_data = '' if match_group: input_data = re.search(r'(<pre><div[\s\S]*?>)?([\s\S]*)', match_group.group(1)).group(2) output_data = '' match_group = re.search(r'>Sample Output</div>[\s\S]*?panel_content>([\s\S]*?)</div', website_data) if match_group: output_data = re.search(r'(<pre><div[\s\S]*?>)?([\s\S]*)', match_group.group(1)).group(2) if re.search('<div', output_data): output_data = re.search(r'([\s\S]*?)<div', output_data).group(1) problem.sample = [ {'input': input_data, 'output': output_data}] match_group = re.search(r'>Author</div>[\s\S]*?panel_content>([\s\S]*?)</div>', website_data) if match_group: problem.author = match_group.group(1) match_group = re.search(r'<i>Hint</i>[\s\S]*?/div>[\s]*([\s\S]+?)</div>', website_data) if match_group: problem.hint = match_group.group(1) except: return Problem.PROBLEM_NOT_FOUND return problem def submit_code(self, *args, **kwargs): if self.login_webside(*args, **kwargs) is False: return False try: code = kwargs['code'] language = kwargs['language'] pid = kwargs['pid'] url = 'http://acm.hdu.edu.cn/submit.php?action=submit' post_data = parse.urlencode({'check': '0', 'language': language, 'problemid': pid, 'usercode': code}) req = request.Request(url=url, data=post_data.encode(self.code_type), headers=Config.custom_headers) response = self.opener.open(req) response.read().decode(self.code_type) return True except: return False def find_language(self, *args, **kwargs): if self.login_webside(*args, **kwargs) is False: return None url = 'http://acm.hdu.edu.cn/submit.php' languages = {} try: with self.opener.open(url) as fin: data = fin.read().decode(self.code_type) soup = BeautifulSoup(data, 'lxml') options = soup.find('select', attrs={'name': 'language'}).find_all('option') for option in options: languages[option.get('value')] = option.string finally: return languages def get_result(self, *args, **kwargs): account = kwargs.get('account') pid = kwargs.get('pid') url = 'http://acm.hdu.edu.cn/status.php?first=&pid=' + pid + '&user=' + account.username + '&lang=0&status=0' return self.get_result_by_url(url=url) def get_result_by_rid(self, rid): url = 'http://acm.hdu.edu.cn/status.php?first=' + rid + '&pid=&user=&lang=0&status=0' return self.get_result_by_url(url=url) def get_result_by_url(self, url): result = Result() try: with request.urlopen(url) as fin: data = fin.read().decode(self.code_type) soup = BeautifulSoup(data, 'lxml') line = soup.find('table', attrs={'class': 'table_text'}).find('tr', attrs={'align': 'center'}).find_all( 'td') if line is not None: result.origin_run_id = line[0].string result.verdict = line[2].string result.execute_time = line[4].string result.execute_memory = line[5].string return result except: pass return result def get_class_name(self): return str('HDU') def is_waiting_for_judge(self, verdict): if verdict in ['Queuing', 'Compiling', 'Running']: return True return False def check_status(self): url = 'http://acm.hdu.edu.cn/' try: with request.urlopen(url, timeout=5) as fin: data = fin.read().decode(self.code_type) if re.search(r'<H1>Welcome to HDU Online Judge System</H1>', data): return True except: return False
41.807018
120
0.55854
ccf57bf381b881ac46ecdef94c8bf2a01ef756ae
705
py
Python
onlineJudge/baekjoon/DFS/Q2667.py
dahyeong-yun/prtc_coding-test-py
f082e42cc47d7da912bd229b355a813f2d38fabb
[ "MIT" ]
null
null
null
onlineJudge/baekjoon/DFS/Q2667.py
dahyeong-yun/prtc_coding-test-py
f082e42cc47d7da912bd229b355a813f2d38fabb
[ "MIT" ]
null
null
null
onlineJudge/baekjoon/DFS/Q2667.py
dahyeong-yun/prtc_coding-test-py
f082e42cc47d7da912bd229b355a813f2d38fabb
[ "MIT" ]
null
null
null
''' 입력 ''' n = int(input()) # 지도의 크기 square_map = [] for i in range(n): square_map.append(list(map(int, input()))) ''' 입력 ''' _house_count = 0 house = [] bundle = 0 def dfx(x, y): global _house_count if x <= -1 or x >= n or y <= -1 or y >= n: return False if square_map[x][y] == 1: square_map[x][y] = 2 _house_count += 1 dfx(x, y - 1) dfx(x, y + 1) dfx(x + 1, y) dfx(x - 1, y) return True return False for i in range(n): for j in range(n): if dfx(i, j): house.append(_house_count) _house_count = 0 bundle += 1 print(bundle) for i in sorted(house): print(i)
16.022727
46
0.486525
ccf5cf766e01ecc826d0e351e27a11fbaff7d8c0
3,329
py
Python
openhivenpy/types/attachment.py
FrostbyteBot/hiven.py
1a2831cf4e0512cc8dd2b8f8f5d04b582158a21e
[ "MIT" ]
9
2020-11-13T19:07:54.000Z
2021-01-30T23:12:57.000Z
openhivenpy/types/attachment.py
FrostbyteBot/hiven.py
1a2831cf4e0512cc8dd2b8f8f5d04b582158a21e
[ "MIT" ]
46
2020-11-05T20:32:41.000Z
2021-04-03T22:48:18.000Z
openhivenpy/types/attachment.py
FrostbyteBot/openhiven.py
1a2831cf4e0512cc8dd2b8f8f5d04b582158a21e
[ "MIT" ]
2
2020-12-19T14:27:07.000Z
2021-01-29T10:52:33.000Z
""" Attachment File which implements the Hiven Attachment type and its methods (endpoints) --- Under MIT License Copyright © 2020 - 2021 Luna Klatzer Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ # Used for type hinting and not having to use annotations for the objects from __future__ import annotations import logging # Only importing the Objects for the purpose of type hinting and not actual use from typing import TYPE_CHECKING from .hiven_type_schemas import AttachmentSchema, get_compiled_validator from ..base_types import DataClassObject from ..utils import log_type_exception if TYPE_CHECKING: from .. import HivenClient logger = logging.getLogger(__name__) __all__ = ['Attachment'] class Attachment(DataClassObject): """ Represents a Hiven Message Attachment containing a file """ _json_schema: dict = AttachmentSchema json_validator = get_compiled_validator(_json_schema) @log_type_exception('Attachment') def __init__(self, data: dict, client: HivenClient): """ Represents a Hiven Message Attachment containing a file :param data: Data that should be used to create the object :param client: The HivenClient """ super().__init__() self._filename = data.get('filename') self._media_url = data.get('media_url') self._raw = data.get('raw') self._client = client @classmethod def format_obj_data(cls, data: dict) -> dict: """ Validates the data and appends data if it iis missing that would be required for the creation of an instance. :param data: Data that should be validated and used to form the object :return: The modified dictionary, which can then be used to create a new class instance """ data['raw'] = {**data.pop('raw', {}), **data} return cls.validate(data) @property def filename(self) -> str: """ Name of the file """ return getattr(self, '_filename', None) @property def media_url(self) -> str: """ Media-url to access the file """ return getattr(self, '_media_url', None) @property def raw(self) -> dict: """ The raw data dictionary received over the Swarm """ # Different files have different attribs return getattr(self, '_raw', None)
34.677083
79
0.714329
ccf684f8b04fadc89a621f3af0e959e3165d4fdf
1,530
py
Python
queue_task/views.py
emoryBlame/queue_server
946345111359d5001244eb0cc8fd1b8acc50dd3f
[ "MIT" ]
null
null
null
queue_task/views.py
emoryBlame/queue_server
946345111359d5001244eb0cc8fd1b8acc50dd3f
[ "MIT" ]
7
2020-02-11T23:41:11.000Z
2022-01-13T01:04:03.000Z
queue_task/views.py
emoryBlame/queue_server
946345111359d5001244eb0cc8fd1b8acc50dd3f
[ "MIT" ]
null
null
null
from django.shortcuts import render from .models import Task from rest_framework import serializers from rest_framework.response import Response from rest_framework.decorators import api_view # Create your views here. class TaskSerializer(serializers.ModelSerializer): """ Task serializer class """ class Meta: """ Task serializer meta class """ model = Task fields = ('id', 'url', 'status', 'response_content',\ 'response_http_status', 'response_body') class TaskSerializerResult(serializers.ModelSerializer): """ Task id serializer """ class Meta: model = Task fields = ('id',) @api_view(("POST",)) def send(request): if request.method == "POST": task = Task.objects.create(url=request.data.get("url")) return Response(TaskSerializerResult(task).data) else: return Response({"error": "Bad request."}) @api_view(("GET", )) def result(request): if request.method == "GET": task_id = request.GET.get("id", False) if task_id: task = Task.objects.filter(id = task_id).first() print(task) if task: return Response(TaskSerializer(task).data) else: task = Task.objects.all().order_by('-id')[:10] print(task) return Response(TaskSerializer(task, many = True).data) else: return Response({"status": "Bad id"}) else: return Response({"status": "Bad request"}) @api_view(("GET",)) def start_tasks(request): Task.objects.all().update(status=0) return Response({"status": "all task gets status New, and will updating every 2 min in case it's still new"})
23.90625
110
0.698693
ccf6ab1d7fc1142582573ac98bff25dde4b1d9c1
471
py
Python
test_project/server/settings/environments/development.py
wemake-services/wemake-django-rest
f1ab3b4b5bf3bf04866e7695667049fefe7b98f1
[ "MIT" ]
13
2018-10-06T13:04:54.000Z
2020-08-14T13:03:01.000Z
test_project/server/settings/environments/development.py
wemake-services/wemake-django-rest
f1ab3b4b5bf3bf04866e7695667049fefe7b98f1
[ "MIT" ]
6
2018-10-09T15:45:43.000Z
2021-06-23T12:31:51.000Z
test_project/server/settings/environments/development.py
wemake-services/wemake-django-rest
f1ab3b4b5bf3bf04866e7695667049fefe7b98f1
[ "MIT" ]
2
2018-11-16T02:58:41.000Z
2020-07-18T18:19:15.000Z
# -*- coding: utf-8 -*- """ This file contains all the settings that defines the development server. SECURITY WARNING: don't run with debug turned on in production! """ import logging from typing import List from server.settings.components.common import INSTALLED_APPS, MIDDLEWARE # Setting the development status: DEBUG = True # Static files: # https://docs.djangoproject.com/en/1.11/ref/settings/#std:setting-STATICFILES_DIRS STATICFILES_DIRS: List[str] = []
19.625
83
0.755839
ccf715d5c23093aaf3f22692cfa66ef6452f1f55
605
py
Python
test/sample_robot_api.py
things-factory/vision-client-python
b76e1fb3e1f4537de7a5fa07330c137f37a9cc5d
[ "MIT" ]
null
null
null
test/sample_robot_api.py
things-factory/vision-client-python
b76e1fb3e1f4537de7a5fa07330c137f37a9cc5d
[ "MIT" ]
null
null
null
test/sample_robot_api.py
things-factory/vision-client-python
b76e1fb3e1f4537de7a5fa07330c137f37a9cc5d
[ "MIT" ]
null
null
null
import sys import json from random import choice, random import time from pyaidoop_graphql_client.api import Client def main(argv): workspaceName = argv[1] client = Client("http://localhost:3000", "system") client.signin("admin@hatiolab.com", "admin") # client.robot_go_home(name='robot01') # client.robot_task_moveby( # name='robot01', pose={'x': 0.0, 'y': 0.01, 'z': 0.0, 'u': 0.0, 'v': 0.0, 'w': 0.0}) print(client.get_robot_status(name="robot01")) print(client.get_robot_status(name="robot01")["moveFinished"]) if __name__ == "__main__": main(sys.argv)
24.2
93
0.661157
ccf716c91df18440671ec6222d7ac8edd7636308
6,275
py
Python
dispotrains.webapp/src/analysis/all_stations.py
emembrives/dispotrains
6ef69d4a62d60a470ed6fd96d04e47d29a0ae44f
[ "Apache-2.0" ]
1
2016-11-12T01:16:32.000Z
2016-11-12T01:16:32.000Z
dispotrains.webapp/src/analysis/all_stations.py
emembrives/dispotrains
6ef69d4a62d60a470ed6fd96d04e47d29a0ae44f
[ "Apache-2.0" ]
null
null
null
dispotrains.webapp/src/analysis/all_stations.py
emembrives/dispotrains
6ef69d4a62d60a470ed6fd96d04e47d29a0ae44f
[ "Apache-2.0" ]
2
2016-05-20T21:04:15.000Z
2020-02-02T15:25:40.000Z
#!/bin/env python3 """ Extracts all metro and RER stations from an OSM dump. """ import xml.etree.cElementTree as ET import argparse import csv from math import radians, cos, sin, asin, sqrt class Station(object): """A train station""" def __init__(self, name, osm_id, lat, lon, accessible=False): self._name = name self._osm_ids = set([int(osm_id)]) self._lat = lat self._lon = lon self._accessible = accessible @property def name(self): """Name of the station.""" return self._name @property def osm_ids(self): """OpenStreetMap ID""" return self._osm_ids @property def lat(self): """Latitude of the station.""" return self._lat @property def lon(self): """Longitude of the station.""" return self._lon @property def accessible(self): """True if the station is accessible.""" return self._accessible def distance(self, other): """ Calculate the great circle distance between two points on the earth (specified in decimal degrees) """ # convert decimal degrees to radians lon1, lat1, lon2, lat2 = [radians(x) for x in \ [self.lon, self.lat, other.lon, other.lat]] # haversine formula dlon = lon2 - lon1 dlat = lat2 - lat1 a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2 c = 2 * asin(sqrt(a)) r = 6371.0 # Radius of earth in kilometers. Use 3956 for miles return c * r def merge(self, other): self._osm_ids.update(other.osm_ids) @staticmethod def from_node(node): """Creates a Station from an XML node in OSM format.""" name_tags = node.findall("./tag[@k='name']") name = None if len(name_tags) != 0 : name = name_tags[0].get("v") osm_id = node.get("id") lat = float(node.get("lat")) lon = float(node.get("lon")) return Station(name, osm_id, lat, lon) def __repr__(self): return "Station(%s)" % (self.name) def __eq__(self, other): if isinstance(other, Station): return self.name == other.name else: return False def __ne__(self, other): return not self.__eq__(other) def __hash__(self): return hash(self.__repr__()) def extract_stations_from_dump(dump_path): """Extract a list of |Station|s from an XML dump.""" tree = ET.parse(dump_path) root = tree.getroot() allstation_nodes = root.findall('./node') allstations = {} for station_node in allstation_nodes: station = Station.from_node(station_node) if station.name in allstations: allstations[station.name].merge(station) else: allstations[station.name] = station return merge_osm_stations(allstations.values()) MERGE_STATIONS = { 26824135: [27371889, 1309031698, 1308998006], # Gare de Lyon 1731763794: [241928557], # Nation 3533789791: [3542631493], # Saint Lazare 243496033: [1731763792], # Etoile 3574677130: [1785132453], # Pont du Garigliano 3586000197: [137533248], # La Défense 269296749: [241926523], # Marne la Vallée Chessy 225119209: [3530909557, 1882558198], # CDG 2 3531066587: [1883637808], # La Fraternelle - Rungis 327613695: [3090733718], # Gare du Nord 255687197: [2367372622], # Issy Val de Seine 264778142: [2799009872], # Porte de la Villette } def merge_osm_stations(stations): stations = list(stations) def get_station(osm_id): for station_index in range(len(stations)): if osm_id in stations[station_index].osm_ids: return station_index, stations[station_index] return -1, None for osm_id, ids_to_merge in MERGE_STATIONS.items(): _, receiver = get_station(osm_id) for id_to_merge in ids_to_merge: index_to_merge, station_to_merge = get_station(id_to_merge) receiver.merge(station_to_merge) del stations[index_to_merge] return stations def extract_accessible_stations(csv_filepath): """Extracts stations from a csv file listing accessible stations.""" stations = [] with open(csv_filepath) as reader: csvreader = csv.reader(reader) for row in csvreader: stations.append(Station(row[0], row[4], float(row[2]), float(row[3]), True)) return stations def merge_stations(all_stations, accessible_stations): """Merge two lists of stations.""" merged_stations = [] merged_count = 0 for station1 in all_stations: found = False for station2 in accessible_stations: if len(station1.osm_ids.intersection(station2.osm_ids)): merged_stations.append(station2) found = True merged_count += 1 if not found and station1.name: merged_stations.append(station1) print(merged_count) return merged_stations def print_to_csv(stations): """Print a list of stations to CSV.""" with open("full-list.csv", "w") as writer: csvwriter = csv.writer(writer) csvwriter.writerow( ["name", "osm_id", "latitude", "longitude", "accessible"]) for station in stations: csvwriter.writerow( [station.name, station.osm_ids, station.lat, station.lon, station.accessible]) def _parse_args(): """Define and parse command-line arguments.""" parser = argparse.ArgumentParser(description='Extract station information.') parser.add_argument('--osm_dump', type=str, help='Path of the OSM dump containing train stations') parser.add_argument('--accessible_csv', type=str, help='Path to the list of accessible stations (CSV)') return parser.parse_args() def _main(): """Script entry-point.""" args = _parse_args() all_stations = extract_stations_from_dump(args.osm_dump) accessible_stations = extract_accessible_stations(args.accessible_csv) merged_stations = merge_stations(all_stations, accessible_stations) print_to_csv(merged_stations) if __name__ == '__main__': _main()
32.179487
94
0.629163
ccf73deff0cd7b3da75f4fe279624fa28407626b
493
py
Python
scripts/insert_plots.py
hamzaMahdi/sphero_formation
71dd4a8097c578f9237ed1f65e3debdcc3a8cc5b
[ "MIT" ]
null
null
null
scripts/insert_plots.py
hamzaMahdi/sphero_formation
71dd4a8097c578f9237ed1f65e3debdcc3a8cc5b
[ "MIT" ]
null
null
null
scripts/insert_plots.py
hamzaMahdi/sphero_formation
71dd4a8097c578f9237ed1f65e3debdcc3a8cc5b
[ "MIT" ]
1
2019-11-06T21:27:51.000Z
2019-11-06T21:27:51.000Z
# note : this does not create the link between the map and the world. It only spawns the robots. # Please make sure to go back and manually add the path to the bitmap file file_name = 'plots.txt' f = open("../new_results/" + file_name, "w+") counter = 1 for i in range(1, 10): for j in range(1, 6): f.write('\subfloat{\includegraphics[width=0.5\linewidth]{figures/test_%d_%d.png}}\n' % (i, j)) if counter % 2 == 0: f.write(r'\\ ') counter+=1 f.close()
35.214286
102
0.622718
ccf80cabc3a7e5b0b42749bb4a83f5a36f41004c
5,615
py
Python
lib/aquilon/worker/formats/entitlement.py
ned21/aquilon
6562ea0f224cda33b72a6f7664f48d65f96bd41a
[ "Apache-2.0" ]
7
2015-07-31T05:57:30.000Z
2021-09-07T15:18:56.000Z
lib/aquilon/worker/formats/entitlement.py
ned21/aquilon
6562ea0f224cda33b72a6f7664f48d65f96bd41a
[ "Apache-2.0" ]
115
2015-03-03T13:11:46.000Z
2021-09-20T12:42:24.000Z
lib/aquilon/worker/formats/entitlement.py
ned21/aquilon
6562ea0f224cda33b72a6f7664f48d65f96bd41a
[ "Apache-2.0" ]
13
2015-03-03T11:17:59.000Z
2021-09-09T09:16:41.000Z
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*- # ex: set expandtab softtabstop=4 shiftwidth=4: # # Copyright (C) 2018-2019 Contributor # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Entitlement formatter.""" from aquilon.aqdb.model import ( EntitlementArchetypeGrnMap, EntitlementArchetypeUserMap, EntitlementClusterGrnMap, EntitlementClusterUserMap, EntitlementGrnGrnMap, EntitlementGrnUserMap, EntitlementHostGrnMap, EntitlementHostUserMap, EntitlementOnArchetype, EntitlementOnCluster, EntitlementOnGrn, EntitlementOnHost, EntitlementOnHostEnvironment, EntitlementOnLocation, EntitlementOnPersonality, EntitlementPersonalityGrnMap, EntitlementPersonalityUserMap, EntitlementToGrn, EntitlementToUser, EntitlementType, ) from aquilon.worker.formats.formatters import ObjectFormatter class EntitlementTypeFormatter(ObjectFormatter): def format_raw(self, entit_type, indent="", embedded=True, indirect_attrs=True): details = [] details.append('{}Entitlement type: {}'.format( indent, entit_type.name)) details.append('{} To GRN: {}'.format( indent, 'enabled' if entit_type.to_grn else 'disabled')) if entit_type.to_user_types: user_types = set(m.user_type.name for m in entit_type.to_user_types) details.append('{} To User Types: {}'.format( indent, ', '.join(sorted(user_types)))) if entit_type.comments: details.append('{} Comments: {}'.format( indent, entit_type.comments)) return '\n'.join(details) ObjectFormatter.handlers[EntitlementType] = EntitlementTypeFormatter() class EntitlementFormatter(ObjectFormatter): def format_raw(self, entit, indent="", embedded=True, indirect_attrs=True): details = [] def add(txt): details.append('{}{}'.format(indent, txt)) add('Entitlement: {}'.format(entit.type.name)) if isinstance(entit, EntitlementToGrn): add(' To {0:c}: {0.grn}'.format(entit.grn)) elif isinstance(entit, EntitlementToUser): add(' To {type} {0:c}: {0.name}'.format( entit.user, type=entit.user.type.name.title())) if isinstance(entit, EntitlementOnHost): add(' On {0:c}: {0.hardware_entity.primary_name.fqdn.fqdn}' .format(entit.host)) elif isinstance(entit, EntitlementOnCluster): add(' On {0:c}: {0.name}'.format(entit.cluster)) elif isinstance(entit, EntitlementOnPersonality): add(' On {0:c}: {0.name}'.format(entit.personality)) elif isinstance(entit, EntitlementOnArchetype): add(' On {0:c}: {0.name}'.format(entit.archetype)) elif isinstance(entit, EntitlementOnGrn): add(' On {0:c}: {0.grn}'.format(entit.target_grn)) if isinstance(entit, EntitlementOnHostEnvironment): add(' On {0:c}: {0.name}'.format(entit.host_environment)) if isinstance(entit, EntitlementOnLocation): add(' On {0:c}: {0.name}'.format(entit.location)) return '\n'.join(details) def fill_proto(self, entit, skeleton, embedded=True, indirect_attrs=True): skeleton.type = entit.type.name if isinstance(entit, EntitlementToGrn): skeleton.eonid = entit.grn.eon_id elif isinstance(entit, EntitlementToUser): self.redirect_proto(entit.user, skeleton.user, indirect_attrs=False) if isinstance(entit, EntitlementOnHost): self.redirect_proto(entit.host, skeleton.host, indirect_attrs=False) elif isinstance(entit, EntitlementOnCluster): self.redirect_proto(entit.cluster, skeleton.cluster, indirect_attrs=False) elif isinstance(entit, EntitlementOnPersonality): self.redirect_proto(entit.personality, skeleton.personality, indirect_attrs=False) elif isinstance(entit, EntitlementOnArchetype): self.redirect_proto(entit.archetype, skeleton.archetype, indirect_attrs=False) elif isinstance(entit, EntitlementOnGrn): skeleton.target_eonid = entit.target_grn.eon_id if isinstance(entit, EntitlementOnHostEnvironment): skeleton.host_environment = entit.host_environment.name if isinstance(entit, EntitlementOnLocation): self.redirect_proto(entit.location, skeleton.location, indirect_attrs=False) for cls in [ EntitlementArchetypeGrnMap, EntitlementArchetypeUserMap, EntitlementClusterGrnMap, EntitlementClusterUserMap, EntitlementGrnGrnMap, EntitlementGrnUserMap, EntitlementHostGrnMap, EntitlementHostUserMap, EntitlementPersonalityGrnMap, EntitlementPersonalityUserMap, ]: ObjectFormatter.handlers[cls] = EntitlementFormatter()
37.18543
79
0.655565
ccf83380f75272da17e827a8354142f3491d9b15
1,618
py
Python
tests/test_fixture.py
macneiln/py4web
ed50294d650fb466a9a06c26b8f311091b2d0035
[ "BSD-3-Clause" ]
133
2019-07-24T11:32:34.000Z
2022-03-25T02:43:55.000Z
tests/test_fixture.py
macneiln/py4web
ed50294d650fb466a9a06c26b8f311091b2d0035
[ "BSD-3-Clause" ]
396
2019-07-24T06:30:19.000Z
2022-03-24T07:59:07.000Z
tests/test_fixture.py
macneiln/py4web
ed50294d650fb466a9a06c26b8f311091b2d0035
[ "BSD-3-Clause" ]
159
2019-07-24T11:32:37.000Z
2022-03-28T15:17:05.000Z
from types import SimpleNamespace import pytest import threading from py4web.core import Fixture result = {'seq': []} def run_thread(func, *a): t = threading.Thread(target=func, args=a) return t class Foo(Fixture): def on_request(self): self._safe_local = SimpleNamespace() @property def bar(self): return self._safe_local.a @bar.setter def bar(self, a): self._safe_local.a = a foo = Foo() def before_request(): Fixture.__init_request_ctx__() @pytest.fixture def init_foo(): def init(key, a, evnt_done=None, evnt_play=None): result['seq'].append(key) before_request() foo.on_request() foo.bar = a evnt_done and evnt_done.set() evnt_play and evnt_play.wait() result[key] = foo.bar return foo return init def test_fixtute_local_storage(init_foo): assert init_foo('t1', 'a1') is foo evnt_done = threading.Event() evnt_play = threading.Event() t2 = run_thread(init_foo, 't2', 'a2', evnt_done, evnt_play) t3 = run_thread(init_foo, 't3', 'a3', None, None) t2.start() evnt_done.wait() t3.start() t3.join() evnt_play.set() t2.join() assert foo.bar == 'a1' assert result['t2'] == 'a2' assert result['t3'] == 'a3' assert ','.join(result['seq']) == 't1,t2,t3' def test_fixtute_error(): before_request() # attempt to access _safe_local prop without on_request-call with pytest.raises(RuntimeError) as err: foo.bar assert 'py4web hint' in err.value.args[0] assert 'Foo object' in err.value.args[0]
22.164384
64
0.631644
ccf92b8e5eba6aedbf6d4f91a3902a09d0c24f3f
13,049
py
Python
Scripts/simulation/objects/components/object_inventory_component.py
velocist/TS4CheatsInfo
b59ea7e5f4bd01d3b3bd7603843d525a9c179867
[ "Apache-2.0" ]
null
null
null
Scripts/simulation/objects/components/object_inventory_component.py
velocist/TS4CheatsInfo
b59ea7e5f4bd01d3b3bd7603843d525a9c179867
[ "Apache-2.0" ]
null
null
null
Scripts/simulation/objects/components/object_inventory_component.py
velocist/TS4CheatsInfo
b59ea7e5f4bd01d3b3bd7603843d525a9c179867
[ "Apache-2.0" ]
null
null
null
# uncompyle6 version 3.7.4 # Python bytecode 3.7 (3394) # Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)] # Embedded file name: T:\InGame\Gameplay\Scripts\Server\objects\components\object_inventory_component.py # Compiled at: 2020-10-06 03:00:48 # Size of source mod 2**32: 16791 bytes from animation.posture_manifest import AnimationParticipant from event_testing.resolver import DoubleObjectResolver from objects.components import componentmethod, types from objects.components.get_put_component_mixin import GetPutComponentMixin from objects.components.inventory import InventoryComponent from objects.components.inventory_enums import InventoryType from objects.components.inventory_item_trigger import ItemStateTrigger from objects.components.inventory_owner_tuning import InventoryTuning from objects.components.state import ObjectStateValue from objects.object_enums import ItemLocation, ResetReason from objects.system import create_object from postures.posture_specs import PostureSpecVariable from sims4.tuning.tunable import TunableList, TunableReference, TunableEnumEntry, Tunable, OptionalTunable, TunableTuple from statistics.statistic import Statistic import services, sims4.resources logger = sims4.log.Logger('Inventory', default_owner='tingyul') class ObjectInventoryComponent(GetPutComponentMixin, InventoryComponent, component_name=types.INVENTORY_COMPONENT): DEFAULT_OBJECT_INVENTORY_AFFORDANCES = TunableList(TunableReference(description='\n Affordances for all object inventories.\n ', manager=(services.get_instance_manager(sims4.resources.Types.INTERACTION)))) FACTORY_TUNABLES = {'description':'\n Generate an object inventory for this object\n ', 'inventory_type':TunableEnumEntry(description='\n Inventory Type must be set for the object type you add this for.\n ', tunable_type=InventoryType, default=InventoryType.UNDEFINED, invalid_enums=( InventoryType.UNDEFINED, InventoryType.SIM)), 'visible':Tunable(description='\n If this inventory is visible to player.', tunable_type=bool, default=True), 'starting_objects':TunableList(description='\n Objects in this list automatically populate the inventory when its\n owner is created. Currently, to keep the game object count down, an\n object will not be added if the object inventory already has\n another object of the same type.', tunable=TunableReference(manager=(services.definition_manager()), description='Objects to populate inventory with.', pack_safe=True)), 'purchasable_objects':OptionalTunable(description='\n If this list is enabled, an interaction to buy the purchasable\n objects through a dialog picker will show on the inventory object.\n \n Example usage: a list of books for the bookshelf inventory.\n ', tunable=TunableTuple(show_description=Tunable(description='\n Toggles whether the object description should show in the \n purchase picker.\n ', tunable_type=bool, default=False), objects=TunableList(description='\n A list of object definitions that can be purchased.\n ', tunable=TunableReference(manager=(services.definition_manager()), description='')))), 'purge_inventory_state_triggers':TunableList(description='\n Trigger the destruction of all inventory items if the inventory owner hits\n any of the tuned state values.\n \n Only considers state-values present at and after zone-load finalize (ignores\n default values that change during load based on state triggers, for example). \n ', tunable=ObjectStateValue.TunableReference(description='\n The state value of the owner that triggers inventory item destruction.\n ')), 'score_contained_objects_for_autonomy':Tunable(description='\n Whether or not to score for autonomy any objects contained in this object.', tunable_type=bool, default=True), 'item_state_triggers':TunableList(description="\n The state triggers to modify inventory owner's state value based on\n inventory items states.\n ", tunable=ItemStateTrigger.TunableFactory()), 'allow_putdown_in_inventory':Tunable(description="\n This inventory allows Sims to put objects away into it, such as books\n or other carryables. Ex: mailbox has an inventory but we don't want\n Sims putting away items in the inventory.", tunable_type=bool, default=True), 'test_set':OptionalTunable(description='\n If enabled, the ability to pick up items from and put items in this\n object is gated by this test.\n ', tunable=TunableReference(manager=(services.get_instance_manager(sims4.resources.Types.SNIPPET)), class_restrictions=('TestSetInstance', ))), 'count_statistic':OptionalTunable(description='\n A statistic whose value will be the number of objects in this\n inventory. It will automatically be added to the object owning this\n type of component.\n ', tunable=Statistic.TunableReference()), 'return_owned_objects':Tunable(description="\n If enabled, inventory objects will return to their household\n owner's inventory when this object is destroyed off lot. This is\n because build buy can undo actions on lot and cause object id\n collisions.\n \n We first consider the closest instanced Sims, and finally move to\n the household inventory if we can't move to a Sim's inventory.\n ", tunable_type=bool, default=False), '_use_top_item_tooltip':Tunable(description="\n If checked, this inventory would use the top item's tooltip as its\n own tooltip. \n ", tunable_type=bool, default=False)} def __init__(self, owner, inventory_type, visible, starting_objects, purchasable_objects, purge_inventory_state_triggers, score_contained_objects_for_autonomy, item_state_triggers, allow_putdown_in_inventory, test_set, count_statistic, return_owned_objects, _use_top_item_tooltip, **kwargs): (super().__init__)(owner, **kwargs) self._inventory_type = inventory_type self.visible = visible self.starting_objects = starting_objects self.purchasable_objects = purchasable_objects self.purge_inventory_state_triggers = purge_inventory_state_triggers self.score_contained_objects_for_autonomy = score_contained_objects_for_autonomy self.item_state_triggers = item_state_triggers self.allow_putdown_in_inventory = allow_putdown_in_inventory self.test_set = test_set self.count_statistic = count_statistic self.return_owned_objects = return_owned_objects self._use_top_item_tooltip = _use_top_item_tooltip @property def inventory_type(self): return self._inventory_type @property def default_item_location(self): return ItemLocation.OBJECT_INVENTORY @componentmethod def get_inventory_access_constraint(self, sim, is_put, carry_target, use_owner_as_target_for_resolver=False): if use_owner_as_target_for_resolver: def constraint_resolver(animation_participant, default=None): if animation_participant in (AnimationParticipant.SURFACE, PostureSpecVariable.SURFACE_TARGET, AnimationParticipant.TARGET, PostureSpecVariable.INTERACTION_TARGET): return self.owner return default else: constraint_resolver = None return self._get_access_constraint(sim, is_put, carry_target, resolver=constraint_resolver) @componentmethod def get_inventory_access_animation(self, *args, **kwargs): return (self._get_access_animation)(*args, **kwargs) @property def should_score_contained_objects_for_autonomy(self): return self.score_contained_objects_for_autonomy @property def use_top_item_tooltip(self): return self._use_top_item_tooltip def _get_inventory_count_statistic(self): return self.count_statistic def on_add(self): for trigger in self.item_state_triggers: self.add_state_trigger(trigger(self)) super().on_add() def on_reset_component_get_interdependent_reset_records(self, reset_reason, reset_records): if reset_reason == ResetReason.BEING_DESTROYED: if not services.current_zone().is_zone_shutting_down: if not self.is_shared_inventory: if self.return_owned_objects: if not self.owner.is_on_active_lot(): household_manager = services.household_manager() objects_to_transfer = list(iter(self)) for obj in objects_to_transfer: household_id = obj.get_household_owner_id() if household_id is not None: household = household_manager.get(household_id) if household is not None: household.move_object_to_sim_or_household_inventory(obj) super().on_reset_component_get_interdependent_reset_records(reset_reason, reset_records) def on_post_bb_fixup(self): self._add_starting_objects() def _add_starting_objects(self): for definition in self.starting_objects: if self.has_item_with_definition(definition): continue new_object = create_object(definition, loc_type=(ItemLocation.OBJECT_INVENTORY)) if new_object is None: logger.error('Failed to create object {}', definition) continue new_object.set_household_owner_id(self.owner.get_household_owner_id()) if not self.player_try_add_object(new_object): logger.error('Failed to add object {} to inventory {}', new_object, self) new_object.destroy(source=(self.owner), cause='Failed to add starting object to inventory.') continue def component_interactable_gen(self): yield self def component_super_affordances_gen(self, **kwargs): if self.visible: for affordance in self.DEFAULT_OBJECT_INVENTORY_AFFORDANCES: yield affordance def _can_access(self, sim): if self.test_set is not None: resolver = DoubleObjectResolver(sim, self.owner) result = self.test_set(resolver) if not result: return False return True @componentmethod def can_access_for_pickup(self, sim): if not self._can_access(sim): return False if any((self.owner.state_value_active(value) for value in InventoryTuning.INVALID_ACCESS_STATES)): return False return True @componentmethod def can_access_for_putdown(self, sim): if not self.allow_putdown_in_inventory: return False else: return self._can_access(sim) or False return True def _check_state_value_for_purge(self, state_value): return state_value in self.purge_inventory_state_triggers def _purge_inventory_from_state_change(self, new_value): if not self._check_state_value_for_purge(new_value): return else: current_zone = services.current_zone() if current_zone is None: return return current_zone.zone_spin_up_service.is_finished or None self.purge_inventory() def on_state_changed(self, state, old_value, new_value, from_init): if self.purge_inventory_state_triggers: if not from_init: self._purge_inventory_from_state_change(new_value) def _purge_inventory_from_load_finalize(self): owner_state_component = self.owner.state_component if owner_state_component is None: logger.error('Attempting to purge an inventory based on state-triggers but the owner ({}) has no state component. Purge fails.', self.owner) return for active_state_value in owner_state_component.values(): if self._check_state_value_for_purge(active_state_value): self.purge_inventory() return def on_finalize_load(self): if self.purge_inventory_state_triggers: self._purge_inventory_from_load_finalize()
58.515695
486
0.683884
ccf98db2c183a542430a289ff4949ad327d07cde
786
py
Python
sqs_consumer/management/commands/process_queue.py
guilhermebferreira/sqs-consumer
30e2a636219b7784e43d851570255193e258678d
[ "MIT" ]
null
null
null
sqs_consumer/management/commands/process_queue.py
guilhermebferreira/sqs-consumer
30e2a636219b7784e43d851570255193e258678d
[ "MIT" ]
null
null
null
sqs_consumer/management/commands/process_queue.py
guilhermebferreira/sqs-consumer
30e2a636219b7784e43d851570255193e258678d
[ "MIT" ]
null
null
null
from __future__ import absolute_import, unicode_literals from django.core.management import BaseCommand, CommandError from sqs_consumer.worker.service import WorkerService class Command(BaseCommand): help = 'Command to process tasks from one or more SQS queues' def add_arguments(self, parser): parser.add_argument('--queues', '-q', dest='queue_names', help='Name of queues to process, separated by commas') def handle(self, *args, **options): if not options['queue_names']: raise CommandError('Queue names (--queues) not specified') queue_names = [queue_name.rstrip() for queue_name in options['queue_names'].split(',')] WorkerService().process_queues(queue_names)
34.173913
95
0.667939
ccfb0220e26dd4f7df5cf240f68d1efa01669ca5
212
py
Python
helper_functions.py
lindsayad/python
4b63a8b02de6a7c0caa7bb770f3f22366e066a7f
[ "MIT" ]
null
null
null
helper_functions.py
lindsayad/python
4b63a8b02de6a7c0caa7bb770f3f22366e066a7f
[ "MIT" ]
null
null
null
helper_functions.py
lindsayad/python
4b63a8b02de6a7c0caa7bb770f3f22366e066a7f
[ "MIT" ]
null
null
null
import numpy as np def indep_array(start, finish, num_steps): x = np.zeros(num_steps) for i in range(0, num_steps): x[i] = start * ((finish / start) ** (1. / (num_steps - 1.))) ** i return x
26.5
73
0.584906
ccfb1aaf66833ea1e34ee0d93d00ff4476230fb8
4,890
py
Python
tests/test_filesystem_paths.py
NinjasCL-archive/masonite-fs
333c11d19927f9cf371d12bb87af88b0ca3dd698
[ "MIT" ]
2
2018-08-18T01:15:33.000Z
2018-08-18T17:28:08.000Z
tests/test_filesystem_paths.py
NinjasCL-archive/masonite-fs
333c11d19927f9cf371d12bb87af88b0ca3dd698
[ "MIT" ]
null
null
null
tests/test_filesystem_paths.py
NinjasCL-archive/masonite-fs
333c11d19927f9cf371d12bb87af88b0ca3dd698
[ "MIT" ]
null
null
null
# coding: utf-8 from the import expect from filesystem import paths from filesystem.paths import root class TestFilesystemPaths: def setup_method(self): pass def test_that_root_exists(self): expect(paths.ROOT).to.be.NOT.empty def test_that_app_is_correct(self): expect(paths.APP).to.be.eq('/app') def test_that_app_http_is_correct(self): expect(paths.APP_HTTP).to.be.eq('/app/http') def test_that_app_http_controllers_is_correct(self): expect(paths.APP_HTTP_CONTROLLERS).to.be.eq('/app/http/controllers') def test_that_app_http_middleware_is_correct(self): expect(paths.APP_HTTP_MIDDLEWARE).to.be.eq('/app/http/middleware') def test_that_app_providers_is_correct(self): expect(paths.APP_PROVIDERS).to.be.eq('/app/providers') def test_that_bootstrap_is_correct(self): expect(paths.BOOTSTRAP).to.be.eq('/bootstrap') def test_that_bootstrap_cache_is_correct(self): expect(paths.BOOTSTRAP_CACHE).to.be.eq('/bootstrap/cache') def test_that_config_is_correct(self): expect(paths.CONFIG).to.be.eq('/config') def test_that_databases_is_correct(self): expect(paths.DATABASES).to.be.eq('/databases') def test_that_databases_migrations_is_correct(self): expect(paths.DATABASES_MIGRATIONS).to.be.eq('/databases/migrations') def test_that_resources_is_correct(self): expect(paths.RESOURCES).to.be.eq('/resources') def test_that_resources_templates_is_correct(self): expect(paths.RESOURCES_TEMPLATES).to.be.eq('/resources/templates') def test_that_resources_snippets_is_correct(self): expect(paths.RESOURCES_SNIPPETS).to.be.eq('/resources/snippets') def test_that_routes_is_correct(self): expect(paths.ROUTES).to.be.eq('/routes') def test_that_storage_is_correct(self): expect(paths.STORAGE).to.be.eq('/storage') def test_that_storage_compiled_is_correct(self): expect(paths.STORAGE_COMPILED).to.be.eq('/storage/compiled') def test_that_storage_static_is_correct(self): expect(paths.STORAGE_STATIC).to.be.eq('/storage/static') def test_that_storage_uploads_is_correct(self): expect(paths.STORAGE_UPLOADS).to.be.eq('/storage/uploads') def test_that_tests_is_correct(self): expect(paths.TESTS).to.be.eq('/tests') # root paths def test_that_root_app_is_correct(self): expect(root.APP).to.be.eq(paths.ROOT + paths.APP) def test_that_root_app_http_is_correct(self): expect(root.APP_HTTP).to.be.eq(paths.ROOT + paths.APP_HTTP) def test_that_root_app_http_controllers_is_correct(self): expect(root.APP_HTTP_CONTROLLERS).to.be.eq(paths.ROOT + paths.APP_HTTP_CONTROLLERS) def test_that_root_app_http_middleware_is_correct(self): expect(root.APP_HTTP_MIDDLEWARE).to.be.eq(paths.ROOT + paths.APP_HTTP_MIDDLEWARE) def test_that_root_app_providers_is_correct(self): expect(root.APP_PROVIDERS).to.be.eq(paths.ROOT + paths.APP_PROVIDERS) def test_that_root_bootstrap_is_correct(self): expect(root.BOOTSTRAP).to.be.eq(paths.ROOT + paths.BOOTSTRAP) def test_that_root_bootstrap_cache_is_correct(self): expect(root.BOOTSTRAP_CACHE).to.be.eq(paths.ROOT + paths.BOOTSTRAP_CACHE) def test_that_root_config_is_correct(self): expect(root.CONFIG).to.be.eq(paths.ROOT + paths.CONFIG) def test_that_root_databases_is_correct(self): expect(root.DATABASES).to.be.eq(paths.ROOT + paths.DATABASES) def test_that_root_databases_migrations_is_correct(self): expect(root.DATABASES_MIGRATIONS).to.be.eq(paths.ROOT + paths.DATABASES_MIGRATIONS) def test_that_root_resources_is_correct(self): expect(root.RESOURCES).to.be.eq(paths.ROOT + paths.RESOURCES) def test_that_root_resources_templates_is_correct(self): expect(root.RESOURCES_TEMPLATES).to.be.eq(paths.ROOT + paths.RESOURCES_TEMPLATES) def test_that_root_resources_snippets_is_correct(self): expect(root.RESOURCES_SNIPPETS).to.be.eq(paths.ROOT + paths.RESOURCES_SNIPPETS) def test_that_root_routes_is_correct(self): expect(root.ROUTES).to.be.eq(paths.ROOT + paths.ROUTES) def test_that_root_storage_is_correct(self): expect(root.STORAGE).to.be.eq(paths.ROOT + paths.STORAGE) def test_that_root_storage_compiled_is_correct(self): expect(root.STORAGE_COMPILED).to.be.eq(paths.ROOT + paths.STORAGE_COMPILED) def test_that_root_storage_static_is_correct(self): expect(root.STORAGE_STATIC).to.be.eq(paths.ROOT + paths.STORAGE_STATIC) def test_that_root_storage_uploads_is_correct(self): expect(root.STORAGE_UPLOADS).to.be.eq(paths.ROOT + paths.STORAGE_UPLOADS) def test_that_root_tests_is_correct(self): expect(root.TESTS).to.be.eq(paths.ROOT + paths.TESTS)
37.045455
91
0.740082
ccfb60a5f0e99b7473379c8b27f4c338be01c980
2,222
py
Python
openpharmacophore/tests/test_zinc.py
dprada/OpenPharmacophore
bfcf4bdafd586b27a48fd5d1f13614707b5e55a8
[ "MIT" ]
2
2021-07-10T05:56:04.000Z
2021-08-04T14:56:47.000Z
openpharmacophore/tests/test_zinc.py
dprada/OpenPharmacophore
bfcf4bdafd586b27a48fd5d1f13614707b5e55a8
[ "MIT" ]
21
2021-04-27T06:05:05.000Z
2021-11-01T23:19:36.000Z
openpharmacophore/tests/test_zinc.py
dprada/OpenPharmacophore
bfcf4bdafd586b27a48fd5d1f13614707b5e55a8
[ "MIT" ]
3
2021-06-21T19:09:47.000Z
2021-07-16T01:16:27.000Z
from openpharmacophore.databases.zinc import get_zinc_urls, discretize_values import pytest @pytest.mark.parametrize("subset,mol_weight,logp,format", [ ("Drug-Like", None, None, "smi"), (None, (250, 350), (-1, 1), "smi"), (None, (365, 415), (1.5, 2.25), "smi"), ("Drug-Like", None, None, "sdf"), (None, (200, 300), (-1, 2), "sdf"), ]) def test_download_ZINC2D_smiles(subset, mol_weight, logp, format): url_list = get_zinc_urls( subset=subset, mw_range=mol_weight, logp_range=logp, file_format=format, ) if format == "smi": base_url = "http://files.docking.org/2D/" if subset == "Drug-like": assert len(url_list) == 90 * 4 * 2 assert url_list[0] == base_url + "BA/BAAA.smi" assert url_list[-1] == base_url + "JJ/JJEB.smi" elif mol_weight == (250, 350): assert len(url_list) == 12 * 4 * 2 assert url_list[0] == base_url + "BA/BAAA.smi" assert url_list[-1] == base_url + "EC/ECEB.smi" elif mol_weight == (365, 415): assert len(url_list) == 12 * 4 * 2 assert url_list[0] == base_url + "EC/ECAA.smi" assert url_list[-1] == base_url + "HE/HEEB.smi" else: base_url = "http://files.docking.org/3D/" if subset == "Drug-like": assert len(url_list) == 19420 assert url_list[0] == base_url + "JJ/EDRP/JJEDRP.xaa.sdf.gz" assert url_list[-1] == base_url + "AB/AAMM/ABAAMM.xaa.sdf.gz" elif mol_weight == (200, 300): assert len(url_list) == 3720 assert url_list[0] == base_url + "AA/AAML/AAAAML.xaa.sdf.gz" assert url_list[-1] == base_url + "DC/EDRP/DCEDRP.xaa.sdf.gz" @pytest.mark.parametrize("value,lower", [ (230, True), (484, False), (600, True) ]) def test_discretize_values(value, lower): bins = [200, 250, 300, 325, 350, 375, 400, 425, 450, 500, 550] new_value = discretize_values(value=value, bins=bins, name="Test", lower=lower) if value == 230: assert new_value == 200 elif value == 484: assert new_value == 500 else: assert new_value == 550
35.83871
83
0.564806
ccfbb57a4333021e3f1feae704010ec46562da66
3,608
py
Python
2_Regression/Energy_balance_MIMO/Python_minimize/mimo_fit.py
APMonitor/arduino
f36e65a70dd7122d1829883899e40e56bf6c4279
[ "Apache-2.0" ]
71
2017-06-09T06:15:09.000Z
2022-01-19T12:02:22.000Z
2_Regression/Energy_balance_MIMO/Python_minimize/mimo_fit.py
rezabbasi/arduino
4d9ea70688427e610228036c44560a11246930e1
[ "Apache-2.0" ]
5
2017-09-14T15:03:39.000Z
2020-04-24T01:05:03.000Z
2_Regression/Energy_balance_MIMO/Python_minimize/mimo_fit.py
rezabbasi/arduino
4d9ea70688427e610228036c44560a11246930e1
[ "Apache-2.0" ]
49
2017-05-11T04:02:25.000Z
2021-11-24T17:56:14.000Z
import numpy as np import matplotlib.pyplot as plt from scipy.integrate import odeint from scipy.optimize import minimize import pandas as pd # generate data file from TCLab or get sample data file from: # http://apmonitor.com/pdc/index.php/Main/ArduinoEstimation2 # Import data file # Column 1 = time (t) # Column 2 = input (u) # Column 3 = output (yp) data = np.loadtxt('data.txt',delimiter=',',skiprows=1) # extract data columns t = data[:,0].T Q1 = data[:,1].T Q2 = data[:,2].T T1meas = data[:,3].T T2meas = data[:,4].T # number of time points ns = len(t) # define energy balance model def heat(x,t,Q1,Q2,p): # Optimized parameters U,alpha1,alpha2 = p # Parameters Ta = 23 + 273.15 # K m = 4.0/1000.0 # kg Cp = 0.5 * 1000.0 # J/kg-K A = 10.0 / 100.0**2 # Area in m^2 As = 2.0 / 100.0**2 # Area in m^2 eps = 0.9 # Emissivity sigma = 5.67e-8 # Stefan-Boltzman # Temperature States T1 = x[0] + 273.15 T2 = x[1] + 273.15 # Heat Transfer Exchange Between 1 and 2 conv12 = U*As*(T2-T1) rad12 = eps*sigma*As * (T2**4 - T1**4) # Nonlinear Energy Balances dT1dt = (1.0/(m*Cp))*(U*A*(Ta-T1) \ + eps * sigma * A * (Ta**4 - T1**4) \ + conv12 + rad12 \ + alpha1*Q1) dT2dt = (1.0/(m*Cp))*(U*A*(Ta-T2) \ + eps * sigma * A * (Ta**4 - T2**4) \ - conv12 - rad12 \ + alpha2*Q2) return [dT1dt,dT2dt] def simulate(p): T = np.zeros((len(t),2)) T[0,0] = T1meas[0] T[0,1] = T2meas[0] T0 = T[0] for i in range(len(t)-1): ts = [t[i],t[i+1]] y = odeint(heat,T0,ts,args=(Q1[i],Q2[i],p)) T0 = y[-1] T[i+1] = T0 return T # define objective def objective(p): # simulate model Tp = simulate(p) # calculate objective obj = 0.0 for i in range(len(t)): obj += ((Tp[i,0]-T1meas[i])/T1meas[i])**2 \ +((Tp[i,1]-T2meas[i])/T2meas[i])**2 # return result return obj # Parameter initial guess U = 10.0 # Heat transfer coefficient (W/m^2-K) alpha1 = 0.0100 # Heat gain 1 (W/%) alpha2 = 0.0075 # Heat gain 2 (W/%) p0 = [U,alpha1,alpha2] # show initial objective print('Initial SSE Objective: ' + str(objective(p0))) # optimize parameters # bounds on variables bnds = ((2.0, 20.0),(0.005,0.02),(0.002,0.015)) solution = minimize(objective,p0,method='SLSQP',bounds=bnds) p = solution.x # show final objective print('Final SSE Objective: ' + str(objective(p))) # optimized parameter values U = p[0] alpha1 = p[1] alpha2 = p[2] print('U: ' + str(U)) print('alpha1: ' + str(alpha1)) print('alpha2: ' + str(alpha2)) # calculate model with updated parameters Ti = simulate(p0) Tp = simulate(p) # Plot results plt.figure(1) plt.subplot(3,1,1) plt.plot(t/60.0,Ti[:,0],'y:',label=r'$T_1$ initial') plt.plot(t/60.0,T1meas,'b-',label=r'$T_1$ measured') plt.plot(t/60.0,Tp[:,0],'r--',label=r'$T_1$ optimized') plt.ylabel('Temperature (degC)') plt.legend(loc='best') plt.subplot(3,1,2) plt.plot(t/60.0,Ti[:,1],'y:',label=r'$T_2$ initial') plt.plot(t/60.0,T2meas,'b-',label=r'$T_2$ measured') plt.plot(t/60.0,Tp[:,1],'r--',label=r'$T_2$ optimized') plt.ylabel('Temperature (degC)') plt.legend(loc='best') plt.subplot(3,1,3) plt.plot(t/60.0,Q1,'g-',label=r'$Q_1$') plt.plot(t/60.0,Q2,'k--',label=r'$Q_2$') plt.ylabel('Heater Output') plt.legend(loc='best') plt.xlabel('Time (min)') plt.show()
26.335766
62
0.560421
ccfbcfc80bfd4176875b1439857777b6f9e25659
4,052
py
Python
Day12/Part2.py
PeterDowdy/AdventOfCode2019
93078b5fc2ef78cdb1b860a3535839dc718c9f5f
[ "MIT" ]
null
null
null
Day12/Part2.py
PeterDowdy/AdventOfCode2019
93078b5fc2ef78cdb1b860a3535839dc718c9f5f
[ "MIT" ]
null
null
null
Day12/Part2.py
PeterDowdy/AdventOfCode2019
93078b5fc2ef78cdb1b860a3535839dc718c9f5f
[ "MIT" ]
null
null
null
from math import gcd moons = [(-16, -1, -12), (0, -4, -17), (-11, 11, 0), (2, 2, -6)] velocities = [(0,0,0),(0,0,0),(0,0,0),(0,0,0)] x_positions = set() y_positions = set() z_positions = set() x_positions.add((moons[0][0],moons[1][0],moons[2][0],moons[3][0],velocities[0][0],velocities[1][0],velocities[2][0],velocities[3][0])) y_positions.add((moons[0][1],moons[1][1],moons[2][1],moons[3][1],velocities[0][1],velocities[1][2],velocities[2][1],velocities[3][1])) z_positions.add((moons[0][2],moons[1][2],moons[2][2],moons[3][2],velocities[0][2],velocities[1][0],velocities[2][2],velocities[3][2])) x_sequences = {(moons[0][0],moons[1][0],moons[2][0],moons[3][0],velocities[0][0],velocities[1][0],velocities[2][0],velocities[3][0]): 0} y_sequences = {(moons[0][1],moons[1][1],moons[2][1],moons[3][1],velocities[0][1],velocities[1][1],velocities[2][1],velocities[3][1]): 0} z_sequences = {(moons[0][2],moons[1][2],moons[2][2],moons[3][2],velocities[0][2],velocities[1][2],velocities[2][2],velocities[3][2]): 0} ctr = 0 def step(): for i in range(0,4): moon = moons[i] gravity_delta = (sum([-1 if other_moon[0] < moon[0] else 1 if other_moon[0] > moon[0] else 0 for other_moon in moons]), sum([-1 if other_moon[1] < moon[1] else 1 if other_moon[1] > moon[1] else 0 for other_moon in moons]), sum([-1 if other_moon[2] < moon[2] else 1 if other_moon[2] > moon[2] else 0 for other_moon in moons]) ) velocity = velocities[i] velocities[i] = (velocity[0]+gravity_delta[0],velocity[1]+gravity_delta[1],velocity[2]+gravity_delta[2]) for i in range(0,4): moon = moons[i] velocity = velocities[i] moons[i] = (moon[0]+velocity[0],moon[1]+velocity[1],moon[2]+velocity[2]) x_cycle_length = 0 y_cycle_length = 0 z_cycle_length = 0 while True: ctr += 1 step() if (moons[0][0],moons[1][0],moons[2][0],moons[3][0],velocities[0][0],velocities[1][0],velocities[2][0],velocities[3][0]) in x_positions: x_cycle_length = ctr-x_sequences[(moons[0][0],moons[1][0],moons[2][0],moons[3][0],velocities[0][0],velocities[1][0],velocities[2][0],velocities[3][0])] pass if (moons[0][1],moons[1][1],moons[2][1],moons[3][1],velocities[0][1],velocities[1][2],velocities[2][1],velocities[3][1]) in y_positions: y_cycle_length = ctr-y_sequences[(moons[0][1],moons[1][1],moons[2][1],moons[3][1],velocities[0][1],velocities[1][2],velocities[2][1],velocities[3][1])] pass if (moons[0][2],moons[1][2],moons[2][2],moons[3][2],velocities[0][2],velocities[1][0],velocities[2][2],velocities[3][2]) in z_positions: z_cycle_length = ctr-z_sequences[(moons[0][2],moons[1][2],moons[2][2],moons[3][2],velocities[0][2],velocities[1][0],velocities[2][2],velocities[3][2])] pass if x_cycle_length != 0 and y_cycle_length != 0 and z_cycle_length != 0: break x_positions.add((moons[0][0],moons[1][0],moons[2][0],moons[3][0],velocities[0][0],velocities[1][0],velocities[2][0],velocities[3][0])) y_positions.add((moons[0][1],moons[1][1],moons[2][1],moons[3][1],velocities[0][1],velocities[1][2],velocities[2][1],velocities[3][1])) z_positions.add((moons[0][2],moons[1][2],moons[2][2],moons[3][2],velocities[0][2],velocities[1][0],velocities[2][2],velocities[3][2])) x_sequences[(moons[0][0],moons[1][0],moons[2][0],moons[3][0],velocities[0][0],velocities[1][0],velocities[2][0],velocities[3][0])] = ctr y_sequences[(moons[0][1],moons[1][1],moons[2][1],moons[3][1],velocities[0][1],velocities[1][2],velocities[2][1],velocities[3][1])] = ctr z_sequences[(moons[0][2],moons[1][2],moons[2][2],moons[3][2],velocities[0][2],velocities[1][0],velocities[2][2],velocities[3][2])] = ctr print('Cycles found:') print(f'x lasts {x_cycle_length}') print(f'y lasts {y_cycle_length}') print(f'z lasts {z_cycle_length}') print((x_cycle_length,y_cycle_length,z_cycle_length)) def compute_lcm(x, y): return (x*y)/gcd(x,y) print(int(compute_lcm(x_cycle_length, int(compute_lcm(y_cycle_length, z_cycle_length)))))
57.070423
159
0.637957
ccfc779a1ced7c9e46cfbe2591e7ace76abaf9a2
643
py
Python
tests/test.py
y95847frank/AutomatedTicketBot
66754758430c7a1240b69259e32fcb452639c134
[ "MIT" ]
1
2021-03-26T05:07:20.000Z
2021-03-26T05:07:20.000Z
tests/test.py
y95847frank/AutomatedTicketBot
66754758430c7a1240b69259e32fcb452639c134
[ "MIT" ]
null
null
null
tests/test.py
y95847frank/AutomatedTicketBot
66754758430c7a1240b69259e32fcb452639c134
[ "MIT" ]
null
null
null
import AutoTicketsBot as tBot configDestination = 'var/config.yml' args = tBot.addArgs() config = tBot.configRead(configDestination) if tBot.configWrite(configDestination, args, config) is True: print("Successfully store new config to {}".format(configDestination)) ticketsBot = tBot.AutoTicketsBot(config) #scheduleBot(ticketsBot, config['Config']['startTime']) try: tBot.websiteSignIn(ticketsBot, retryCounter=3) tBot.buyTickets(ticketsBot) tBot.notifyUser('AutoTicketsBot Notification', 'Got tickets!!!!!') tBot.terminateBot(ticketsBot, waitTime=900) except RuntimeError as e: tBot.terminateBot(ticketsBot, waitTime=0) print(e)
29.227273
71
0.785381
ccfc9563a897ecbadd4690eb294f53f692567173
1,834
py
Python
03/main.py
cjcbusatto/advent-of-code-2020
7868a6dfe9233809e47c27dd2afd2a287fbf4906
[ "MIT" ]
null
null
null
03/main.py
cjcbusatto/advent-of-code-2020
7868a6dfe9233809e47c27dd2afd2a287fbf4906
[ "MIT" ]
null
null
null
03/main.py
cjcbusatto/advent-of-code-2020
7868a6dfe9233809e47c27dd2afd2a287fbf4906
[ "MIT" ]
null
null
null
def get_map_from_input(input_location): f = open(input_location, 'r') input_map = f.read().split('\n') f.close() lines = len(input_map) columns = len(input_map[0]) print(f"Original map = {lines} x {columns}") extended_map = [] for line in input_map: extended_map.append(line * 200) print( f"Extended map = {str(len(extended_map))} x {str(len(extended_map[0]))}") return extended_map def traverse_map_counting_trees(extended_map, right, down): squares = [] i = 0 j = 0 while i < len(extended_map): if i == 0: squares.append(extended_map[i][j]) else: try: squares.append(extended_map[i][(j * right)]) except: print("Error") break i += down j+= 1 tree_counter = 0 for char in squares: if char == '#': tree_counter += 1 return tree_counter extended_map = get_map_from_input('input') number_of_threes = traverse_map_counting_trees(extended_map, 1, 1) print(f"1x1 => {number_of_threes}") number_of_threes = traverse_map_counting_trees(extended_map, 3, 1) print(f"3x1 => {number_of_threes}") number_of_threes = traverse_map_counting_trees(extended_map, 5, 1) print(f"5x1 => {number_of_threes}") number_of_threes = traverse_map_counting_trees(extended_map, 7, 1) print(f"7x1 => {number_of_threes}") number_of_threes = traverse_map_counting_trees(extended_map, 1, 2) print(f"1x2 => {number_of_threes}") total = traverse_map_counting_trees(extended_map, 1, 1) * traverse_map_counting_trees(extended_map, 3, 1) * traverse_map_counting_trees( extended_map, 5, 1) * traverse_map_counting_trees(extended_map, 7, 1) * traverse_map_counting_trees(extended_map, 1, 2) print(f"Numbers multiplied = {total}")
26.970588
136
0.657579
ccfca93576d38c7241f634e36cb1fb7366307974
343
py
Python
{{cookiecutter.project_slug}}/sources/conftest.py
AsheKR/cookiecutter-django
d0402aefcc2eeaffa747faa7ef50ad97286bfcca
[ "BSD-3-Clause" ]
null
null
null
{{cookiecutter.project_slug}}/sources/conftest.py
AsheKR/cookiecutter-django
d0402aefcc2eeaffa747faa7ef50ad97286bfcca
[ "BSD-3-Clause" ]
null
null
null
{{cookiecutter.project_slug}}/sources/conftest.py
AsheKR/cookiecutter-django
d0402aefcc2eeaffa747faa7ef50ad97286bfcca
[ "BSD-3-Clause" ]
null
null
null
import os import sys import pytest from django.contrib.auth import get_user_model from users.tests.factories import UserFactory sys.path.append(os.path.join(os.path.dirname(__file__), 'app')) User = get_user_model() @pytest.fixture(autouse=True) def enable_db(db): pass @pytest.fixture def user() -> User: return UserFactory()
15.590909
63
0.752187
ccfd2ea93b839a028809fe07b95b0245dfeb7f66
191
py
Python
src/numba_vectormath.py
arkavo/Maxwell-ecosystem
d956ea892ebe581b8b2c0325df59fa58a6d5db9a
[ "Apache-2.0" ]
null
null
null
src/numba_vectormath.py
arkavo/Maxwell-ecosystem
d956ea892ebe581b8b2c0325df59fa58a6d5db9a
[ "Apache-2.0" ]
null
null
null
src/numba_vectormath.py
arkavo/Maxwell-ecosystem
d956ea892ebe581b8b2c0325df59fa58a6d5db9a
[ "Apache-2.0" ]
null
null
null
import numpy as np import numba from numba import jit @jit(nopython=True) def distance(a,b): d = 0 for i in range(max(len(a),len(b))): d += (a[i] - b[i])**2 return d**0.5
19.1
39
0.581152
ccfe49b139702ec62120531b875985143b174591
751
py
Python
kattis/k_ones.py
ivanlyon/exercises
0792976ae2acb85187b26a52812f9ebdd119b5e8
[ "MIT" ]
null
null
null
kattis/k_ones.py
ivanlyon/exercises
0792976ae2acb85187b26a52812f9ebdd119b5e8
[ "MIT" ]
null
null
null
kattis/k_ones.py
ivanlyon/exercises
0792976ae2acb85187b26a52812f9ebdd119b5e8
[ "MIT" ]
null
null
null
''' Smallest factor to reach a number composed of digit '1' Status: Accepted ''' ############################################################################### def main(): """Read input and print output""" while True: try: number = int(input()) except EOFError: break if number == 1: print('1') else: assert number % 2 != 0 assert number % 5 != 0 digits, remainder = 1, 1 while remainder: remainder = (remainder * 10 + 1) % number digits += 1 print(digits) ############################################################################### if __name__ == '__main__': main()
22.757576
79
0.370173
ccfe72e943c07b30fc915317d0d3a67d9c72f9cc
2,190
py
Python
back/api/message.py
LyonParapente/EventOrganizer
b263c2ce61b6ad1d6c414eb388ca5ee9492a9b73
[ "MIT" ]
4
2018-07-29T10:48:53.000Z
2018-08-23T13:02:15.000Z
back/api/message.py
LyonParapente/EventOrganizer
b263c2ce61b6ad1d6c414eb388ca5ee9492a9b73
[ "MIT" ]
7
2018-11-15T15:17:45.000Z
2021-05-11T19:58:55.000Z
back/api/message.py
LyonParapente/EventOrganizer
b263c2ce61b6ad1d6c414eb388ca5ee9492a9b73
[ "MIT" ]
null
null
null
from flask import request, abort from flask_restful_swagger_3 import Resource, swagger from flask_jwt_extended import jwt_required, get_jwt_identity, get_jwt from models.message import Message, MessageCreate from database.manager import db from emails import send_new_message class MessageAPICreate(Resource): @jwt_required() @swagger.doc({ 'tags': ['message'], 'security': [ {'BearerAuth': []} ], 'requestBody': { 'required': True, 'content': { 'application/json': { 'schema': Message } } }, 'responses': { '201': { 'description': 'Created message', 'content': { 'application/json': { 'schema': Message } } }, '401': { 'description': 'Not authenticated' }, '403': { 'description': 'Update forbidden' } } }) def post(self): """Create a message""" args = request.json author_id = get_jwt_identity() args['author_id'] = author_id try: # Validate request body with schema model message = MessageCreate(**args) except ValueError as e: abort(400, e.args[0]) props = None editLatest = message['editLatest'] del message['editLatest'] if editLatest: last_msg = db.get_last_message(message['event_id']) if last_msg and last_msg['author_id'] == author_id: nb = db.edit_message(last_msg['id'], message['comment'], last_msg['author_id'], last_msg['event_id']) if nb == 1: last_msg['comment'] = message['comment'] props = last_msg else: abort(500, 'Error updating comment') else: abort(403, 'Can only update the latest comment if it is yours') else: try: props = db.insert_message(**message) except Exception as e: abort(500, e.args[0]) # Email if not editLatest: claims = get_jwt() author_name = claims['firstname'] + ' ' + claims['lastname'] send_new_message(author_name, author_id, props['event_id'], props['comment']) return Message(**props), 201, {'Location': request.path + '/' + str(props['id'])}
27.721519
109
0.590411
ccff8fb01ea3f497743ce74c1e9b8975a96ada59
5,544
py
Python
PYTHON/dgemm_omp.py
dbaaha/Kernels
232fc44fc9427dd7b56862cec2d46296c467b4e8
[ "BSD-3-Clause" ]
346
2015-06-07T19:55:15.000Z
2022-03-18T07:55:10.000Z
PYTHON/dgemm_omp.py
dbaaha/Kernels
232fc44fc9427dd7b56862cec2d46296c467b4e8
[ "BSD-3-Clause" ]
202
2015-06-16T15:28:05.000Z
2022-01-06T18:26:13.000Z
PYTHON/dgemm_omp.py
dbaaha/Kernels
232fc44fc9427dd7b56862cec2d46296c467b4e8
[ "BSD-3-Clause" ]
101
2015-06-15T22:06:46.000Z
2022-01-13T02:56:02.000Z
#!/usr/bin/env python3 # # Copyright (c) 2015, Intel Corporation # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of Intel Corporation nor the names of its # contributors may be used to endorse or promote products # derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. #******************************************************************* # # NAME: dgemm # # PURPOSE: This program tests the efficiency with which a dense matrix # dense multiplication is carried out # # USAGE: The program takes as input the matrix order, # the number of times the matrix-matrix multiplication # is carried out. # # <progname> <# iterations> <matrix order> # # The output consists of diagnostics to make sure the # algorithm worked, and of timing statistics. # # HISTORY: Written by Rob Van der Wijngaart, February 2009. # Converted to Python by Jeff Hammond, February 2016. # PyOMP support, ave+std_dev by Tim Mattson, May 2021 # ******************************************************************* import sys from numba import njit from numba.openmp import openmp_context as openmp from numba.openmp import omp_set_num_threads, omp_get_thread_num, omp_get_num_threads, omp_get_wtime import numpy as np #from time import process_time as timer #@njit(enable_ssa=False, cache=True) What does "enable_ssa" mean? @njit(fastmath=True) def dgemm(iters,order): # ******************************************************************** # ** Allocate space for the input and transpose matrix # ******************************************************************** print('inside dgemm') A = np.zeros((order,order)) B = np.zeros((order,order)) C = np.zeros((order,order)) for i in range(order): A[:,i] = float(i) B[:,i] = float(i) # print(omp_get_num_threads()) for kiter in range(0,iters+1): if kiter==1: t0 = omp_get_wtime() tSum=0.0 tsqSum=0.0 with openmp("parallel for schedule(static) private(j,k)"): for i in range(order): for k in range(order): for j in range(order): C[i][j] += A[i][k] * B[k][j] if kiter>0: tkiter = omp_get_wtime() t = tkiter - t0 tSum = tSum + t tsqSum = tsqSum+t*t t0 = tkiter dgemmAve = tSum/iters dgemmStdDev = ((tsqSum-iters*dgemmAve*dgemmAve)/(iters-1))**0.5 print('finished with computations') # ******************************************************************** # ** Analyze and output results. # ******************************************************************** checksum = 0.0; for i in range(order): for j in range(order): checksum += C[i][j]; ref_checksum = order*order*order ref_checksum *= 0.25*(order-1.0)*(order-1.0) ref_checksum *= (iters+1) epsilon=1.e-8 if abs((checksum - ref_checksum)/ref_checksum) < epsilon: print('Solution validates') nflops = 2.0*order*order*order recipDiff = (1.0/(dgemmAve-dgemmStdDev) - 1.0/(dgemmAve+dgemmStdDev)) GfStdDev = 1.e-6*nflops*recipDiff/2.0 print('nflops: ',nflops) print('Rate: ',1.e-6*nflops/dgemmAve,' +/- (MF/s): ',GfStdDev) else: print('ERROR: Checksum = ', checksum,', Reference checksum = ', ref_checksum,'\n') # sys.exit("ERROR: solution did not validate") # ******************************************************************** # read and test input parameters # ******************************************************************** print('Parallel Research Kernels version ') #, PRKVERSION print('Python Dense matrix-matrix multiplication: C = A x B') if len(sys.argv) != 3: print('argument count = ', len(sys.argv)) sys.exit("Usage: ./dgemm <# iterations> <matrix order>") itersIn = int(sys.argv[1]) if itersIn < 1: sys.exit("ERROR: iterations must be >= 1") orderIn = int(sys.argv[2]) if orderIn < 1: sys.exit("ERROR: order must be >= 1") print('Number of iterations = ', itersIn) print('Matrix order = ', orderIn) dgemm(itersIn, orderIn)
37.459459
100
0.590729
ccff9565d795674cf4b93f869a86dae2c49d6c9e
1,859
py
Python
DS_Algo/quick_sort.py
YorkFish/git_study
6e023244daaa22e12b24e632e76a13e5066f2947
[ "MIT" ]
null
null
null
DS_Algo/quick_sort.py
YorkFish/git_study
6e023244daaa22e12b24e632e76a13e5066f2947
[ "MIT" ]
null
null
null
DS_Algo/quick_sort.py
YorkFish/git_study
6e023244daaa22e12b24e632e76a13e5066f2947
[ "MIT" ]
null
null
null
# coding:utf-8 # example 17: quick_sort.py import random # def quick_sort(array): # if len(array) <= 1: # return array # pivot_idx = 0 # pivot = array[pivot_idx] # less_part = [num for num in array[pivot_idx + 1:] if num <= pivot] # great_part = [num for num in array[pivot_idx + 1:] if num > pivot] # return quick_sort(less_part) + [pivot] + quick_sort(great_part) # def test_quick_sort(): # import random # array = [random.randint(1, 100) for _ in range(10)] # sorted_array = sorted(array) # my_sorted_array = quick_sort(array) # assert my_sorted_array == sorted_array def partition(array, start, stop): # [start, stop) pivot_idx = start pivot = array[pivot_idx] left = pivot_idx + 1 right = stop - 1 while left <= right: while left <= right and array[left] < pivot: left += 1 while left <= right and pivot <= array[right]: right -= 1 if left < right: array[left], array[right] = array[right], array[left] array[pivot_idx], array[right] = array[right], array[pivot_idx] return right def test_partition(): lst = [3, 1, 4, 2] assert partition(lst, 0, len(lst)) == 2 lst = [1, 2, 3, 4] assert partition(lst, 0, len(lst)) == 0 lst = [4, 3, 2, 1] assert partition(lst, 0, len(lst)) == 3 lst = [3, 5, 4, 3, 6, 7, 2, 3] assert partition(lst, 0, len(lst)) == 1 def quick_sort_inplace(array, start, stop): # [start, stop) if start < stop: pivot = partition(array, start, stop) quick_sort_inplace(array, start, pivot) quick_sort_inplace(array, pivot + 1, stop) def test_quick_sort_inplace(): seq = [random.randint(-100, 100) for _ in range(10)] sorted_seq = sorted(seq) quick_sort_inplace(seq, 0, len(seq)) assert seq == sorted_seq
27.338235
72
0.603012
ccffcaa195c026f029865dcd87b3fe8d69e0f7e3
3,129
py
Python
xquotient/test/historic/test_undefertask1to2.py
twisted/quotient
95f2515219da99a77905852bc01deeb27e93466e
[ "MIT" ]
6
2016-02-15T07:33:39.000Z
2018-12-03T17:20:58.000Z
xquotient/test/historic/test_undefertask1to2.py
DalavanCloud/quotient
95f2515219da99a77905852bc01deeb27e93466e
[ "MIT" ]
1
2021-02-18T20:01:02.000Z
2021-02-18T20:01:02.000Z
xquotient/test/historic/test_undefertask1to2.py
DalavanCloud/quotient
95f2515219da99a77905852bc01deeb27e93466e
[ "MIT" ]
4
2015-11-15T17:28:20.000Z
2018-12-03T17:20:48.000Z
from zope.interface import implements from epsilon.extime import Time from axiom.iaxiom import IScheduler from axiom.item import Item from axiom.attributes import text from axiom.test.historic.stubloader import StubbedTest from xquotient.exmess import _UndeferTask, Message, INBOX_STATUS, CLEAN_STATUS from xquotient.test.historic.stub_undefertask1to2 import FakeScheduler from xquotient.test.historic import stub_undefertask1to2 from xquotient.test.util import DummyMessageImplementationMixin class DummyMessageImplementation(Item, DummyMessageImplementationMixin): """ Satisfy the requirement imposed by this database to have an item with this type name. This is an extremely terrible hack necessitated by the use of "dummy" items in the test package which aren't actually stable. This should be avoided as much as possible, since it can easily result in tests which have mutually exclusive requirements in order to pass, and at the very least impose an excessive maintenance burden as the codebase is updated. Do not copy this hack. Do not define new schemas which might eventually require it. """ typeName = 'xquotient_test_test_workflow_dummymessageimplementation' senderInfo = text( doc=""" The sender as passed by the factory which created this implementation; used to provide a sensible implementation of relatedAddresses. """, default=None, allowNone=True) def walk(self): """ Necessary for the tests for upgrading Message to version 6. """ return () class UndeferTaskTest(StubbedTest): def setUp(self): stub_undefertask1to2.SCHEDULE_LOG = [] return StubbedTest.setUp(self) def getStatuses(self): """ @return: A C{set} of statuses for the deferred message. """ return set(self.store.findFirst(Message).iterStatuses()) def test_correctScheduling(self): """ Check that the old task has been unscheduled and the new task has been scheduled. """ task = self.store.findFirst(_UndeferTask) self.assertEqual(list(zip(*stub_undefertask1to2.SCHEDULE_LOG)[0]), ['unschedule', 'schedule']) self.assertEqual(stub_undefertask1to2.SCHEDULE_LOG[-1][1], task) self.assertNotEqual(stub_undefertask1to2.SCHEDULE_LOG[0][1], task) def test_notInInbox(self): """ Test that the deferred message is not in the inbox. """ stats = self.getStatuses() self.failIfIn(INBOX_STATUS, stats) def test_inAll(self): """ Test that the deferred message does appear in the "all" view. """ stats = self.getStatuses() self.failUnlessIn(CLEAN_STATUS, stats) def test_notFrozen(self): """ Test that the deferred message is not 'frozen' with L{Message.freezeStatus}. """ # NOTE: This is added as documentation, not TDD -- it passes already. for status in self.getStatuses(): self.failIf(status.startswith('.'))
33.645161
78
0.683925
69003cd799998d8973cda2d04b1c36351df5836d
2,304
py
Python
tests/test_zestimation.py
jibanCat/gpy_dla_detection
4d987adec75a417313fdc6601ee41a0ea60a0a2e
[ "MIT" ]
1
2020-07-31T01:31:52.000Z
2020-07-31T01:31:52.000Z
tests/test_zestimation.py
jibanCat/gpy_dla_detection
4d987adec75a417313fdc6601ee41a0ea60a0a2e
[ "MIT" ]
12
2020-07-20T18:55:15.000Z
2021-09-23T05:08:26.000Z
tests/test_zestimation.py
jibanCat/gpy_dla_detection
4d987adec75a417313fdc6601ee41a0ea60a0a2e
[ "MIT" ]
null
null
null
""" A test file for testing zestimation The learned file could be downloaded at [learned_zqso_only_model_outdata_full_dr9q_minus_concordance_norm_1176-1256.mat] (https://drive.google.com/file/d/1SqAU_BXwKUx8Zr38KTaA_nvuvbw-WPQM/view?usp=sharing) """ import os import re import time import numpy as np from .test_selection import filenames, z_qsos from gpy_dla_detection.read_spec import read_spec, retrieve_raw_spec from gpy_dla_detection.zqso_set_parameters import ZParameters from gpy_dla_detection.zqso_samples import ZSamples from gpy_dla_detection.zqso_gp import ZGPMAT def test_zestimation(nspec: int): filename = filenames[nspec] if not os.path.exists(filename): plate, mjd, fiber_id = re.findall( r"spec-([0-9]+)-([0-9]+)-([0-9]+).fits", filename, )[0] retrieve_raw_spec(int(plate), int(mjd), int(fiber_id)) params = ZParameters() z_qso_samples = ZSamples(params) wavelengths, flux, noise_variance, pixel_mask = read_spec(filename) z_qso_gp = ZGPMAT( params, z_qso_samples, learned_file="data/dr12q/processed/learned_zqso_only_model_outdata_full_dr9q_minus_concordance_norm_1176-1256.mat", ) tic = time.time() z_qso_gp.inference_z_qso(wavelengths, flux, noise_variance, pixel_mask) print("Z True : {:.3g}".format(z_qsos[nspec])) toc = time.time() print("spent {} mins; {} seconds".format((toc - tic) // 60, (toc - tic) % 60)) return z_qso_gp.z_map, z_qsos[nspec] def test_batch(num_quasars: int = 100): all_z_diffs = np.zeros((num_quasars,)) for nspec in range(num_quasars): z_map, z_true = test_zestimation(nspec) z_diff = z_map - z_true print("[Info] z_diff = z_map - z_true = {:.8g}".format(z_diff)) all_z_diffs[nspec] = z_diff print("[Info] abs(z_diff) < 0.5 = {:.4g}".format(accuracy(all_z_diffs, 0.5))) print("[Info] abs(z_diff) < 0.05 = {:.4g}".format(accuracy(all_z_diffs, 0.05))) # we got ~99% accuracy in https://arxiv.org/abs/2006.07343 # so at least we need to ensure ~98% here assert accuracy(all_z_diffs, 0.5) > 0.98 def accuracy(z_diff: np.ndarray, z_thresh: float): num_quasars = z_diff.shape[0] corrects = (np.abs(z_diff) < z_thresh).sum() return corrects / num_quasars
29.538462
123
0.69401
6901b7acffb34f8f8d84f5944353762bec3fbb2a
2,981
py
Python
tests/test_xml_parser_quart.py
criteo/quart
7d018837d7b2f372da3fa83b1b78e4067d89557a
[ "Apache-2.0" ]
1
2017-12-07T11:37:51.000Z
2017-12-07T11:37:51.000Z
tests/test_xml_parser_quart.py
muneebirfan/quart
7d018837d7b2f372da3fa83b1b78e4067d89557a
[ "Apache-2.0" ]
null
null
null
tests/test_xml_parser_quart.py
muneebirfan/quart
7d018837d7b2f372da3fa83b1b78e4067d89557a
[ "Apache-2.0" ]
2
2018-06-26T15:28:10.000Z
2022-02-21T11:29:35.000Z
import pytest from quart.xml_parser_quart import fusion_vulnerability_dictionaries EXPECTED_1 = \ {u'1': {'category': u'Category 1', 'consequence': u'Consequence 1', 'diagnosis': u'Diagnosis 1', 'hosts': [{'ip': u'1.1.1.1', 'name': 'host1'}, {'ip': u'2.2.2.2', 'name': 'host2'}], 'qid': u'1', 'severity': 5, 'solution': u'Solution 1', 'title': u'Vulnerability Title 1'}, u'2': {'category': u'Category 2', 'consequence': u'Consequence 2', 'diagnosis': u'Diagnosis 2', 'hosts': [{'ip': u'2.2.2.2', 'name': 'host2'}], 'qid': u'2', 'severity': 4, 'solution': u'Solution 2', 'title': u'Vulnerability Title 2'}} EXPECTED_2 = \ {u'2': {'category': u'Category 2', 'consequence': u'Consequence 2', 'diagnosis': u'Diagnosis 2', 'hosts': [{'ip': u'3.3.3.3', 'name': 'host3'}], 'qid': u'2', 'severity': 4, 'solution': u'Solution 2', 'title': u'Vulnerability Title 2'}, u'3': {'category': u'Category 3', 'consequence': u'Consequence 3', 'diagnosis': u'Diagnosis 3', 'hosts': [{'ip': u'4.4.4.4', 'name': 'host4'}], 'qid': u'3', 'severity': 3, 'solution': u'Solution 3', 'title': u'Vulnerability Title 3'}} EXPECTED_1_2= \ {u'1': {'category': u'Category 1', 'consequence': u'Consequence 1', 'diagnosis': u'Diagnosis 1', 'hosts': [{'ip': u'1.1.1.1', 'name': 'host1'}, {'ip': u'2.2.2.2', 'name': 'host2'}], 'qid': u'1', 'severity': 5, 'solution': u'Solution 1', 'title': u'Vulnerability Title 1'}, u'2': {'category': u'Category 2', 'consequence': u'Consequence 2', 'diagnosis': u'Diagnosis 2', 'hosts': [{'ip': u'2.2.2.2', 'name': 'host2'}, {'ip': u'3.3.3.3', 'name': 'host3'}], 'qid': u'2', 'severity': 4, 'solution': u'Solution 2', 'title': u'Vulnerability Title 2'}, u'3': {'category': u'Category 3', 'consequence': u'Consequence 3', 'diagnosis': u'Diagnosis 3', 'hosts': [{'ip': u'4.4.4.4', 'name': 'host4'}], 'qid': u'3', 'severity': 3, 'solution': u'Solution 3', 'title': u'Vulnerability Title 3'}} @pytest.mark.parametrize('dictionary_1, dictionary_2, fusion_dictionary', ( ({}, {}, {}), ({}, EXPECTED_1, EXPECTED_1), (EXPECTED_2, {}, EXPECTED_2), (EXPECTED_1, EXPECTED_1, EXPECTED_1), (EXPECTED_1, EXPECTED_2, EXPECTED_1_2), )) def test_fusion_vulnerability_dictionaries(dictionary_1, dictionary_2, fusion_dictionary): assert fusion_vulnerability_dictionaries(dictionary_1, dictionary_2) ==\ fusion_dictionary
36.353659
77
0.495136
6902524469a111f980df924fbfd0b52b4036a41b
1,651
py
Python
poker/poker_base/test.py
chitty/poker_base
913c13b89a0c4e4c0ec7e3b01495a96309a02f26
[ "MIT" ]
null
null
null
poker/poker_base/test.py
chitty/poker_base
913c13b89a0c4e4c0ec7e3b01495a96309a02f26
[ "MIT" ]
null
null
null
poker/poker_base/test.py
chitty/poker_base
913c13b89a0c4e4c0ec7e3b01495a96309a02f26
[ "MIT" ]
null
null
null
from poker import poker, kind, two_pair, hand_rank, card_ranks, best_hand def test(): "Test cases for the functions in poker program" sf = "6C 7C 8C 9C TC".split() # Straight Flush fk = "9D 9H 9S 9C 7D".split() # Four of a Kind fh = "TD TC TH 7C 7D".split() # Full House tp = "5S 5D AC AS KS".split() # Two Pair s1 = "AC 2S 3C 4D 5D".split() # A-5 Straight s2 = "2S 3C 4D 5D 6S".split() # 2-6 Straight ah = "AC 2S 9C 4D 6D".split() # A High sh = "7C 2S 6C 3D 5D".split() # 7 High assert poker([s1, s2, ah, sh]) == [s2] assert poker([s1, ah, sh]) == [s1] fkranks = card_ranks(fk) tpranks = card_ranks(tp) assert kind(4, fkranks) == 9 assert kind(3, fkranks) is None assert kind(2, fkranks) is None assert kind(1, fkranks) == 7 assert two_pair(fkranks) is None assert two_pair(tpranks) == (14, 5) assert poker([sf, fk, fh]) == [sf] assert poker([fk, fh]) == [fk] assert poker([fh, fh]) == [fh, fh] assert poker([sf]) == [sf] assert poker([sf] + 99*[fh]) == [sf] assert hand_rank(sf) == (8, 10) assert hand_rank(fk) == (7, 9, 7) assert hand_rank(fh) == (6, 10, 7) return 'tests pass' print test() def test_best_hand(): test_1 = sorted(best_hand("6C 7C 8C 9C TC 5C JS".split())) assert (test_1 == ['6C', '7C', '8C', '9C', 'TC']) test_2 = sorted(best_hand("TD TC TH 7C 7D 8C 8S".split())) assert (test_2 == ['8C', '8S', 'TC', 'TD', 'TH']) test_3 = sorted(best_hand("JD TC TH 7C 7D 7S 7H".split())) assert (test_3 == ['7C', '7D', '7H', '7S', 'JD']) return 'test_best_hand passes' print test_best_hand()
35.12766
73
0.574197
690437062ad9bc153f8073ff84de085091fb62c1
5,249
py
Python
megaman/embedding/base.py
jakevdp/Mmani
681b6cdbd358b207e8b6c4a482262c84bea15bd7
[ "BSD-2-Clause" ]
303
2016-03-03T00:44:37.000Z
2022-03-14T03:43:38.000Z
megaman/embedding/base.py
jakevdp/Mmani
681b6cdbd358b207e8b6c4a482262c84bea15bd7
[ "BSD-2-Clause" ]
52
2016-02-26T21:41:31.000Z
2021-06-27T08:33:51.000Z
megaman/embedding/base.py
jakevdp/Mmani
681b6cdbd358b207e8b6c4a482262c84bea15bd7
[ "BSD-2-Clause" ]
67
2016-03-03T22:38:35.000Z
2022-01-12T08:03:47.000Z
""" base estimator class for megaman """ # Author: James McQueen -- <jmcq@u.washington.edu> # LICENSE: Simplified BSD https://github.com/mmp2/megaman/blob/master/LICENSE import numpy as np from scipy.sparse import isspmatrix from sklearn.base import BaseEstimator, TransformerMixin from sklearn.utils.validation import check_array from ..geometry.geometry import Geometry # from sklearn.utils.validation import FLOAT_DTYPES FLOAT_DTYPES = (np.float64, np.float32, np.float16) class BaseEmbedding(BaseEstimator, TransformerMixin): """ Base Class for all megaman embeddings. Inherits BaseEstimator and TransformerMixin from sklearn. BaseEmbedding creates the common interface to the geometry class for all embeddings as well as providing a common .fit_transform(). Parameters ---------- n_components : integer number of coordinates for the manifold. radius : float (optional) radius for adjacency and affinity calculations. Will be overridden if either is set in `geom` geom : dict or megaman.geometry.Geometry object specification of geometry parameters: keys are ["adjacency_method", "adjacency_kwds", "affinity_method", "affinity_kwds", "laplacian_method", "laplacian_kwds"] Attributes ---------- geom_ : a fitted megaman.geometry.Geometry object. """ def __init__(self, n_components=2, radius=None, geom=None): self.n_components = n_components self.radius = radius self.geom = geom def _validate_input(self, X, input_type): if input_type == 'data': sparse_formats = None elif input_type in ['adjacency', 'affinity']: sparse_formats = ['csr', 'coo', 'lil', 'bsr', 'dok', 'dia'] else: raise ValueError("unrecognized input_type: {0}".format(input_type)) return check_array(X, dtype=FLOAT_DTYPES, accept_sparse=sparse_formats) # # The world is not ready for this... # def estimate_radius(self, X, input_type='data', intrinsic_dim=None): # """Estimate a radius based on the data and intrinsic dimensionality # # Parameters # ---------- # X : array_like, [n_samples, n_features] # dataset for which radius is estimated # intrinsic_dim : int (optional) # estimated intrinsic dimensionality of the manifold. If not # specified, then intrinsic_dim = self.n_components # # Returns # ------- # radius : float # The estimated radius for the fit # """ # if input_type == 'affinity': # return None # elif input_type == 'adjacency': # return X.max() # elif input_type == 'data': # if intrinsic_dim is None: # intrinsic_dim = self.n_components # mean_std = np.std(X, axis=0).mean() # n_features = X.shape[1] # return 0.5 * mean_std / n_features ** (1. / (intrinsic_dim + 6)) # else: # raise ValueError("Unrecognized input_type: {0}".format(input_type)) def fit_geometry(self, X=None, input_type='data'): """Inputs self.geom, and produces the fitted geometry self.geom_""" if self.geom is None: self.geom_ = Geometry() elif isinstance(self.geom, Geometry): self.geom_ = self.geom else: try: kwds = dict(**self.geom) except TypeError: raise ValueError("geom must be a Geometry instance or " "a mappable/dictionary") self.geom_ = Geometry(**kwds) if self.radius is not None: self.geom_.set_radius(self.radius, override=False) # if self.radius == 'auto': # if X is not None and input_type != 'affinity': # self.geom_.set_radius(self.estimate_radius(X, input_type), # override=False) # else: # self.geom_.set_radius(self.radius, # override=False) if X is not None: self.geom_.set_matrix(X, input_type) return self def fit_transform(self, X, y=None, input_type='data'): """Fit the model from data in X and transform X. Parameters ---------- input_type : string, one of: 'data', 'distance' or 'affinity'. The values of input data X. (default = 'data') X: array-like, shape (n_samples, n_features) Training vector, where n_samples in the number of samples and n_features is the number of features. If self.input_type is 'distance': X : array-like, shape (n_samples, n_samples), Interpret X as precomputed distance or adjacency graph computed from samples. Returns ------- X_new: array-like, shape (n_samples, n_components) """ self.fit(X, y=y, input_type=input_type) return self.embedding_ def transform(self, X, y=None, input_type='data'): raise NotImplementedError("transform() not implemented. " "Try fit_transform()")
36.451389
81
0.599543
6905dd86176dcedc7d4f61bca47e2cde2c5fae84
8,048
py
Python
pymorsegui.py
mmgrant73/pymorsecode
594c341817598e86a7be766ca591a99cd8db0e94
[ "MIT" ]
1
2021-04-15T03:26:32.000Z
2021-04-15T03:26:32.000Z
pymorsegui.py
mmgrant73/pymorsecode
594c341817598e86a7be766ca591a99cd8db0e94
[ "MIT" ]
null
null
null
pymorsegui.py
mmgrant73/pymorsecode
594c341817598e86a7be766ca591a99cd8db0e94
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: UTF-8 -*- # # generated by wxGlade 0.9.4 on Sat Feb 1 19:14:54 2020 # import wx from pymorsecode import MorseCode # begin wxGlade: dependencies # end wxGlade # begin wxGlade: extracode # end wxGlade class MyFrame(wx.Frame): def __init__(self, *args, **kwds): # begin wxGlade: MyFrame.__init__ kwds["style"] = kwds.get("style", 0) | wx.DEFAULT_FRAME_STYLE wx.Frame.__init__(self, *args, **kwds) self.SetSize((602, 444)) self.text_ctrl_txt = wx.TextCtrl(self, wx.ID_ANY, "Enter String", style=wx.TE_MULTILINE | wx.TE_NO_VSCROLL) self.spin_ctrl_wpm = wx.SpinCtrl(self, wx.ID_ANY, "15", min=5, max=25) self.spin_ctrl_hz = wx.SpinCtrl(self, wx.ID_ANY, "800", min=500, max=1000) self.text_ctrl_morsecode = wx.TextCtrl(self, wx.ID_ANY, "Generated Morse Code", style=wx.TE_MULTILINE | wx.TE_NO_VSCROLL | wx.TE_READONLY) self.button_clear = wx.Button(self, wx.ID_ANY, "Clear") self.button_open = wx.Button(self, wx.ID_ANY, "Open") self.button_save = wx.Button(self, wx.ID_ANY, "Save") self.button_play = wx.Button(self, wx.ID_ANY, "Play") self.button_generate = wx.Button(self, wx.ID_ANY, "Generate") self.button_exit = wx.Button(self, wx.ID_ANY, "Exit") self.morse = MorseCode("Kayleb Walter") print("Starting program") # Menu Bar self.frame_menubar = wx.MenuBar() wxglade_tmp_menu = wx.Menu() wxglade_tmp_menu.Append(1, "Open", "") self.Bind(wx.EVT_MENU, self.onOpen, id=1) wxglade_tmp_menu.Append(2, "Save", "") self.Bind(wx.EVT_MENU, self.onSave, id=2) wxglade_tmp_menu.AppendSeparator() wxglade_tmp_menu.Append(3, "Generate", "") self.Bind(wx.EVT_MENU, self.onGenerate, id=3) wxglade_tmp_menu.Append(4, "Play", "") self.Bind(wx.EVT_MENU, self.onPlay, id=4) wxglade_tmp_menu.AppendSeparator() wxglade_tmp_menu.Append(5, "Exit", "") self.Bind(wx.EVT_MENU, self.onExit, id=5) self.frame_menubar.Append(wxglade_tmp_menu, "File") self.SetMenuBar(self.frame_menubar) # Menu Bar end self.__set_properties() self.__do_layout() self.Bind(wx.EVT_BUTTON, self.onClear, self.button_clear) self.Bind(wx.EVT_BUTTON, self.onOpen, self.button_open) self.Bind(wx.EVT_BUTTON, self.onSave, self.button_save) self.Bind(wx.EVT_BUTTON, self.onPlay, self.button_play) self.Bind(wx.EVT_BUTTON, self.onGenerate, self.button_generate) self.Bind(wx.EVT_BUTTON, self.onExit, self.button_exit) self.Bind(wx.EVT_SPINCTRL, self.onWpm, self.spin_ctrl_wpm) self.Bind(wx.EVT_SPINCTRL, self.onHz, self.spin_ctrl_hz) # end wxGlade def __set_properties(self): # begin wxGlade: MyFrame.__set_properties self.SetTitle("Morse Code Program") self.text_ctrl_txt.SetToolTip("Enter the text you want to convert to morse code in this text box") self.text_ctrl_txt.SetFocus() self.spin_ctrl_wpm.SetToolTip("Morse Code speed in words per minute") self.spin_ctrl_hz.SetToolTip("Audio Frequency for the tone") self.text_ctrl_morsecode.SetToolTip("Morse Code will be generated in this text box") self.button_clear.SetToolTip("Clears the textboxes") self.button_open.SetToolTip("Opens a wave file") self.button_save.SetToolTip("Save the morse code audio to a wave file") self.button_play.SetToolTip("Play an audio version of the morse code") self.button_generate.SetToolTip("Takes a text string and convert it to morse code") self.button_exit.SetToolTip("Exit the program") # end wxGlade def __do_layout(self): # begin wxGlade: MyFrame.__do_layout sizer_1 = wx.BoxSizer(wx.VERTICAL) sizer_2 = wx.BoxSizer(wx.VERTICAL) sizer_5 = wx.BoxSizer(wx.HORIZONTAL) sizer_6 = wx.StaticBoxSizer(wx.StaticBox(self, wx.ID_ANY, "Actions"), wx.VERTICAL) sizer_3 = wx.BoxSizer(wx.HORIZONTAL) sizer_4 = wx.StaticBoxSizer(wx.StaticBox(self, wx.ID_ANY, "Options"), wx.VERTICAL) label_1 = wx.StaticText(self, wx.ID_ANY, "Morse Code Generator", style=wx.ALIGN_CENTER) label_1.SetFont(wx.Font(12, wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, 0, "")) sizer_2.Add(label_1, 0, wx.EXPAND, 0) sizer_3.Add(self.text_ctrl_txt, 3, wx.ALL | wx.EXPAND, 5) label_2 = wx.StaticText(self, wx.ID_ANY, "Speed (words/minute):") sizer_4.Add(label_2, 0, 0, 0) sizer_4.Add(self.spin_ctrl_wpm, 0, 0, 0) label_3 = wx.StaticText(self, wx.ID_ANY, "Audio Frequency (Hz):") sizer_4.Add(label_3, 0, 0, 0) sizer_4.Add(self.spin_ctrl_hz, 0, 0, 0) sizer_3.Add(sizer_4, 1, wx.ALL | wx.EXPAND, 3) sizer_2.Add(sizer_3, 1, wx.EXPAND, 0) sizer_5.Add(self.text_ctrl_morsecode, 3, wx.ALL | wx.EXPAND, 5) sizer_6.Add(self.button_clear, 0, wx.ALL | wx.EXPAND, 1) sizer_6.Add(self.button_open, 0, wx.ALL | wx.EXPAND, 1) sizer_6.Add(self.button_save, 0, wx.ALL | wx.EXPAND, 1) sizer_6.Add(self.button_play, 0, wx.ALL | wx.EXPAND, 1) sizer_6.Add(self.button_generate, 0, wx.ALL | wx.EXPAND, 1) sizer_6.Add(self.button_exit, 0, wx.ALL | wx.EXPAND, 1) sizer_5.Add(sizer_6, 1, wx.ALL | wx.EXPAND, 3) sizer_2.Add(sizer_5, 1, wx.EXPAND, 0) sizer_1.Add(sizer_2, 1, wx.EXPAND, 0) self.SetSizer(sizer_1) self.Layout() # end wxGlade def onClear(self, event): # wxGlade: MyFrame.<event_handler> self.text_ctrl_txt.SetValue("") self.text_ctrl_morsecode.SetValue("") def onOpen(self, event): # wxGlade: MyFrame.<event_handler> openfile = wx.LoadFileSelector('Open a Wave file', '.wav', 'testmorse.wav', None) if(openfile != ""): self.SetTitle("Morse Code Wave File - " + openfile) self.text_ctrl_txt.SetValue("Please Wait Generating....") self.text_ctrl_morsecode.SetValue("Please Wait Generating....") self.morse.sound_info(openfile) print("Generating morse code from audio") self.morse.sound_to_morse(openfile) self.text_ctrl_txt.SetValue(self.morse.morse_text) self.text_ctrl_morsecode.SetValue(self.morse.morse_code) print("Finished with translating audio to morse code") def onSave(self, event): # wxGlade: MyFrame.<event_handler> savefile = wx.SaveFileSelector('Open a Wave file', '.wav', 'testmorse.wav', None) if(savefile != ""): self.SetTitle("Morse Code Wave File - " + savefile) self.morse.save_wav(savefile) def onGenerate(self, event): # wxGlade: MyFrame.<event_handler> print("Generating morse code") morse_text = self.text_ctrl_txt.GetValue() morse_code = self.morse.to_morse(morse_text) self.text_ctrl_morsecode.SetValue(self.morse.morse_code) def onExit(self, event): # wxGlade: MyFrame.<event_handler> print("Closing program") self.Destroy() def onPlay(self, event): # wxGlade: MyFrame.<event_handler> print("Playing morse code") morse_code = self.text_ctrl_morsecode.GetValue() self.morse.play_morse(morse_code) def onWpm(self, event): wpm = self.spin_ctrl_wpm.GetValue() print("Change the WPM value") self.morse.time_period = self.morse.set_time_period(wpm) def onHz(self, event): hz = self.spin_ctrl_hz.GetValue() print("Change the frequency value") self.morse.tone = self.morse.set_tone(hz) # end of class MyFrame class MyApp(wx.App): def OnInit(self): self.frame = MyFrame(None, wx.ID_ANY, "") self.SetTopWindow(self.frame) self.frame.Show() return True # end of class MyApp if __name__ == "__main__": app = MyApp(0) app.MainLoop()
44.21978
146
0.650596
690613c67cd63310af621f929b186a79abbe5cd8
1,032
py
Python
ppr-api/migrations/versions/d3f96fb8b8e5_update_user_profile_definition.py
cameron-freshworks/ppr
01d6f5d300c791aebad5e58bb4601e9be2ccfc46
[ "Apache-2.0" ]
4
2020-01-21T21:46:42.000Z
2021-02-24T18:30:24.000Z
ppr-api/migrations/versions/d3f96fb8b8e5_update_user_profile_definition.py
cameron-freshworks/ppr
01d6f5d300c791aebad5e58bb4601e9be2ccfc46
[ "Apache-2.0" ]
1,313
2019-10-18T22:48:16.000Z
2022-03-30T17:42:47.000Z
ppr-api/migrations/versions/d3f96fb8b8e5_update_user_profile_definition.py
cameron-freshworks/ppr
01d6f5d300c791aebad5e58bb4601e9be2ccfc46
[ "Apache-2.0" ]
201
2019-10-18T21:34:41.000Z
2022-03-31T20:07:42.000Z
"""update user profile definition Revision ID: d3f96fb8b8e5 Revises: 2b13f89aa1b3 Create Date: 2021-10-18 15:45:33.906745 """ from alembic import op import sqlalchemy as sa from alembic_utils.pg_function import PGFunction from sqlalchemy import text as sql_text # revision identifiers, used by Alembic. revision = 'd3f96fb8b8e5' down_revision = '2b13f89aa1b3' branch_labels = None depends_on = None # Update user profile to add registrations table and miscellaneous (future) preferences. def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.add_column('user_profiles', sa.Column('registrations_table', sa.JSON(), nullable=True)) op.add_column('user_profiles', sa.Column('misc_preferences', sa.JSON(), nullable=True)) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_column('user_profiles', 'misc_preferences') op.drop_column('user_profiles', 'registrations_table') # ### end Alembic commands ###
30.352941
94
0.738372
6906a12f2953d09ffd721ec5d1611ca70e378fb9
2,948
py
Python
questions/available-captures-for-rook/Solution.py
marcus-aurelianus/leetcode-solutions
8b43e72fe1f51c84abc3e89b181ca51f09dc7ca6
[ "MIT" ]
141
2017-12-12T21:45:53.000Z
2022-03-25T07:03:39.000Z
questions/available-captures-for-rook/Solution.py
marcus-aurelianus/leetcode-solutions
8b43e72fe1f51c84abc3e89b181ca51f09dc7ca6
[ "MIT" ]
32
2015-10-05T14:09:52.000Z
2021-05-30T10:28:41.000Z
questions/available-captures-for-rook/Solution.py
marcus-aurelianus/leetcode-solutions
8b43e72fe1f51c84abc3e89b181ca51f09dc7ca6
[ "MIT" ]
56
2015-09-30T05:23:28.000Z
2022-03-08T07:57:11.000Z
""" On an 8 x 8 chessboard, there is one white rook.  There also may be empty squares, white bishops, and black pawns.  These are given as characters 'R', '.', 'B', and 'p' respectively. Uppercase characters represent white pieces, and lowercase characters represent black pieces. The rook moves as in the rules of Chess: it chooses one of four cardinal directions (north, east, west, and south), then moves in that direction until it chooses to stop, reaches the edge of the board, or captures an opposite colored pawn by moving to the same square it occupies.  Also, rooks cannot move into the same square as other friendly bishops. Return the number of pawns the rook can capture in one move.   Example 1: Input: [[".",".",".",".",".",".",".","."],[".",".",".","p",".",".",".","."],[".",".",".","R",".",".",".","p"],[".",".",".",".",".",".",".","."],[".",".",".",".",".",".",".","."],[".",".",".","p",".",".",".","."],[".",".",".",".",".",".",".","."],[".",".",".",".",".",".",".","."]] Output: 3 Explanation: In this example the rook is able to capture all the pawns. Example 2: Input: [[".",".",".",".",".",".",".","."],[".","p","p","p","p","p",".","."],[".","p","p","B","p","p",".","."],[".","p","B","R","B","p",".","."],[".","p","p","B","p","p",".","."],[".","p","p","p","p","p",".","."],[".",".",".",".",".",".",".","."],[".",".",".",".",".",".",".","."]] Output: 0 Explanation: Bishops are blocking the rook to capture any pawn. Example 3: Input: [[".",".",".",".",".",".",".","."],[".",".",".","p",".",".",".","."],[".",".",".","p",".",".",".","."],["p","p",".","R",".","p","B","."],[".",".",".",".",".",".",".","."],[".",".",".","B",".",".",".","."],[".",".",".","p",".",".",".","."],[".",".",".",".",".",".",".","."]] Output: 3 Explanation: The rook can capture the pawns at positions b5, d6 and f5.   Note: board.length == board[i].length == 8 board[i][j] is either 'R', '.', 'B', or 'p' There is exactly one cell with board[i][j] == 'R' """ class Solution(object): def numRookCaptures(self, board): """ :type board: List[List[str]] :rtype: int """ ri, rj = 0, 0 found = False for i in xrange(len(board)): for j in xrange(len(board[0])): c = board[i][j] if c == 'R': ri, rj = i, j found = True break if found: break num = 0 dirs = [[1, 0], [-1, 0], [0, 1], [0, -1]] for di, dj in dirs: i, j = ri + di, rj + dj while i >= 0 and i < len(board) and j >= 0 and j < len(board[0]): c = board[i][j] if c == '.': pass elif c == 'p': num += 1 break else: break i += di j += dj return num
39.837838
353
0.402307
6908c59f82b4dce18b0359af8fb11f6688af03cf
3,200
py
Python
test/test_npu/test_network_ops/test_sin.py
Ascend/pytorch
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
[ "BSD-3-Clause" ]
1
2021-12-02T03:07:35.000Z
2021-12-02T03:07:35.000Z
test/test_npu/test_network_ops/test_sin.py
Ascend/pytorch
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
[ "BSD-3-Clause" ]
1
2021-11-12T07:23:03.000Z
2021-11-12T08:28:13.000Z
test/test_npu/test_network_ops/test_sin.py
Ascend/pytorch
39849cf72dafe8d2fb68bd1679d8fd54ad60fcfc
[ "BSD-3-Clause" ]
null
null
null
# Copyright (c) 2020, Huawei Technologies.All rights reserved. # # Licensed under the BSD 3-Clause License (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://opensource.org/licenses/BSD-3-Clause # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import torch import numpy as np import sys import copy from common_utils import TestCase, run_tests from common_device_type import dtypes, instantiate_device_type_tests from util_test import create_common_tensor class TestSin(TestCase): def cpu_op_exec(self, input1): output = torch.sin(input1) output = output.numpy() return output def npu_op_exec(self, input1): output = torch.sin(input1) output = output.to("cpu") output = output.numpy() return output def npu_op_exec_out(self, input1, input2): torch.sin(input1, out=input2) output = input2.to("cpu") output = output.numpy() return output def test_sin_common_shape_format(self, device): shape_format = [ [[np.float32, 0, (5,3)]], ] for item in shape_format: cpu_input1, npu_input1 = create_common_tensor(item[0], -10, 10) cpu_output = self.cpu_op_exec(cpu_input1) npu_output = self.npu_op_exec(npu_input1) self.assertRtolEqual(cpu_output, npu_output) def test_sin_out_common_shape_format(self, device): shape_format = [ [[np.float16, -1, (4, 3, 128, 128)], [np.float16, -1, (4, 3, 128, 128)]], [[np.float16, 0, (4, 3, 128, 128)], [np.float16, 0, (10, 3, 64, 128)]], [[np.float16, 0, (4, 3, 128, 128)], [np.float16, 0, (2, 3, 256, 128)]], [[np.float32, 0, (4, 3, 128, 128)], [np.float32, 0, (4, 3, 128, 128)]], [[np.float32, 0, (4, 3, 128, 128)], [np.float32, 0, (8, 3, 64, 128)]], [[np.float32, -1, (4, 3, 128, 128)], [np.float32, -1, (4, 3, 256, 64)]], ] for item in shape_format: cpu_input1, npu_input1 = create_common_tensor(item[0], -10, 10) cpu_input2, npu_input2 = create_common_tensor(item[0], -10, 10) cpu_input3, npu_input3 = create_common_tensor(item[1], -10, 10) if cpu_input1.dtype == torch.float16: cpu_input1 = cpu_input1.to(torch.float32) cpu_output = self.cpu_op_exec(cpu_input1) npu_output_out1 = self.npu_op_exec_out(npu_input1, npu_input2) npu_output_out2 = self.npu_op_exec_out(npu_input1, npu_input3) cpu_output = cpu_output.astype(npu_output_out1.dtype) self.assertRtolEqual(cpu_output, npu_output_out1) self.assertRtolEqual(cpu_output, npu_output_out2) instantiate_device_type_tests(TestSin, globals(), except_for='cpu') if __name__ == "__main__": run_tests()
42.105263
89
0.637813
690958b4739c7c62384b219e19362e11eacddb43
968
py
Python
src/simmate/website/core_components/filters/dynamics.py
laurenmm/simmate-1
c06b94c46919b01cda50f78221ad14f75c100a14
[ "BSD-3-Clause" ]
9
2021-12-21T02:58:21.000Z
2022-01-25T14:00:06.000Z
src/simmate/website/core_components/filters/dynamics.py
laurenmm/simmate-1
c06b94c46919b01cda50f78221ad14f75c100a14
[ "BSD-3-Clause" ]
51
2022-01-01T15:59:58.000Z
2022-03-26T21:25:42.000Z
src/simmate/website/core_components/filters/dynamics.py
laurenmm/simmate-1
c06b94c46919b01cda50f78221ad14f75c100a14
[ "BSD-3-Clause" ]
7
2022-01-01T03:44:32.000Z
2022-03-29T19:59:27.000Z
# -*- coding: utf-8 -*- from simmate.website.core_components.filters import ( Structure, Forces, Thermodynamics, Calculation, ) from simmate.database.base_data_types.dynamics import ( DynamicsRun as DynamicsRunTable, DynamicsIonicStep as DynamicsIonicStepTable, ) class DynamicsRun(Structure, Calculation): class Meta: model = DynamicsRunTable fields = dict( temperature_start=["range"], temperature_end=["range"], time_step=["range"], nsteps=["range"], **Structure.get_fields(), **Calculation.get_fields(), ) class DynamicsIonicStep(Structure, Forces, Thermodynamics): class Meta: model = DynamicsIonicStepTable fields = dict( number=["range"], temperature=["range"], **Structure.get_fields(), **Thermodynamics.get_fields(), **Forces.get_fields(), )
25.473684
59
0.599174
690b4a69f377b976688d836e3b5ab8c8dc9b6884
3,821
py
Python
webproject/taller1/algoritmoCoseno.py
jairocollante/sr
f395c0f9aef804ec0100edcfe1a1c6ccab2494a1
[ "MIT" ]
null
null
null
webproject/taller1/algoritmoCoseno.py
jairocollante/sr
f395c0f9aef804ec0100edcfe1a1c6ccab2494a1
[ "MIT" ]
null
null
null
webproject/taller1/algoritmoCoseno.py
jairocollante/sr
f395c0f9aef804ec0100edcfe1a1c6ccab2494a1
[ "MIT" ]
null
null
null
import numpy as np import pandas as pd from taller1.models import Userid_Timestamp_Count class Coseno(): def recomendacionUsuario(self,usuario_activo): print("Modelo Coseno Usuario") cant = 10 df_mapreduce = Coseno.cargarDatos(self) print("df_mapreduce.shape",df_mapreduce.shape) df_pivot = df_mapreduce.pivot('userid','artist','count').fillna(0) print("Pivot.shape=", df_pivot.shape) lista_coseno_usuario = Coseno.iterarUsuario(self,df_pivot,usuario_activo) print("Termina calculo coseno=",len(lista_coseno_usuario)) lista_coseno_usuario.sort(key=lambda k:k['coseno'], reverse = True) print("Termina ordenar lista coseno") usuario_mas_similar = lista_coseno_usuario[0]['usuario_similar'] print("Usuario mas similar=",usuario_mas_similar) lista_recomendacion = Coseno.artistaMasEscuchadoPorUsuario(self,usuario_mas_similar,cant,df_pivot) resp = {"lista_coseno_usuario":lista_coseno_usuario[:cant], "lista_recomendacion":lista_recomendacion} return resp def cargarDatos(self): #df_mapreduce = pd.read_csv('part-r-00000',sep='\t',names=['userid','artist','count']) df_mapreduce = pd.DataFrame(list(Userid_Timestamp_Count.objects.all().values('userid','artist','count'))) return df_mapreduce.dropna() def iterarUsuario(self,df_pivot,usuario_activo): v_usuario_activo = df_pivot.loc[usuario_activo].values lista_coseno=[] for user_evaluado in df_pivot.index.tolist(): if usuario_activo != user_evaluado: object = {} object['usuario_similar']=user_evaluado v_usuario_evaluado = df_pivot.loc[user_evaluado].values object['coseno']=Coseno.cos_sim(self,v_usuario_activo, v_usuario_evaluado) lista_coseno.append(object) return lista_coseno def valorCoseno(self): return val['coseno'] def artistaMasEscuchadoPorUsuario(self,usuario_evaluado,cant,df_pivot): artistas_escuchados = df_pivot.loc[usuario_evaluado] df_r = pd.DataFrame(artistas_escuchados) df_r = df_r.sort_values(by=[usuario_evaluado], ascending=False).index.tolist() return df_r[:cant] def cos_sim(self,a, b): #Takes 2 vectors a, b and returns the cosine similarity according #to the definition of the dot product dot_product = np.dot(a, b) norm_a = np.linalg.norm(a) norm_b = np.linalg.norm(b) return dot_product / (norm_a * norm_b) def recomendacionItem(self,usuario_activo): print("Modelo Coseno Item") df_mapreduce = Coseno.cargarDatos(self) print("df_mapreduce.shape",df_mapreduce.shape) df_pivotA = df_mapreduce.pivot('userid','artist','count').fillna(0) print("Usuario Pivot.shape=", df_pivotA.shape) artista_activo = Coseno.artistaMasEscuchadoPorUsuario(self,usuario_activo,10,df_pivotA) cant = 10 df_pivot = df_mapreduce.pivot('artist','userid','count').fillna(0) print("Artista Pivot.shape=", df_pivot.shape) lista_coseno_artista = Coseno.iterarArtistas(self,df_pivot,artista_activo[:1]) print("Termina calculo coseno=",len(lista_coseno_artista)) lista_coseno_artista.sort(key=lambda k:k['coseno'], reverse = True) print("Termina ordenar lista coseno") resp = {"lista_coseno_artista":lista_coseno_artista[:cant], "artista_activo":artista_activo} return resp def iterarArtistas(self,df_pivot_artista,artista_activo): v_artista_activo = df_pivot_artista.loc[artista_activo].values lista_coseno=[] for artista_evaluado in df_pivot_artista.index.tolist(): if artista_activo != artista_evaluado: object = {} object['artista_similar']=artista_evaluado v_artista_evaluado = df_pivot_artista.loc[artista_evaluado].values object['coseno']=Coseno.cos_sim(self,v_artista_activo, v_artista_evaluado) lista_coseno.append(object) return lista_coseno
43.420455
108
0.741952
690bf7914b0e1d6a8fa65445caa5e076e78c0f57
5,646
py
Python
python/dataconversion/shapefile_to_plt.py
Mehrdadj93/handyscripts
5df9a69e17345ca5a3e42dda2424da2da0ab6f12
[ "MIT" ]
66
2018-09-21T22:55:34.000Z
2022-03-22T14:29:57.000Z
python/dataconversion/shapefile_to_plt.py
Mehrdadj93/handyscripts
5df9a69e17345ca5a3e42dda2424da2da0ab6f12
[ "MIT" ]
4
2018-10-04T22:09:01.000Z
2022-03-31T16:18:38.000Z
python/dataconversion/shapefile_to_plt.py
Mehrdadj93/handyscripts
5df9a69e17345ca5a3e42dda2424da2da0ab6f12
[ "MIT" ]
50
2018-09-23T15:50:55.000Z
2022-03-06T06:59:33.000Z
"""Convert Shapefiles to Tecplot plt format usage: > python shapefile_to_plt.py shapefile.shp outfile.plt Necessary modules ----------------- pyshp The Python Shapefile Library (pyshp) reads and writes ESRI Shapefiles in pure Python. https://pypi.python.org/pypi/pyshp https://www.esri.com/library/whitepapers/pdfs/shapefile.pdf Description ----------- This script is used to convert Shapefiles (.shp) to Tecplot plt format. Users will need to answer a few questions about their shapefile to accurately import into Tecplot format. First select a conversion type: Convert to a single zone or one zone per shape. Next select variable names to use: x/y or lon/lat Finally, if using one zone per shape, select the column to name the zones After running the script, append the new plt file to the active frame and match the variable names. """ import sys import os import time import shapefile as sf import tecplot as tp from tecplot.constant import * def create_connectivity_list(shape, element_offset=0): """Use the element indices for each shape to create the connectivity list""" num_points = len(shape.points) num_parts = len(shape.parts) elements = [] for i in range(num_parts): # parts[] returns the point index at the start of each part # These values will define the connectivity list of the line segments p1 = shape.parts[i] # Check to see if we're at the last part so we don't over index the list if i < num_parts - 1: p2 = shape.parts[i + 1] - 1 else: p2 = num_points - 1 p1 += element_offset p2 += element_offset # Create the connectivity list for this part. Each point is connected to the next for i in range(p1, p2): elements.append((i, i + 1)) return elements def convert_to_single_zone(s, zone_name, dataset): """Loop over all the shapes, collecting their point values and generating the FE-Line Segment connectivity list.""" x = [] y = [] elements = [] num_points = 0 for shapeRec in s.shapeRecords(): elements.extend(create_connectivity_list(shapeRec.shape, num_points)) x.extend([n[0] for n in shapeRec.shape.points]) y.extend([n[1] for n in shapeRec.shape.points]) num_points += len(shapeRec.shape.points) # Now that we have the points and connectivity list we add a zone to the dataset zone = dataset.add_fe_zone(ZoneType.FELineSeg, zone_name, num_points, len(elements)) zone.values(0)[:] = x zone.values(1)[:] = y zone.nodemap[:] = elements def convert_to_one_zone_per_shape(s, name_index, dataset): """Create a Tecplot zone for each shape""" for i, shapeRec in enumerate(s.shapeRecords()): # Extract the zone name from the appropriate location in the shape record zone_name = shapeRec.record[name_index] if len(zone_name) == 0: zone_name = 'NONE' num_points = len(shapeRec.shape.points) elements = create_connectivity_list(shapeRec.shape) x = [n[0] for n in shapeRec.shape.points] y = [n[1] for n in shapeRec.shape.points] # Create the Tecplot zone and add the point data as well as the connectivity list zone = dataset.add_fe_zone(ZoneType.FELineSeg, zone_name, num_points, len(elements)) zone.values(0)[:] = x zone.values(1)[:] = y zone.nodemap[:] = elements # Print dots to give the user an indication that something is happening sys.stdout.write('.') sys.stdout.flush() def get_var_names(): """Choose the variable names to use""" print("1 - Use 'x' and 'y'") print("2 - Use 'lon' and 'lat'") var_name_choice = int(input("Enter your choice for variable names: ")) - 1 return var_name_choice def get_name_index(shape_reader): """Displays Shapefile column used to name zones""" first_record = shape_reader.shapeRecords()[0].record # Record is the "column" information for the shape index = 1 for f, r in zip(shape_reader.fields[1:], first_record): print(index, "- ", f[0], ": ", r) index += 1 name_index = int(input("Enter the index to use for zone names: ")) - 1 return name_index def get_conversion_option(shape_records): """Prompts user for conversion options""" print("1 - Convert to a single zone") print("2 - Convert to one zone per shape (%d zones) (this can take a while)" % (len(shape_records))) import_option = int(input("Enter your conversion selection: ")) return import_option def main(shapefilename, outfilename): # define index from record for zone name s = sf.Reader(shapefilename) shape_records = s.shapeRecords() conversion_option = get_conversion_option(shape_records) if get_var_names() == 0: x_var_name = 'x' y_var_name = 'y' else: x_var_name = 'lon' y_var_name = 'lat' dataset = tp.active_frame().create_dataset("Shapefile", [x_var_name, y_var_name]) if conversion_option == 1: # Single Zone start = time.time() convert_to_single_zone(s, os.path.basename(shapefilename), dataset) else: # One Zone per Shape name_index = get_name_index(s) start = time.time() convert_to_one_zone_per_shape(s, name_index, dataset) tp.data.save_tecplot_plt(outfilename) print("Elapsed time: ", time.time() - start) if len(sys.argv) != 3: print("Usage:\nshapefile_to_plt.py shapefile.shp outfile.plt") else: shapefilename = sys.argv[1] outfilename = sys.argv[2] main(shapefilename, outfilename)
33.607143
104
0.669146
690cf46e3a65cc79fc30a8178dbe551901ad1473
2,746
py
Python
mastering_oop/special_methods/card_factory.py
brittainhard/py
aede05530ad05a8319fef7e76b49e4bf3cebebac
[ "MIT" ]
null
null
null
mastering_oop/special_methods/card_factory.py
brittainhard/py
aede05530ad05a8319fef7e76b49e4bf3cebebac
[ "MIT" ]
null
null
null
mastering_oop/special_methods/card_factory.py
brittainhard/py
aede05530ad05a8319fef7e76b49e4bf3cebebac
[ "MIT" ]
null
null
null
"""When creating factory functions, plain functions are good unless you need to inherit from a higher level class. If you don't need to inherit, dont use a class.""" from functools import partial from .suits import * from .cards import * def card(rank, suit): if rank == 1: return AceCard('A', suit) elif 2 <= rank < 11: return NumberCard(str(rank), suit) elif 11 <= rank < 14: name = {11: "J", 12: "Q", 13: "K"}[rank] return FaceCard(name, suit) else: """The else clause is there to make explicit what inputs this function will handle""" raise Exception("Rank out of range.") def card_better_elif(rank, suit): if rank == 1: return AceCard('A', suit) elif 2 <= rank < 11: return NumberCard(str(rank), suit) elif rank == 11: return FaceCard("J", suit) elif rank == 12: return FaceCard("Q", suit) elif rank == 13: return FaceCard("K", suit) else: """The else clause is there to make explicit what inputs this function will handle""" raise Exception("Rank out of range.") def card_mapping(rank, suit): """Get the desired rank. If the rank isnt there by default, return a nubmer card""" class_ = {1: AceCard, 11: FaceCard, 12: FaceCard, 13: FaceCard}.get(rank, NumberCard) return class_(rank, suit) def card_functools_mapping(rank, suit): part_class = { 1: partial(AceCard, 'A'), 11: partial(FaceCard, 'J'), 12: partial(FaceCard, 'Q'), 13: partial(FaceCard, 'K') }.get(rank, partial(NumberCard, str(rank))) return part_class(suit) class CardFactory: """This class is designed to contain a 'fluent api'. That means that one function call happens after the next. In the example, its x.a().b(). This class is returning itself, which the next function uses to generate the card. We are containing this in one object for the sake of simplicity. It seems like the minute we decide to do a different API... I don't know how this woulf be useful exactly. A lot of these are just examples of stuff you can do with collections.""" def rank(self, rank): self.class_, self.rank_str = { 1: (AceCard, 'A'), 11: (FaceCard, 'J'), 12: (FaceCard, 'Q'), 13: (FaceCard, 'K') }.get(rank, (NumberCard, str(rank))) return self def suit(self, suit): return self.class_(self.rank_str, suit) def get_deck(self): return [self.rank(r + 1).suit(s) for r in range(13) for s in (Club, Diamond, Heart, Spade)] factory_functions = [card, card_better_elif, card_mapping, card_functools_mapping]
31.563218
89
0.617626
690dd745221fea2279bf9ae3ddea1cf478e5ead0
2,645
py
Python
tests/test_spider_qidian.py
congjinruo/JulyNovel
feff0adfecab1c21728fc177c94621b9b8707bbd
[ "Apache-2.0" ]
5
2018-03-05T02:32:53.000Z
2020-10-27T13:13:59.000Z
tests/test_spider_qidian.py
congjinruo/JulyNovel
feff0adfecab1c21728fc177c94621b9b8707bbd
[ "Apache-2.0" ]
null
null
null
tests/test_spider_qidian.py
congjinruo/JulyNovel
feff0adfecab1c21728fc177c94621b9b8707bbd
[ "Apache-2.0" ]
1
2019-02-20T03:04:22.000Z
2019-02-20T03:04:22.000Z
# -*- coding: utf-8 -*- import unittest from app.services.spider_qidian import QidianSpider class TestSpider(unittest.TestCase): def test_init(self): spider = QidianSpider() self.assertTrue(isinstance(spider, QidianSpider)) def test_query_book_list(self): spider = QidianSpider() count = spider.queryBookList("https://www.qidian.com/all?orderId=&style=2&pageSize=50&siteid=1&pubflag=0&hiddenField=0&page=1") self.assertEqual(count, 50) def test_chapter_api(self): spider = QidianSpider() chapters = spider.queryChapterApi("107580") self.assertEqual(len(chapters), 2600) self.assertEqual(chapters[2583]["chapterName"], "第十一卷 真仙降临 第两千四百四十六章 飞升仙界(大结局)") self.assertEqual(chapters[2583]["xchapterId"], "48169888") self.assertEqual(chapters[2583]["wordNumbers"], "3288") self.assertEqual(chapters[2583]["updatetime"], "2013-09-23 22:10:37") self.assertEqual(chapters[2583]["free"], 0) self.assertEqual(chapters[2583]["xbookId"], "107580") def test_query_book_info(self): spider = QidianSpider() book = spider.queryBookInfo("https://book.qidian.com/info/107580") self.assertEqual(book["bookName"] , "凡人修仙传") self.assertEqual(book["cover"] , "https://qidian.qpic.cn/qdbimg/349573/107580/180") self.assertEqual(book["wordNumbers"], "744.75") self.assertEqual(book["author"], "忘语") self.assertEqual(book["tags"], "") self.assertEqual(book["xbookId"], "107580") self.assertEqual(book["status"], 0) self.assertEqual(book["lastupdate"], "2016-01-05 17:02:39") book_b = spider.queryBookInfo("https://book.qidian.com/info/1010626574") self.assertEqual(book_b["bookName"] , "无限刷钱系统") self.assertEqual(book_b["cover"] , "https://qidian.qpic.cn/qdbimg/349573/1010626574/180") self.assertEqual(book_b["wordNumbers"], "89.21") self.assertEqual(book_b["author"], "二发凉了") self.assertEqual(book_b["tags"], "明星|爆笑|系统流|赚钱") self.assertEqual(book_b["xbookId"], "1010626574") self.assertEqual(book_b["status"], 1) self.assertEqual(book_b["lastupdate"], "2018-03-16 17:17:32") def test_query_content(self): spider = QidianSpider() content = spider.queryContent("https://read.qidian.com/chapter/_khZq99sDTj7X4qr8VpWrA2/yocLiS1ZCjPM5j8_3RRvhw2") print(content) self.assertEqual(content["wordNumbers"], '2707') self.assertEqual(content["xchapterId"], 'yocLiS1ZCjPM5j8_3RRvhw2') self.assertEqual(content["xbookId"], '1010626574')
45.603448
135
0.662004
690eeeccad42026d1f4e91d779a9b52f4b5eb52e
847
py
Python
Python3/1311-Get-Watched-Videos-by-Your-Friends/soln.py
wyaadarsh/LeetCode-Solutions
3719f5cb059eefd66b83eb8ae990652f4b7fd124
[ "MIT" ]
5
2020-07-24T17:48:59.000Z
2020-12-21T05:56:00.000Z
Python3/1311-Get-Watched-Videos-by-Your-Friends/soln.py
zhangyaqi1989/LeetCode-Solutions
2655a1ffc8678ad1de6c24295071308a18c5dc6e
[ "MIT" ]
null
null
null
Python3/1311-Get-Watched-Videos-by-Your-Friends/soln.py
zhangyaqi1989/LeetCode-Solutions
2655a1ffc8678ad1de6c24295071308a18c5dc6e
[ "MIT" ]
2
2020-07-24T17:49:01.000Z
2020-08-31T19:57:35.000Z
class Solution: def watchedVideosByFriends(self, watchedVideos: List[List[str]], friends: List[List[int]], ID: int, level: int) -> List[str]: n = len(friends) # BFS frontier = [ID] levels = {ID : 0} nsteps = 0 while frontier: if level == 0: break level -= 1 next_level = [] for u in frontier: for v in friends[u]: if v not in levels: levels[v] = nsteps + 1 next_level.append(v) frontier = next_level nsteps += 1 counter = collections.Counter() for ID in frontier: for video in watchedVideos[ID]: counter[video] += 1 return sorted(counter, key=lambda x : (counter[x], x))
33.88
129
0.472255
690f42d3fe72fbc49129a7da627897da2c12d466
2,614
py
Python
tests/fixtures/client.py
radiac/mara
413f1f9f4c7117839a8c03d72733d6f75494ddd3
[ "BSD-3-Clause" ]
16
2015-11-22T13:12:46.000Z
2020-09-04T06:42:55.000Z
tests/fixtures/client.py
radiac/mara
413f1f9f4c7117839a8c03d72733d6f75494ddd3
[ "BSD-3-Clause" ]
8
2016-01-09T23:32:46.000Z
2019-09-30T23:30:49.000Z
tests/fixtures/client.py
radiac/mara
413f1f9f4c7117839a8c03d72733d6f75494ddd3
[ "BSD-3-Clause" ]
7
2016-07-19T04:39:31.000Z
2020-09-04T06:43:06.000Z
from __future__ import annotations import logging import socket import pytest from .constants import TEST_HOST, TEST_PORT logger = logging.getLogger("tests.fixtures.client") class BaseClient: """ Blocking test client to connect to an app server """ name: str def __init__(self, name: str): self.name = name def __str__(self): return self.name class SocketClient(BaseClient): socket: socket.socket | None buffer: bytes def __init__(self, name: str): super().__init__(name) self.buffer = b"" def connect(self, host: str, port: int): logger.debug(f"Socket client {self} connecting") self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket.connect((host, port)) logger.debug(f"Socket client {self} connected") def write(self, raw: bytes): if not self.socket: raise ValueError("Socket not open") logger.debug(f"Socket client {self} writing {raw!r}") self.socket.sendall(raw) def read(self, len: int = 1024) -> bytes: if not self.socket: raise ValueError("Socket not open") raw: bytes = self.socket.recv(len) logger.debug(f"Socket client {self} received {raw!r}") return raw def read_line(self, len: int = 1024) -> bytes: if b"\r\n" not in self.buffer: self.buffer += self.read(len) if b"\r\n" not in self.buffer: raise ValueError("Line not found") line, self.buffer = self.buffer.split(b"\r\n", 1) return line def close(self): if not self.socket: raise ValueError("Socket not open") logger.debug(f"Socket client {self} closing") self.socket.close() logger.debug(f"Socket client {self} closed") @pytest.fixture def socket_client_factory(request: pytest.FixtureRequest): """ Socket client factory fixture Usage:: def test_client(app_harness, socket_client_factory): app_harness(myapp) client = socket_client_factory() client.write(b'hello') assert client.read() == b'hello' """ clients = [] def connect(name: str | None = None, host: str = TEST_HOST, port: int = TEST_PORT): client_name = request.node.name if name is not None: client_name = f"{client_name}:{name}" client = SocketClient(client_name) client.connect(host, port) clients.append(client) return client yield connect for client in clients: client.close()
25.627451
87
0.613236
690f93b1369267fc52a8a48543ba3893e460a7b1
7,768
py
Python
pi.py
przemyslawgarlinski/pi_iowrap
3eef421ebe718dd3bfe1723d1ede6b6a7bc76599
[ "MIT" ]
1
2017-11-26T22:12:16.000Z
2017-11-26T22:12:16.000Z
pi.py
przemyslawgarlinski/pi_iowrap
3eef421ebe718dd3bfe1723d1ede6b6a7bc76599
[ "MIT" ]
null
null
null
pi.py
przemyslawgarlinski/pi_iowrap
3eef421ebe718dd3bfe1723d1ede6b6a7bc76599
[ "MIT" ]
null
null
null
"""Standard raspberry GPIO access layer. It defines abstract layer that extends InOutInterface to access all standard ports on rapsberry pi. It uses RPi.GPIO under the hood. Thanks to that you have a standardized way of accessing these ports, as well as any others implementing InOutInterface. """ import logging from base import InOutInterface from base import get_gpio from base import Settings from base import PortListener from exceptions import InvalidPortNumberError from port import Port class PiInterface(InOutInterface): """Standard GPIO interface abstraction layer. Some examples of raw calls to ports using RPi.GPIO GPIO.setmode(GPIO.BOARD) // set usual port numbering GPIO.setup(7, GPIO.OUT) GPIO.output(7, GPIO.HIGH) GPIO.output(7, GPIO.LOW) GPIO.cleanup() """ _GROUND = (6, 9, 14, 20, 25, 30, 34, 39) _POWER_5V = (2, 4) _POWER_3V3 = (1, 17) _I2C = (3, 5, 27, 28) _FORBIDDEN = _GROUND + _POWER_5V + _POWER_3V3 + _I2C PULL_UP = 'pull_up' PULL_DOWN = 'pull_down' def __init__(self): super(PiInterface, self).__init__(40) for number in range(1, 41): if number not in self._FORBIDDEN: self._ports[number] = Port(self, number) # Defines the pull up or pull down rezistor for inputs. # Possible values are: # 1. self.PULL_UP # 2. self.PULL_DOWN # 3. None (input fluctuating by default) self.pull_up_down_rezistor = self.PULL_UP self._port_listeners = {} self._initialize_ports() def __str__(self): return 'Raspberry PI GPIO' def _validate_port_number(self, port_number): super(PiInterface, self)._validate_port_number(port_number) if port_number in self._GROUND: raise InvalidPortNumberError( 'This port number(%d) is reserved for GROUND.', port_number) if port_number in self._POWER_3V3: raise InvalidPortNumberError( 'This port number(%d) is reserved for 3.3V POWER.', port_number) if port_number in self._POWER_5V: raise InvalidPortNumberError( 'This port number(%d) is reserved for 5V POWER.', port_number) if port_number in self._I2C: raise InvalidPortNumberError( 'This port number(%d) is reserved for I2c.', port_number) if port_number in self._FORBIDDEN: raise InvalidPortNumberError( 'This port number(%d) is forbidden to take.', port_number) def _gpio_setup(self, port_number, gpio_attr_name): self._validate_port_number(port_number) if Settings.IS_NO_HARDWARE_MODE: logging.warning('No hardware mode, no value written') else: gpio = get_gpio() if gpio_attr_name == 'IN': # Special case for settings port as input. # Pullup or pulldown rezistor should be set here. kwargs = {} if self.pull_up_down_rezistor == self.PULL_UP: kwargs['pull_up_down'] = gpio.PUD_UP elif self.pull_up_down_rezistor == self.PULL_DOWN: kwargs['pull_up_down'] = gpio.PUD_DOWN gpio.setup( port_number, getattr(gpio, gpio_attr_name), **kwargs) else: gpio.setup(port_number, getattr(gpio, gpio_attr_name)) def _gpio_output(self, port_number, value): self._validate_port_number(port_number) if Settings.IS_NO_HARDWARE_MODE: logging.warning('No hardware mode, no value written') else: gpio = get_gpio() gpio.output( port_number, gpio.HIGH if value == self.HIGH else gpio.LOW ) def get_value(self, port_number): self._validate_port_number(port_number) value = self._check_no_hardware_port_value(port_number) if value is not None: return value else: gpio = get_gpio() value = gpio.input(port_number) # logging.debug( # 'Read gpio port value (%s): %s', # self.get_port(port_number), # value) return self.HIGH if value == gpio.HIGH else self.LOW def set_as_input(self, port_number): self._gpio_setup(port_number, 'IN') self._in_out_registry[port_number] = self._INPUT return self def set_as_output(self, port_number): self._gpio_setup(port_number, 'OUT') self._in_out_registry[port_number] = self._OUTPUT return self def set_high(self, port_number): self._validate_port_number(port_number) self._validate_write_port_number(port_number) self._gpio_output(port_number, self.HIGH) return self def set_low(self, port_number): self._validate_port_number(port_number) self._validate_write_port_number(port_number) self._gpio_output(port_number, self.LOW) return self def add_event( self, port_number, on_rising_callback=None, on_falling_callback=None): """Adds listening event on given port. In this case 2nd argument passed to a callback is a value read during callback invocation, which in theory might not be the one that actually cause triggering the event. """ if Settings.IS_NO_HARDWARE_MODE: logging.warning('No hardware mode, adding read event failed.') else: port_listener = self._port_listeners.get(port_number) if not port_listener: port_listener = _PiPortListener(self.get_port(port_number)) gpio = get_gpio() gpio.add_event_detect( port_number, gpio.BOTH, callback=port_listener.trigger_callbacks, bouncetime=Settings.READ_SWITCH_DEBOUNCE) self._port_listeners[port_number] = port_listener if on_rising_callback: logging.debug( 'Adding rising callback for interface (%s) on port %d', self, port_number) port_listener.add_rising_callback(on_rising_callback) if on_falling_callback: logging.debug( 'Adding falling callback for interface (%s) on port %d', self, port_number) port_listener.add_falling_callback(on_falling_callback) def clear_read_events(self, port_number): if not Settings.IS_NO_HARDWARE_MODE: get_gpio().remove_event_detect(port_number) if port_number in self._port_listeners: del self._port_listeners[port_number] class _PiPortListener(PortListener): def get_callbacks_to_trigger(self): if not self._rising_callbacks and not self._falling_callbacks: return [] to_trigger = [] port_value = self.port.value if (port_value == InOutInterface.HIGH): to_trigger.extend(self._rising_callbacks) logging.debug( 'Event detected on interface (%s) on port (%d). ' 'Type: RISING.', self.port.interface, self.port.number) elif (port_value == InOutInterface.LOW): to_trigger.extend(self._falling_callbacks) logging.debug( 'Event detected on interface (%s) on port (%d). ' 'Type: FALLING.', self.port.interface, self.port.number) return to_trigger
36.469484
80
0.608651
6910edd2d74b03e0e705138a62b8524d93b325d6
4,190
py
Python
cogs/ments.py
NastyHub/yewon
62e7666a6be8c970871d15af4dfbbcd3ff0a97fd
[ "MIT" ]
null
null
null
cogs/ments.py
NastyHub/yewon
62e7666a6be8c970871d15af4dfbbcd3ff0a97fd
[ "MIT" ]
null
null
null
cogs/ments.py
NastyHub/yewon
62e7666a6be8c970871d15af4dfbbcd3ff0a97fd
[ "MIT" ]
null
null
null
import discord import os import json from discord.ext import commands, tasks import time import asyncio import random from discord.utils import MAX_ASYNCIO_SECONDS ########################################################################## #generalrole = discord.utils.get(ctx.guild.roles, id=661454256251076613) #logchannel = discord.utils.get(client.get_all_channels(), id = 753619980548833401) #SERVER INFO ownerid = 631441731350691850 chanwoo = 631441731350691850 yewon = 819734468465786891 saji = 785135229894524959 donggu = 543680309661663233 hanjae = 406822771524501516 mintchocolate = 434328592739074048 csticker = 864745666580316170 dohyun = 652531481767444498 ########################################################################## #USEFUL FUNCTIONS ########################################################################## def checkidentity(supposeid): if int(supposeid) == chanwoo: return "chanwoo" elif int(supposeid) == yewon: return "yewon" elif int(supposeid) == saji: return "saji" elif int(supposeid) == donggu: return "donggu" elif int(supposeid) == hanjae: return "hanjae" elif int(supposeid) == mintchocolate: return "mint" elif int(supposeid) == csticker: return "csticker" elif int(supposeid) == dohyun: return "dohyun" else: return None def sendrandom(providedlist, min, max): howmuchtosend = random.randint(min, max) sizeoflist = len(providedlist) i = 1 returnlist = [] while i <= howmuchtosend: i += 1 thingtoadd = providedlist[random.randrange(0, sizeoflist)] returnlist.append(thingtoadd) return returnlist def getlist(sendid): sendid = str(sendid) path = "ments/ments.json" with open(path) as f: jsondata = json.load(f) f.close() try: mylist = jsondata[sendid] except: mylist = None return mylist class ments(commands.Cog): def __init__(self, client): self.client = client @commands.command(aliases=["테스트"]) async def test(self, ctx): checkme = checkidentity(ctx.author.id) #await ctx.message.delete() if ctx.author.id == 434328592739074048: await ctx.send('...나는 모구모구') await ctx.send(file=discord.File('image/mogumogu.jpg')) else: grablist = getlist(ctx.author.id) if grablist == None: await ctx.send("아직 너는 잘 모르겠는데..") else: herelist = sendrandom(grablist, 1, 1) for i in herelist: await ctx.send(i) @commands.command() async def joinvc(self, ctx): if ctx.author.id == ownerid: await ctx.message.delete() channel = ctx.author.voice.channel await channel.connect() @commands.command() async def leavevc(self, ctx): if ctx.author.id == ownerid: await ctx.message.delete() await ctx.voice_client.disconnect() @commands.command() async def sendjson(self, ctx): if ctx.author.id == ownerid: await ctx.author.send(file=discord.File('ments/ments.json')) @commands.command(aliases=["전송"]) async def dm(self, ctx, target: discord.Member, *, message): try: await ctx.message.delete() except: await ctx.send("이 명령어는 서버에서 사용해 주세요") embed = discord.Embed( title = f"📨 메세지가 도착했습니다!", description = f"```{message}```\n\n답장해도 보내지지 않으니 직접 그 사람에게 말하세용\n명령어: `?전송 @유저 메세지 내용`", color = discord.Color.from_rgb(255,105,180) ) embed.set_footer(text=f"{ctx.author.name}님이 보낸 메세지") try: await target.send(embed=embed) except: await ctx.send(f"{target.mention}, 도착한 메세지가 있었지만 디엠 수신 기능이 꺼져있어 보내지 못하였습니다.") #find a channel with an id 879895499338039301 from all the servers the bot is in channel = discord.utils.get(self.client.get_all_channels(), id = 879895499338039301) await channel.send(embed=embed) def setup(client): client.add_cog(ments(client))
28.310811
100
0.587351
691177c77ccb4ef9f4c89444502885d55b50a94c
5,007
py
Python
data/process_data.py
MitraG/Disaster-Response-Project
179d875f9d16aba08cca14d9517531fb29b28041
[ "OLDAP-2.4" ]
null
null
null
data/process_data.py
MitraG/Disaster-Response-Project
179d875f9d16aba08cca14d9517531fb29b28041
[ "OLDAP-2.4" ]
null
null
null
data/process_data.py
MitraG/Disaster-Response-Project
179d875f9d16aba08cca14d9517531fb29b28041
[ "OLDAP-2.4" ]
null
null
null
#First, we import the relevant libraries import sys import pandas as pd from sqlalchemy import create_engine def load_data(messages_filepath, categories_filepath): '''This function will load the messages and categories datasets. Then, this function will merge the datasets by left join using the common id and then return a pandas dataframe. If the input is invalid or the data does not exist, this function will raise an error. INPUT: messages_filepath --> location of messages data file from the project root categories_filepath --> location of the categories data file from the project root OUTPUT: df --> a DataFrame containing the merged dataset ''' #load the messages dataset messages = pd.read_csv(messages_filepath) #load the categories dataset categories = pd.read_csv(categories_filepath) #merge the two datasets df = pd.merge(messages, categories, on='id', how = 'left') return df def clean_data(df): ''' This function will clean and prepare the merged data to make it more efficient to work with. The steps this function will take to clean and prepare the data are: - Split the categories into separate category columns - Rename every column to its corresponding category - Convert category values to a boolean format (0 and 1) - Replace the original categories column in the merged dataframe with the new category columns - Drop any dulplicates in the newly merged dataset If the input is invalid or the data does not exist, this function will raise an error. INPUT: df --> a Pandas DataFrame with the merged data OUTPUT: df --> a new Pandas Dataframe with each category as a column and its entries as 0/1 indicators. This is to flag if a message is classified under each category column. ''' #Split the categories into 36 individual category columns and create a dataframe cat_cols = df["categories"].str.split(";", expand=True) #Rename every column to its corresponding category ##First, calling the first row of cat_cols to extract a new list of new column names ##Using a lambda function that takes everything ##up to the second to last character of each string with slicing row = cat_cols.iloc[0] string_slicer = lambda x: x[:-2] cat_colnames = [string_slicer(i) for i in list(row)] cat_cols.columns = cat_colnames #Convert category values to a boolean format (0 and 1) #Iterating through the category columns in df to keep only the last character of each string (the 1 or 0) ##Then convert the string into a numeric value ##Using the slicing method once again int_slicer = lambda x: int(x[-1]) for column in cat_cols: cat_cols[column] = [int_slicer(i) for i in list(cat_cols[column])] #Replace the original categories column in the merged dataframe with the new category columns df = df.drop(['categories'], axis=1) df = pd.merge(df, cat_cols, left_index=True, right_index=True) df['related'] = df['related'].astype('str').str.replace('2', '1') df['related'] = df['related'].astype('int') #Drop any dulplicates in the newly merged dataset df = df.drop_duplicates() return df def save_data(df, database_filename): ''' This function will load the prepared data into a SQLite database file. If the input is invalid or the data does not exist, this function will raise an error. INPUT: df --> a Pandas DataFrame containing the prepared data DisasterResponse.db --> database to store data for model ingestion ''' engine = create_engine('sqlite:///DisasterResponse.db') df.to_sql('categorised_messages', engine, index=False, if_exists='replace') def main(): ''' This is the mail ETL function that extracts, transforms and loads the data. ''' if len(sys.argv) == 4: messages_filepath, categories_filepath, database_filepath = sys.argv[1:] print('Loading data...\n MESSAGES: {}\n CATEGORIES: {}' .format(messages_filepath, categories_filepath)) df = load_data(messages_filepath, categories_filepath) print('Cleaning data...') df = clean_data(df) print('Saving data...\n DATABASE: {}'.format(database_filepath)) save_data(df, database_filepath) print('Cleaned data saved to database!') else: print('Please provide the filepaths of the messages and categories '\ 'datasets as the first and second argument respectively, as '\ 'well as the filepath of the database to save the cleaned data '\ 'to as the third argument. \n\nExample: python process_data.py '\ 'disaster_messages.csv disaster_categories.csv '\ 'DisasterResponse.db') if __name__ == '__main__': main()
37.931818
117
0.675654
6911b43b591ee3e9e1fa26ab8ffa4f05986921b5
7,123
py
Python
ai_challenge/agents/_ignore_Agent.py
village-people/flying-pig
c86b589aadb02dbfd42a917a388c2b8488ecd338
[ "MIT" ]
72
2017-05-22T18:22:58.000Z
2019-03-11T22:49:00.000Z
ai_challenge/agents/_ignore_Agent.py
village-people/flying-pig
c86b589aadb02dbfd42a917a388c2b8488ecd338
[ "MIT" ]
1
2017-06-09T05:26:43.000Z
2017-08-22T11:41:41.000Z
ai_challenge/agents/_ignore_Agent.py
village-people/flying-pig
c86b589aadb02dbfd42a917a388c2b8488ecd338
[ "MIT" ]
3
2017-05-25T08:49:27.000Z
2019-09-09T18:41:54.000Z
import torch from argparse import ArgumentParser, Namespace import logging import os from collections import namedtuple import random import torch Transition = namedtuple('Transition', ('state', 'action', 'next_state', 'reward', 'done')) class AgentMemory(object): def __init__(self, capacity): self.capacity = capacity self.reset() def push(self, *args): """Saves a transition.""" if len(self.memory) < self.capacity: self.memory.append(None) self.memory[self.position] = Transition(*args) self.position = (self.position + 1) % self.capacity def pushNewObservation(self, observation, reward, done, info, action): if self._lastState is None: self._lastState = observation else: next_state = observation self.push(self._lastState, action, next_state, reward, done) def sample(self, batch_size): return random.sample(self.memory, batch_size) def last(self): return [] if len(self.memory) <= 0 else [self.memory[-1]] def all(self): return self.memory[:] def reset(self): self.memory = [] self.position = 0 self._lastState = None def __len__(self): return len(self.memory) class AgentModel: _model = None _optimizer = None _criterion = None _loaded = False def __init__(self, saveFolder, modelName, logger=None, modelSaveSuffix=""): self._model = None self._optimizer = None self.modelName = modelName self.modelPath = os.path.join(saveFolder, modelName + modelSaveSuffix + ".tar") self.bestModelPath = os.path.join(saveFolder, modelName + modelSaveSuffix + "_best.tar") self._maxMeanReward = -1 if logger is None: self.logger = logging.getLogger(modelName + "_" + modelSaveSuffix) else: self.logger = logger def loadModel(self, model, optimizer, criterion): self._model = model self._optimizer = optimizer self._criterion = criterion self._loaded = True def loaded(self): return self._loaded def saveModel(self, epoch, meanReward): if self._model is None: self.logger.info("No model to save") return if meanReward > self._maxMeanReward: self._maxMeanReward= meanReward torch.save({ 'epoch': epoch + 1, 'arch': self.modelName, 'state_dict': self._model.state_dict(), 'meanReward': meanReward, 'bestMeanReward': self._maxMeanReward }, self.bestModelPath) torch.save({ 'epoch': epoch + 1, 'arch': self.modelName, 'state_dict': self._model.state_dict(), 'mean_reward': meanReward, 'best_mean_reward': self._maxMeanReward }, self.modelPath) def loadModelFromFile(self, path): if os.path.isfile(path): self.logger.info("=> loading checkpoint '{}'".format(path)) checkpoint = torch.load(path) self.start_epoch = checkpoint['epoch'] self._maxMeanReward = checkpoint['best_mean_reward'] self._model.load_state_dict(checkpoint['state_dict']) self.logger.info("=> loaded checkpoint '{}' (epoch {})" .format(path, checkpoint['epoch'])) else: self.logger.debug("=> no checkpoint found at '{}'".format(path)) def modelReport(self): pass class Agent: def __init__(self, name, cfg): self.modelDataType = cfg.general.use_cuda self.saveFolder = cfg.general.save_folder self.logger = logging.getLogger(name + "_" + str(self.agentID)) self.sharedModel = cfg.model.shared self.transitionMemory = cfg.model.transition_memory self._modelClass = None self._memory = AgentMemory(self.transitionMemory) self._crtStep = 0 self._crtEpoch = 0 self._useCUDA = False if self.modelDataType: self.dtype = torch.cuda.FloatTensor self._useCUDA = True else: self.dtype = torch.FloatTensor #Instantiate Agents model if self.sharedModel: if hasattr(self.env, "_agentsModel"): self._modelClass = self.env._agentsModel else: self._modelClass = AgentModel(self.saveFolder, self.name, logger=self.logger) self.env._agentsModel = self._modelClass else: self._modelClass = AgentModel(self.saveFolder, self.name, logger=self.logger, modelSaveSuffix=str(self.agentID)) """ Baseline methods. Should not be overridden when extending """ def __post_init__(self): if not (self.sharedModel and self._modelClass.loaded()): self._createLearningArchitecture() self._modelClass._model.type(self.dtype) self.logger.info("Loaded architecture") def act(self, observation, reward, done, is_training): self._crtStep += 1 observation = observation reward = reward action = self._act(observation, reward, done, is_training) self._postAction() self._memory.pushNewObservation(observation, reward, done, None, action) self._optimizeModel() return action.view(-1) def restart(self): """ Called when game restarts """ self._lastState = None self._restart() def epochFinished(self): """ Called after end of training epoch """ self._crtEpoch += 1 self._epochFinished() pass def report(self): """ Should log internal information """ self._modelClass.modelReport() self._report() def saveModel(self, epoch, meanReward): """ save model information """ self._modelClass.saveModel(epoch, meanReward) self._saveModel(epoch, meanReward) """ Classes extending this class should override only methods starting "_" to keep base class methods """ def _act(self, observation, reward, done, info): pass def _reset(self): pass def _epochFinished(self): pass def _report(self): pass def _saveModel(self, epoch, meanReward): pass def _postAction(self): pass def _createLearningArchitecture(self): """ Should create learning architecture #!!! Instantiate self._modelClass._model (sibling of nn.Module) #Instantiate other learning models """ self._modelClass._model = None def _optimizeModel(self): """ Is called after registering each new transition. """ pass
28.378486
80
0.579952
691235d4163755651c608d7db64917c20c45cfde
1,568
py
Python
randomseq/modules/make_random.py
andreagrioni/special-couscous
17b8dcd0bcafab2f6952ddf3b38cd1292f62cee7
[ "MIT" ]
null
null
null
randomseq/modules/make_random.py
andreagrioni/special-couscous
17b8dcd0bcafab2f6952ddf3b38cd1292f62cee7
[ "MIT" ]
1
2021-08-17T12:17:29.000Z
2021-08-17T12:17:29.000Z
randomseq/modules/make_random.py
andreagrioni/special-couscous
17b8dcd0bcafab2f6952ddf3b38cd1292f62cee7
[ "MIT" ]
null
null
null
import pandas as pd from modules import bedtools from modules import intervals def generator(ARGUMENTS): if not ARGUMENTS.input_bed and not ARGUMENTS.gtf_anno: print(f"get random intervals from genome {ARGUMENTS.reference}") RANDOM_BED = bedtools.random_interval( ARGUMENTS.reference, ARGUMENTS.int_size, ARGUMENTS.N ) elif ARGUMENTS.gtf_anno: print(f"get intervals from annotation file {ARGUMENTS.gtf_anno}") RANDOM_BED = intervals.gtf_to_bed( file_name=ARGUMENTS.gtf_anno, feature=ARGUMENTS.feature, int_size=ARGUMENTS.int_size, N=ARGUMENTS.N, ) elif ARGUMENTS.input_bed: print(f"load input bed file {ARGUMENTS.input_bed}") RANDOM_BED = ARGUMENTS.input_bed else: print("nothing to do") if ARGUMENTS.avoid_int and RANDOM_BED: print("removing positive intervals") RANDOM_BED = bedtools.intersect( RANDOM_BED, ARGUMENTS.avoid_int, opt=ARGUMENTS.intersect_opt ) return RANDOM_BED def make_set(ARGUMENTS): df_list = list() tmp_size = 0 while tmp_size < ARGUMENTS.N: RANDOM_BED = generator(ARGUMENTS) tmp_df = pd.read_csv(RANDOM_BED, sep="\t", header=None) tmp_size += tmp_df.shape[0] df_list.append(tmp_df) if df_list: merge_df = pd.concat(df_list, axis=0).sample(n=ARGUMENTS.N) merge_df.to_csv(RANDOM_BED, sep="\t", header=False, index=False) return RANDOM_BED if __name__ == "__main__": pass
28
73
0.658163
6913dde4acd202b7bb00fc51376d6dcff16b4ba7
100
py
Python
q025.py
sjf/project_euler
8514710e2018136ba8a087ae58cba35370700f6f
[ "MIT" ]
null
null
null
q025.py
sjf/project_euler
8514710e2018136ba8a087ae58cba35370700f6f
[ "MIT" ]
null
null
null
q025.py
sjf/project_euler
8514710e2018136ba8a087ae58cba35370700f6f
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 f=1 fprev=1 n=2 while f < 10**999: f,fprev = f + fprev,f n += 1 print(n)
11.111111
23
0.58
69146b024afb8e8179d7794495072111c92f9cf1
532
py
Python
Modulo3/aula18.py
Werberty/Curso-em-Video-Python3
24c0299edd635fb9c2db2ecbaf8532d292f92d49
[ "MIT" ]
1
2022-03-06T11:37:47.000Z
2022-03-06T11:37:47.000Z
Modulo3/aula18.py
Werberty/Curso-em-Video-Python3
24c0299edd635fb9c2db2ecbaf8532d292f92d49
[ "MIT" ]
null
null
null
Modulo3/aula18.py
Werberty/Curso-em-Video-Python3
24c0299edd635fb9c2db2ecbaf8532d292f92d49
[ "MIT" ]
null
null
null
test = list() test.append('Werberty') test.append(21) galera = list() galera.append(test[:]) test[0] = 'Maria' test[1] = 22 galera.append(test[:]) print(galera) pessoal = [['joão', 19], ['Ana', 33], ['Joaquim', 13], ['Maria', 45]] print(pessoal[1]) print(pessoal[2][1]) for p in pessoal: print(f'{p[0]} tem {p[1]} anos de idade.') galerinha = list() dado = list() for c in range(0, 3): dado.append(str(input('Nome: '))) dado.append(int(input('idade: '))) galerinha.append(dado[:]) dado.clear() print(galerinha)
22.166667
69
0.614662
6914bfcaeef4fb7954973326e68cefa7ccd0e8c9
4,667
py
Python
summariser/ngram_vector/vector_generator.py
UKPLab/ijcai2019-relis
8a40762dcfa90c075a4f6591cbdceb468026ef17
[ "MIT" ]
5
2019-06-30T14:45:12.000Z
2020-07-26T12:59:36.000Z
summariser/ngram_vector/vector_generator.py
UKPLab/ijcai2019-relis
8a40762dcfa90c075a4f6591cbdceb468026ef17
[ "MIT" ]
1
2020-07-11T10:47:57.000Z
2020-09-16T10:53:36.000Z
summariser/ngram_vector/vector_generator.py
UKPLab/ijcai2019-relis
8a40762dcfa90c075a4f6591cbdceb468026ef17
[ "MIT" ]
2
2019-12-24T02:10:42.000Z
2020-04-27T05:39:49.000Z
from summariser.ngram_vector.base import Sentence from summariser.utils.data_helpers import * from nltk.stem.porter import PorterStemmer from summariser.ngram_vector.state_type import * import random class Vectoriser: def __init__(self,docs,sum_len=100,no_stop_words=True,stem=True,block=1,base=200,lang='english'): self.docs = docs self.without_stopwords = no_stop_words self.stem = stem self.block_num = block self.base_length = base self.language = lang self.sum_token_length = sum_len self.stemmer = PorterStemmer() self.stoplist = set(stopwords.words(self.language)) self.sim_scores = {} self.stemmed_sentences_list = [] self.load_data() def sampleRandomReviews(self,num,heuristic_reward=True,rouge_reward=True,models=None): heuristic_list = [] rouge_list = [] act_list = [] for ii in range(num): state = State(self.sum_token_length, self.base_length, len(self.sentences), self.block_num, self.language) while state.available_sents != [0]: new_id = random.choice(state.available_sents) if new_id == 0: continue if new_id > 0 and len(self.sentences[new_id-1].untokenized_form.split(' ')) > self.sum_token_length: continue state.updateState(new_id-1,self.sentences) actions = state.historical_actions act_list.append(actions) if heuristic_reward: rew = state.getTerminalReward(self.sentences,self.stemmed_sentences_list,self.sent2tokens,self.sim_scores) heuristic_list.append(rew) if rouge_reward: assert models is not None r_dic = {} for model in models: model_name = model[0].split('/')[-1].strip() rew = state.getOptimalTerminalRougeScores(model) r_dic[model_name] = rew rouge_list.append(r_dic) return act_list, heuristic_list, rouge_list def getSummaryVectors(self,summary_acts_list): vector_list = [] for act_list in summary_acts_list: state = State(self.sum_token_length, self.base_length, len(self.sentences), self.block_num, self.language) for i, act in enumerate(act_list): state.updateState(act, self.sentences, read=True) vector = state.getSelfVector(self.top_ngrams_list, self.sentences) vector_list.append(vector) return vector_list def sent2tokens(self, sent_str): if self.without_stopwords and self.stem: return sent2stokens_wostop(sent_str, self.stemmer, self.stoplist, self.language) elif self.without_stopwords == False and self.stem: return sent2stokens(sent_str, self.stemmer, self.language) elif self.without_stopwords and self.stem == False: return sent2tokens_wostop(sent_str, self.stoplist, self.language) else: # both false return sent2tokens(sent_str, self.language) def load_data(self): self.sentences = [] for doc_id, doc in enumerate(self.docs): doc_name, doc_sents = doc doc_tokens_list = [] for sent_id, sent_text in enumerate(doc_sents): token_sent = word_tokenize(sent_text, self.language) current_sent = Sentence(token_sent, doc_id, sent_id + 1) untokenized_form = untokenize(token_sent) current_sent.untokenized_form = untokenized_form current_sent.length = len(untokenized_form.split(' ')) self.sentences.append(current_sent) sent_tokens = self.sent2tokens(untokenized_form) doc_tokens_list.extend(sent_tokens) stemmed_form = ' '.join(sent_tokens) self.stemmed_sentences_list.append(stemmed_form) #print('total sentence num: ' + str(len(self.sentences))) self.state_length_computer = StateLengthComputer(self.block_num, self.base_length, len(self.sentences)) self.top_ngrams_num = self.state_length_computer.getStatesLength(self.block_num) self.vec_length = self.state_length_computer.getTotalLength() sent_list = [] for sent in self.sentences: sent_list.append(sent.untokenized_form) self.top_ngrams_list = getTopNgrams(sent_list, self.stemmer, self.language, self.stoplist, 2, self.top_ngrams_num)
42.045045
122
0.630812
69153ffe65a563075cabcea48239e1f649f663fd
320
py
Python
setup.py
onidzelskyi/python_betbright_test
5de60c10857829bc3d326558132fa512776e952d
[ "MIT" ]
null
null
null
setup.py
onidzelskyi/python_betbright_test
5de60c10857829bc3d326558132fa512776e952d
[ "MIT" ]
null
null
null
setup.py
onidzelskyi/python_betbright_test
5de60c10857829bc3d326558132fa512776e952d
[ "MIT" ]
null
null
null
from setuptools import setup setup(name='betbright_test', version='0.1', description='Python product category classification kit', url='', author='Oleksii Nidzelskyi', author_email='alexey.education@gmail.com', license='MIT', packages=['betbright_test'], zip_safe=False)
26.666667
63
0.6625
69157e91f425c2893ef06beb1c157d48d39855f1
198
py
Python
docsie_universal_importer/providers/google_cloud_storage/urls.py
Zarif99/test-universal
062972ed64d9f048de702ab1edf4025cffca2abb
[ "BSD-3-Clause" ]
null
null
null
docsie_universal_importer/providers/google_cloud_storage/urls.py
Zarif99/test-universal
062972ed64d9f048de702ab1edf4025cffca2abb
[ "BSD-3-Clause" ]
null
null
null
docsie_universal_importer/providers/google_cloud_storage/urls.py
Zarif99/test-universal
062972ed64d9f048de702ab1edf4025cffca2abb
[ "BSD-3-Clause" ]
null
null
null
from docsie_universal_importer.providers.base.urls import default_urlpatterns from .import_provider import GoogleCloudStorageProvider urlpatterns = default_urlpatterns(GoogleCloudStorageProvider)
33
77
0.89899
691732087c532907538627537cd1a0c66892dfed
2,182
py
Python
example/multicut/performance/simple_experiments.py
abailoni/elf
e3b459b4dcc1d72a8dd3d38eabbf3b7183285372
[ "MIT" ]
23
2019-09-07T23:02:08.000Z
2022-03-19T09:50:58.000Z
example/multicut/performance/simple_experiments.py
abailoni/elf
e3b459b4dcc1d72a8dd3d38eabbf3b7183285372
[ "MIT" ]
34
2019-08-07T11:54:01.000Z
2022-03-29T09:50:54.000Z
example/multicut/performance/simple_experiments.py
abailoni/elf
e3b459b4dcc1d72a8dd3d38eabbf3b7183285372
[ "MIT" ]
12
2019-08-23T09:01:29.000Z
2021-11-09T01:21:20.000Z
import argparse import json import os import time from elf.segmentation.multicut import get_multicut_solver, _to_objective from elf.segmentation.utils import load_multicut_problem def simple_performance_experiments(problem, solvers): os.makedirs("problems", exist_ok=True) path = f"./problems/{problem}" sample, size = problem.split("_") graph, costs = load_multicut_problem(sample, size, path) objective = _to_objective(graph, costs) results = {} print("Measure performance for sample:", problem) for solver_name in solvers: # get the mode for RAMA solvers if solver_name.startswith("rama"): _, mode = solver_name.split("_") solver = get_multicut_solver("rama") kwargs = {"mode": mode} else: solver = get_multicut_solver(solver_name) kwargs = {} t0 = time.time() node_labels = solver(graph, costs, **kwargs) t0 = time.time() - t0 energy = objective.evalNodeLabels(node_labels) print("Solver", solver_name, "runtime:", t0, "s, energy:", energy) results[solver_name] = (energy, t0) return results # TODO add large problems! where decomp should shine... def main(): parser = argparse.ArgumentParser() # default_solvers = ["decomposition", "kernighan-lin", "greedy-additive", "greedy-fixation"] default_solvers = ["decomposition", "kernighan-lin", "greedy-additive", "greedy-fixation", "rama_P", "rama_PD+"] parser.add_argument("--solvers", "-s", default=default_solvers) default_problems = ["A_small", "B_small", "C_small", "A_medium", "B_medium", "C_medium"] parser.add_argument("--problems", "-p", default=default_problems) # TODO save as a single csv instead print("Simple multicut performance experiments:") args = parser.parse_args() for problem in args.problems: res = simple_performance_experiments(problem, args.solvers) res_path = f"./results_{problem}.json" with open(res_path, "w") as f: json.dump(res, f, sort_keys=True, indent=2) if __name__ == "__main__": main()
34.634921
96
0.648029
6917abd481963fb432ab3aee8ef82db2c9fb0a45
104
py
Python
app/utils/Constants.py
jonzxz/project-piscator
588c8b1ac9355f9a82ac449fdbeaa1ef7eb441ef
[ "MIT" ]
null
null
null
app/utils/Constants.py
jonzxz/project-piscator
588c8b1ac9355f9a82ac449fdbeaa1ef7eb441ef
[ "MIT" ]
null
null
null
app/utils/Constants.py
jonzxz/project-piscator
588c8b1ac9355f9a82ac449fdbeaa1ef7eb441ef
[ "MIT" ]
1
2021-02-18T03:08:21.000Z
2021-02-18T03:08:21.000Z
IMAP_GMAIL = 'imap.gmail.com' IMAP_OUTLOOK = 'outlook.office365.com' IMAP_YAHOO = 'imap.mail.yahoo.com'
26
38
0.759615
691961c351775c6d1643e0e7e934a4b430e01308
35
py
Python
tests/__init__.py
anrputina/oadds
f2e058f48a9edfbdd9ee7229b4f303cdd4543190
[ "MIT" ]
5
2020-12-25T19:38:32.000Z
2021-06-12T20:39:05.000Z
tests/__init__.py
anrputina/ods-anomalydetection
f2e058f48a9edfbdd9ee7229b4f303cdd4543190
[ "MIT" ]
63
2020-08-04T12:08:09.000Z
2020-10-09T12:08:09.000Z
tests/__init__.py
anrputina/ods-anomalydetection
f2e058f48a9edfbdd9ee7229b4f303cdd4543190
[ "MIT" ]
null
null
null
"""Unit test package for oadds."""
17.5
34
0.657143
69212d7ab58acd86f0a6a7a1366fa6f42c3e9584
1,296
py
Python
src/openprocurement/tender/openua/procedure/models/award.py
ProzorroUKR/openprocurement.api
2855a99aa8738fb832ee0dbad4e9590bd3643511
[ "Apache-2.0" ]
10
2020-02-18T01:56:21.000Z
2022-03-28T00:32:57.000Z
src/openprocurement/tender/openua/procedure/models/award.py
quintagroup/openprocurement.api
2855a99aa8738fb832ee0dbad4e9590bd3643511
[ "Apache-2.0" ]
26
2018-07-16T09:30:44.000Z
2021-02-02T17:51:30.000Z
src/openprocurement/tender/openua/procedure/models/award.py
ProzorroUKR/openprocurement.api
2855a99aa8738fb832ee0dbad4e9590bd3643511
[ "Apache-2.0" ]
15
2019-08-08T10:50:47.000Z
2022-02-05T14:13:36.000Z
from schematics.types import StringType, BooleanType, MD5Type, BaseType from schematics.exceptions import ValidationError from schematics.types.compound import ModelType from openprocurement.api.models import ListType from openprocurement.tender.core.procedure.models.award import ( Award as BaseAward, PatchAward as BasePatchAward, PostAward as BasePostAward, ) from openprocurement.tender.core.procedure.models.milestone import QualificationMilestoneListMixin from openprocurement.tender.openua.procedure.models.item import Item class Award(QualificationMilestoneListMixin, BaseAward): complaints = BaseType() items = ListType(ModelType(Item, required=True)) qualified = BooleanType(default=False) eligible = BooleanType(default=False) def validate_qualified(self, data, qualified): if data["status"] == "active" and not qualified: raise ValidationError("This field is required.") def validate_eligible(self, data, eligible): if data["status"] == "active" and not eligible: raise ValidationError("This field is required.") class PatchAward(BasePatchAward): items = ListType(ModelType(Item, required=True)) qualified = BooleanType() eligible = BooleanType() class PostAward(BasePostAward): pass
35.027027
98
0.759259
6922080e0ef28e36cb9344b32948cabc43d991c9
4,448
py
Python
tests/test_exch_uniform.py
computationalmodelling/fidimag
07a275c897a44ad1e0d7e8ef563f10345fdc2a6e
[ "BSD-2-Clause" ]
53
2016-02-27T09:40:21.000Z
2022-01-19T21:37:44.000Z
tests/test_exch_uniform.py
computationalmodelling/fidimag
07a275c897a44ad1e0d7e8ef563f10345fdc2a6e
[ "BSD-2-Clause" ]
132
2016-02-26T13:18:58.000Z
2021-12-01T21:52:42.000Z
tests/test_exch_uniform.py
computationalmodelling/fidimag
07a275c897a44ad1e0d7e8ef563f10345fdc2a6e
[ "BSD-2-Clause" ]
32
2016-02-26T13:21:40.000Z
2022-03-08T08:54:51.000Z
from __future__ import print_function import numpy as np from fidimag.atomistic import Sim from fidimag.common import CuboidMesh from fidimag.atomistic import UniformExchange def init_m(pos): x, y, z = pos return (x - 0.5, y - 0.5, z - 0.5) def test_exch_1d(): """ Test the x component of the exchange field in a 1D mesh, with the spin ordering: 0 1 2 3 4 5 """ mesh = CuboidMesh(nx=5, ny=1, nz=1) sim = Sim(mesh) exch = UniformExchange(1) sim.add(exch) sim.set_m(init_m, normalise=False) field = exch.compute_field() assert field[0] == 1 assert field[1 * 3] == 2 assert field[2 * 3] == 4 assert field[3 * 3] == 6 assert field[4 * 3] == 3 assert np.max(field[2::3]) == 0 assert np.max(field[1::3]) == 0 def test_exch_1d_pbc(): mesh = CuboidMesh(nx=5, ny=1, nz=1, periodicity=(True, False, False)) sim = Sim(mesh) exch = UniformExchange(1) sim.add(exch) sim.set_m(init_m, normalise=False) field = exch.compute_field() assert field[0] == 1 + 4 assert field[3] == 2 assert field[6] == 4 assert field[9] == 6 assert field[12] == 3 + 0 assert np.max(field[2::3]) == 0 assert np.max(field[1::3]) == 0 def test_exch_2d(): mesh = CuboidMesh(nx=5, ny=2, nz=1) sim = Sim(mesh) exch = UniformExchange(1) sim.add(exch) sim.set_m(init_m, normalise=False) field = exch.compute_field() assert np.max(field[2::3]) == 0 assert field[0] == 1 assert field[3] == 2 + 1 assert field[6] == 1 + 2 + 3 assert field[9] == 2 + 3 + 4 assert field[12] == 3 + 4 def test_exch_2d_pbc2d(): """ Test the exchange field components in a 2D mesh with PBCs The mesh sites: 3 4 5 --> (0,1,0) (1,1,0) (2,1,0) y ^ 0 1 2 (0,0,0) (1,0,0) (2,0,0) | x --> The expected components are in increasing order along x """ mesh = CuboidMesh(nx=3, ny=2, nz=1, periodicity=(True, True, False)) print(mesh.neighbours) sim = Sim(mesh) exch = UniformExchange(1) sim.add(exch) sim.set_m(init_m, normalise=False) field = exch.compute_field() expected_x = np.array([3, 4, 5, 3, 4, 5]) expected_y = np.array([2, 2, 2, 2, 2, 2]) # Since the field ordering is now: fx1 fy1 fz1 fx2 ... # We extract the x components jumping in steps of 3 assert np.max(abs(field[::3] - expected_x)) == 0 # For the y component is similar, now we start at the 1th # entry and jump in steps of 3 assert np.max(abs(field[1::3] - expected_y)) == 0 # Similar fot he z component assert np.max(field[2::3]) == 0 def test_exch_3d(): """ Test the exchange field of the spins in this 3D mesh: bottom layer: 8 9 10 11 4 5 6 7 x 2 0 1 2 3 Assertions are according to the mx component of the spins, since J is set to 1 Spin components are given according to the (i, j) index position in the lattice: i lattice site [[ 0. 0. 0.] --> 0 j=0 [ 1. 0. 0.] --> 1 [ 2. 0. 0.] --> 2 [ 3. 0. 0.] --> 3 [ 0. 1. 0.] --> 4 j=1 [ 1. 1. 0.] ... Remember the field ordering: fx0, fy0, fz0, fx1, ... """ mesh = CuboidMesh(nx=4, ny=3, nz=2) sim = Sim(mesh) exch = UniformExchange(1) sim.add(exch) sim.set_m(init_m, normalise=False) field = exch.compute_field() # print field # Exchange from 0th spin assert field[0] == 1 # Exchange from 1st spin # spin: 2 0 5 13 # mx: 2 0 1 1 assert field[3] == 2 + 0 + 1 + 1 # Exchange from 2nd spin # spin: 3 1 6 14 # mx: 3 1 2 2 assert field[6] == 3 + 1 + 2 + 2 # ... assert field[9] == 2 + 3 + 3 assert field[4 * 3] == 1 assert field[5 * 3] == 5 assert field[6 * 3] == 10 assert field[7 * 3] == 11 def test_exch_energy_1d(): mesh = CuboidMesh(nx=2, ny=1, nz=1) sim = Sim(mesh) exch = UniformExchange(1.23) sim.add(exch) sim.set_m((0, 0, 1)) energy = exch.compute_energy() assert energy == -1.23 if __name__ == '__main__': # test_exch_1d() # test_exch_1d_pbc() # test_exch_2d() test_exch_2d_pbc2d() # test_exch_3d() # test_exch_energy_1d()
23.046632
77
0.546088
6922c9f5f58bcc2458ccc430c20fbd3d35a3615f
2,475
py
Python
burin/dxftypes.py
matthewSorensen/plotterstuff
5e694c21d3cc49690f8b20cc9c06b3365612ee12
[ "BSD-2-Clause" ]
1
2020-10-15T04:59:16.000Z
2020-10-15T04:59:16.000Z
burin/dxftypes.py
matthewSorensen/plotterstuff
5e694c21d3cc49690f8b20cc9c06b3365612ee12
[ "BSD-2-Clause" ]
null
null
null
burin/dxftypes.py
matthewSorensen/plotterstuff
5e694c21d3cc49690f8b20cc9c06b3365612ee12
[ "BSD-2-Clause" ]
null
null
null
from geomdl import BSpline import numpy as np import math import ezdxf import burin.types class Spline: def __init__(self,degree, control, knots): self.degree = degree self.control = np.array(control) self.knots = np.array(knots) def length_upper_bound(self): acc = 0 n, _ = self.control.shape for i in range(1,n): delta = self.control[i] - self.control[i - 1] acc += np.sqrt(delta.dot(delta)) return acc def render_to_tolerance(self,tolerance): curve = BSpline.Curve() curve.degree = self.degree curve.ctrlpts = self.control.tolist() curve.knotvector = self.knots.tolist() curve.sample_size = max(2, math.ceil(self.length_upper_bound() / tolerance)) return burin.types.BSpline(curve, tolerance) @staticmethod def from_dxf(s): return Spline(s.dxf.degree, s.control_points, s.knots) class Polyline: def __init__(self, points): self.points = points def render_to_tolerance(self, tolerance): # Here for parallelism with Splines - doesn't apply the tolerance return burin.types.Polyline(self.points[:,0:2]) @staticmethod def from_dxf(line): vertices = [v.dxf.location for v in line.vertices] if line.dxf.flags & 1: vertices.append(vertices[0]) return Polyline(np.array(vertices)) class Point: def __init__(self, coords): self.coords = coords def render_to_tolerance(self, _): a,b = self.coords[0:2] return burin.types.Point((a,b)) @staticmethod def from_dxf(point): return Point(np.array(point.dxf.location)) class Arc: def __init__(self, start, end, center): self.start = start self.end = end self.center = center def render_to_tolerance(self, _): return burin.types.Arc(self.start, self.end, self.center, False) @staticmethod def from_dxf(circle): """ Is it an arc? Is it a circle? Who knows? """ center, radius = np.array(circle.dxf.center)[0:2], circle.dxf.radius if isinstance(circle, ezdxf.entities.Arc): start = np.array(circle.start_point)[0:2] end = np.array(circle.end_point)[0:2] return Arc(start, end, center) point = center + np.array([radius, 0]) return Arc(point, point, center)
27.197802
84
0.606869
692480274c65e40eecef76118c129488a11778d0
161
py
Python
backend/microservices/audio-generator/config/routes/authentication.py
MuhamedAbdalla/Automatic-Audio-Book-Based-On-Emotion-Detection
72130ad037b900461af5be6d80b27ab29c81de5e
[ "MIT" ]
3
2021-04-26T00:17:14.000Z
2021-07-04T15:30:09.000Z
backend/microservices/audio-generator/config/routes/authentication.py
MuhamedAbdalla/Automatic-Audio-Book-Based-On-Emotion-Detection
72130ad037b900461af5be6d80b27ab29c81de5e
[ "MIT" ]
null
null
null
backend/microservices/audio-generator/config/routes/authentication.py
MuhamedAbdalla/Automatic-Audio-Book-Based-On-Emotion-Detection
72130ad037b900461af5be6d80b27ab29c81de5e
[ "MIT" ]
null
null
null
API_ROOT = 'http://localhost:6000' #'https://bookbeat.herokuapp.com' TOKEN_VALIDATION_ABS_ENDPOINT = API_ROOT + '/validate-token' REQ_USER_ID_KEY_NAME = 'id'
40.25
69
0.757764
6924fcbecc439543091a0747fd24dab3169d855c
238
py
Python
output/models/nist_data/list_pkg/date/schema_instance/nistschema_sv_iv_list_date_min_length_3_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
1
2021-08-14T17:59:21.000Z
2021-08-14T17:59:21.000Z
output/models/nist_data/list_pkg/date/schema_instance/nistschema_sv_iv_list_date_min_length_3_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
4
2020-02-12T21:30:44.000Z
2020-04-15T20:06:46.000Z
output/models/nist_data/list_pkg/date/schema_instance/nistschema_sv_iv_list_date_min_length_3_xsd/__init__.py
tefra/xsdata-w3c-tests
b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f
[ "MIT" ]
null
null
null
from output.models.nist_data.list_pkg.date.schema_instance.nistschema_sv_iv_list_date_min_length_3_xsd.nistschema_sv_iv_list_date_min_length_3 import NistschemaSvIvListDateMinLength3 __all__ = [ "NistschemaSvIvListDateMinLength3", ]
39.666667
182
0.886555
6925a6eef582f0d830aa78315162927b6a168d1b
2,717
py
Python
face_follow/src/follower.py
andrewtron3000/hacdc-ros-pkg
f3178ebc5a71889066db102e3b57beb0dcc1a204
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
face_follow/src/follower.py
andrewtron3000/hacdc-ros-pkg
f3178ebc5a71889066db102e3b57beb0dcc1a204
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
face_follow/src/follower.py
andrewtron3000/hacdc-ros-pkg
f3178ebc5a71889066db102e3b57beb0dcc1a204
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
#!/usr/bin/env python #********************************************************************* # Software License Agreement (BSD License) # # Copyright (c) 2011 andrewtron3000 # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # * Neither the name of the Willow Garage nor the names of its # contributors may be used to endorse or promote products derived # from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. #********************************************************************/ import roslib; roslib.load_manifest('face_follow') import rospy from geometry_msgs.msg import Point from geometry_msgs.msg import PointStamped from pid_control import PID from pid_control.msg import PIDDiagnostics from geometry_msgs.msg import Twist from geometry_msgs.msg import Vector3 pid_controller = PID.PIDController('face_follow', -0.2, 0.2, -0.2, 0.2) twist_publisher = rospy.Publisher("/cmd_vel", Twist) def listener(): rospy.init_node('listener', anonymous=True) while True: try: data = rospy.wait_for_message("face_coords", PointStamped, timeout=0.5) control = pid_controller.update(352.0/2.0 - data.point.x, data.point.x) twist_publisher.publish(Twist(angular = Vector3(x = 0.0, y = 0.0, z = control))) except: twist_publisher.publish(Twist(angular = Vector3(x = 0.0, y = 0.0, z = 0.0))) rospy.sleep(1.0) if __name__ == '__main__': listener()
45.283333
90
0.703717
69279b7254cd9d2ee23d0e66b83b60fdbe51cae2
4,359
py
Python
fangyu.py
gaogaotiantian/tybuild
f97a1fc5e15c35dd887dbcd8a5018e424ee79e78
[ "MIT" ]
null
null
null
fangyu.py
gaogaotiantian/tybuild
f97a1fc5e15c35dd887dbcd8a5018e424ee79e78
[ "MIT" ]
null
null
null
fangyu.py
gaogaotiantian/tybuild
f97a1fc5e15c35dd887dbcd8a5018e424ee79e78
[ "MIT" ]
null
null
null
# coding:utf-8 import matplotlib.pyplot as plt import matplotlib as mpl # Def def GetDefRate(d): if d/(3360.0+d) > 0.9: return 0.9 return d/(3360.0+d) def GetDamageRate(d): return 1-GetDefRate(d) def GetDefImp(currDef, prevDef, pofang): return 1 - GetDamageRate(currDef*(1-pofang)) / GetDamageRate(prevDef*(1-pofang)) # HP def GetHPImp(currHP, prevHP): return 1 - float(prevHP) / currHP # CriDef def GetCriDefRate(d): if d/(4410.0+d) > 0.9: return 0.9 return d/(4410.0+d) def GetCriDefImp(currCriDef, prevCriDef, Cri, CriPlus): prevRate = GetCriDefRate(prevCriDef) currRate = GetCriDefRate(currCriDef) deltaRate = currRate - prevRate return deltaRate / ((Cri-prevRate) + 1/CriPlus) # CriDec def GetCriDecRate(d): return d/(1050+d) def GetCriDecImp(currCriDec, prevCriDec, Cri, CriPlus): prevRate = GetCriDecRate(prevCriDec) currRate = GetCriDecRate(currCriDec) deltaRate = currRate - prevRate return deltaRate / ((CriPlus - prevRate) + 1/Cri) # Dodge def GetDodgeRate(d): return d/(4350.0+d*0.7) def GetRealDodgeRate(d, cri): if d+cri < 1: return d else: return d/(d+cri) def GetDodgeImp(currDodge, prevDodge, cri, criDam): currDodgeRate = GetRealDodgeRate(currDodge, cri) prevDodgeRate = GetRealDodgeRate(prevDodge, cri) return (currDodgeRate - prevDodgeRate)*1.7 / (1 + cri*criDam - prevDodgeRate * 0.7) # Yuling def GetYulingDamageRate(d): return 1-d/(d+3112.5) def GetYulingImp(currYuling, prevYuling): return 1- GetYulingDamageRate(currYuling) / GetYulingDamageRate(prevYuling) def GetVal(xo, yo, x, const): ret_x = [] ret_y = [] assert(len(xo) == len(yo)) lastx = xo[0] for x_curr in x: for i in range(1, len(xo)): thisx = xo[i] lastx = xo[i-1] if lastx <= x_curr <= thisx or lastx >= x_curr >= thisx: ret_x.append(x_curr) ret_y.append((((yo[i] - yo[i-1])/(xo[i]-xo[i-1]))*(x_curr-lastx)+yo[i-1])*const) return [ret_x, ret_y] if __name__ == "__main__": print mpl.matplotlib_fname() DEF_CONST = 120 HP_CONST = 432 CRI_DEF_CONST = 54 DODGE_CONST = 54 YULING_CONST = 54 fig = plt.figure() ax1 = fig.add_subplot(111) xa = [] defense_0 = [] defense_75 = [] HP = [] d = [] defCri_s = [] defCri_l = [] dodge = [] yuling = [] lastp = 0 first = True for p in range(1, 170, 1): if first != True: xa.append(p) defense_0.append(GetDefImp(p*DEF_CONST, lastp*DEF_CONST, 0)) defense_75.append(GetDefImp(p*DEF_CONST, lastp*DEF_CONST, 0.75)) HP.append(GetHPImp(p*HP_CONST, lastp*HP_CONST)) defCri_s.append(GetCriDefImp(p*CRI_DEF_CONST, lastp*CRI_DEF_CONST, 0.3, 1.0)) defCri_l.append(GetCriDefImp(p*CRI_DEF_CONST, lastp*CRI_DEF_CONST, 0.75, 1.9)) dodge.append(GetDodgeImp(p*DODGE_CONST, lastp*DODGE_CONST, 0.7, 1.9)) yuling.append(GetYulingImp(p*YULING_CONST, lastp*YULING_CONST)) else: first = False lastp = p print "d0:",GetVal(defense_0, xa, [x*0.001 for x in range(0, 20, 1)], DEF_CONST) print "d75:",GetVal(defense_75, xa, [x*0.001 for x in range(0, 20, 1)], DEF_CONST) print "hp:",GetVal(HP, xa, [x*0.001 for x in range(0, 20, 1)], HP_CONST) print "fb1:",GetVal(defCri_s, xa, [x*0.001 for x in range(0, 20, 1)], CRI_DEF_CONST) print "fb2:",GetVal(defCri_l, xa, [x*0.001 for x in range(0, 20, 1)], CRI_DEF_CONST) print "sb:",GetVal(dodge, xa, [x*0.001 for x in range(0, 20, 1)], DODGE_CONST) print "yl:",GetVal(yuling, xa, [x*0.001 for x in range(0, 20, 1)], YULING_CONST) ax1.set_xlabel(u"收益") fy1, = ax1.plot(defense_0, xa, label=u"防御(0%):" + str(DEF_CONST)) fy2, = ax1.plot(defense_75, xa, label=u"防御(75%):" + str(DEF_CONST)) hp, = ax1.plot(HP, xa, label=u"血量:" + str(HP_CONST)) dc1, = ax1.plot(defCri_s, xa, label=u"暴防(30%, 100%):" + str(CRI_DEF_CONST)) dc2, = ax1.plot(defCri_l, xa, label=u"暴防(75%, 190%):" + str(CRI_DEF_CONST)) ds, = ax1.plot(dodge, xa, label=u"闪避:" + str(DODGE_CONST)) yl, = ax1.plot(yuling, xa, label=u"御灵:" + str(YULING_CONST)) ax1.set_xlim([0, 0.025]) plt.legend(handles=[fy1,fy2,hp,dc1,dc2,ds,yl]) plt.show()
34.054688
96
0.620096
692850aadfd57c9c530354c065381240dc631282
759
py
Python
portfolio_app/migrations/0005_auto_20210309_1606.py
ashraffaris42/faris-Personal-Portfolio
ea2a84cdefbefeb703037dd2eb80299ca24c42f4
[ "BSD-3-Clause" ]
2
2021-05-04T20:52:56.000Z
2021-05-05T12:50:05.000Z
portfolio_app/migrations/0005_auto_20210309_1606.py
ashraffaris42/faris-Personal-Portfolio
ea2a84cdefbefeb703037dd2eb80299ca24c42f4
[ "BSD-3-Clause" ]
null
null
null
portfolio_app/migrations/0005_auto_20210309_1606.py
ashraffaris42/faris-Personal-Portfolio
ea2a84cdefbefeb703037dd2eb80299ca24c42f4
[ "BSD-3-Clause" ]
null
null
null
# Generated by Django 3.1.7 on 2021-03-09 16:06 from django.db import migrations, models import djrichtextfield.models class Migration(migrations.Migration): dependencies = [ ('portfolio_app', '0004_delete_service'), ] operations = [ migrations.AddField( model_name='project', name='date', field=models.DateField(auto_now_add=True, null=True), ), migrations.AlterField( model_name='blog', name='description', field=djrichtextfield.models.RichTextField(), ), migrations.AlterField( model_name='project', name='description', field=djrichtextfield.models.RichTextField(), ), ]
25.3
65
0.59025
6928610238b2e0cae526f79687ff41ce7a474164
4,882
py
Python
Competing_Algorithm/demo.py
ZhiQiu976/project-Indian-Buffet-Process
a4817550f2ca1778333066fa03ec6bb5b9cb4240
[ "MIT" ]
null
null
null
Competing_Algorithm/demo.py
ZhiQiu976/project-Indian-Buffet-Process
a4817550f2ca1778333066fa03ec6bb5b9cb4240
[ "MIT" ]
null
null
null
Competing_Algorithm/demo.py
ZhiQiu976/project-Indian-Buffet-Process
a4817550f2ca1778333066fa03ec6bb5b9cb4240
[ "MIT" ]
1
2020-04-30T17:26:26.000Z
2020-04-30T17:26:26.000Z
"""Demo for latent factor model""" from __future__ import division import numpy as np import numpy.random as nr import matplotlib.pyplot as plt from IBPFM import IBPFM from utils.tracePlot import trace from utils.scaledimage import scaledimage N = 100 chain = 1000 K_finite = 6 # # read the keyboard input for the number of images # N = raw_input("Enter the number of noisy images for learning features: ") # try: # N = int(N) # except ValueError: # print "Not a number" # sys.exit('Try again') # # read the keyboard input for the number of MCMC chain # chain = raw_input("Enter the number of MCMC chain: ") # try: # chain = int(chain) # except ValueError: # print "Not a number" # sys.exit('Try again') # # read the keyboard input for the number of finite K # K_finite = raw_input("Enter the finite number (upper bound) of features K: ") # try: # K_finite = int(K_finite) # except ValueError: # print "Not a number" # sys.exit('Try again') # ------------------------------------------------------------------------------ # Model parameter (alpha, alpha_a, alpha_b) = (1., 1., 1.) (sigma_x, sigma_xa, sigma_xb) = (.5, 1., 1.) (sigma_a, sigma_aa, sigma_ab) = (1., 1., 1.) # ------------------------------------------------------------------------------ # Generate image data from the known features feature1 = np.array([[0,1,0,0,0,0],[1,1,1,0,0,0],[0,1,0,0,0,0],\ [0,0,0,0,0,0],[0,0,0,0,0,0],[0,0,0,0,0,0]]) feature2 = np.array([[0,0,0,1,1,1],[0,0,0,1,0,1],[0,0,0,1,1,1],\ [0,0,0,0,0,0],[0,0,0,0,0,0],[0,0,0,0,0,0]]) feature3 = np.array([[0,0,0,0,0,0],[0,0,0,0,0,0],[0,0,0,0,0,0],\ [1,0,0,0,0,0],[1,1,0,0,0,0],[1,1,1,0,0,0]]) feature4 = np.array([[0,0,0,0,0,0],[0,0,0,0,0,0],[0,0,0,0,0,0],\ [0,0,0,1,1,1],[0,0,0,0,1,0],[0,0,0,0,1,0]]) D = 36 f1 = feature1.reshape(D) f2 = feature2.reshape(D) f3 = feature3.reshape(D) f4 = feature4.reshape(D) trueWeights = np.vstack((f1, f2, f3, f4)) # ------------------------------------------------------------------------------ # Generate noisy image data K = 4 sig_x_true = 0.5 A = np.vstack((f1, f2, f3, f4)).astype(np.float) Z_orig = nr.binomial(1, 0.5, (N, K)).astype(np.float) V_orig = nr.normal(0, 1, size=(N, K)) # V_orig = nr.exponential(1, size=(N, K)) Z_orig = np.multiply(Z_orig, V_orig) X = np.dot(Z_orig, A) noise = nr.normal(0, sig_x_true, (N, D)) X += noise # ------------------------------------------------------------------------------ # Return MCMC result (K_save, alpha_save, sigma_x_save, sigma_a_save, loglikelihood_save, Z_save, A_save) = \ IBPFM(iteration=chain, data=X, upperbound_K=K_finite, alpha=(alpha, alpha_a, alpha_b), sigma_x=(sigma_x, sigma_xa, sigma_xb), sigma_a=(sigma_a, sigma_aa, sigma_ab), realvaluedZ=True, proposeNewfeature=True, updateAlpha=True, updateSigma_x=True, updateSigma_a=True, initZ=None, stdData=False) # Save trace plots trace(K_save, alpha_save, sigma_x_save, sigma_a_save, loglikelihood_save) # Save true latent feature plot (orig, sub) = plt.subplots(1, 4) for sa in sub.flatten(): sa.set_visible(False) orig.suptitle('True Latent Features') for (i, true) in enumerate(trueWeights): ax = sub[i] ax.set_visible(True) scaledimage(true.reshape(6, 6), pixwidth=3, ax=ax) orig.set_size_inches(13, 3) orig.savefig('Original_Latent_Features.png') plt.close() # Save some of example figures from data X examples = X[0:4, :] (ex, sub) = plt.subplots(1, 4) for sa in sub.flatten(): sa.set_visible(False) ex.suptitle('Image Examples') for (i, true) in enumerate(examples): ax = sub[i] ax.set_visible(True) scaledimage(true.reshape(6, 6), pixwidth=3, ax=ax) ex.set_size_inches(13, 3) ex.savefig('Image_Examples.png') plt.close() # Show and save result lastZ = Z_save[:, :, chain] mcount = (lastZ != 0).astype(np.int).sum(axis=0) index = np.where(mcount > 0) lastK = K_save[chain].astype(np.int) lastA = A_save[index, :, chain] A = lastA.reshape(len(index[0]), D) A_row = A.shape[0] for i in range(A_row): cur_row = A[i, :].tolist() abs_row = [abs(j) for j in cur_row] max_index = abs_row.index(max(abs_row)) if cur_row[max_index] < 0: A[i, :] = -np.array(cur_row) K = max(len(trueWeights), len(A)) (fig, subaxes) = plt.subplots(2, K) for sa in subaxes.flatten(): sa.set_visible(False) fig.suptitle('Ground truth (top) vs learned factors (bottom)') for (idx, trueFactor) in enumerate(trueWeights): ax = subaxes[0, idx] ax.set_visible(True) scaledimage(trueFactor.reshape(6, 6), pixwidth=3, ax=ax) for (idx, learnedFactor) in enumerate(A): ax = subaxes[1, idx] scaledimage(learnedFactor.reshape(6, 6), pixwidth=3, ax=ax) ax.set_visible(True) #fig.savefig("IBP_meanA.png") plt.show()
30.135802
88
0.599549
6928d12a9070c21f90e40630002456e946b09b39
1,391
py
Python
ex3_1_dnn_mnist_cl.py
yoongon/keraspp
4950e2e78bfd19095b88fd3a1ca74ffedba819a5
[ "MIT" ]
null
null
null
ex3_1_dnn_mnist_cl.py
yoongon/keraspp
4950e2e78bfd19095b88fd3a1ca74ffedba819a5
[ "MIT" ]
null
null
null
ex3_1_dnn_mnist_cl.py
yoongon/keraspp
4950e2e78bfd19095b88fd3a1ca74ffedba819a5
[ "MIT" ]
null
null
null
# 기본 파라미터 설정 ######################### Nin = 784 Nh_l = [100, 50] number_of_class = 10 Nout = number_of_class # 분류 DNN 모델 구현 ######################## from keras import layers, models class DNN(models.Sequential): def __init__(self, Nin, Nh_l, Nout): super().__init__() self.add(layers.Dense(Nh_l[0], activation='relu', input_shape=(Nin,), name='Hidden-1')) self.add(layers.Dense(Nh_l[1], activation='relu', name='Hidden-2')) self.add(layers.Dense(Nout, activation='softmax')) self.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) # 데이터 준비 ############################## import numpy as np from keras import datasets from keras.utils import np_utils (X_train, y_train), (X_test, y_test) = datasets.mnist.load_data() Y_train = np_utils.to_categorical(y_train) Y_test = np_utils.to_categorical(y_test) L, W, H = X_train.shape X_train = X_train.reshape(-1, W * H) X_test = X_test.reshape(-1, W * H) X_train = X_train / 255.0 X_test = X_test / 255.0 # 분류 DNN 학습 및 테스팅 #################### model = DNN(Nin, Nh_l, Nout) history = model.fit(X_train, Y_train, epochs=5, batch_size=100, validation_split=0.2) performace_test = model.evaluate(X_test, Y_test, batch_size=100) print('Test Loss and Accuracy ->', performace_test)
33.119048
95
0.608914
69290e1920360e574d9ff10f85dbac38fc7fe420
492
py
Python
notes-n-resources/Data-Structures-N-Algo/_DS-n-Algos/Interview-Problems/HackerRank/Data Structures/Dynamic-Array.py
side-projects-42/INTERVIEW-PREP-COMPLETE
627a3315cee4bbc38a0e81c256f27f928eac2d63
[ "MIT" ]
13
2021-03-11T00:25:22.000Z
2022-03-19T00:19:23.000Z
notes-n-resources/Data-Structures-N-Algo/_DS-n-Algos/Interview-Problems/HackerRank/Data Structures/Dynamic-Array.py
side-projects-42/INTERVIEW-PREP-COMPLETE
627a3315cee4bbc38a0e81c256f27f928eac2d63
[ "MIT" ]
160
2021-04-26T19:04:15.000Z
2022-03-26T20:18:37.000Z
notes-n-resources/Data-Structures-N-Algo/_DS-n-Algos/Interview-Problems/HackerRank/Data Structures/Dynamic-Array.py
side-projects-42/INTERVIEW-PREP-COMPLETE
627a3315cee4bbc38a0e81c256f27f928eac2d63
[ "MIT" ]
12
2021-04-26T19:43:01.000Z
2022-01-31T08:36:29.000Z
def dynamicArray(n, queries): Q = len(queries) seqList = [[] for _ in range(n)] lastAnswer = 0 la = [] for i in range(Q): t, x, y = queries[i][0], queries[i][1], queries[i][2] if t == 1: seqList[(x ^ lastAnswer) % n].append(y) else: lastAnswer = seqList[(x ^ lastAnswer) % n][ y % len(seqList[(x ^ lastAnswer) % n]) ] print(lastAnswer) la.append(lastAnswer) return la
28.941176
61
0.477642
6929df2927459d70fbbc2605690e202e9fada472
649
py
Python
MLGame/mlgame/crosslang/exceptions.py
Liuian/1092_INTRODUCTION-TO-MACHINE-LEARNING-AND-ITS-APPLICATION-TO-GAMING
f4a58d0d9f5832a77a4a86352e084065dc7bae50
[ "MIT" ]
null
null
null
MLGame/mlgame/crosslang/exceptions.py
Liuian/1092_INTRODUCTION-TO-MACHINE-LEARNING-AND-ITS-APPLICATION-TO-GAMING
f4a58d0d9f5832a77a4a86352e084065dc7bae50
[ "MIT" ]
null
null
null
MLGame/mlgame/crosslang/exceptions.py
Liuian/1092_INTRODUCTION-TO-MACHINE-LEARNING-AND-ITS-APPLICATION-TO-GAMING
f4a58d0d9f5832a77a4a86352e084065dc7bae50
[ "MIT" ]
null
null
null
""" The exceptions for the crosslang module """ class CompilationError(Exception): """ Exception raised when failed to compile the user script """ def __init__(self, file, reason): self.file = file self.reason = reason def __str__(self): return "Failed to compile '{}':\n{}".format(self.file, self.reason) class MLClientExecutionError(Exception): """ Exception raised when an error occurred while running non-python ml script """ def __init__(self, message): """ Constructor """ self.message = message def __str__(self): return self.message
23.178571
78
0.624037
692aef8917d87dca4ac44be30bc2c59f6597251b
663
py
Python
meirin/db/metapolicy.py
sinsong/Meirin
ac536b3905d258dbdf5e1013d56d2f906efce58e
[ "BSD-2-Clause" ]
null
null
null
meirin/db/metapolicy.py
sinsong/Meirin
ac536b3905d258dbdf5e1013d56d2f906efce58e
[ "BSD-2-Clause" ]
null
null
null
meirin/db/metapolicy.py
sinsong/Meirin
ac536b3905d258dbdf5e1013d56d2f906efce58e
[ "BSD-2-Clause" ]
null
null
null
from sqlalchemy import Column, Sequence from sqlalchemy import Integer, String, Text from meirin.db.base_class import Base class MetaPolicy(Base): __tablename__ = 'metapolicy' # helper metapolicy_id_seq = Sequence('metapolicy_id_seq', metadata=Base.metadata) # columns id = Column(Integer, metapolicy_id_seq, server_default=metapolicy_id_seq.next_value(), primary_key = True) name = Column(String(64)) match = Column(Text) mode = Column(String(64)) # TODO: index https://docs.sqlalchemy.org/en/14/orm/extensions/indexable.html def __repr__(self): return f"MetaPolicy(id={self.id!r}, name={self.name!r})"
27.625
112
0.714932
692bcf4ca6a37b28d7cd78b37196366ac2f21606
115
py
Python
lib/extensions/inplace_abn_1/__init__.py
shampooma/openseg.pytorch
d1da408a1e870d52c058c359583bc098f7f3d9e2
[ "MIT" ]
2,671
2019-04-09T16:20:16.000Z
2022-03-31T19:21:29.000Z
lib/extensions/inplace_abn_1/__init__.py
shampooma/openseg.pytorch
d1da408a1e870d52c058c359583bc098f7f3d9e2
[ "MIT" ]
245
2019-05-02T05:10:55.000Z
2022-03-31T07:14:44.000Z
lib/extensions/inplace_abn_1/__init__.py
shampooma/openseg.pytorch
d1da408a1e870d52c058c359583bc098f7f3d9e2
[ "MIT" ]
730
2019-04-16T14:45:56.000Z
2022-03-31T08:59:22.000Z
from .bn import ABN, InPlaceABN, InPlaceABNSync from .functions import ACT_RELU, ACT_LEAKY_RELU, ACT_ELU, ACT_NONE
38.333333
66
0.826087
692ec0bb7422b9bf134882c71709eab791047862
779
py
Python
infinisdk/core/utils/environment.py
Infinidat/infinisdk
a5a3fbe1e6c649bca85b8a3fc3bd28fb820a90e4
[ "BSD-3-Clause" ]
5
2019-02-26T20:11:43.000Z
2021-03-10T08:45:38.000Z
infinisdk/core/utils/environment.py
Infinidat/infinisdk
a5a3fbe1e6c649bca85b8a3fc3bd28fb820a90e4
[ "BSD-3-Clause" ]
11
2017-11-15T19:20:23.000Z
2021-09-14T18:17:47.000Z
infinisdk/core/utils/environment.py
Infinidat/infinisdk
a5a3fbe1e6c649bca85b8a3fc3bd28fb820a90e4
[ "BSD-3-Clause" ]
2
2017-11-16T11:59:05.000Z
2019-02-25T20:44:23.000Z
import os import pkg_resources from mitba import cached_function @cached_function def get_logged_in_username(): try: import pwd user_id = os.getuid() os_info = pwd.getpwuid(user_id) return os_info.pw_name except (ImportError, KeyError): # ImportError: For windows users # KeyError: In case getpwuid fails to retrieve the user information return os.environ.get('USERNAME') or os.environ.get('USER', 'unknown') @cached_function def get_hostname(): import socket return socket.getfqdn() @cached_function def get_infinisdk_version(): try: return pkg_resources.get_distribution('infinisdk').version # pylint: disable=no-member except pkg_resources.DistributionNotFound: return 'N/A'
25.966667
94
0.703466
69301dd6f35763c4b558452e2a75490a7e95b5cb
1,176
py
Python
ChestnutPatcher/inject-lib.py
chestnut-sandbox/Chestnut
b42b9eb902e0928e8b549339788f83bb009290c1
[ "Zlib" ]
7
2020-12-08T02:00:14.000Z
2021-05-10T13:12:35.000Z
ChestnutPatcher/inject-lib.py
cc0x1f/Chestnut
b42b9eb902e0928e8b549339788f83bb009290c1
[ "Zlib" ]
2
2022-01-03T13:51:48.000Z
2022-01-26T15:42:44.000Z
ChestnutPatcher/inject-lib.py
cc0x1f/Chestnut
b42b9eb902e0928e8b549339788f83bb009290c1
[ "Zlib" ]
2
2021-05-15T03:06:07.000Z
2021-08-06T18:11:35.000Z
import sys import lief import json import struct import os def filter_file(fname): f = fname.replace("/", "_") + ".json" if f[0] == ".": f = f[1:] return f def main(fname): # load filter ffname = "policy_%s" % filter_file(fname) filters = None try: filters = json.loads(open(ffname).read()) except: print("[-] Could not load filter file %s" % ffname) return 1 print("[+] Allowed syscalls: %d" % len(filters["syscalls"])) # inject sandboxing library binary = lief.parse(fname) binary.add_library("libchestnut.so") # add seccomp library as well binary.add_library("libseccomp.so.2") binary.write("%s_patched" % fname) with open("%s_patched" % fname, "ab") as elf: filter_data = json.dumps(filters).encode() elf.write(filter_data) elf.write(struct.pack("I", len(filter_data))) os.chmod("%s_patched" % fname, 0o755); #print(binary) print("[+] Saved patched binary as %s_patched" % fname) if __name__ == "__main__": if len(sys.argv) != 2: print("Usage: %s <binary>" % sys.argv[0]) else: main(sys.argv[1])
25.021277
64
0.590986
69331d5c6bc354bd17e9f9d5696da9f4cbff069b
5,826
py
Python
scoff/parsers/generic.py
brunosmmm/scoff
e1a0b5f98dd9e60f41f3f7cfcda9038ffd80e138
[ "MIT" ]
null
null
null
scoff/parsers/generic.py
brunosmmm/scoff
e1a0b5f98dd9e60f41f3f7cfcda9038ffd80e138
[ "MIT" ]
1
2020-03-20T13:57:52.000Z
2021-03-11T17:25:25.000Z
scoff/parsers/generic.py
brunosmmm/scoff
e1a0b5f98dd9e60f41f3f7cfcda9038ffd80e138
[ "MIT" ]
null
null
null
"""Generic regex-based parser.""" import re from collections import deque from typing import Union, Any, List, Deque, Tuple, Dict, Callable from scoff.parsers.linematch import MatcherError, LineMatcher EMPTY_LINE = re.compile(b"\s*$") class ParserError(Exception): """Parser error.""" class DataParser: """Simple data parser. Tokens are regular expression-based """ def __init__( self, initial_state: Union[str, int, None] = None, consume_spaces: bool = False, **kwargs, ): """Initialize. :param initial_state: Initial state of the parser :param consume_spaces: Consume stray space characters """ self._state_hooks = {} super().__init__(**kwargs) self._state_stack: Deque[Union[str, int, None]] = deque() self._state = initial_state self._consume = consume_spaces self._current_position = 1 self._current_line = 1 self._data = None self._abort = False @property def state(self): """Get current state.""" return self._state def add_state_hook(self, state: Union[str, int], hook: Callable): """Add state hook (callback). A callback will be called when the parser reaches a specified state. :param state: The parser state to add a callback to :param hook: The callback to be added """ if not callable(hook): raise TypeError("hook must be callable") if state not in self.states: print(self.states) raise ParserError(f"unknown state '{state}'") if state not in self._state_hooks: self._state_hooks[state] = {hook} else: self._state_hooks[state] |= {hook} def _handle_match(self, candidate): """Handle candidate match.""" def _handle_options(self, **options: Any): """Handle candidate options.""" def _try_parse( self, candidates: List[LineMatcher], position: int ) -> Tuple[int, LineMatcher, Dict[str, str]]: if self._consume: m = EMPTY_LINE.match(self._data, position) if m is not None: # an empty line, consume return (m.span()[1], None, None) for candidate in candidates: try: if not isinstance(candidate, LineMatcher): raise TypeError("candidate must be LineMatcher object") size, fields = candidate.parse_first(self._data, position) except MatcherError: continue options = candidate.options.copy() change_state = options.pop("change_state", None) push_state = options.pop("push_state", None) pop_state = options.pop("pop_state", None) if change_state is not None: self._change_state(change_state) elif push_state is not None: self._push_state(push_state) elif pop_state is not None: self._pop_state(pop_state) # handle other options self._handle_options(**options) # handle other custom options self._handle_match(candidate) # advance position self._current_position += size # advance line self._current_line += ( self._data.count(b"\n", position, position + size) + 1 ) return (size, candidate, fields) raise ParserError("could not parse data") def _current_state_function(self, position: int) -> int: if not hasattr(self, "_state_{}".format(self._state)): raise RuntimeError(f"in unknown state: {self._state}") size, stmt, fields = getattr(self, "_state_{}".format(self._state))( position ) # call hooks if self._state in self._state_hooks: for hook in self._state_hooks[self._state]: hook(self._state, stmt, fields) return size def _abort_parser(self): """Stop parsing.""" self._abort = True @property def current_pos(self): """Get current position.""" return self._current_position @property def current_line(self): """Get current line.""" return self._current_line @property def states(self): """Get possible states.""" return [ attr_name.split("_")[2] for attr_name in dir(self) if attr_name.startswith("_state") ] def parse(self, data: str) -> int: """Parse data. :param data: Textual data to be parsed :return: Current position in data """ self._data = data.encode() self._current_position = 1 self._current_line = 1 current_pos = 0 while current_pos < len(data): if self._abort is True: break size = self._current_state_function(current_pos) # consume data current_pos += size + 1 return current_pos def _state_change_handler(self, old_state, new_state): """State change handler.""" def _change_state(self, new_state): """Change state.""" old_state = self._state self._state = new_state # call state change handler self._state_change_handler(old_state, new_state) def _push_state(self, new_state): """Push into state stack and change state.""" self._state_stack.append(self._state) self._change_state(new_state) def _pop_state(self, count): """Pop from state stack and change state.""" for num in range(count): state = self._state_stack.popleft() self._change_state(state)
31.15508
76
0.583934
6933f6dc14a4268b062d6716b8b6fce13e8b8ff9
34,279
py
Python
cfg/model/CfgModel.py
sdnellen/ordt-config-tool
30cc7342c5bc0f574b2a4a8d207230e1fa527615
[ "Apache-2.0" ]
1
2019-12-06T19:11:28.000Z
2019-12-06T19:11:28.000Z
utils/cfgtool/cfg/model/CfgModel.py
mytoys/open-register-design-tool
5d6dea268f77546a9a786a16603f50e974d87050
[ "Apache-2.0" ]
null
null
null
utils/cfgtool/cfg/model/CfgModel.py
mytoys/open-register-design-tool
5d6dea268f77546a9a786a16603f50e974d87050
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python3 ''' @author: snellenbach Config sequence model ''' from enum import Enum, unique import re from cfg.model.RegModelWrapper import RegModelWrapper from cfg.model.Utils import MsgUtils from cfg.output.OutBuilder import OutBuilder as ob # ------- config model node classes class BaseCfgNode: _nodeStack = [] # stack of active config nodes for auto-add _outBuilder = None def __init__(self, sourceAstNode=None, comment=''): self.sourceAstNode = sourceAstNode self.comment = comment self.children = [] self.parent = None self.allowedTags = set() # set of allowed versions for this level (parser allows currently allows in class, method) # add this node to parent (top of stack) if __class__._nodeStack: self.parent = __class__._nodeStack[-1] self.parent.addChild(self) def addChild(self, child): self.children.append(child) def popChild(self): ''' pop last added child from this node ''' if self.children: self.children.pop() def display(self, indent = 0): ''' display config model node info recursively ''' print(' '*indent + 'base:') for child in self.children: child.display(indent+1) @staticmethod def finishNode(omit): ''' Pop current node from the active model stack. Optionally, remove this node if omit is set. ''' __class__.popNode() if omit: parent = __class__._nodeStack[-1] parent.popChild() @staticmethod def popNode(): ''' pop cfg node from top of the stack ''' return __class__._nodeStack.pop() @staticmethod def peekNode(): ''' return cfg node at top of the stack ''' return __class__._nodeStack[-1] def hierDisplay(self, indent, s): ''' display config model node info recursively ''' print(' '*indent + s) for child in self.children: child.display(indent+1) def resolvePaths(self): ''' resolve all paths in this config model node info recursively ''' for child in self.children: child.resolvePaths() def setOutBuilder(self, outBuilder): ''' set specified output builder ''' #print(f'BaseCfgNode setOutBuilder: called in {type(self)}, outBuilder type={type(outBuilder)}') BaseCfgNode._outBuilder = outBuilder def generateOutput(self): ''' generate specified output for this config model recursively ''' #print(f'BaseCfgNode generateOutput: called in {type(self)}') for child in self.children: child.generateOutput() class HierCfgNode(BaseCfgNode): ''' hierarchical node (pushed to node stack on create) ''' def __init__(self, sourceAstNode = None, comment=''): BaseCfgNode.__init__(self, sourceAstNode, comment) # append this node to the stack __class__._nodeStack.append(self) self.vars = {} # dict of vars defined in this node scope def whatami(self): return 'unspecified hierarchy' def findVar(self, varName, allowInputs = True): ''' find a variable by name traversing from current node thru ancestors ''' if self.vars.__contains__(varName): retVar = self.vars[varName] if allowInputs or (type(retVar) is not CfgInputVariable): return retVar MsgUtils.errorExit('input variable ' + varName + ' can not be assigned a value.') return None elif self.parent is None: return None else: return self.parent.findVar(varName) def getInputList(self): return {k: v for k, v in self.vars.items() if type(v) is CfgInputVariable} def verifyInputParms(self, inputListStr, callingNode): ''' check that a list of call parameter strings matches inputs for this hier and return the list of resolved inputs ''' if type(inputListStr) is not str: MsgUtils.errorExit(f'misformed input list found when in call of {self.whatami()} {self.name}') inputList = [] if not inputListStr else inputListStr.split(',') inputCount = len(inputList) inputParms = self.getInputList() inputParmCount = len(inputParms) #print(f"HierCfgNode verifyInputParms: inputList={inputList}, in len={inputCount}, vars=({', '.join(str(e) for e in inputParms.values())}), parm len={inputParmCount}, callNode type={type(callingNode)}") if inputCount != inputParmCount: MsgUtils.errorExit(f'incorrect number of input parameters (found {inputCount}, expected {inputParmCount}) in call of {self.whatami()} {self.name}') # loop and resolve inputs CfgVariable.resolveRhsExpression(className, CfgClassNode, False, True) resolvedInputList = [] for inVal, inParm in zip(inputList, inputParms.values()): resolvedInputList.append(CfgVariable.resolveRhsExpression(inVal, inParm.vartype, True, True)) return resolvedInputList class CfgClassNode(HierCfgNode): _classes = {} _current = None def __init__(self, name, sourceAstNode = None, comment=''): HierCfgNode.__init__(self, sourceAstNode, comment) self.name = name self.methods = {} __class__._classes[self.name] = self __class__._current = self #print('creating class node, name=', self.name) def whatami(self): return 'class' @staticmethod def getCurrent(): ''' return last created CfgClassNode ''' return __class__._current @staticmethod def findClass(className): ''' return a CfgClassNode by name ''' return None if className not in __class__._classes else __class__._classes[className] def findMethod(self, methodName): ''' return a CfgMethodNode in this class by name ''' return None if methodName not in self.methods else self.methods[methodName] def display(self, indent = 0): inParms = self.getInputList() self.hierDisplay(indent, f"class: {self.name}, vars=({', '.join(str(e) for e in self.vars.values())}), inputs=({', '.join(str(e) for e in inParms.values())}), allowed versions='{self.allowedTags}") def generateOutput(self): ''' generate specified output for this class node ''' #print(f'CfgClassNode generateOutput: called in {type(self)}') BaseCfgNode._outBuilder.enterClass(self) for child in self.children: child.generateOutput() BaseCfgNode._outBuilder.exitClass(self) class CfgMethodNode(HierCfgNode): def __init__(self, name, sourceAstNode = None, comment=''): HierCfgNode.__init__(self, sourceAstNode, comment) self.name = name self.args = [] # add method to dict in current class scope parent = BaseCfgNode._nodeStack[-2] parent.methods[self.name] = self #print('creating method node, name=', self.name) def whatami(self): return 'method' def display(self, indent = 0): inParms = self.getInputList() self.hierDisplay(indent, f"method: {self.name}, vars=({', '.join(str(e) for e in self.vars.values())}), inputs=({', '.join(str(e) for e in inParms.values())})") def generateOutput(self): ''' generate specified output for this method node ''' #print(f'CfgMethodNode generateOutput: called in {type(self)}') BaseCfgNode._outBuilder.enterMethod(self) for child in self.children: child.generateOutput() BaseCfgNode._outBuilder.exitMethod(self) @unique class ConfigAssignType(Enum): UNSUPPORTED = 0 EQ = 1 def isSupported(self): return type(self) is not ConfigAssignType.UNSUPPORTED @staticmethod def resolve(opStr): ''' convert a string to ConfigAssignType ''' if type(opStr) is ConfigAssignType: # if type is already correct, just return input return opStr if opStr == '=': return ConfigAssignType.EQ else: return ConfigAssignType.UNSUPPORTED class CfgAssign(BaseCfgNode): def __init__(self, left=None, op=ConfigAssignType.UNSUPPORTED, right=None, sourceAstNode = None): BaseCfgNode.__init__(self, sourceAstNode) self.op = ConfigAssignType.resolve(op) self.left = left # TODO - resolve here and remove checks from builder or allow default var create? self.right = right # maybe pass target type into assign? or verify type match? def isValid(self): if self.op.isSupported() and (self.left is not None) and (self.right is not None): return True return False def isRead(self): ''' return True if assign involves a reg read ''' return (type(self.right) is CfgReadNode) def display(self, indent = 0): self.hierDisplay(indent, f'assign: {self.left} {self.op.name} {self.right}') def resolvePaths(self): if self.isRead(): self.right.resolvePaths() class CfgMethodCall(BaseCfgNode): def __init__(self, className, methodName, parmList, sourceAstNode = None): BaseCfgNode.__init__(self, sourceAstNode) # if className specified in call path resolve class as a variable, else use current class if className: cfgClassVar = CfgVariable.resolveRhsExpression(className, CfgClassNode, False, True) # find the class variable #self.cfgClass = CfgClassNode.getCurrent() # TODO add findVar option for non-none className self.cfgClass = CfgClassNode.findClass(cfgClassVar.val[0].name) # TODO - saved call name structure shoul be fixed else: self.cfgClass = CfgClassNode.getCurrent() #if not cfgClass: # MsgUtils.errorExit('unable to resolve cfgClass ' + str(className) + ' in call of method ' + methodName) self.cfgMethod = self.cfgClass.findMethod(methodName) if not self.cfgMethod: MsgUtils.errorExit(f'unable to resolve method {methodName} in cfgClass {self.cfgClass.name}') self.parmList = self.cfgMethod.verifyInputParms(parmList, self.parent) def display(self, indent = 0): self.hierDisplay(indent, f'call: cfgClass={self.cfgClass.name}, method={self.cfgMethod.name}, parms={self.parmList}') class CfgCaseNode(HierCfgNode): def __init__(self, selectVar, sourceAstNode = None): HierCfgNode.__init__(self, sourceAstNode) self.selectVar = HierCfgNode.findVar(self, selectVar) #print('creating case node, select var=' + str(self.selectVar)) def display(self, indent = 0): self.hierDisplay(indent, f'case: select var={self.selectVar}') class CfgCaseBlockNode(HierCfgNode): _currentChoices = set() # init current choice set def __init__(self, sourceAstNode = None): HierCfgNode.__init__(self, sourceAstNode) self.selectVals = set(__class__._currentChoices) # copy current set of choices __class__._currentChoices.clear() # clear current choices #print('creating case block node, choices=' + str(self.selectVals)) def display(self, indent = 0): self.hierDisplay(indent, f'case block: choices={self.selectVals}') @staticmethod def addChoice(choiceName): __class__._currentChoices.add(choiceName) class CfgNumericForNode(HierCfgNode): def __init__(self, name, rangeStart, rangeEnd, sourceAstNode = None): HierCfgNode.__init__(self, sourceAstNode) self.forVar = CfgVariable(name, CfgNumDataType) self.rangeStart = CfgVariable.resolveRhsExpression(rangeStart, CfgNumDataType) self.rangeEnd = CfgVariable.resolveRhsExpression(rangeEnd, CfgNumDataType) #print('creating numeric for loop node, iterator var=' + str(self.forVar) + ' rangeStart=' + str(self.rangeStart) + ' rangeEnd=' + str(self.rangeEnd)) def display(self, indent = 0): self.hierDisplay(indent, f'for (numeric): iterator={self.forVar} rangeStart={self.rangeStart} rangeEnd={self.rangeEnd}') class CfgPathForNode(HierCfgNode): def __init__(self, name, path, sourceAstNode = None): HierCfgNode.__init__(self, sourceAstNode) self.forVar = CfgVariable(name, CfgPathDataType) self.path = CfgVariable.resolveRhsExpression(path, CfgPathDataType) # create path range self.forVar.val = self.path # assign path to loop var so full path prefix can be extracted recursively using var #print('creating path for loop node, iterator var=' + str(self.forVar) + ' path=' + str(self.path)) def display(self, indent = 0): self.hierDisplay(indent, f'for (path): iterator={self.forVar}, range={self.path}') def resolvePaths(self): ''' resolve paths in this for node ''' print(f'resolve CfgPathForNode path: {self.path}') # TODO if type(self.path) is CfgPathDataType: self.path.resolvePath(self.allowedTags) #TODO - any checks for a var?, how is version resolve handled? # resolve paths in child nodes for child in self.children: child.resolvePaths() class CfgPrintNode(BaseCfgNode): def __init__(self, form, form_vars, sourceAstNode = None): BaseCfgNode.__init__(self, sourceAstNode) self.form = form # form can also be a list of comma separated args self.form_vars = form_vars #print('creating display node, form=', self.form, 'form_vars=', self.form_vars) def display(self, indent = 0): self.hierDisplay(indent, 'print: ' + str(self.form) + ', vars=' + str(self.form_vars)) class CfgWaitNode(BaseCfgNode): def __init__(self, time, sourceAstNode = None): BaseCfgNode.__init__(self, sourceAstNode) self.time = time # time in ms #print('creating wait node, time=', self.time) def display(self, indent = 0): self.hierDisplay(indent, 'wait: ' + str(self.time)) class CfgWriteNode(BaseCfgNode): def __init__(self, path, value, wtype, isRmw = False, sourceAstNode = None): BaseCfgNode.__init__(self, sourceAstNode) self.path = CfgVariable.resolveRhsExpression(path, CfgPathDataType) self.wtype = CfgPathHierType.resolve(wtype) self.value = CfgVariable.resolveRhsExpression(value, CfgNumDataType) self.isRmw = isRmw #print('creating write node, path=', str(self.path), 'value=', str(self.value)) def display(self, indent = 0): self.hierDisplay(indent, 'write: ' + str(self.path) + ', wtype=' + str(self.wtype) + ', value=' + str(self.value) + ', rmw=' + str(self.isRmw)) pass def resolvePaths(self): ''' resolve paths in this write node ''' print(f'resolve CfgWriteNode path: {self.path}, wtype: {self.wtype}, rwm: {self.isRmw} --- self.path type={type(self.path)}') # TODO if type(self.path) is CfgPathDataType: self.path.resolvePath(self.allowedTags, self.wtype) #TODO - any checks for a var?, how is version resolve handled? def generateOutput(self): ''' generate specified output for this write node ''' #print(f'CfgWriteNode generateOutput: called in {type(self)}') if self.wtype.isReg(): BaseCfgNode._outBuilder.doRegWrite(self) else: BaseCfgNode._outBuilder.doFieldWrite(self) class CfgWhileNode(HierCfgNode): def __init__(self, compare, delay = 1, timeout = None, sourceAstNode = None): HierCfgNode.__init__(self, sourceAstNode) self.compare = compare self.delay = delay self.timeout = timeout #print('creating poll node, compare=', self.compare, 'delay=', self.delay) CfgWaitNode(self.delay) def display(self, indent = 0): prefix = 'poll ' if self.compare.isPoll() else '' self.hierDisplay(indent, prefix + 'while: ' + str(self.compare) + ' timeout=' + str(self.timeout)) def isPoll(self): ''' return True if compare involves a reg read ''' return self.compare.isPoll() def resolvePaths(self): ''' resolve paths in this for while node ''' if self.isPoll(): self.compare.resolvePaths() for child in self.children: child.resolvePaths() # ------- config model support classes (not BaseCfgNode children) @unique class CfgPathHierType(Enum): UNKNOWN = 0 REGSET = 1 REG = 2 FIELDSET = 3 FIELD = 4 @staticmethod def resolve(hierStr): ''' convert a string to CfgPathHierType ''' if type(hierStr) is CfgPathHierType: # if type is already correct, just return input return hierStr if 'RegSet' in hierStr: return CfgPathHierType.REGSET elif 'FieldSet' in hierStr: return CfgPathHierType.FIELDSET elif 'Reg' in hierStr: return CfgPathHierType.REG elif 'Field' in hierStr: return CfgPathHierType.FIELD else: return CfgPathHierType.UNKNOWN def isReg(self): return self is CfgPathHierType.REG def matchesRegModelType(self, regModType): if self is CfgPathHierType.UNKNOWN: return True #print(f' -> CfgPathHierType matchesRegModelType: self type={self.name}, regModType={regModType.name}') # TODO if self.name == regModType.name: return True return False class CfgReadNode(): def __init__(self, path, rtype = CfgPathHierType.UNKNOWN, sourceAstNode = None): self.path = CfgVariable.resolveRhsExpression(path, CfgPathDataType) self.rtype = CfgPathHierType.resolve(rtype) self.sourceAstNode = sourceAstNode # TODO - change to srcInfo #print('creating read node, path=', self.path) def __str__(self): return f'read {self.path}, rtype={self.rtype}' def resolvePaths(self): ''' resolve paths in this read ''' print(f'resolve CfgReadNode path: {self.path}, rtype={self.rtype}') # TODO if type(self.path) is CfgPathDataType: self.path.resolvePath(set(), self.rtype) # read node has no allowed tag override, TODO - any checks for a var?, how is version resolve handled? # ------- config model data classes class CfgDataType(): def __init__(self): pass def isValid(self): return hasattr(self, 'val') and (self.val is not None) class CfgBoolDataType(CfgDataType): def __init__(self): pass class CfgNumDataType(CfgDataType): def __init__(self, s): self.size = None self.hasSize = False intval = __class__.strToInt(s) if intval is not None: self.val = intval @staticmethod def strToInt(s): ''' convert str to int if possible, else return None ''' try: out = int(s, 0) return out except ValueError: return None def __str__(self): return str(self.val) + (('(size=' + str(self.size) + ')') if self.size else '') if self.isValid() else 'invalid num' def needsSize(self): return self.hasSize and self.size is None class CfgEnumDataType(CfgNumDataType): # FIXME use separate type def __init__(self): pass class CfgPathDataElement(): def __init__(self, pelemstr): self.name = None # invalid if name is None self.start = None self.end = None self.isIndexed = False self.hasRange = False # is element indexed with start not equal to end self.annotations = {} if '[' in pelemstr: # detect an array self.isIndexed = True pat = re.compile('(\\w+)\\s*\\[(.*)\\]') mat = pat.match(pelemstr) if mat: self.name = mat.group(1) arraystr = mat.group(2) if ':' in arraystr: self.hasRange = True pat = re.compile('(\\w+|\\*)\\s*:\\s*(\\w+|\\*)') mat = pat.match(arraystr) if mat: leftstr = mat.group(1) rightstr = mat.group(2) if leftstr == '*': self.hasRange = True else: self.start = leftstr if rightstr == '*': self.hasRange = True else: self.end = rightstr #else: # print('CfgPathDataElement array match failed for s=' + arraystr) elif '*' in arraystr: # detect full range wildcard self.hasRange = True else: self.start = arraystr # single index case self.end = arraystr else: self.name = pelemstr # scalar, so just save the name def isVar(self): ''' return true if this path element is a path variable ''' return hasattr(self, 'baseVar') def isRootVar(self): ''' return true if this path element is a path variable representing root of the reg model ''' return self.isVar() and (self.name == 'root') def needsResolution(self): return self.isIndexed and ((self.start is None) or (self.end is None)) def getElementString(self, unrollBase, leftIdx, rightIdx=None): if unrollBase and self.isVar() and not self.isRootVar(): return self.baseVar.val.genFullPathStr() if not self.isIndexed: return self.name if not rightIdx or (rightIdx == leftIdx): return f'{self.name}[{leftIdx}]' return f'{self.name}[{leftIdx}:{rightIdx}]' def getFullElementString(self): # TODO ''' return full element string ''' startStr = str(self.start) if self.start else '*' endStr = str(self.end) if self.end else '*' return self.getElementString(True, startStr, endStr) def getRawElementString(self): ''' return raw element string ''' startStr = str(self.start) if self.start else '*' endStr = str(self.end) if self.end else '*' return self.getElementString(False, startStr, endStr) def getSampleElementString(self): ''' return sample element string for model lookup with indices set to 0 ''' return self.getElementString(True, 0) def __str__(self): return self.getRawElementString() class CfgPathDataType(CfgDataType): def __init__(self, pathstr): self.htype = CfgPathHierType.UNKNOWN # resolved path type is unknown by default self.call = None # default to no call basepathstr = '' if '(' in pathstr: # detect a call and remove from path pat = re.compile('(.*)\\.(\\w+)') mat = pat.match(pathstr) if mat: basepathstr = mat.group(1) self.call = mat.group(2) #print(f'found call match path={self.val}, call={self.call}') else: basepathstr = pathstr # TODO - store as path elem tuples? also TODO allow range wildcards # create a list of path elements self.val = [] newlist = basepathstr.split('.') for elemstr in newlist: elem = CfgPathDataElement(elemstr) self.val.append(elem) # check for valid path var extract if not self.val: MsgUtils.errorExit(f'unable create path from string={pathstr}') firstPathElement = self.getBasePathElem() # check for valid path base variable baseVar = CfgVariable.resolveLhsExpression(firstPathElement.name, CfgPathDataType, False, False) # check for existing base path variable if not baseVar: MsgUtils.errorExit(f'unable to resolve root of path {pathstr}') firstPathElement.baseVar = baseVar # save the referenced path variable in first element def genFullPathStr(self): ''' return path with base var unrolled ''' return '.'.join([ elem.getFullElementString() for elem in self.getPathList() ]) def genRawPathStr(self): ''' return raw path (no base var unroll) ''' return '.'.join([ elem.getRawElementString() for elem in self.getPathList() ]) def genSamplePathStr(self): ''' return sample path for model lookup with all indices set to 0 ''' return '.'.join([ elem.getSampleElementString() for elem in self.getPathList() ]) def hasCall(self): return self.call is not None def setRegset(self): self.htype = CfgPathHierType.REGSET def setReg(self): self.htype = CfgPathHierType.REG def setFieldset(self): self.htype = CfgPathHierType.FIELDSET def setField(self): self.htype = CfgPathHierType.FIELD def getBasePathElem(self): ''' return the base path element ''' return self.getPathList()[0] def getBasePathVar(self): ''' return the base path variable ''' return self.getBasePathElem().baseVar def needsResolution(self): if not self.getBasePathVar(): # or self.getBasePath().needsResolution(): # TODO - need variable needsResolution method? return True for elem in self.getPathList(): # check to see if any path elems are unresolved if elem.needsResolution(): return True return False def isMultiPath(self): for elem in self.getPathList(): # check to see if any path elems have more than single element range if elem.hasRange: return True return False def resolvePath(self, allowedTags, targetType=CfgPathHierType.UNKNOWN): # TODO also pass in allowedTags ''' resolve path type and any path index wildcards by referencing the regmodel ''' print(f' -> resolvePath CfgPathDataType raw path: {self} full path: {self.genFullPathStr()} sample path: {self.genSamplePathStr()}') # TODO regModel = RegModelWrapper.getRegModelRoot() if not regModel: if self.needsResolution(): MsgUtils.errorExit(f'Path {self} has unresolved info, but no register model is defined.') return # if no model and resolved we're done # extract valid version tags and annotate path elements for each validTags = RegModelWrapper.getValidTags(allowedTags) print(f' -> resolvePath CfgPathDataType: allowedTags={allowedTags}, regmod tags: {RegModelWrapper.getRegModelTags()} valid tags: {validTags}') # TODO for tag in validTags: plist = regModel.get_path_instance_list(tag, self.genSamplePathStr()) if 'error' in plist: MsgUtils.errorExit(f'Path {self.genRawPathStr()} was not found in register model using tag="{tag}".') if not targetType.matchesRegModelType(plist['type']): # check that path type returned from model matches target MsgUtils.errorExit(f'Expected type of path {self.genRawPathStr()} ({targetType}) does not match returned register model type ({plist["type"]}).') # TODO - check that MultPath elems are allowed self.annotatePath(tag, plist['instances']) #print(f' -> resolvePath CfgPathDataType model returns: {plist}') def annotatePath(self, tag, regModelPath): # extract the full path by expanding lead path vars expandedPath = self.getExpandedPathList() #print(f' -> CfgPathDataType annotatePath: this path len={len(self.getPathList())}, expanded path len={len(expandedPath)}, regmod path len={len(regModelPath)}, path={regModelPath}') if len(expandedPath) != len(regModelPath): MsgUtils.errorExit(f'Path {self.genRawPathStr()} does not match form of returned register model path.') # now loop and append regmodel info to local (non expanded) path elements localIndex = len(expandedPath) - len(self.getPathList()) for pathElem, regModElem in zip(self.getPathList(), regModelPath[localIndex:]): # only annotate local path elements print(f' -> CfgPathDataType annotatePath: element annotation, tag={tag}, elem={pathElem.name}, mod elem type={type(regModElem)}') annotation = RegModelWrapper.createAnnotation(regModElem) pathElem.annotations[tag] = annotation # annotate pathElem by tag def getPathList(self): ''' return non-expanded path list ''' return self.val def getExpandedPathList(self): ''' generate full path list by unrolling base path variable ''' if self.getBasePathElem().isRootVar(): return self.getPathList() else: if len(self.getPathList()) > 1: return self.getBasePathElem().baseVar.val.getExpandedPathList() + self.getPathList()[1:] # remove lead element and append remainder else: return self.getBasePathElem().val.getExpandedPathList() def __str__(self): return f'ptype={self.htype.name}, path={self.genRawPathStr()}, needsResolution={self.needsResolution()}' # ------- variable classes class CfgVariable: def __init__(self, name, vartype = CfgNumDataType): self.name = name self.vartype = vartype self.val = None # add var in current scope parent = BaseCfgNode._nodeStack[-1] if parent.findVar(self.name): MsgUtils.errorExit('variable ' + self.name + ' is already defined.') if not name.isalnum(): MsgUtils.errorExit('variable name ' + self.name + ' is not valid.') parent.vars[self.name] = self #print (f'--- cfg_model CfgVariable: adding var {self.name}, parent type is {type(parent)}') def __str__(self): return self.vartype.__name__ + ' ' + self.name @staticmethod def resolveRhsExpression(inVal, targetVarType, allowInstCreate = True, exitOnFail = True): # targetVarType is valid CfgDataType ''' given an unknown rhs expression, return an existing variable or instance (new from str or existing) of specified target data type ''' if type(inVal) is targetVarType: # already target type so done return inVal if (type(inVal) is CfgVariable) and (inVal.vartype is targetVarType): # already a variable so done return inVal if type(inVal) is str: # try to find an existing variable foundVar = HierCfgNode.peekNode().findVar(inVal) if (foundVar is not None) and (foundVar.vartype is targetVarType): return foundVar # else try creating new target instance if allowInstCreate: newVal = targetVarType(inVal) if newVal.isValid(): return newVal if exitOnFail: MsgUtils.errorExit('unable to resolve rhs expression ' + str(inVal) + ' to a value or variable.') @staticmethod def resolveLhsExpression(inVar, targetVarType, allowVarCreate = True, exitOnFail = True): # targetVarType is valid CfgDataType ''' given an unknown lhs expression, return an existing variable or create a new variable of specified target data type from str ''' if (type(inVar) is CfgVariable) and (inVar.vartype is targetVarType): # already a variable so done return inVar if type(inVar) is str: # try to find an existing (non-input) variable foundVar = HierCfgNode.peekNode().findVar(inVar, False) # input variables are not allowed on lhs if (foundVar is not None) and (foundVar.vartype is targetVarType): return foundVar # else create a new var of target type if allowVarCreate: return CfgVariable(inVar, targetVarType) if exitOnFail: MsgUtils.errorExit('unable to resolve lhs expression ' + str(inVar) + ' to a variable.') class CfgInputVariable(CfgVariable): def __str__(self): return 'input ' + self.vartype.__name__ + ' ' + self.name # ------- config model compare class @unique class ConfigCompareType(Enum): UNSUPPORTED = 0 EQ = 1 NE = 2 GT = 3 LT = 4 GE = 5 LE = 6 def isSupported(self): return type(self) is not ConfigCompareType.UNSUPPORTED @staticmethod def resolve(opStr): ''' convert a string to ConfigCompareType ''' if type(opStr) is ConfigCompareType: # if type is already correct, just return input return opStr if opStr == '==': return ConfigCompareType.EQ elif opStr == '!=': return ConfigCompareType.NE elif opStr == '>': return ConfigCompareType.GT elif opStr == '<': return ConfigCompareType.LT elif opStr == '>=': return ConfigCompareType.GE elif opStr == '<=': return ConfigCompareType.LE else : return ConfigCompareType.UNSUPPORTED class CfgCompare(): def __init__(self, left=None, op=ConfigCompareType.UNSUPPORTED, right=None): self.op = op if type(op) is ConfigCompareType else ConfigCompareType.resolve(op) self.left = left if type(left) is CfgReadNode else left # TODO - extract into val or variable self.right = right if type(right) is CfgReadNode else right # TODO - extract into val or variable def isValid(self): if self.op.isSupported() and (self.left is not None) and (self.right is not None): return True return False def leftIsPoll(self): return type(self.left) is CfgReadNode def rightIsPoll(self): return type(self.right) is CfgReadNode def isPoll(self): ''' return True if compare involves a reg read ''' return self.leftIsPoll() or self.rightIsPoll() def __str__(self): return f'l=({self.left}) op={self.op.name} r=({self.right})' def resolvePaths(self): ''' resolve paths in this compare node ''' if self.leftIsPoll(): self.left.resolvePaths() if self.rightIsPoll(): self.right.resolvePaths() # ------ config model visitor TODO
47.675939
210
0.632603
693467a0acaed0c7690c50885524596a1996895e
922
py
Python
TERCEIRO MUNDO - THIRD WORLD/Um print especial - 97.py
MatheusKlebson/Python-Course
c1c5404095601733057bd91a96b5b4c45f0b5b9a
[ "MIT" ]
null
null
null
TERCEIRO MUNDO - THIRD WORLD/Um print especial - 97.py
MatheusKlebson/Python-Course
c1c5404095601733057bd91a96b5b4c45f0b5b9a
[ "MIT" ]
1
2020-11-25T15:47:38.000Z
2020-11-25T15:47:38.000Z
TERCEIRO MUNDO - THIRD WORLD/Um print especial - 97.py
MatheusKlebson/Python-Course
c1c5404095601733057bd91a96b5b4c45f0b5b9a
[ "MIT" ]
null
null
null
# Exercício Python 097: Faça um programa que tenha uma função chamada escreva(), # que receba um texto qualquer como parâmetro e mostre uma mensagem com tamanho adaptável. # Ex: # escreva(‘Olá, Mundo!’) Saída: # ~~~~~~~~~ # Olá, Mundo! # ~~~~~~~~~ def write(text): size = len(text) + 4 print("="*size) print(f" {text}") print("="*size) write("HELLO WORLD") write("I AM PROGRAMMER")
61.466667
167
0.273319
69357d4b61dce4eb42c2cf196abf2f132d27de5f
2,061
py
Python
tests/test_dict_qtable.py
fgka/reinforcement-learning-py
e4c582d192b36a270efce5e1512596b72466c8f7
[ "MIT" ]
null
null
null
tests/test_dict_qtable.py
fgka/reinforcement-learning-py
e4c582d192b36a270efce5e1512596b72466c8f7
[ "MIT" ]
null
null
null
tests/test_dict_qtable.py
fgka/reinforcement-learning-py
e4c582d192b36a270efce5e1512596b72466c8f7
[ "MIT" ]
null
null
null
#!/usr/bin/env python # vim: ai:sw=4:ts=4:sta:et:fo=croql # coding=utf-8 import pytest # Uncomment to run test in debug mode # import pudb; pudb.set_trace() from reinforcement_learning.dict_qtable import DictQTable from test_qaction import QActionTest from test_qstate import QStateTest """ DictQTable """ @pytest.mark.incremental class TestDictQTable(object): action_a = QActionTest(3) action_b = QActionTest(4) action_c = QActionTest(5) state_a = QStateTest([action_a, action_b]) state_b = QStateTest([action_c]) value_a = 123.1 value_b = 234.5 def test_set_value(self): # given obj = DictQTable() obj.set_value(self.state_a, self.action_a, self.value_a) # when stored_states = obj.get_all_stored_states() # then assert stored_states is not None, 'Table: {}'.format(obj) assert len(stored_states) is 1, 'Table: {}'.format(obj) assert stored_states[0] is self.state_a, 'Table: {}'.format(obj) value = obj.get_value(self.state_a, self.action_a) assert value is not None, 'Table: {}'.format(obj) assert value is self.value_a, 'Table: {}'.format(obj) def test_get_stored_action_values(self): # given obj = DictQTable() obj.set_value(self.state_a, self.action_a, self.value_a) obj.set_value(self.state_a, self.action_b, self.value_b) # when stored_action_values = obj.get_stored_action_values(self.state_a) # then assert stored_action_values is not None, 'Table: {}'.format(obj) assert len(stored_action_values) is 2, 'Table: {}'.format(obj) assert self.action_a in stored_action_values.keys(), \ 'Table: {}'.format(obj) assert stored_action_values[self.action_a] is self.value_a, \ 'Table: {}'.format(obj) assert self.action_b in stored_action_values.keys(), \ 'Table: {}'.format(obj) assert stored_action_values[self.action_b] is self.value_b, \ 'Table: {}'.format(obj)
33.786885
73
0.654051
693580d8911168f775041c87c4274d3d07d8d2de
2,851
py
Python
handlers/acceptmember.py
micjerry/groupservice
807e5d53533897ac36d9bf1cce30aee09979ea9f
[ "Apache-2.0" ]
1
2015-12-14T08:31:30.000Z
2015-12-14T08:31:30.000Z
handlers/acceptmember.py
micjerry/groupservice
807e5d53533897ac36d9bf1cce30aee09979ea9f
[ "Apache-2.0" ]
null
null
null
handlers/acceptmember.py
micjerry/groupservice
807e5d53533897ac36d9bf1cce30aee09979ea9f
[ "Apache-2.0" ]
null
null
null
import tornado.web import tornado.gen import json import io import logging import motor from bson.objectid import ObjectId import mickey.userfetcher from mickey.basehandler import BaseHandler class AcceptMemberHandler(BaseHandler): @tornado.web.asynchronous @tornado.gen.coroutine def post(self): coll = self.application.db.groups publish = self.application.publish token = self.request.headers.get("Authorization", "") data = json.loads(self.request.body.decode("utf-8")) groupid = data.get("groupid", "") inviteid = data.get("invite_id", self.p_userid) members = data.get("members", []) logging.info("begin to add members to group %s" % groupid) if not groupid or not members: logging.error("invalid request") self.set_status(403) self.finish() return result = yield coll.find_one({"_id":ObjectId(groupid)}) if not result: logging.error("group %s does not exist" % groupid) self.set_status(404) self.finish() return if result.get("owner", "") != self.p_userid: logging.error("%s are not the owner" % self.p_userid) self.set_status(403) self.finish() return; #get exist members exist_ids = [x.get("id", "") for x in result.get("members", [])] # get members and the receivers add_members = list(filter(lambda x: x not in exist_ids, [x.get("id", "") for x in members])) notify = {} notify["name"] = "mx.group.authgroup_invited" notify["pub_type"] = "any" notify["nty_type"] = "device" notify["msg_type"] = "other" notify["groupid"] = groupid notify["groupname"] = result.get("name", "") notify["userid"] = inviteid opter_info = yield mickey.userfetcher.getcontact(inviteid, token) if opter_info: notify["username"] = opter_info.get("name", "") else: logging.error("get user info failed %s" % inviteid) adddb_members = list(filter(lambda x: x.get("id", "") in add_members, members)) append_result = yield coll.find_and_modify({"_id":ObjectId(groupid)}, { "$addToSet":{"appendings":{"$each": adddb_members}}, "$unset": {"garbage": 1} }) if append_result: self.set_status(200) publish.publish_multi(add_members, notify) else: self.set_status(500) logging.error("add user failed %s" % groupid) return self.finish()
33.541176
105
0.54437
6938f0e86ee3f8439ad8093f9adb38fd142480f6
1,366
py
Python
ois_api_client/v2_0/dto/AdditionalQueryParams.py
peterkulik/ois_api_client
51dabcc9f920f89982c4419bb058f5a88193cee0
[ "MIT" ]
7
2020-10-22T08:15:29.000Z
2022-01-27T07:59:39.000Z
ois_api_client/v3_0/dto/AdditionalQueryParams.py
peterkulik/ois_api_client
51dabcc9f920f89982c4419bb058f5a88193cee0
[ "MIT" ]
null
null
null
ois_api_client/v3_0/dto/AdditionalQueryParams.py
peterkulik/ois_api_client
51dabcc9f920f89982c4419bb058f5a88193cee0
[ "MIT" ]
null
null
null
from typing import Optional from dataclasses import dataclass from .InvoiceAppearance import InvoiceAppearance from .InvoiceCategory import InvoiceCategory from .PaymentMethod import PaymentMethod from .Source import Source @dataclass class AdditionalQueryParams: """Additional params of the invoice query :param tax_number: Tax number of the supplier or the customer of the invoice (the search criteria depends on the value of the invoiceDirection tag) :param group_member_tax_number: Tax number of group member of the supplier or the customer of the invoice (the search criteria depends on the value of the invoiceDirection tag) :param name: Query param of the supplier or the customer of the invoice for leading match pattern (the search criteria depends on the value of the invoiceDirection tag) :param invoice_category: Type of invoice :param payment_method: Method of payment :param invoice_appearance: Form of appearance of the invoice :param source: Data exchange source :param currency: Currency of the invoice """ tax_number: Optional[str] group_member_tax_number: Optional[str] name: Optional[str] invoice_category: Optional[InvoiceCategory] payment_method: Optional[PaymentMethod] invoice_appearance: Optional[InvoiceAppearance] source: Optional[Source] currency: Optional[str]
44.064516
180
0.786237
6939aa03ac383f4a97a326eec7719fca0c79b4b8
1,213
py
Python
ingestion/testdir/testperf.py
jaysisodiya/relevare
b4e7b547b7372cee132e84bbd5d92cd69cdbdb63
[ "Apache-2.0" ]
null
null
null
ingestion/testdir/testperf.py
jaysisodiya/relevare
b4e7b547b7372cee132e84bbd5d92cd69cdbdb63
[ "Apache-2.0" ]
null
null
null
ingestion/testdir/testperf.py
jaysisodiya/relevare
b4e7b547b7372cee132e84bbd5d92cd69cdbdb63
[ "Apache-2.0" ]
null
null
null
# # GDELT data will be published to Kafka # Topics will be created - one topic per country using the Alpha-2 country codes # from kafka import KafkaProducer from datetime import datetime import pickle # # Initialize the producer # brokerlist='ec2-54-186-208-110.us-west-2.compute.amazonaws.com:9092,ec2-52-11-172-126.us-west-2.compute.amazonaws.com:9092,ec2-52-88-204-111.us-west-2.compute.amazonaws.com:9092,ec2-52-35-101-204.us-west-2.compute.amazonaws.com:9092' producer = KafkaProducer(bootstrap_servers=brokerlist) cnt = 0 payload = [ '0123456789', '01234567890123456789012345678901234567890123456789', '0123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789' ] msgcnt = [ 1000, 10000, 1000000 ] def testit(pload,n): beg_time = datetime.now() for i in range(n): sendmsg = pickle.dumps(pload) producer.send('foobar',sendmsg) return datetime.now() - beg_time for testload in payload: for n in msgcnt: timetaken = testit(testload,n) lenpayload = len(testload) print('Payload:\t%d\tMsgCnt:\t%d\tTime:\t%d secs %d microsecs' % (lenpayload, n, timetaken.seconds, timetaken.microseconds)) producer.flush()
34.657143
233
0.746908
693a26318273b86a08a21c3e6ee0d27d5cef1837
529
py
Python
ope-backend/create_db.py
clauddio-silva/ope-1
8be8a10c0db3d2f17e8e90a594abdf5770d53fa3
[ "CC0-1.0" ]
null
null
null
ope-backend/create_db.py
clauddio-silva/ope-1
8be8a10c0db3d2f17e8e90a594abdf5770d53fa3
[ "CC0-1.0" ]
null
null
null
ope-backend/create_db.py
clauddio-silva/ope-1
8be8a10c0db3d2f17e8e90a594abdf5770d53fa3
[ "CC0-1.0" ]
null
null
null
from src.infra.config import * from src.infra.db_entities import * from src.main.utils import hash_password db_conn = DBConnectionHandler() engine = db_conn.get_engine() Base.metadata.create_all(engine) with DBConnectionHandler() as db: try: new_user = Users(name='admin', role=1, email='admin@suzy.com', password=hash_password('admin')) db.session.add(new_user) db.session.commit() except Exception as ex: print(ex) db.session.rollback() finally: db.session.close()
27.842105
103
0.688091
693bf1abd077feaa155858e8c51deee32624b50d
14,767
py
Python
example/hermes_bot.py
azalio/python-icq-bot
b5ab8306d2abf8c259da71db1a3195c842d51110
[ "MIT" ]
null
null
null
example/hermes_bot.py
azalio/python-icq-bot
b5ab8306d2abf8c259da71db1a3195c842d51110
[ "MIT" ]
null
null
null
example/hermes_bot.py
azalio/python-icq-bot
b5ab8306d2abf8c259da71db1a3195c842d51110
[ "MIT" ]
null
null
null
import logging.config import random import re from collections import defaultdict from datetime import datetime from enum import Enum import requests from example.util import log_call from icq.bot import ICQBot, FileNotFoundException from icq.constant import TypingStatus from icq.filter import MessageFilter from icq.handler import MessageHandler try: from urllib import parse except ImportError: import urlparse as parse logging.config.fileConfig("logging.ini") log = logging.getLogger(__name__) NAME = "Hermes Bot" VERSION = "0.0.2" TOKEN = "000.0000000000.0000000000:000000000" PHRASES = ( "Sweet lion of Zion!", "Sweet manatee of Galilee!", "Sweet llamas of the Bahamas!", "Sweet something... of... someplace...", "Great cow of Moscow!", "Sweet giant anteater of Santa Anita!", "Sweet ghost of Babylon!", "Sacred boa of West and Eastern Samoa!", "Sacred hog of Prague!", "Cursed bacteria of Liberia!", "Sweet guinea pig of Winnipeg!", "Great bonda of Uganda!", "Sweet three-toed sloth of the ice planet Hoth!", "Sweet honey bee of infinity!", "Sweet yeti of the Serengeti!", "Sweet bongo of the Congo!", "Sweet squid of Madrid!", "Sweet kookaburra of Edinburgh!", "Sweet topology of cosmology!", "Sweet coincidence of Port-au-Prince!", "Sweet orca of Mallorca!", "Sweet candelabra of Le Havre, LaBarbara!" ) def logging_iterator(name, iterable): for item in iterable: log.debug("Processing line ({name}): '{item}'.".format(name=name, item=item)) yield item class HTTPMethod(Enum): GET = "GET" POST = "POST" HEAD = "HEAD" OPTIONS = "OPTIONS" PUT = "PUT" DELETE = "DELETE" TRACE = "TRACE" CONNECT = "CONNECT" PATCH = "PATCH" class HTTPRequest(object): pattern = re.compile(r"^Connected to (?P<host>\S+) \((?P<ip>[^)]+)\) port (?P<port>\d+) \(#\d+\)$", re.IGNORECASE) _pattern_request_line = re.compile( r"^(?P<method>" + "|".join(m.value for m in HTTPMethod) + r")\s(?P<uri>/\S*)\sHTTP/(?P<version>\d\.\d)$", flags=re.IGNORECASE ) _pattern_http_header = re.compile( r"^\s*(?P<name>X-[^:]*?|Host|User-Agent|Accept|Accept-Encoding|Connection|Content-Length|Content-Type|Expect|If" r"-None-Match)\s*:\s*(?P<value>.*?)\s*$", flags=re.IGNORECASE ) @log_call def __init__(self, ip, method, url, version, headers, data): super(HTTPRequest, self).__init__() self.ip = ip self.method = method self.url = url self.version = version self.headers = headers self.data = data @staticmethod @log_call def parse(match, lines): for line in lines: request_line_match = HTTPRequest._pattern_request_line.search(line) if request_line_match: log.debug("Line matched with 'HTTPRequest._pattern_request_line' pattern.") break else: raise ParseException("Can't find request line!") headers = defaultdict(list) for line in lines: header_match = re.search(HTTPRequest._pattern_http_header, line) if header_match: headers[header_match.group("name")].append(header_match.group("value")) else: break method = HTTPMethod(request_line_match.group("method")) # Crutch for handling "Expect" request. if "Expect" in headers: if len(headers["Expect"]) != 1 and headers["Expect"][0] != "100-continue": raise ParseException("Unknown 'Expect' request header value ('{}')!".format(headers["Expect"])) line = next(lines) if line != "HTTP/1.1 100 Continue": raise ParseException("Unknown status line ('{}') for 'Expect' response!".format(line)) line = next(lines) if line == "We are completely uploaded and fine": # No data, seems like client logging bug. data = None else: data = line else: if method is HTTPMethod.GET: data = None elif method is HTTPMethod.POST: data = next(lines) else: raise ParseException("Unsupported HTTP method ('{}')!".format(method)) return HTTPRequest( ip=match.group("ip"), method=method, url=parse.urlparse("{scheme}://{host}{uri}".format( scheme={80: "HTTP", 443: "HTTPS"}[int(match.group("port"))], host=match.group("host"), uri=request_line_match.group("uri") )), version=request_line_match.group("version"), headers=headers, data=data ) def __repr__(self): return ( "HTTPRequest(method='{self.method}', url='{self.url}', version='{self.version}', headers='{self.headers}', " "data='{self.data}')".format(self=self) ) class HTTPResponse(object): pattern = re.compile(r"^HTTP/(?P<version>\d\.\d)\s(?P<status_code>\d{3})\s(?P<reason_phrase>.+)$", re.IGNORECASE) _pattern_http_header = re.compile( r"^\s*(?P<name>X-[^:]*?|Server|Date|Content-Type|Content-Length|Content-Encoding|Connection|Keep-Alive|Access-C" r"ontrol-Allow-Origin|Transfer-Encoding|Pragma|Cache-Control|ETag|Strict-Transport-Security|Set-Cookie)\s*:\s*(" r"?P<value>.*?)\s*$", re.IGNORECASE ) _pattern_elapsed = re.compile(r"^Completed in (?P<elapsed>\d+) ms$", re.IGNORECASE) @log_call def __init__(self, version, status_code, reason_phrase, headers, data, elapsed): super(HTTPResponse, self).__init__() self.version = version self.status_code = status_code self.reason_phrase = reason_phrase self.headers = headers self.data = data self.elapsed = elapsed @staticmethod @log_call def parse(match, lines): headers = defaultdict(list) for line in lines: (key, value) = map(lambda s: s.strip(), line.split(":", 1)) headers[key].append(value) data = next(lines) for line in lines: elapsed_match = re.search(HTTPResponse._pattern_elapsed, line) if elapsed_match: log.debug("Line matched with 'HTTPResponse._pattern_elapsed' pattern.") elapsed = elapsed_match.group("elapsed") break else: raise ParseException("Can't find elapsed time!") return HTTPResponse( version=match.group("version"), status_code=match.group("status_code"), reason_phrase=match.group("reason_phrase"), headers=headers, data=data, elapsed=elapsed ) def __repr__(self): return ( "HTTPResponse(version='{self.version}', status_code='{self.status_code}', reason_phrase='{self.reason_phras" "e}', headers='{self.headers}', data='{self.data}', elapsed='{self.elapsed}')".format(self=self) ) class LogRecord(object): pattern = re.compile( r"^\[(?P<week_day>Sun|Mon|Tue|Wed|Thu|Fri|Sat)\s(?P<month>Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s{1," r"2}(?P<day>\d{1,2})\s(?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})\s(?P<year>\d+)\.(?P<microsecond>\d{1," r"3})\]\.\[(?:0x)?[0-9a-fA-F]+\]\s*$", re.IGNORECASE ) @log_call def __init__(self, date_time, request=None, response=None): super(LogRecord, self).__init__() self.date_time = date_time self.request = request self.response = response @staticmethod @log_call def parse(match, lines): date_time = datetime( year=int(match.group("year")), month=int(datetime.strptime(match.group("month"), "%b").month), day=int(match.group("day")), hour=int(match.group("hour")), minute=int(match.group("minute")), second=int(match.group("second")), microsecond=int(match.group("microsecond")) * 1000, ) for line in lines: request_match = HTTPRequest.pattern.search(line) if request_match: log.debug("Line matched with 'HTTPRequest.pattern' pattern.") buffer = [] # noinspection PyAssignmentToLoopOrWithParameter for line in lines: response_match = re.search(HTTPResponse.pattern, line) if response_match: log.debug("Line matched with 'HTTPResponse.pattern' pattern.") return LogRecord( date_time=date_time, request=HTTPRequest.parse(request_match, logging_iterator(HTTPRequest.__name__, buffer)), response=HTTPResponse.parse( response_match, logging_iterator(HTTPResponse.__name__, list(lines)) ) ) else: buffer.append(line) return LogRecord(date_time=date_time) def fix_log(lines): status_line_regexp = re.compile(r"^(?P<body>.*)(?P<status_line>HTTP/\d\.\d\s\d{3}\s.+)$", re.IGNORECASE) connection_left_regexp = re.compile(r"^.*Connection #\d+ to host \S+ left intact$", re.IGNORECASE) upload_sent_regexp = re.compile(r"^.*upload completely sent off: \d+ out of \d+ bytes$", re.IGNORECASE) prev_line = None for line in lines: log.debug("Processing line: '{}'.".format(line)) if prev_line == "HTTP/1.1 100 Continue": match = re.search(status_line_regexp, line) if match: log.debug("Fixing '100-continue' problem line.") yield match.group("body") yield match.group("status_line") elif re.search(connection_left_regexp, line): log.debug("Fixing 'Connection blah-blah left intact' problem line.") # yield re.split(connection_left_split_regexp, line)[0] elif re.search(upload_sent_regexp, line): log.debug("Fixing 'Upload completely sent blah-blah' problem line.") # result = re.split(upload_sent_split_regexp, line)[0] else: yield line prev_line = line def iterate_log(lines): buffer = [] match = None for line in lines: m = re.search(LogRecord.pattern, line) if m: log.debug("Line matched with 'LogRecord.pattern' pattern.") if buffer and match: yield LogRecord.parse(match, logging_iterator(LogRecord.__name__, buffer)) buffer = [] match = m else: buffer.append(line) def file_callback(bot, event): source_uin = event.data["source"]["aimId"] message = event.data["message"] try: bot.set_typing(target=source_uin, typing_status=TypingStatus.TYPING) # Getting info for file in message. path = parse.urlsplit(message.strip()).path file_id = path.rsplit("/", 1).pop() file_info_response = bot.get_file_info(file_id=file_id) if file_info_response.status_code == requests.codes.not_found: raise FileNotFoundException url = file_info_response.json()["file_list"].pop()["dlink"] # Starting file download. file_response = bot.http_session.get(url, stream=True) if file_response.encoding is None: file_response.encoding = "utf-8" # Downloading file and calculating stats. stats = defaultdict(int) status_codes = defaultdict(int) for log_record in iterate_log(fix_log( line for line in file_response.iter_lines(chunk_size=1024, decode_unicode=True) if line )): if log_record.request: stats["requests_count"] += 1 if log_record.request.url.path == "/aim/startSession": stats["start_session_count"] += 1 if log_record.request.url.path == "/genToken": stats["gen_token_count"] += 1 if log_record.response: key = log_record.response.status_code + " " + log_record.response.reason_phrase status_codes[key] += 1 else: stats["no_response_count"] += 1 bot.send_im( target=source_uin, message=( "Total requests: {requests_count}\n /aim/startSession: {start_session_count}\n /genToken: {gen_to" "ken_count}\n\nResponse count by status code:\n{status_codes}\n\nFound problems:\n{problems}\n\n{phrase" "}" ).format( requests_count=stats["requests_count"], start_session_count=stats["start_session_count"], gen_token_count=stats["gen_token_count"], status_codes="\n".join([ " {code}: {count}".format( code=code, count=count ) for (code, count) in sorted(status_codes.items()) ]), problems=" Requests without response: {no_response_count}".format( no_response_count=stats["no_response_count"] ), phrase=random.choice(PHRASES) ) ) except FileNotFoundException: bot.send_im(target=source_uin, message=random.choice(PHRASES) + " Give me your log right now!") except ParseException as e: bot.send_im( target=source_uin, message="{phrase} Log format is not supported! Error: '{error}'.".format( phrase=random.choice(PHRASES), error=e ) ) raise except Exception: bot.send_im(target=source_uin, message=random.choice(PHRASES) + " Something has gone wrong!") raise finally: bot.set_typing(target=source_uin, typing_status=TypingStatus.NONE) class ParseException(Exception): pass def main(): # Creating a new bot instance. bot = ICQBot(token=TOKEN, name=NAME, version=VERSION) # Registering message handlers. bot.dispatcher.add_handler(MessageHandler( callback=file_callback, filters=MessageFilter.file & ~(MessageFilter.image | MessageFilter.video | MessageFilter.audio) )) # Starting a polling thread watching for new events from server. This is a non-blocking call. bot.start_polling() # Blocking the current thread while the bot is working until SIGINT, SIGTERM or SIGABRT is received. bot.idle() if __name__ == "__main__": main()
36.825436
120
0.590709
693d787b8fc4cb803c97288d52b6f488a6db0a75
2,920
py
Python
qstat_live.py
romeromig/qstat_live
dde8ceb956dc0689a1c40c06ff20d58990488765
[ "MIT" ]
null
null
null
qstat_live.py
romeromig/qstat_live
dde8ceb956dc0689a1c40c06ff20d58990488765
[ "MIT" ]
null
null
null
qstat_live.py
romeromig/qstat_live
dde8ceb956dc0689a1c40c06ff20d58990488765
[ "MIT" ]
null
null
null
#! /usr/bin/python3 import curses import sys import subprocess def main_menu(stdscr): k = 0 cursor_x = 0 cursor_y = 0 # Start colors in curses curses.start_color() curses.init_pair(1, curses.COLOR_CYAN, curses.COLOR_BLACK) curses.init_pair(2, curses.COLOR_RED, curses.COLOR_BLACK) curses.init_pair(3, curses.COLOR_BLACK, curses.COLOR_WHITE) # Set mode switch = 0 # Loop where k is the last character pressed while True: if k == ord('q'): sys.exit() # Respond if the switch was pressed if k == ord('.'): if switch == 0: switch = 1 else: switch = 0 k = -1 # Initialization curses.curs_set(False) stdscr.nodelay(True) stdscr.clear() height, width = stdscr.getmaxyx() # Call qstat if switch == 0: process = subprocess.Popen("qstat -u '*'", stdout=subprocess.PIPE, shell=True) else: process = subprocess.Popen('qstat', stdout=subprocess.PIPE) stdout, stderr = process.communicate() qstat = str(stdout)[2:-1].split('\\n')[:-1] # Strings statusbarstr = " github.com/miferg | '.' to toggle all or user | 'q' to exit " if switch == 0: title = " qstat all users, {} jobs".format(len(qstat)-2) title_empty = " qstat all users, no jobs" if switch == 1: title = " qstat current user, {} jobs".format(len(qstat)-2) title_empty = " qstat current user, no jobs" # Centering calculations start_x_title = int((width // 2) - (len(title) // 2) - len(title) % 2) # Render status bar stdscr.attron(curses.color_pair(3)) stdscr.addstr(height-1, 0, statusbarstr) stdscr.addstr(height-1, len(statusbarstr), " " * (width - len(statusbarstr) - 1)) stdscr.attroff(curses.color_pair(3)) # Rendering title stdscr.attron(curses.color_pair(3)) stdscr.attron(curses.A_BOLD) if len(qstat)-2 == -2: stdscr.addstr(0, 0, title_empty) stdscr.addstr(0, len(title_empty), " " * (width - len(title) - 1)) else: stdscr.addstr(0, 0, title) stdscr.addstr(0, len(title), " " * (width - len(title) - 1)) stdscr.attroff(curses.color_pair(3)) # Turning off attributes for title stdscr.attroff(curses.color_pair(2)) stdscr.attroff(curses.A_BOLD) # Print the qstat report, line by line until the screen is filled for i in range(0, min(len(qstat),height-3)): stdscr.addstr(i+1, 0, qstat[i]) # Refresh the screen stdscr.refresh() curses.napms(100) # Wait for next input k = stdscr.getch() def main(): curses.wrapper(main_menu) if __name__ == "__main__": main()
28.627451
90
0.564726
693fc462e0aaf1cceaf2297cb92e001c5129520c
2,458
py
Python
nwbwidgets/utils/plotly.py
NeurodataWithoutBorders/nwb-jupyter-widgets
0d11e5d7b193c53d744b13c6404186ac84f4a5c1
[ "BSD-3-Clause-LBNL" ]
35
2019-03-10T23:39:17.000Z
2021-11-16T11:50:33.000Z
nwbwidgets/utils/plotly.py
catalystneuro/nwb-jupyter-widgets
0d11e5d7b193c53d744b13c6404186ac84f4a5c1
[ "BSD-3-Clause-LBNL" ]
158
2019-03-12T21:40:24.000Z
2022-03-16T14:35:55.000Z
nwbwidgets/utils/plotly.py
catalystneuro/nwb-jupyter-widgets
0d11e5d7b193c53d744b13c6404186ac84f4a5c1
[ "BSD-3-Clause-LBNL" ]
20
2019-03-08T14:30:27.000Z
2021-11-08T16:31:26.000Z
import plotly.graph_objects as go import numpy as np def multi_trace(x, y, color, label=None, fig=None, insert_nans=False): """Create multiple traces that are associated with a single legend label Parameters ---------- x: array-like y: array-like color: str label: str, optional fig: go.FigureWidget Returns ------- """ if fig is None: fig = go.FigureWidget() if insert_nans: y_nans = [] x_nans = [] for xx,yy in zip(x,y): y_nans.append(np.append(yy,np.nan)) x_nans.append(np.append(xx, np.nan)) y_plot = np.concatenate(y_nans,axis=0) x_plot = np.concatenate(x_nans, axis=0) fig.add_scattergl( x=x_plot, y=y_plot, name=label, line={"color": color}, ) return fig else: for i, yy in enumerate(y): if label is not None and i: showlegend = False else: showlegend = True fig.add_scattergl( x=x, y=yy, legendgroup=label, name=label, showlegend=showlegend, line={"color": color}, ) return fig def event_group( times_list, offset=0, color="Black", label=None, fig=None, marker=None, line_width=None, ): """Create an event raster that are all associated with a single legend label Parameters ---------- times_list: list of array-like offset: float, optional label: str, optional fig: go.FigureWidget optional, passed to go.Scatter.marker: marker: str line_width: str color: str default: Black Returns ------- """ if fig is None: fig = go.FigureWidget() if label is not None: showlegend = True else: showlegend = False for i, times in enumerate(times_list): if len(times): fig.add_scattergl( x=times, y=np.ones_like(times) * (i + offset), marker=dict( color=color, line_width=line_width, symbol=marker, line_color=color ), legendgroup=str(label), name=label, showlegend=showlegend, mode="markers", ) showlegend = False return fig
22.345455
87
0.513832
69417b7d7a3a4bf350d3fdf5bf9bebda6e608488
5,997
py
Python
django_python3_saml/saml_settings.py
IronCountySchoolDistrict/django-python3-saml
06d6198ed6c2b9ebfbfe4d6782715d91b6a468d8
[ "BSD-3-Clause" ]
6
2018-04-16T16:38:59.000Z
2022-02-10T09:02:11.000Z
django_python3_saml/saml_settings.py
IronCountySchoolDistrict/django-python3-saml
06d6198ed6c2b9ebfbfe4d6782715d91b6a468d8
[ "BSD-3-Clause" ]
1
2018-10-18T20:59:11.000Z
2018-10-19T13:42:43.000Z
django_python3_saml/saml_settings.py
IronCountySchoolDistrict/django-python3-saml
06d6198ed6c2b9ebfbfe4d6782715d91b6a468d8
[ "BSD-3-Clause" ]
6
2018-04-16T17:06:12.000Z
2020-05-06T11:32:39.000Z
from django.conf import settings class SAMLServiceProviderSettings(object): contact_info = { # Contact information template, it is recommended to suply a # technical and support contacts. "technical": { "givenName": settings.SAML['CONTACT_INFO']['TECHNICAL']['GIVEN_NAME'], "emailAddress": settings.SAML['CONTACT_INFO']['TECHNICAL']['EMAIL'], }, "support": { "givenName": settings.SAML['CONTACT_INFO']['SUPPORT']['GIVEN_NAME'], "emailAddress": settings.SAML['CONTACT_INFO']['SUPPORT']['EMAIL'], } } organization_info = { # Organization information template, the info in en_US lang is # recommended, add more if required. "en-US": { "name": settings.SAML['ORGANIZATION_INFO']['EN_US']['NAME'], "displayname": settings.SAML['ORGANIZATION_INFO']['EN_US']['DISPLAY_NAME'], "url": settings.SAML['ORGANIZATION_INFO']['EN_US']['URL'], } } def __init__(self, debug=False, strict=True, sp_metadata_url=None, sp_login_url=None, sp_logout_url=None, sp_x509cert=None, sp_private_key=None, # Service provider settings (e.g. us) idp_metadata_url=None, idp_sso_url=None, idp_slo_url=None, idp_x509cert=None, idp_x509_fingerprint=None, # Identify provider settings (e.g. onelogin) ): super(SAMLServiceProviderSettings, self).__init__() self.settings = default_settings = { # If strict is True, then the Python Toolkit will reject unsigned # or unencrypted messages if it expects them to be signed or encrypted. # Also it will reject the messages if the SAML standard is not strictly # followed. Destination, NameId, Conditions ... are validated too. "strict": strict, # Enable debug mode (outputs errors). "debug": debug, # Service Provider Data that we are deploying. "sp": { # Identifier of the SP entity (must be a URI) "entityId": sp_metadata_url, # Specifies info about where and how the <AuthnResponse> message MUST be # returned to the requester, in this case our SP. "assertionConsumerService": { # URL Location where the <Response> from the IdP will be returned "url": sp_login_url, # SAML protocol binding to be used when returning the <Response> # message. OneLogin Toolkit supports this endpoint for the # HTTP-POST binding only. "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST" }, # Specifies info about where and how the <Logout Response> message MUST be # returned to the requester, in this case our SP. "singleLogoutService": { # URL Location where the <Response> from the IdP will be returned "url": sp_logout_url, # SAML protocol binding to be used when returning the <Response> # message. OneLogin Toolkit supports the HTTP-Redirect binding # only for this endpoint. "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" }, # Specifies the constraints on the name identifier to be used to # represent the requested subject. # Take a look on src/onelogin/saml2/constants.py to see the NameIdFormat that are supported. "NameIDFormat": "urn:oasis:names:tc:SAML:2.0:nameid-format:unspecified", # Usually x509cert and privateKey of the SP are provided by files placed at # the certs folder. But we can also provide them with the following parameters 'x509cert': sp_x509cert, 'privateKey': sp_private_key }, # Identity Provider Data that we want connected with our SP. "idp": { # Identifier of the IdP entity (must be a URI) "entityId": idp_metadata_url, # SSO endpoint info of the IdP. (Authentication Request protocol) "singleSignOnService": { # URL Target of the IdP where the Authentication Request Message # will be sent. "url": idp_sso_url, # SAML protocol binding to be used when returning the <Response> # message. OneLogin Toolkit supports the HTTP-Redirect binding # only for this endpoint. "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" }, # SLO endpoint info of the IdP. "singleLogoutService": { # URL Location of the IdP where SLO Request will be sent. "url": idp_slo_url, # SAML protocol binding to be used when returning the <Response> # message. OneLogin Toolkit supports the HTTP-Redirect binding # only for this endpoint. "binding": "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect" }, # Public x509 certificate of the IdP "x509cert": idp_x509cert, # Instead of use the whole x509cert you can use a fingerprint # (openssl x509 -noout -fingerprint -in "idp.crt" to generate it) "certFingerprint": idp_x509_fingerprint }, "organization": self.organization_info, 'contactPerson': self.contact_info, } if not idp_x509cert: del self.settings['idp']['x509cert'] if not idp_x509_fingerprint: del self.settings['idp']['certFingerprint']
51.698276
167
0.571786
6943f352d6732b6ea4e8c626dd8012e42b34ad09
25,972
py
Python
heat/engine/parser.py
citrix-openstack-build/heat
fa31873529481472e037e3ce157b87f8057fe622
[ "Apache-2.0" ]
null
null
null
heat/engine/parser.py
citrix-openstack-build/heat
fa31873529481472e037e3ce157b87f8057fe622
[ "Apache-2.0" ]
null
null
null
heat/engine/parser.py
citrix-openstack-build/heat
fa31873529481472e037e3ce157b87f8057fe622
[ "Apache-2.0" ]
null
null
null
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import functools import re from oslo.config import cfg from heat.engine import environment from heat.common import exception from heat.engine import dependencies from heat.common import identifier from heat.engine import resource from heat.engine import resources from heat.engine import scheduler from heat.engine import template from heat.engine import timestamp from heat.engine import update from heat.engine.parameters import Parameters from heat.engine.template import Template from heat.engine.clients import Clients from heat.db import api as db_api from heat.openstack.common import log as logging from heat.openstack.common.gettextutils import _ from heat.common.exception import StackValidationFailed logger = logging.getLogger(__name__) (PARAM_STACK_NAME, PARAM_REGION) = ('AWS::StackName', 'AWS::Region') class Stack(object): ACTIONS = (CREATE, DELETE, UPDATE, ROLLBACK, SUSPEND, RESUME ) = ('CREATE', 'DELETE', 'UPDATE', 'ROLLBACK', 'SUSPEND', 'RESUME') STATUSES = (IN_PROGRESS, FAILED, COMPLETE ) = ('IN_PROGRESS', 'FAILED', 'COMPLETE') created_time = timestamp.Timestamp(functools.partial(db_api.stack_get, show_deleted=True), 'created_at') updated_time = timestamp.Timestamp(functools.partial(db_api.stack_get, show_deleted=True), 'updated_at') _zones = None def __init__(self, context, stack_name, tmpl, env=None, stack_id=None, action=None, status=None, status_reason='', timeout_mins=60, resolve_data=True, disable_rollback=True, parent_resource=None, owner_id=None): ''' Initialise from a context, name, Template object and (optionally) Environment object. The database ID may also be initialised, if the stack is already in the database. ''' if owner_id is None: if re.match("[a-zA-Z][a-zA-Z0-9_.-]*$", stack_name) is None: raise ValueError(_('Invalid stack name %s' ' must contain only alphanumeric or ' '\"_-.\" characters, must start with alpha' ) % stack_name) self.id = stack_id self.owner_id = owner_id self.context = context self.clients = Clients(context) self.t = tmpl self.name = stack_name self.action = action self.status = status self.status_reason = status_reason self.timeout_mins = timeout_mins self.disable_rollback = disable_rollback self.parent_resource = parent_resource self._resources = None self._dependencies = None resources.initialise() self.env = env or environment.Environment({}) self.parameters = Parameters(self.name, self.t, user_params=self.env.params) self._set_param_stackid() if resolve_data: self.outputs = self.resolve_static_data(self.t[template.OUTPUTS]) else: self.outputs = {} @property def resources(self): if self._resources is None: template_resources = self.t[template.RESOURCES] self._resources = dict((name, resource.Resource(name, data, self)) for (name, data) in template_resources.items()) return self._resources @property def dependencies(self): if self._dependencies is None: self._dependencies = self._get_dependencies( self.resources.itervalues()) return self._dependencies def reset_dependencies(self): self._dependencies = None @property def root_stack(self): ''' Return the root stack if this is nested (otherwise return self). ''' if (self.parent_resource and self.parent_resource.stack): return self.parent_resource.stack.root_stack return self def total_resources(self): ''' Total number of resources in a stack, including nested stacks below. ''' total = 0 for res in iter(self.resources.values()): if hasattr(res, 'nested') and res.nested(): total += res.nested().total_resources() total += 1 return total def _set_param_stackid(self): ''' Update self.parameters with the current ARN which is then provided via the Parameters class as the AWS::StackId pseudo parameter ''' # This can fail if constructor called without a valid context, # as it is in many tests try: stack_arn = self.identifier().arn() except (AttributeError, ValueError, TypeError): logger.warning("Unable to set parameters StackId identifier") else: self.parameters.set_stack_id(stack_arn) @staticmethod def _get_dependencies(resources): '''Return the dependency graph for a list of resources.''' deps = dependencies.Dependencies() for resource in resources: resource.add_dependencies(deps) return deps @classmethod def load(cls, context, stack_id=None, stack=None, resolve_data=True, parent_resource=None, show_deleted=True): '''Retrieve a Stack from the database.''' if stack is None: stack = db_api.stack_get(context, stack_id, show_deleted=show_deleted) if stack is None: message = 'No stack exists with id "%s"' % str(stack_id) raise exception.NotFound(message) template = Template.load(context, stack.raw_template_id) env = environment.Environment(stack.parameters) stack = cls(context, stack.name, template, env, stack.id, stack.action, stack.status, stack.status_reason, stack.timeout, resolve_data, stack.disable_rollback, parent_resource, owner_id=stack.owner_id) return stack def store(self, backup=False): ''' Store the stack in the database and return its ID If self.id is set, we update the existing stack ''' s = { 'name': self._backup_name() if backup else self.name, 'raw_template_id': self.t.store(self.context), 'parameters': self.env.user_env_as_dict(), 'owner_id': self.owner_id, 'username': self.context.username, 'tenant': self.context.tenant_id, 'action': self.action, 'status': self.status, 'status_reason': self.status_reason, 'timeout': self.timeout_mins, 'disable_rollback': self.disable_rollback, } if self.id: db_api.stack_update(self.context, self.id, s) else: # Create a context containing a trust_id and trustor_user_id # if trusts are enabled if cfg.CONF.deferred_auth_method == 'trusts': trust_context = self.clients.keystone().create_trust_context() new_creds = db_api.user_creds_create(trust_context) else: new_creds = db_api.user_creds_create(self.context) s['user_creds_id'] = new_creds.id new_s = db_api.stack_create(self.context, s) self.id = new_s.id self._set_param_stackid() return self.id def _backup_name(self): return '%s*' % self.name def identifier(self): ''' Return an identifier for this stack. ''' return identifier.HeatIdentifier(self.context.tenant_id, self.name, self.id) def __iter__(self): ''' Return an iterator over this template's resources in the order that they should be started. ''' return iter(self.dependencies) def __reversed__(self): ''' Return an iterator over this template's resources in the order that they should be stopped. ''' return reversed(self.dependencies) def __len__(self): '''Return the number of resources.''' return len(self.resources) def __getitem__(self, key): '''Get the resource with the specified name.''' return self.resources[key] def __setitem__(self, key, value): '''Set the resource with the specified name to a specific value.''' self.resources[key] = value def __contains__(self, key): '''Determine whether the stack contains the specified resource.''' return key in self.resources def keys(self): '''Return a list of resource keys for the stack.''' return self.resources.keys() def __str__(self): '''Return a human-readable string representation of the stack.''' return 'Stack "%s"' % self.name def resource_by_refid(self, refid): ''' Return the resource in this stack with the specified refid, or None if not found ''' for r in self.resources.values(): if r.state in ( (r.CREATE, r.IN_PROGRESS), (r.CREATE, r.COMPLETE), (r.RESUME, r.IN_PROGRESS), (r.RESUME, r.COMPLETE), (r.UPDATE, r.IN_PROGRESS), (r.UPDATE, r.COMPLETE)) and r.FnGetRefId() == refid: return r def validate(self): ''' http://docs.amazonwebservices.com/AWSCloudFormation/latest/\ APIReference/API_ValidateTemplate.html ''' # TODO(sdake) Should return line number of invalid reference # Check duplicate names between parameters and resources dup_names = set(self.parameters.keys()) & set(self.resources.keys()) if dup_names: logger.debug("Duplicate names %s" % dup_names) raise StackValidationFailed(message="Duplicate names %s" % dup_names) for res in self: try: result = res.validate() except exception.Error as ex: logger.exception(ex) raise ex except Exception as ex: logger.exception(ex) raise StackValidationFailed(message=str(ex)) if result: raise StackValidationFailed(message=result) def requires_deferred_auth(self): ''' Returns whether this stack may need to perform API requests during its lifecycle using the configured deferred authentication method. ''' return any(res.requires_deferred_auth for res in self) def state_set(self, action, status, reason): '''Update the stack state in the database.''' if action not in self.ACTIONS: raise ValueError("Invalid action %s" % action) if status not in self.STATUSES: raise ValueError("Invalid status %s" % status) self.action = action self.status = status self.status_reason = reason if self.id is None: return stack = db_api.stack_get(self.context, self.id) stack.update_and_save({'action': action, 'status': status, 'status_reason': reason}) @property def state(self): '''Returns state, tuple of action, status.''' return (self.action, self.status) def timeout_secs(self): ''' Return the stack creation timeout in seconds, or None if no timeout should be used. ''' if self.timeout_mins is None: return None return self.timeout_mins * 60 def create(self): ''' Create the stack and all of the resources. ''' def rollback(): if not self.disable_rollback and self.state == (self.CREATE, self.FAILED): self.delete(action=self.ROLLBACK) creator = scheduler.TaskRunner(self.stack_task, action=self.CREATE, reverse=False, post_func=rollback) creator(timeout=self.timeout_secs()) @scheduler.wrappertask def stack_task(self, action, reverse=False, post_func=None): ''' A task to perform an action on the stack and all of the resources in forward or reverse dependency order as specfifed by reverse ''' self.state_set(action, self.IN_PROGRESS, 'Stack %s started' % action) stack_status = self.COMPLETE reason = 'Stack %s completed successfully' % action.lower() res = None def resource_action(r): # Find e.g resource.create and call it action_l = action.lower() handle = getattr(r, '%s' % action_l) return handle() action_task = scheduler.DependencyTaskGroup(self.dependencies, resource_action, reverse) try: yield action_task() except exception.ResourceFailure as ex: stack_status = self.FAILED reason = 'Resource %s failed: %s' % (action.lower(), str(ex)) except scheduler.Timeout: stack_status = self.FAILED reason = '%s timed out' % action.title() self.state_set(action, stack_status, reason) if callable(post_func): post_func() def _backup_stack(self, create_if_missing=True): ''' Get a Stack containing any in-progress resources from the previous stack state prior to an update. ''' s = db_api.stack_get_by_name(self.context, self._backup_name(), owner_id=self.id) if s is not None: logger.debug('Loaded existing backup stack') return self.load(self.context, stack=s) elif create_if_missing: prev = type(self)(self.context, self.name, self.t, self.env, owner_id=self.id) prev.store(backup=True) logger.debug('Created new backup stack') return prev else: return None def update(self, newstack): ''' Compare the current stack with newstack, and where necessary create/update/delete the resources until this stack aligns with newstack. Note update of existing stack resources depends on update being implemented in the underlying resource types Update will fail if it exceeds the specified timeout. The default is 60 minutes, set in the constructor ''' updater = scheduler.TaskRunner(self.update_task, newstack) updater() @scheduler.wrappertask def update_task(self, newstack, action=UPDATE): if action not in (self.UPDATE, self.ROLLBACK): logger.error("Unexpected action %s passed to update!" % action) self.state_set(self.UPDATE, self.FAILED, "Invalid action %s" % action) return if self.status != self.COMPLETE: if (action == self.ROLLBACK and self.state == (self.UPDATE, self.IN_PROGRESS)): logger.debug("Starting update rollback for %s" % self.name) else: self.state_set(action, self.FAILED, 'State invalid for %s' % action) return self.state_set(self.UPDATE, self.IN_PROGRESS, 'Stack %s started' % action) oldstack = Stack(self.context, self.name, self.t, self.env) backup_stack = self._backup_stack() try: update_task = update.StackUpdate(self, newstack, backup_stack, rollback=action == self.ROLLBACK) updater = scheduler.TaskRunner(update_task) self.env = newstack.env self.parameters = newstack.parameters try: updater.start(timeout=self.timeout_secs()) yield while not updater.step(): yield finally: self.reset_dependencies() if action == self.UPDATE: reason = 'Stack successfully updated' else: reason = 'Stack rollback completed' stack_status = self.COMPLETE except scheduler.Timeout: stack_status = self.FAILED reason = 'Timed out' except exception.ResourceFailure as e: reason = str(e) stack_status = self.FAILED if action == self.UPDATE: # If rollback is enabled, we do another update, with the # existing template, so we roll back to the original state if not self.disable_rollback: yield self.update_task(oldstack, action=self.ROLLBACK) return else: logger.debug('Deleting backup stack') backup_stack.delete() self.state_set(action, stack_status, reason) # flip the template to the newstack values # Note we do this on success and failure, so the current # stack resources are stored, even if one is in a failed # state (otherwise we won't remove them on delete) self.t = newstack.t template_outputs = self.t[template.OUTPUTS] self.outputs = self.resolve_static_data(template_outputs) self.store() def delete(self, action=DELETE): ''' Delete all of the resources, and then the stack itself. The action parameter is used to differentiate between a user initiated delete and an automatic stack rollback after a failed create, which amount to the same thing, but the states are recorded differently. ''' if action not in (self.DELETE, self.ROLLBACK): logger.error("Unexpected action %s passed to delete!" % action) self.state_set(self.DELETE, self.FAILED, "Invalid action %s" % action) return stack_status = self.COMPLETE reason = 'Stack %s completed successfully' % action.lower() self.state_set(action, self.IN_PROGRESS, 'Stack %s started' % action) backup_stack = self._backup_stack(False) if backup_stack is not None: backup_stack.delete() if backup_stack.status != backup_stack.COMPLETE: errs = backup_stack.status_reason failure = 'Error deleting backup resources: %s' % errs self.state_set(action, self.FAILED, 'Failed to %s : %s' % (action, failure)) return action_task = scheduler.DependencyTaskGroup(self.dependencies, resource.Resource.destroy, reverse=True) try: scheduler.TaskRunner(action_task)(timeout=self.timeout_secs()) except exception.ResourceFailure as ex: stack_status = self.FAILED reason = 'Resource %s failed: %s' % (action.lower(), str(ex)) except scheduler.Timeout: stack_status = self.FAILED reason = '%s timed out' % action.title() self.state_set(action, stack_status, reason) if stack_status != self.FAILED: # If we created a trust, delete it stack = db_api.stack_get(self.context, self.id) user_creds = db_api.user_creds_get(stack.user_creds_id) trust_id = user_creds.get('trust_id') if trust_id: self.clients.keystone().delete_trust(trust_id) # delete the stack db_api.stack_delete(self.context, self.id) self.id = None def suspend(self): ''' Suspend the stack, which invokes handle_suspend for all stack resources waits for all resources to become SUSPEND_COMPLETE then declares the stack SUSPEND_COMPLETE. Note the default implementation for all resources is to do nothing other than move to SUSPEND_COMPLETE, so the resources must implement handle_suspend for this to have any effect. ''' sus_task = scheduler.TaskRunner(self.stack_task, action=self.SUSPEND, reverse=True) sus_task(timeout=self.timeout_secs()) def resume(self): ''' Resume the stack, which invokes handle_resume for all stack resources waits for all resources to become RESUME_COMPLETE then declares the stack RESUME_COMPLETE. Note the default implementation for all resources is to do nothing other than move to RESUME_COMPLETE, so the resources must implement handle_resume for this to have any effect. ''' sus_task = scheduler.TaskRunner(self.stack_task, action=self.RESUME, reverse=False) sus_task(timeout=self.timeout_secs()) def output(self, key): ''' Get the value of the specified stack output. ''' value = self.outputs[key].get('Value', '') return self.resolve_runtime_data(value) def restart_resource(self, resource_name): ''' stop resource_name and all that depend on it start resource_name and all that depend on it ''' deps = self.dependencies[self[resource_name]] failed = False for res in reversed(deps): try: scheduler.TaskRunner(res.destroy)() except exception.ResourceFailure as ex: failed = True logger.error('delete: %s' % str(ex)) for res in deps: if not failed: try: res.state_reset() scheduler.TaskRunner(res.create)() except exception.ResourceFailure as ex: logger.exception('create') failed = True else: res.state_set(res.CREATE, res.FAILED, 'Resource restart aborted') # TODO(asalkeld) if any of this fails we Should # restart the whole stack def get_availability_zones(self): if self._zones is None: self._zones = [ zone.zoneName for zone in self.clients.nova().availability_zones.list(detailed=False)] return self._zones def resolve_static_data(self, snippet): return resolve_static_data(self.t, self, self.parameters, snippet) def resolve_runtime_data(self, snippet): return resolve_runtime_data(self.t, self.resources, snippet) def resolve_static_data(template, stack, parameters, snippet): ''' Resolve static parameters, map lookups, etc. in a template. Example: >>> from heat.common import template_format >>> template_str = '# JSON or YAML encoded template' >>> template = Template(template_format.parse(template_str)) >>> parameters = Parameters('stack', template, {'KeyName': 'my_key'}) >>> resolve_static_data(template, None, parameters, {'Ref': 'KeyName'}) 'my_key' ''' return transform(snippet, [functools.partial(template.resolve_param_refs, parameters=parameters), functools.partial(template.resolve_availability_zones, stack=stack), functools.partial(template.resolve_resource_facade, stack=stack), template.resolve_find_in_map, template.reduce_joins]) def resolve_runtime_data(template, resources, snippet): return transform(snippet, [functools.partial(template.resolve_resource_refs, resources=resources), functools.partial(template.resolve_attributes, resources=resources), template.resolve_split, template.resolve_member_list_to_map, template.resolve_select, template.resolve_joins, template.resolve_replace, template.resolve_base64]) def transform(data, transformations): ''' Apply each of the transformation functions in the supplied list to the data in turn. ''' for t in transformations: data = t(data) return data
37.262554
79
0.578354
69453942628ce1c37639781c43bed0432a313dc3
1,547
py
Python
src/puzzle_1_you_will_all_conform/my/please_conform_squared.py
foryourselfand/mit_6_S095_programming_for_the_puzzled
88371bd8461709011acbed6066ac4f40c5cde29e
[ "MIT" ]
null
null
null
src/puzzle_1_you_will_all_conform/my/please_conform_squared.py
foryourselfand/mit_6_S095_programming_for_the_puzzled
88371bd8461709011acbed6066ac4f40c5cde29e
[ "MIT" ]
null
null
null
src/puzzle_1_you_will_all_conform/my/please_conform_squared.py
foryourselfand/mit_6_S095_programming_for_the_puzzled
88371bd8461709011acbed6066ac4f40c5cde29e
[ "MIT" ]
null
null
null
from typing import List from please_conform import PleaseConform from structures import Interval class PleaseConformSquared(PleaseConform): def please_conform(self, caps: List[str]) -> List[Interval]: if len(caps) == 0: return list() caps: List[str] = caps.copy() caps.append('end') interval_inputs: List[Interval] = list() count_forward: int = 0 count_backward: int = 0 index_previous: int = 0 for index_current in range(1, len(caps)): cap_current = caps[index_current] cap_previous = caps[index_previous] if cap_current != cap_previous: interval_input = Interval(start=index_previous, end=index_current - 1, cap_type=cap_previous) interval_inputs.append(interval_input) if cap_previous == 'F': count_forward += 1 else: count_backward += 1 index_previous = index_current cap_to_flip: str if count_forward < count_backward: cap_to_flip = 'F' else: cap_to_flip = 'B' interval_results: List[Interval] = list() for interval_input in interval_inputs: if interval_input.cap_type == cap_to_flip: interval_result: Interval = interval_input interval_results.append(interval_result) return interval_results
30.333333
64
0.564318
694611c01663b1d27e4ebc26f84bb2603c45ff7c
5,784
py
Python
apps/almoxarifado/apps/cont/forms.py
mequetrefe-do-subtroco/web_constel
57b5626fb17b4fefc740cbe1ac95fd4ab90147bc
[ "MIT" ]
1
2020-06-18T09:03:53.000Z
2020-06-18T09:03:53.000Z
apps/almoxarifado/apps/cont/forms.py
gabrielhjs/web_constel
57b5626fb17b4fefc740cbe1ac95fd4ab90147bc
[ "MIT" ]
33
2020-06-16T18:59:33.000Z
2021-08-12T21:33:17.000Z
apps/almoxarifado/apps/cont/forms.py
gabrielhjs/web_constel
57b5626fb17b4fefc740cbe1ac95fd4ab90147bc
[ "MIT" ]
null
null
null
from django import forms from .models import * class FormCadastraModelo(forms.ModelForm): class Meta: model = Modelo fields = ['nome', 'descricao', ] def __init__(self, *args, **kwargs): super(FormCadastraModelo, self).__init__(*args, **kwargs) for key in self.fields.keys(): self.fields[key].widget.attrs.update({'class': 'form-control'}) class FormCadastraSecao(forms.ModelForm): class Meta: model = Secao fields = ['nome', 'descricao', ] def __init__(self, *args, **kwargs): super(FormCadastraSecao, self).__init__(*args, **kwargs) for key in self.fields.keys(): self.fields[key].widget.attrs.update({'class': 'form-control'}) class FormEntradaOnt1(forms.Form): modelo = forms.ChoiceField() secao = forms.ChoiceField() def __init__(self, *args, **kwargs): super(FormEntradaOnt1, self).__init__(*args, **kwargs) modelos = Modelo.objects.all().order_by('nome') modelos_name = [(i.id, i.nome.upper()) for i in modelos] self.fields['modelo'] = forms.ChoiceField( choices=modelos_name, label='Modelo', help_text='Modelo das ONT\'s a serem inseridas', ) secoes = Secao.objects.all().order_by('nome') secoes_name = [(i.id, i.nome.upper()) for i in secoes] self.fields['secao'] = forms.ChoiceField( choices=secoes_name, label='Seção', help_text='Atividade de destino das ONT\'s a serem inseridas', ) for key in self.fields.keys(): self.fields[key].widget.attrs.update({'class': 'form-control'}) class NonstickyCharfield(forms.TextInput): """Custom text input widget that's "non-sticky" (i.e. does not remember submitted values). """ def get_context(self, name, value, attrs): value = None # Clear the submitted value. return super().get_context(name, value, attrs) class FormEntradaOnt2(forms.Form): serial = forms.CharField(required=True, widget=NonstickyCharfield()) def __init__(self, *args, **kwargs): super(FormEntradaOnt2, self).__init__(*args, **kwargs) self.fields['serial'].widget.attrs.update( {'autofocus': 'autofocus', 'required': 'required'} ) for key in self.fields.keys(): self.fields[key].widget.attrs.update({'class': 'form-control'}) def clean(self): form_data = super().clean() serial = form_data['serial'].upper() if serial.find('4857544', 0, 7) >= 0: if len(serial) != 16: self.errors['serial'] = ['Serial de Ont Huawei inválido'] return form_data elif serial.find('ZNTS', 0, 5) >= 0: if len(serial) != 12: self.errors['serial'] = ['Serial de Ont Zhone inválido'] return form_data else: self.errors['serial'] = ['Serial de Ont inválido'] return form_data class FormOntFechamento(forms.Form): serial = forms.CharField(required=True, widget=NonstickyCharfield()) def __init__(self, *args, **kwargs): super(FormOntFechamento, self).__init__(*args, **kwargs) self.fields['serial'].widget.attrs.update( {'autofocus': 'autofocus', 'required': 'required'} ) for key in self.fields.keys(): self.fields[key].widget.attrs.update({'class': 'form-control'}) def clean(self): form_data = super().clean() serial = form_data['serial'].upper() if Ont.objects.filter(codigo=serial).exists(): form_data['serial'] = Ont.objects.get(codigo=serial) else: self.errors['serial'] = ['Ont não cadastrada no sistema, cadastre-a para registrá-la como com defeito'] return form_data class FormOntManutencao1(forms.Form): modelo = forms.ChoiceField() def __init__(self, *args, **kwargs): super(FormOntManutencao1, self).__init__(*args, **kwargs) modelos = Modelo.objects.all().order_by('nome') modelos_name = [(i.id, i.nome.upper()) for i in modelos] self.fields['modelo'] = forms.ChoiceField( choices=modelos_name, label='Modelo', ) for key in self.fields.keys(): self.fields[key].widget.attrs.update({'class': 'form-control'}) class FormPswLogin(forms.Form): """ Formulário de login de usuário no psw """ username = forms.CharField(max_length=150, label='Chave da Copel') password = forms.CharField(widget=forms.PasswordInput) widgets = { 'password': forms.PasswordInput(), } def __init__(self, *args, **kwargs): super(FormPswLogin, self).__init__(*args, **kwargs) for key in self.fields.keys(): self.fields[key].widget.attrs.update({'class': 'form-control'}) class FormPswContrato(forms.Form): """ Formulário de busca de contrato no psw """ contratos = forms.CharField( label='Contratos', widget=forms.TextInput( attrs={'placeholder': 'Ex: 1234567,1234568, 1234569'} ) ) def __init__(self, *args, **kwargs): super(FormPswContrato, self).__init__(*args, **kwargs) for key in self.fields.keys(): self.fields[key].widget.attrs.update({'class': 'form-control'}) class FormSerial(forms.Form): """ Formulário de busca de serial """ serial = forms.CharField(label='Serial', required=False) def __init__(self, *args, **kwargs): super(FormSerial, self).__init__(*args, **kwargs) for key in self.fields.keys(): self.fields[key].widget.attrs.update({'class': 'form-control'})
28.492611
115
0.602006
69469bc9f4f19c9f16e8cc58a6b94958c3abce9d
1,980
py
Python
DMD/pyDMD.py
yusovm/GEMSEC
d9abd43d27e05607e7b1ea8c99fcc736abd204fd
[ "MIT" ]
null
null
null
DMD/pyDMD.py
yusovm/GEMSEC
d9abd43d27e05607e7b1ea8c99fcc736abd204fd
[ "MIT" ]
null
null
null
DMD/pyDMD.py
yusovm/GEMSEC
d9abd43d27e05607e7b1ea8c99fcc736abd204fd
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Mon Mar 2 16:43:45 2020 @author: micha """ import matplotlib.pyplot as plt import numpy as np import pandas as pd import seaborn as sns from MD_Analysis import Angle_Calc from pydmd import DMD pdb="pdbs/WT_295K_200ns_50ps_0_run.pdb" #Extract phi, psi angles AC=Angle_Calc(pdb) Angle_DF=AC.get_phi_psi() def cossin(data): cols = data.columns data = data.to_numpy() coss = np.cos(data/180.*np.pi) sins = np.sin(data/180.*np.pi) res=pd.DataFrame() for i in range(len(cols)): res[cols[i]+"_cos"] = coss[:,i] res[cols[i]+"_sin"] = sins[:,i] return res def halftime(data): dropindex = [1+2*i for i in (range(int(data.shape[0]/2)))] return data.drop(dropindex) #half = halftime(Angle_DF) angle_cossin = cossin(Angle_DF) angle_cossin_full = angle_cossin.copy() angle_cossin_full.drop(angle_cossin_full.tail(1).index,inplace=True) f=angle_cossin_full.to_numpy() dt=50*(10**-12) xi=np.linspace(np.min(f),np.max(f),f.shape[0]) t=np.linspace(0,f.shape[0],f.shape[1])*dt #+200*10**-9 Xgrid,T=np.meshgrid(xi,t) dmd = DMD(svd_rank=40) dmd.fit(f.T) xl=np.linspace(0,4000*dt,2000) yl=range(40) xlabel,ylabel=np.meshgrid(xl,yl) #Actual fig = plt.figure(figsize=(17,6)) plt.pcolor(xl, yl, f.real.T) plt.yticks([]) plt.title('Actual Data') plt.colorbar() plt.show() fig.savefig("PyDMD Actual Data.png") #Reconstructed fig2 = plt.figure(figsize=(17,6)) plt.pcolor(xl, yl, dmd.reconstructed_data.real) plt.yticks([]) plt.title('Reconstructed Data') plt.colorbar() plt.show() fig2.savefig("PyDMD Reconstructed Data.png") #Error fig3 = plt.figure(figsize=(17,6)) plt.pcolor(xl, yl, (np.sqrt(f.T-dmd.reconstructed_data)**2).real) plt.yticks([]) plt.title('RMSE Error') plt.colorbar() plt.show() fig3.savefig("PyDMD Error.png") #Eigenvalues dmd.plot_eigs(show_axes=True, show_unit_circle=True)
21.758242
69
0.663636
69473144068a69e9d819b7ace65fa14ac4780508
2,629
py
Python
litreview/apps/accounts/migrations/0001_initial.py
josayko/litreview
7bbc08eb635897a763f3882f0f42c76b76a5f127
[ "MIT" ]
null
null
null
litreview/apps/accounts/migrations/0001_initial.py
josayko/litreview
7bbc08eb635897a763f3882f0f42c76b76a5f127
[ "MIT" ]
null
null
null
litreview/apps/accounts/migrations/0001_initial.py
josayko/litreview
7bbc08eb635897a763f3882f0f42c76b76a5f127
[ "MIT" ]
null
null
null
# Generated by Django 4.0.1 on 2022-02-18 10:26 from django.conf import settings import django.core.validators from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='Review', fields=[ ('id', models.AutoField(primary_key=True, serialize=False)), ('headline', models.CharField(max_length=128)), ('body', models.CharField(blank=True, max_length=8192)), ('rating', models.PositiveSmallIntegerField(validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(5)])), ('time_created', models.DateTimeField(auto_now_add=True)), ], ), migrations.CreateModel( name='Ticket', fields=[ ('id', models.AutoField(primary_key=True, serialize=False)), ('title', models.CharField(max_length=128)), ('description', models.CharField(max_length=8192)), ('time_created', models.DateTimeField(auto_now_add=True)), ('review_id', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.review')), ('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], ), migrations.AddField( model_name='review', name='ticket_id', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='accounts.ticket'), ), migrations.AddField( model_name='review', name='user', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), migrations.CreateModel( name='UserFollow', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('followed_user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='followed_user', to=settings.AUTH_USER_MODEL)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL)), ], options={ 'unique_together': {('user', 'followed_user')}, }, ), ]
43.098361
164
0.613922
6948cabfaee34f9d09b3487a9f8788c36f0c90c4
24,914
py
Python
bot.py
StickBrush/RecommenderBot
a9e9ad5900e016cfeb1cba144cac5faa84607522
[ "WTFPL" ]
null
null
null
bot.py
StickBrush/RecommenderBot
a9e9ad5900e016cfeb1cba144cac5faa84607522
[ "WTFPL" ]
null
null
null
bot.py
StickBrush/RecommenderBot
a9e9ad5900e016cfeb1cba144cac5faa84607522
[ "WTFPL" ]
null
null
null
from telegram.ext import Updater, CommandHandler import re import os import logging from model import Anime from gateway import * def recommend_anime(bot, update): logging.debug('Received recommendation request') anime = get_best_anime().to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_airing_anime(bot, update): logging.debug('Received airing request') anime = get_best_anime(subtype=Subtype.AIRING).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_movie(bot, update): logging.debug('Received movie request') anime = get_best_anime(subtype=Subtype.MOVIE).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_action_anime(bot, update): logging.debug('Received action request') anime = get_anime_by_genre(Genre.ACTION).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_adventure_anime(bot, update): logging.debug('Received adventure request') anime = get_anime_by_genre(Genre.ADVENTURE).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_cars_anime(bot, update): logging.debug('Received cars request') anime = get_anime_by_genre(Genre.CARS).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_comedy_anime(bot, update): logging.debug('Received comedy request') anime = get_anime_by_genre(Genre.COMEDY).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_dementia_anime(bot, update): logging.debug('Received dementia request') anime = get_anime_by_genre(Genre.DEMENTIA).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_demons_anime(bot, update): logging.debug('Received demons request') anime = get_anime_by_genre(Genre.DEMONS).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_mystery_anime(bot, update): logging.debug('Received mystery request') anime = get_anime_by_genre(Genre.MYSTERY).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_drama_anime(bot, update): logging.debug('Received drama request') anime = get_anime_by_genre(Genre.DRAMA).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_ecchi_anime(bot, update): logging.debug('Received ecchi request') anime = get_anime_by_genre(Genre.ECCHI).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_fantasy_anime(bot, update): logging.debug('Received fantasy request') anime = get_anime_by_genre(Genre.FANTASY).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_game_anime(bot, update): logging.debug('Received game request') anime = get_anime_by_genre(Genre.GAME).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_hentai_anime(bot, update): logging.debug('Received hentai request') anime = get_anime_by_genre(Genre.HENTAI).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_historical_anime(bot, update): logging.debug('Received historical request') anime = get_anime_by_genre(Genre.HISTORICAL).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_horror_anime(bot, update): logging.debug('Received horror request') anime = get_anime_by_genre(Genre.HORROR).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_magic_anime(bot, update): logging.debug('Received magic request') anime = get_anime_by_genre(Genre.MAGIC).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_martial_arts_anime(bot, update): logging.debug('Received martial arts request') anime = get_anime_by_genre(Genre.MARTIAL_ARTS).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_mecha_anime(bot, update): logging.debug('Received mecha request') anime = get_anime_by_genre(Genre.MECHA).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_music_anime(bot, update): logging.debug('Received music request') anime = get_anime_by_genre(Genre.MUSIC).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_parody_anime(bot, update): logging.debug('Received parody request') anime = get_anime_by_genre(Genre.PARODY).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_samurai_anime(bot, update): logging.debug('Received samurai request') anime = get_anime_by_genre(Genre.SAMURAI).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_romance_anime(bot, update): logging.debug('Received romance request') anime = get_anime_by_genre(Genre.ROMANCE).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_school_anime(bot, update): logging.debug('Received school request') anime = get_anime_by_genre(Genre.SCHOOL).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_sci_fi_anime(bot, update): logging.debug('Received sci-fi request') anime = get_anime_by_genre(Genre.SCI_FI).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_shojo_anime(bot, update): logging.debug('Received shojo request') anime = get_anime_by_genre(Genre.SHOJO).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_shojo_ai_anime(bot, update): logging.debug('Received shojo-ai request') anime = get_anime_by_genre(Genre.SHOJO_AI).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_shonen_anime(bot, update): logging.debug('Received shonen request') anime = get_anime_by_genre(Genre.SHONEN).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_shonen_ai_anime(bot, update): logging.debug('Received shonen-ai request') anime = get_anime_by_genre(Genre.SHONEN_AI).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_space_anime(bot, update): logging.debug('Received space request') anime = get_anime_by_genre(Genre.SPACE).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_sports_anime(bot, update): logging.debug('Received sports request') anime = get_anime_by_genre(Genre.SPORTS).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_super_power_anime(bot, update): logging.debug('Received super power request') anime = get_anime_by_genre(Genre.SUPER_POWER).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_vampire_anime(bot, update): logging.debug('Received vampire request') anime = get_anime_by_genre(Genre.VAMPIRE).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_yaoi_anime(bot, update): logging.debug('Received yaoi request') anime = get_anime_by_genre(Genre.YAOI).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_yuri_anime(bot, update): logging.debug('Received yuri request') anime = get_anime_by_genre(Genre.YURI).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_harem_anime(bot, update): logging.debug('Received harem request') anime = get_anime_by_genre(Genre.HAREM).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_slice_of_life_anime(bot, update): logging.debug('Received slice of life request') anime = get_anime_by_genre(Genre.SLICE_OF_LIFE).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_supernatural_anime(bot, update): logging.debug('Received supernatural request') anime = get_anime_by_genre(Genre.SUPERNATURAL).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_military_anime(bot, update): logging.debug('Received military request') anime = get_anime_by_genre(Genre.MILITARY).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_police_anime(bot, update): logging.debug('Received police request') anime = get_anime_by_genre(Genre.POLICE).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_psychological_anime(bot, update): logging.debug('Received psychological request') anime = get_anime_by_genre(Genre.PSYCHOLOGICAL).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_thriller_anime(bot, update): logging.debug('Received thriller request') anime = get_anime_by_genre(Genre.THRILLER).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_seinen_anime(bot, update): logging.debug('Received seinen request') anime = get_anime_by_genre(Genre.SEINEN).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def recommend_josei_anime(bot, update): logging.debug('Received josei request') anime = get_anime_by_genre(Genre.JOSEI).to_prettified_dict() logging.debug('Answer: '.format(anime)) chat_id = update.message.chat_id image = anime.pop('Image') caption = '{}\n'.format(anime.pop('Title')) for key in anime: caption += '{}: {}\n'.format(key, anime[key]) bot.send_photo(chat_id=chat_id, photo=image, caption=caption) def main(): updater = Updater(os.environ.get("API_KEY")) debug_log = os.environ.get('DEBUG_LOG', None) if debug_log is not None: logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.INFO) dispatcher = updater.dispatcher dispatcher.add_handler(CommandHandler('recommend', recommend_anime)) dispatcher.add_handler(CommandHandler('airing', recommend_airing_anime)) dispatcher.add_handler(CommandHandler('movie', recommend_movie)) dispatcher.add_handler(CommandHandler('action', recommend_action_anime)) dispatcher.add_handler(CommandHandler('adventure', recommend_adventure_anime)) dispatcher.add_handler(CommandHandler('cars', recommend_cars_anime)) dispatcher.add_handler(CommandHandler('comedy', recommend_comedy_anime)) dispatcher.add_handler(CommandHandler('dementia', recommend_dementia_anime)) dispatcher.add_handler(CommandHandler('demons', recommend_demons_anime)) dispatcher.add_handler(CommandHandler('mystery', recommend_mystery_anime)) dispatcher.add_handler(CommandHandler('drama', recommend_drama_anime)) dispatcher.add_handler(CommandHandler('ecchi', recommend_ecchi_anime)) dispatcher.add_handler(CommandHandler('fantasy', recommend_fantasy_anime)) dispatcher.add_handler(CommandHandler('game', recommend_game_anime)) dispatcher.add_handler(CommandHandler('hentai', recommend_hentai_anime)) dispatcher.add_handler(CommandHandler('historical', recommend_historical_anime)) dispatcher.add_handler(CommandHandler('horror', recommend_horror_anime)) dispatcher.add_handler(CommandHandler('magic', recommend_magic_anime)) dispatcher.add_handler(CommandHandler('martialarts', recommend_martial_arts_anime)) dispatcher.add_handler(CommandHandler('mecha', recommend_mecha_anime)) dispatcher.add_handler(CommandHandler('music', recommend_music_anime)) dispatcher.add_handler(CommandHandler('parody', recommend_parody_anime)) dispatcher.add_handler(CommandHandler('samurai', recommend_samurai_anime)) dispatcher.add_handler(CommandHandler('romance', recommend_romance_anime)) dispatcher.add_handler(CommandHandler('school', recommend_school_anime)) dispatcher.add_handler(CommandHandler('scifi', recommend_sci_fi_anime)) dispatcher.add_handler(CommandHandler('shojo', recommend_shojo_anime)) dispatcher.add_handler(CommandHandler('shoujo', recommend_shojo_anime)) dispatcher.add_handler(CommandHandler('shojoai', recommend_shojo_ai_anime)) dispatcher.add_handler(CommandHandler('shoujoai', recommend_shojo_ai_anime)) dispatcher.add_handler(CommandHandler('shonen', recommend_shonen_anime)) dispatcher.add_handler(CommandHandler('shounen', recommend_shonen_anime)) dispatcher.add_handler(CommandHandler('shonenai', recommend_shonen_ai_anime)) dispatcher.add_handler(CommandHandler('shounenai', recommend_shonen_ai_anime)) dispatcher.add_handler(CommandHandler('space', recommend_space_anime)) dispatcher.add_handler(CommandHandler('sports', recommend_sports_anime)) dispatcher.add_handler(CommandHandler('superpower', recommend_super_power_anime)) dispatcher.add_handler(CommandHandler('vampire', recommend_vampire_anime)) dispatcher.add_handler(CommandHandler('yaoi', recommend_yaoi_anime)) dispatcher.add_handler(CommandHandler('yuri', recommend_yuri_anime)) dispatcher.add_handler(CommandHandler('harem', recommend_harem_anime)) dispatcher.add_handler(CommandHandler('sliceoflife', recommend_slice_of_life_anime)) dispatcher.add_handler(CommandHandler('supernatural', recommend_supernatural_anime)) dispatcher.add_handler(CommandHandler('military', recommend_military_anime)) dispatcher.add_handler(CommandHandler('police', recommend_police_anime)) dispatcher.add_handler(CommandHandler('psychological', recommend_psychological_anime)) dispatcher.add_handler(CommandHandler('thriller', recommend_thriller_anime)) dispatcher.add_handler(CommandHandler('seinen', recommend_seinen_anime)) dispatcher.add_handler(CommandHandler('josei', recommend_josei_anime)) updater.start_polling() updater.idle() if __name__ == '__main__': main()
44.017668
90
0.702537
69496f7144482f3f124f969247fa77f335fc5db1
695
py
Python
models/t_compensate_event_definition.py
THM-MA/XSDATA-waypoint
dd94442f9d6677c525bf3ebb03c15fec52fa1079
[ "MIT" ]
null
null
null
models/t_compensate_event_definition.py
THM-MA/XSDATA-waypoint
dd94442f9d6677c525bf3ebb03c15fec52fa1079
[ "MIT" ]
null
null
null
models/t_compensate_event_definition.py
THM-MA/XSDATA-waypoint
dd94442f9d6677c525bf3ebb03c15fec52fa1079
[ "MIT" ]
null
null
null
from dataclasses import dataclass, field from typing import Optional from xml.etree.ElementTree import QName from .t_event_definition import TEventDefinition __NAMESPACE__ = "http://www.omg.org/spec/BPMN/20100524/MODEL" @dataclass class TCompensateEventDefinition(TEventDefinition): class Meta: name = "tCompensateEventDefinition" wait_for_completion: Optional[bool] = field( default=None, metadata={ "name": "waitForCompletion", "type": "Attribute", } ) activity_ref: Optional[QName] = field( default=None, metadata={ "name": "activityRef", "type": "Attribute", } )
24.821429
61
0.640288
694980cd79dd13058b0c41b67c32eb322a3674e4
822
py
Python
plugins.py
ddfabbro/translatorbot
a14a442ec840d81e3d8bbc6faa15e52f68145655
[ "Unlicense" ]
null
null
null
plugins.py
ddfabbro/translatorbot
a14a442ec840d81e3d8bbc6faa15e52f68145655
[ "Unlicense" ]
null
null
null
plugins.py
ddfabbro/translatorbot
a14a442ec840d81e3d8bbc6faa15e52f68145655
[ "Unlicense" ]
null
null
null
import html from googletrans import Translator from slackbot.bot import default_reply, respond_to, listen_to translator = Translator() def translate(message): msg_in = html.unescape(message.body["text"]) if msg_in != "": if translator.detect(msg_in).lang == "en": text = translator.translate(msg_in, dest = "ja").text else: text = translator.translate(msg_in, dest = "en").text msg_out = "```{}```".format(text) if message.thread_ts == message.body["event_ts"]: message.send(msg_out) else: message.reply(msg_out) @default_reply def my_default_handler(message): translate(message) @respond_to(".*") def all_replies(message): translate(message) @listen_to(".*") def all_messages(message): translate(message)
24.176471
65
0.647202
6949c8d294973b9c15f0cdaf6df462ee0fe3f120
22
py
Python
utils/db_api/__init__.py
zotov-vs/tg_shop
e640e7cfaeac0af1de33a62fb5e6da28d8843651
[ "MIT" ]
1
2021-12-16T10:41:16.000Z
2021-12-16T10:41:16.000Z
utils/db_api/__init__.py
zotov-vs/tg_shop
e640e7cfaeac0af1de33a62fb5e6da28d8843651
[ "MIT" ]
6
2021-10-11T06:03:48.000Z
2021-10-17T09:42:05.000Z
App(BE)/main/models/__init__.py
osamhack2021/AI_APP_handylib_devlib
62cf67e6df280217e3715e2aa425636cefa7dd6f
[ "MIT" ]
null
null
null
from . import database
22
22
0.818182
694a62087121f0e7a903137a5cded3d86b3d17e4
1,324
py
Python
app.py
muelletm/search
3087dcfd26861b1386c38575b53cb026cb1045f8
[ "Apache-2.0" ]
1
2022-03-25T19:14:53.000Z
2022-03-25T19:14:53.000Z
app.py
muelletm/search
3087dcfd26861b1386c38575b53cb026cb1045f8
[ "Apache-2.0" ]
null
null
null
app.py
muelletm/search
3087dcfd26861b1386c38575b53cb026cb1045f8
[ "Apache-2.0" ]
4
2022-03-10T18:40:44.000Z
2022-03-10T19:20:30.000Z
import collections import os from pathlib import Path from typing import List import streamlit as st from sentence_transformers import SentenceTransformer from search.engine import Engine, Result from search.model import load_minilm_model from search.utils import get_memory_usage os.environ["TOKENIZERS_PARALLELISM"] = "false" _DATA_DIR = os.environ.get("DATA_DIR", "data/people_pm_minilm") st.set_page_config(page_title="Search Engine", layout="wide") st.markdown( """ <style> .big-font { font-size:20px; } </style> """, unsafe_allow_html=True, ) @st.cache(allow_output_mutation=True) def load_engine() -> Engine: engine = Engine( data_dir=Path(_DATA_DIR), ) return engine @st.cache(allow_output_mutation=True) def load_model() -> SentenceTransformer: return load_minilm_model() engine = load_engine() model = load_model() st.error("Create a text input for the query.") st.error("Create a slider with the number of results to retrieve.") with st.spinner("Querying index ..."): st.error("Get query embedding.") st.error("Search results (engine.search).") # Show the results. # You can use st.markdown to render markdown. # e.g. st.markdown("**text**") will add text in bold font. st.error("Render results") st.markdown(f"**Mem Usage**: {get_memory_usage()}MB")
21.354839
67
0.728852
694b3b51b65fa886685be715d3c914e309e0c1fe
1,596
py
Python
interface/exemplos1/04.py
ell3a/estudos-python
09808a462aa3e73ad433501acb11f62217548af8
[ "MIT" ]
null
null
null
interface/exemplos1/04.py
ell3a/estudos-python
09808a462aa3e73ad433501acb11f62217548af8
[ "MIT" ]
null
null
null
interface/exemplos1/04.py
ell3a/estudos-python
09808a462aa3e73ad433501acb11f62217548af8
[ "MIT" ]
null
null
null
from tkinter import * class EditBoxWindow: def __init__(self, parent = None): if parent == None: parent = Tk() self.myParent = parent self.top_frame = Frame(parent) # Criando a barra de rolagem scrollbar = Scrollbar(self.top_frame) self.editbox = Text(self.top_frame, yscrollcommand=scrollbar.set) scrollbar.pack(side=RIGHT, fill=Y) scrollbar.config(command=self.editbox.yview) # Área do texto self.editbox.pack(anchor=CENTER, fill=BOTH) self.top_frame.pack(side=TOP) # Texto a procurar self.bottom_left_frame = Frame(parent) self.textfield = Entry(self.bottom_left_frame) self.textfield.pack(side=LEFT, fill=X, expand=1) # Botão Find buttonSearch = Button(self.bottom_left_frame, text='Find', command=self.find) buttonSearch.pack(side=RIGHT) self.bottom_left_frame.pack(side=LEFT, expand=1) self.bottom_right_frame = Frame(parent) def find(self): self.editbox.tag_remove('found', '1.0', END) s = self.textfield.get() if s: idx = '1.0' while True: idx =self.editbox.search(s, idx, nocase=1, stopindex=END) if not idx: break lastidx = '%s+%dc' % (idx, len(s)) self.editbox.tag_add('found', idx, lastidx) idx = lastidx self.editbox.tag_config('found', foreground='red') if __name__=="__main__": root = Tk() myapp = EditBoxWindow(root)
32.571429
85
0.58396
694cb06a76643cd64ded70df62959d8318b7af93
426
py
Python
app/app.py
cagriozkurt/EksiSansur
071f5e136d58f7fdd5ba32c8387904b2710d04a5
[ "MIT" ]
null
null
null
app/app.py
cagriozkurt/EksiSansur
071f5e136d58f7fdd5ba32c8387904b2710d04a5
[ "MIT" ]
null
null
null
app/app.py
cagriozkurt/EksiSansur
071f5e136d58f7fdd5ba32c8387904b2710d04a5
[ "MIT" ]
1
2022-03-22T13:50:41.000Z
2022-03-22T13:50:41.000Z
import psycopg from flask import Flask, render_template from flask_compress import Compress app = Flask(__name__) DATABASE_URL = "" Compress(app) @app.route("/") def index(): with psycopg.connect(DATABASE_URL, sslmode="require") as conn: with conn.cursor() as cur: cur.execute("SELECT * FROM topics;") items = cur.fetchall() return render_template("index.html", items=items)
25.058824
66
0.671362
69512ed9252aea21d648e173c9d6e12c14061403
1,404
py
Python
2020/07/ape.py
notxenonbox/adventofcode
82cd8fafdf21c988bd7383f2b6d71cec04282e65
[ "Unlicense" ]
null
null
null
2020/07/ape.py
notxenonbox/adventofcode
82cd8fafdf21c988bd7383f2b6d71cec04282e65
[ "Unlicense" ]
null
null
null
2020/07/ape.py
notxenonbox/adventofcode
82cd8fafdf21c988bd7383f2b6d71cec04282e65
[ "Unlicense" ]
null
null
null
import re class Bag: def __init__(self, _name, _contents): self.name = _name self.contents = _contents self.c_cache = None self.has_cache = {} def hasBagType(self, _name, bags): try: return self.has_cache[_name] except: if _name != self.name: for i in self.contents: if bags[i[1]].hasBagType(_name, bags): break else: return False return True else: return True def children_count(self): if self.c_cache != None: return self.c_cache count = 0 for i in self.contents: count += i[0] + (i[0] * bags[i[1]].children_count()) self.c_cache = count return count input_lines = [] with open('input.txt') as f: input_lines = f.readlines() input_lines = list(filter(None, input_lines)) bags = {} for i in input_lines: bag, contents = re.search(r'^((?:[\w]+ ){2})bags contain ([\S\s]+)', i).groups() if contents.strip() == "no other bags.": bag = bag.strip() bags[bag] = Bag(bag, []) continue contents = contents.split(', ') contents = list(map(lambda x: re.search(r'(\d)+ ((?:[\w]+ ){2})', x).groups(), contents)) # cleaning up contents = list(map(lambda x: (int(x[0]), x[1].strip()), contents)) bag = bag.strip() bags[bag] = Bag(bag, contents) part1 = -1 for i in bags.values(): if i.hasBagType("shiny gold", bags): part1 += 1 print(f'part 1: {part1}') print(f'part 2: {bags["shiny gold"].children_count()}')
22.285714
90
0.623219
69517bd1fdcbc759ae3114b27d1f3038e73dc9c5
3,209
py
Python
src/drive.py
Matej-Chmel/pydrive-chat
551504335bcebbeed239f1961b7bffa3f45d220d
[ "Apache-2.0", "CC0-1.0" ]
null
null
null
src/drive.py
Matej-Chmel/pydrive-chat
551504335bcebbeed239f1961b7bffa3f45d220d
[ "Apache-2.0", "CC0-1.0" ]
null
null
null
src/drive.py
Matej-Chmel/pydrive-chat
551504335bcebbeed239f1961b7bffa3f45d220d
[ "Apache-2.0", "CC0-1.0" ]
null
null
null
from datetime import datetime, timedelta from io import BytesIO from pathlib import Path from time import altzone, daylight, localtime, timezone from pydrive.auth import GoogleAuth, AuthenticationRejected from pydrive.drive import GoogleDrive as Drive, GoogleDriveFile as File from requests import patch from .auth import gauth from ._this import ENDL, res_ CHAT_LOG: File = None FILE_TYPE = 'application/vnd.google-apps.file' FOLDER_TYPE = 'application/vnd.google-apps.folder' LAST_READ: datetime = None UTC_OFFSET_SECS = -(altzone if daylight and localtime().tm_isdst > 0 else timezone) drive: Drive = None def setup_gauth(): path = res_('client_secrets.json') if not Path(path).is_file(): raise FileNotFoundError GoogleAuth.DEFAULT_SETTINGS['client_config_file'] = path def empty_contents_of_(file): patch( f"https://www.googleapis.com/upload/drive/v3/files/{file['id']}?uploadType=multipart", headers={'Authorization': f"Bearer {gauth.credentials.token_response['access_token']}"}, files={ 'data': ('metadata', '{}', 'application/json'), 'file': BytesIO() } ) def ensure_item(title: str, mime_type=None, parents=None, trashed=False): query = f"title='{title}'" if mime_type: query += f" and mimeType='{mime_type}'" if parents: query += f""" and { ' and '.join(f"'{item['id']}' in parents" for item in parents) }""" if type(parents) is list else f" and '{parents['id']}' in parents" if trashed is not None: query += f' and trashed={str(trashed).lower()}' try: return drive.ListFile({'q': query}).GetList()[0] except IndexError: metadata = {'title': title} if mime_type: metadata['mimeType'] = mime_type if parents: metadata['parents'] = [ {'id': item['id']} for item in parents ] if type(parents) is list else [{'id': parents['id']}] file = drive.CreateFile(metadata) file.Upload() return file def log_into_drive(): creds_path = res_('creds.json') if Path(creds_path).is_file(): gauth.LoadCredentialsFile(creds_path) else: try: gauth.LocalWebserverAuth() gauth.SaveCredentialsFile(creds_path) except: return None return Drive(gauth) def login_and_init(): global CHAT_LOG, drive drive = log_into_drive() if drive is None: return False app_data = ensure_item('AppData', FOLDER_TYPE) app_folder = ensure_item('pydrive-chat', FOLDER_TYPE, app_data) CHAT_LOG = ensure_item('chat_log.txt', parents=app_folder) return True def append_to_log(text): CHAT_LOG.SetContentString(f'{CHAT_LOG.GetContentString()}{text}{ENDL}') CHAT_LOG.Upload() def overwrite_log(text=None): if not text: empty_contents_of_(CHAT_LOG) CHAT_LOG.Upload() CHAT_LOG.SetContentString('') else: CHAT_LOG.SetContentString(text) CHAT_LOG.Upload() def read_log(): return CHAT_LOG.GetContentString() def read_if_modified(): global LAST_READ, LINES_READ def was_modified(): modified_at = when_modified() if LAST_READ < modified_at: LAST_READ = modified_at return True return False if LAST_READ is None or was_modified(): return CHAT_LOG.GetContentString() return None def when_modified(): return datetime.strptime(CHAT_LOG['modifiedDate'], '%Y-%m-%dT%H:%M:%S.%fZ') + timedelta(seconds=UTC_OFFSET_SECS)
26.966387
113
0.727641
69550175d4982933c72091480c87edac34bffafc
1,967
py
Python
tests/test_yaml_files.py
graeme-winter/data
6e359b169c35d1a6569fd316f7b7ab19fa5812b8
[ "BSD-3-Clause" ]
null
null
null
tests/test_yaml_files.py
graeme-winter/data
6e359b169c35d1a6569fd316f7b7ab19fa5812b8
[ "BSD-3-Clause" ]
null
null
null
tests/test_yaml_files.py
graeme-winter/data
6e359b169c35d1a6569fd316f7b7ab19fa5812b8
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function import pkg_resources import pytest import string import yaml definition_yamls = { fn for fn in pkg_resources.resource_listdir("dials_data", "definitions") if fn.endswith(".yml") } hashinfo_yamls = { fn for fn in pkg_resources.resource_listdir("dials_data", "hashinfo") if fn.endswith(".yml") } def is_valid_name(filename): if not filename.endswith(".yml") or len(filename) <= 4: return False allowed_characters = frozenset(string.ascii_letters + string.digits + "_") return all(c in allowed_characters for c in filename[:-4]) @pytest.mark.parametrize("yaml_file", definition_yamls) def test_yaml_file_is_valid_definition(yaml_file): assert is_valid_name(yaml_file) definition = yaml.safe_load( pkg_resources.resource_stream("dials_data", "definitions/" + yaml_file).read() ) fields = set(definition) required = {"name", "data", "description"} optional = {"license", "url", "author"} assert fields >= required, "Required fields missing: " + str( sorted(required - fields) ) assert fields <= (required | optional), "Unknown fields present: " + str( sorted(fields - required - optional) ) @pytest.mark.parametrize("yaml_file", hashinfo_yamls) def test_yaml_file_is_valid_hashinfo(yaml_file): assert is_valid_name(yaml_file) assert ( yaml_file in definition_yamls ), "hashinfo file present without corresponding definition file" hashinfo = yaml.safe_load( pkg_resources.resource_stream("dials_data", "hashinfo/" + yaml_file).read() ) fields = set(hashinfo) required = {"definition", "formatversion", "verify"} assert fields >= required, "Required fields missing: " + str( sorted(required - fields) ) assert fields <= required, "Unknown fields present: " + str( sorted(fields - required) )
31.222222
86
0.68785