hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3912705dbc54803f8d227de8fae5fd64aab3961f | 8,414 | py | Python | models/vgg.py | enjoy-the-science/brain-texts | 2f90cff6b7efd610791b278579c62ba802eb0f02 | [
"MIT"
] | null | null | null | models/vgg.py | enjoy-the-science/brain-texts | 2f90cff6b7efd610791b278579c62ba802eb0f02 | [
"MIT"
] | null | null | null | models/vgg.py | enjoy-the-science/brain-texts | 2f90cff6b7efd610791b278579c62ba802eb0f02 | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import torchvision
class VGG(nn.Module):
def __init__(self, combine_dim, from_pretrained=False):
super().__init__()
self.features = nn.Sequential(
nn.Conv2d(1, 64, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(64, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2,
padding=0, dilation=1, ceil_mode=False),
nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv2d(128, 128, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(128, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2,
padding=0, dilation=1, ceil_mode=False),
nn.Conv2d(128, 256, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv2d(256, 256, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(256, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2,
padding=0, dilation=1, ceil_mode=False),
nn.Conv2d(256, 512, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2,
padding=0, dilation=1, ceil_mode=False),
nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv2d(512, 512, kernel_size=3, stride=1, padding=1),
nn.BatchNorm2d(512, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.MaxPool2d(kernel_size=2, stride=2,
padding=0, dilation=1, ceil_mode=False)
)
self.avgpool = nn.AdaptiveAvgPool2d(output_size=7)
self.classifier = nn.Sequential(
nn.Linear(in_features=25088, out_features=4096, bias=True),
nn.LeakyReLU(inplace=True),
nn.Dropout(p=0.5),
nn.Linear(in_features=4096, out_features=4096, bias=True),
nn.LeakyReLU(inplace=True),
nn.Dropout(p=0.5),
nn.Linear(in_features=4096, out_features=combine_dim, bias=True)
)
def forward(self, x):
x = self.features(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
class VGG11(nn.Module):
def __init__(self, combine_dim, from_pretrained=False):
super().__init__()
self.features = nn.Sequential(
nn.Conv3d(1, 64, kernel_size=3, stride=1, padding=1),
nn.BatchNorm3d(64, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.MaxPool3d(kernel_size=2, stride=2, padding=0, dilation=1,
ceil_mode=False),
nn.Conv3d(64, 128, kernel_size=3, stride=1, padding=1),
nn.BatchNorm3d(128, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.MaxPool3d(kernel_size=2, stride=2, padding=0, dilation=1,
ceil_mode=False),
nn.Conv3d(128, 256, kernel_size=3, stride=1, padding=1),
nn.BatchNorm3d(256, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv3d(256, 256, kernel_size=3, stride=1, padding=1),
nn.BatchNorm3d(256, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.MaxPool3d(kernel_size=2, stride=2, padding=0, dilation=1,
ceil_mode=False),
nn.Conv3d(256, 512, kernel_size=3, stride=1, padding=1),
nn.BatchNorm3d(512, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv3d(512, 512, kernel_size=3, stride=1, padding=1),
nn.BatchNorm3d(512, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.MaxPool3d(kernel_size=2, stride=2, padding=0, dilation=1,
ceil_mode=False),
nn.Conv3d(512, 512, kernel_size=3, stride=1, padding=1),
nn.BatchNorm3d(512, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.Conv3d(512, 512, kernel_size=3, stride=1, padding=1),
nn.BatchNorm3d(512, eps=1e-05, momentum=0.1, affine=True,
track_running_stats=True),
nn.LeakyReLU(inplace=True),
nn.MaxPool3d(kernel_size=2, stride=2, padding=0, dilation=1,
ceil_mode=False)
)
self.avgpool = nn.AdaptiveAvgPool3d(output_size=3)
self.classifier = nn.Sequential(
nn.Linear(in_features=13824, out_features=combine_dim, bias=True),
# nn.LeakyReLU(inplace=True),
# nn.Dropout(p=0.5),
# nn.Linear(in_features=4096, out_features=2048, bias=True),
# nn.LeakyReLU(inplace=True),
# nn.Dropout(p=0.5),
# nn.Linear(in_features=2048, out_features=combine_dim, bias=True)
)
def forward(self, x):
x = self.features(x)
x = self.avgpool(x)
x = x.view(x.size(0), -1)
x = self.classifier(x)
return x
| 48.356322 | 78 | 0.555146 | 1,076 | 8,414 | 4.22119 | 0.067844 | 0.073976 | 0.09247 | 0.135623 | 0.966094 | 0.966094 | 0.961471 | 0.961471 | 0.942096 | 0.937252 | 0 | 0.095745 | 0.318517 | 8,414 | 173 | 79 | 48.635838 | 0.696373 | 0.02579 | 0 | 0.806452 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.025806 | false | 0 | 0.019355 | 0 | 0.070968 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
39298784d3e8723ef9aa93d4bba67bb01a9e8140 | 187 | py | Python | exercicios/ex002.py | CinatitBR/exercicios-phyton | 16d9c14a83c9dbd6f7bda5477d665848bcd91184 | [
"MIT"
] | null | null | null | exercicios/ex002.py | CinatitBR/exercicios-phyton | 16d9c14a83c9dbd6f7bda5477d665848bcd91184 | [
"MIT"
] | null | null | null | exercicios/ex002.py | CinatitBR/exercicios-phyton | 16d9c14a83c9dbd6f7bda5477d665848bcd91184 | [
"MIT"
] | null | null | null | nome = str(input('Digite seu nome: ')).strip()
print('\033[1;31m-=-\033[m' * 10)
print(f'\033[1;30mSeja muito bem-vindo\033[m, \033[1;35m{nome}\033[m!')
print('\033[1;31m-=-\033[m' * 10) | 37.4 | 71 | 0.614973 | 37 | 187 | 3.108108 | 0.486486 | 0.13913 | 0.156522 | 0.208696 | 0.313043 | 0.313043 | 0.313043 | 0 | 0 | 0 | 0 | 0.233918 | 0.085562 | 187 | 5 | 72 | 37.4 | 0.438596 | 0 | 0 | 0.5 | 0 | 0.25 | 0.617021 | 0.12234 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.75 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 7 |
1a32c166af9e57e7fe68c3c39230344b9dfa131e | 120 | py | Python | jupyterlabpymolpysnips/Settings/listSettings2.py | MooersLab/pymolpysnips | 50a89c85adf8006d85c1d6cd3f8aad7e440a0b92 | [
"MIT"
] | null | null | null | jupyterlabpymolpysnips/Settings/listSettings2.py | MooersLab/pymolpysnips | 50a89c85adf8006d85c1d6cd3f8aad7e440a0b92 | [
"MIT"
] | null | null | null | jupyterlabpymolpysnips/Settings/listSettings2.py | MooersLab/pymolpysnips | 50a89c85adf8006d85c1d6cd3f8aad7e440a0b92 | [
"MIT"
] | null | null | null | cmd.do('list = setting.get_name_list();[print("%s => %s" % (name, setting.get_setting_text(name))) for name in list];')
| 60 | 119 | 0.666667 | 20 | 120 | 3.8 | 0.55 | 0.263158 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.1 | 120 | 1 | 120 | 120 | 0.703704 | 0 | 0 | 0 | 0 | 1 | 0.908333 | 0.55 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 9 |
1a614e954dccdba37e0fba1e5dc85ea9bea3871a | 4,172 | py | Python | src/systemParameterIdentification/preSourcedDataID.py | FlaminMad/researchProject | 309577602c0974c402a3f7c9cf1ba3e443e963b5 | [
"MIT"
] | null | null | null | src/systemParameterIdentification/preSourcedDataID.py | FlaminMad/researchProject | 309577602c0974c402a3f7c9cf1ba3e443e963b5 | [
"MIT"
] | 2 | 2018-02-12T18:34:01.000Z | 2018-02-12T18:34:33.000Z | src/systemParameterIdentification/preSourcedDataID.py | FlaminMad/researchProject | 309577602c0974c402a3f7c9cf1ba3e443e963b5 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Author: Alexander David Leech
Date: 16/12/2015
Rev: 2
Lang: Python 2.7
Deps: Numpy, RLS (Internal, see MVC)
Desc: System Parameter identification using RLS based on already sourced data
Use comma delimted file format in excel and open in a text editor for
easy copy and paste data access.
"""
from RLS import RLS
import numpy as np
class preSourcedDataID:
def __init__(self):
#Initialise Recursive Least Squares Object
self.rls = RLS()
def run(self):
self.testData()
self.rls.setup(self.X,np.matrix.transpose(self.Z))
while(True):
for i in range(3, self.Z.size-1):
x = self.X[:,i]
y = self.Z[:,i]
self.rls.solve(x,y)
print self.rls.sysID
print "Done, SysID is:"
print self.rls.sysID
break
def testData(self):
#U = np.array([[350,350,350,350,350,350,350,350,350,350,350,350,350,350,350,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400,400]])
#Y = np.array([[90,90,91,87,95,93,100,87,93,86,87,94,91,91,97,92,104,109,115,123,126,134,131,146,149,154,163,170,178,180,180,193,194,203,209,212,210,218,230,239,240,241,245,247,251,263,259,271,273,274,278,284,281,294,298,302,303,305,308,309,320,321,322,326,327,333,336,337,343,350,352,353,358,358,363,366,365,367,367,372,376,374,382,378,387,388,389,392,394,397,399,402,396,402,401,403,407,410,415,421,423,418,418,427,427,427,426,426,429,429,432,427,429,429,430,432,434,434,433,436,437,447,448,436,442,454,446,445,452,448,448,447,456,454,456,457,458,457,459,459,460,460,463,460,460,463,466,465,458,467,463,464,464,471,466]])
#self.Z = np.array([[90,91,87,95,93,100,87,93,86,87,94,91,91,97,92,104,109,115,123,126,134,131,146,149,154,163,170,178,180,180,193,194,203,209,212,210,218,230,239,240,241,245,247,251,263,259,271,273,274,278,284,281,294,298,302,303,305,308,309,320,321,322,326,327,333,336,337,343,350,352,353,358,358,363,366,365,367,367,372,376,374,382,378,387,388,389,392,394,397,399,402,396,402,401,403,407,410,415,421,423,418,418,427,427,427,426,426,429,429,432,427,429,429,430,432,434,434,433,436,437,447,448,436,442,454,446,445,452,448,448,447,456,454,456,457,458,457,459,459,460,460,463,460,460,463,466,465,458,467,463,464,464,471,466,469]])
#U = np.array([[29.6,29.5,29.8,30.3,29.8,30.8,30,30.9,31.8,32.6,32.7,34.1,35,35.8,36,36.7,37.4,38.6,38.8,39.4,39.6,40.6,41.5,41.7,42.1,42.4,42.5,43.3,43.5,44.4,44.1]])
#Y = np.array([[37.5,37.5,37.5,37.5,37.5,37.5,37.5,37.5,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40,40]])
#self.Z = np.array([[29.5,29.8,30.3,29.8,30.8,30,30.9,31.8,32.6,32.7,34.1,35,35.8,36,36.7,37.4,38.6,38.8,39.4,39.6,40.6,41.5,41.7,42.1,42.4,42.5,43.3,43.5,44.4,44.1,45]])
U = np.array([[47.3,47.4,48.3,48.3,48.1,48.3,48.2,48.5,48.3,48.3,48.5,49.2,50,50.3,51.2,51.3,51.5,52.3,53.8,54.5,55.2,55.5,57.1,57.2,57.7,57.8,57.8,58.3,58.7,58.8,60.3,60.5,60.6,60.5,60.8]])
Y = np.array([[40,40,40,40,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5,42.5]])
self.Z = np.array([[47.4,48.3,48.3,48.1,48.3,48.2,48.5,48.3,48.3,48.5,49.2,50,50.3,51.2,51.3,51.5,52.3,53.8,54.5,55.2,55.5,57.1,57.2,57.7,57.8,57.8,58.3,58.7,58.8,60.3,60.5,60.6,60.5,60.8,0]])
self.X = np.concatenate((Y,U), axis=0)
def main():
ID = preSourcedDataID()
ID.run()
if __name__ == '__main__':main() | 71.931034 | 646 | 0.63255 | 1,006 | 4,172 | 2.611332 | 0.247515 | 0.317472 | 0.472783 | 0.625809 | 0.723258 | 0.723258 | 0.723258 | 0.723258 | 0.723258 | 0.723258 | 0 | 0.52938 | 0.122963 | 4,172 | 58 | 647 | 71.931034 | 0.188576 | 0.582694 | 0 | 0.076923 | 0 | 0 | 0.016312 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.076923 | null | null | 0.115385 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
1a7404d2c0864a3e097cfad9ae824be1533484b1 | 1,486 | py | Python | ulltma/ulltmaforms.py | aselisdev/aselis.ulltma.app | 8778bcf61b5e4d21615373ade6a9eb1a6dd0a9e9 | [
"MIT"
] | null | null | null | ulltma/ulltmaforms.py | aselisdev/aselis.ulltma.app | 8778bcf61b5e4d21615373ade6a9eb1a6dd0a9e9 | [
"MIT"
] | null | null | null | ulltma/ulltmaforms.py | aselisdev/aselis.ulltma.app | 8778bcf61b5e4d21615373ade6a9eb1a6dd0a9e9 | [
"MIT"
] | null | null | null | from django import forms
from django.contrib.auth.forms import UserCreationForm
class SignInForm(forms.Form):
firstname = forms.CharField(label = '', max_length=100, widget=forms.TextInput(attrs = {'class' : 'formbox'}))
lastname = forms.CharField(label = '', max_length=100, widget=forms.TextInput(attrs = {'class' : 'formbox'}))
email = forms.CharField(label = '', max_length=100, widget=forms.TextInput(attrs = {'class' : 'formbox'}))
emailconfirm = forms.CharField(label = '', max_length=100, widget=forms.TextInput(attrs = {'class' : 'formbox'}))
pword = forms.CharField(label = '', max_length=100, widget=forms.PasswordInput(attrs = {'class' : 'formbox'}))
pconfirm = forms.CharField(label = '', max_length=100, widget=forms.PasswordInput(attrs = {'class' : 'formbox'}))
class ChangePWordForm(forms.Form):
pcurrent = forms.CharField(label = '', max_length=100, widget=forms.PasswordInput(attrs = {'class' : 'formbox'}))
pword = forms.CharField(label = '', max_length=100, widget=forms.PasswordInput(attrs = {'class' : 'formbox'}))
pconfirm = forms.CharField(label = '', max_length=100, widget=forms.PasswordInput(attrs = {'class' : 'formbox'}))
class ChangePWordLoginForm(forms.Form):
email = forms.CharField(label = '', max_length=150)
pword = forms.CharField(label = '', max_length=100, widget=forms.PasswordInput(attrs = {'class' : 'formbox'}))
pconfirm = forms.CharField(label = '', max_length=100, widget=forms.PasswordInput(attrs = {'class' : 'formbox'}))
| 61.916667 | 114 | 0.713997 | 173 | 1,486 | 6.063584 | 0.17341 | 0.160153 | 0.21735 | 0.251668 | 0.822688 | 0.822688 | 0.786463 | 0.786463 | 0.786463 | 0.786463 | 0 | 0.027129 | 0.106999 | 1,486 | 23 | 115 | 64.608696 | 0.763376 | 0 | 0 | 0.352941 | 0 | 0 | 0.088889 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.411765 | 0.117647 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 10 |
1ac27c95339ca83c3e06689df63881a82fcde0fa | 1,754 | py | Python | src/tools/fasta.py | cowboysmall/rosalind | 021e4392a8fc946b97bbf86bbb8227b28bb5e462 | [
"MIT"
] | null | null | null | src/tools/fasta.py | cowboysmall/rosalind | 021e4392a8fc946b97bbf86bbb8227b28bb5e462 | [
"MIT"
] | null | null | null | src/tools/fasta.py | cowboysmall/rosalind | 021e4392a8fc946b97bbf86bbb8227b28bb5e462 | [
"MIT"
] | null | null | null | from collections import defaultdict
def read_ordered(file_path):
strings = defaultdict(str)
labels = []
with open(file_path) as file:
label = None
for line in file:
line = line.strip()
if line.startswith('>'):
label = line[1:]
labels.append(label)
else:
strings[label] += line
return [strings[label] for label in labels]
def read_ordered_from(obj):
strings = defaultdict(str)
labels = []
label = None
for line in obj:
line = line.strip()
if line.startswith('>'):
label = line[1:]
labels.append(label)
else:
strings[label] += line
return [strings[label] for label in labels]
def read(file_path):
strings = defaultdict(str)
with open(file_path) as file:
label = None
for line in file:
line = line.strip()
if line.startswith('>'):
label = line[1:]
else:
strings[label] += line
return strings
def read_from(obj):
strings = defaultdict(str)
label = None
for line in obj:
line = line.strip()
if line.startswith('>'):
label = line[1:]
else:
strings[label] += line
return strings
def read_one(file_path):
string = ''
with open(file_path) as file:
for line in file:
line = line.strip()
if not line.startswith('>'):
string += line
return string
def read_one_from(obj):
string = ''
for line in obj:
line = line.strip()
if not line.startswith('>'):
string += line
return string
| 18.860215 | 47 | 0.518244 | 196 | 1,754 | 4.571429 | 0.158163 | 0.080357 | 0.060268 | 0.100446 | 0.882813 | 0.75558 | 0.731027 | 0.731027 | 0.703125 | 0.703125 | 0 | 0.00367 | 0.378563 | 1,754 | 92 | 48 | 19.065217 | 0.818349 | 0 | 0 | 0.887097 | 0 | 0 | 0.003423 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.096774 | false | 0 | 0.016129 | 0 | 0.209677 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
20352d00e269db063a00865edb27f318f1b3d375 | 67,910 | py | Python | BacklogPy/api/projects.py | vortexkd/BacklogPy | 6d66932fcc90afda1bb3201d52c0a481bacab822 | [
"MIT"
] | null | null | null | BacklogPy/api/projects.py | vortexkd/BacklogPy | 6d66932fcc90afda1bb3201d52c0a481bacab822 | [
"MIT"
] | null | null | null | BacklogPy/api/projects.py | vortexkd/BacklogPy | 6d66932fcc90afda1bb3201d52c0a481bacab822 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
This file was created by Backlog APIGenerator
"""
from __future__ import unicode_literals, absolute_import
from deprecated import deprecated
from BacklogPy.base import BacklogBase
class Projects(BacklogBase):
def __init__(self, space_id, api_key):
super(Projects, self).__init__(space_id, api_key)
def add_category_raw(self, project_id_or_key, form_parameters):
"""
Adds new Category to the project.
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/categories'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_category(self, project_id_or_key, name):
"""
Adds new Category to the project.
:param str project_id_or_key: Project ID or Project Key
:param str name: Category name
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name
}
return self._request(
'/projects/{}/categories'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_custom_field_raw(self, project_id_or_key, form_parameters):
"""
Adds new Custom Field to the project.
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/customFields'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_custom_field(
self,
project_id_or_key,
type_id,
name,
applicable_issue_types=None,
description=None,
required=None):
"""
Adds new Custom Field to the project.
:param str project_id_or_key: Project ID or Project Key
:param int type_id: Type ID of Custom field
:param str name: Name
:param list[int] or int applicable_issue_types: Type ID to enable Custom fields
:param str description: Description
:param bool required: True to make the Custom field required
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'typeId': type_id,
'name': name,
'applicableIssueTypes[]': applicable_issue_types,
'description': description,
'required': self._bool_to_str(required)
}
return self._request(
'/projects/{}/customFields'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_issue_type_raw(self, project_id_or_key, form_parameters):
"""
Adds new Issue Type to the project.
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/issueTypes'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_issue_type(
self,
project_id_or_key,
name,
color,
template_summary=None,
template_description=None):
"""
Adds new Issue Type to the project.
:param str project_id_or_key: Project ID or Project Key
:param str name: Issue Type name
:param str color: Background color
:param str template_summary: Subject
:param str template_description: Description
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name,
'color': color,
'templateSummary': template_summary,
'templateDescription': template_description
}
return self._request(
'/projects/{}/issueTypes'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_list_item_for_list_type_custom_field_raw(
self, project_id_or_key, _id, form_parameters):
"""
Adds new list item for list type custom field. Only administrator can call this API if the option “Add items in adding or editing issues” is disabled in settings. Calling API fails if specified custom field’s type is not a list.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Custom field ID
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/customFields/{}/items'.format(
project_id_or_key,
_id),
method='POST',
form_parameters=form_parameters)
def add_list_item_for_list_type_custom_field(
self, project_id_or_key, _id, name=None):
"""
Adds new list item for list type custom field. Only administrator can call this API if the option “Add items in adding or editing issues” is disabled in settings. Calling API fails if specified custom field’s type is not a list.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Custom field ID
:param str name: List item name
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name
}
return self._request(
'/projects/{}/customFields/{}/items'.format(
project_id_or_key,
_id),
method='POST',
form_parameters=form_parameters)
def add_project_raw(self, form_parameters):
"""
Adds new project.
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects',
method='POST',
form_parameters=form_parameters)
def add_project(
self,
name,
key,
chart_enabled,
subtasking_enabled,
text_formatting_rule,
project_leader_can_edit_project_leader=None):
"""
Adds new project.
:param str name: Project Name
:param str key: Project Key (Uppercase letters (A-Z), numbers (0-9) and underscore (_) can be used.)
:param bool chart_enabled: Enable chart
:param bool project_leader_can_edit_project_leader: Allow project administrators to manage each other
:param bool subtasking_enabled: Enable subtasking
:param str text_formatting_rule: Formatting rules “backlog” or “markdown”
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name,
'key': key,
'chartEnabled': self._bool_to_str(chart_enabled),
'projectLeaderCanEditProjectLeader': self._bool_to_str(project_leader_can_edit_project_leader),
'subtaskingEnabled': self._bool_to_str(subtasking_enabled),
'textFormattingRule': text_formatting_rule}
return self._request(
'/projects',
method='POST',
form_parameters=form_parameters)
def add_project_administrator_raw(
self, project_id_or_key, form_parameters):
"""
Adds “Project Administrator” role to user
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/administrators'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_project_administrator(self, project_id_or_key, user_id=None):
"""
Adds “Project Administrator” role to user
:param str project_id_or_key: Project ID or Project Key
:param int user_id: User ID
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'userId': user_id
}
return self._request(
'/projects/{}/administrators'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
@deprecated(reason="This API has been deprecated and is no longer recommended for use. Please replace it with Add Project Team.https://developer.nulab.com/docs/backlog/api/2/add-project-team/")
def add_project_group_raw(self, project_id_or_key, form_parameters):
"""
Add group to project. ※ Deprecated API. https://developer.nulab.com/docs/backlog/api/2/add-project-team/
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/groups'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
@deprecated(reason="This API has been deprecated and is no longer recommended for use. Please replace it with Add Project Team.https://developer.nulab.com/docs/backlog/api/2/add-project-team/")
def add_project_group(self, project_id_or_key, group_id=None):
"""
Add group to project. ※ Deprecated API. https://developer.nulab.com/docs/backlog/api/2/add-project-team/
:param str project_id_or_key: Project ID or Project Key
:param int group_id: Group ID
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'groupId': group_id
}
return self._request(
'/projects/{}/groups'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_project_team_raw(self, project_id_or_key, form_parameters):
"""
Add team to project.
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/teams'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_project_team(self, project_id_or_key, team_id=None):
"""
Add team to project.
:param str project_id_or_key: Project ID or Project Key
:param int team_id: Team ID
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'teamId': team_id
}
return self._request(
'/projects/{}/teams'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_project_user_raw(self, project_id_or_key, form_parameters):
"""
Adds user to list of project members.
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/users'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_project_user(self, project_id_or_key, user_id=None):
"""
Adds user to list of project members.
:param str project_id_or_key: Project ID or Project Key
:param int user_id: User ID
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'userId': user_id
}
return self._request(
'/projects/{}/users'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_pull_request_raw(
self,
project_id_or_key,
repo_id_or_name,
form_parameters):
"""
Adds pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}/pullRequests'.format(
project_id_or_key,
repo_id_or_name),
method='POST',
form_parameters=form_parameters)
def add_pull_request(
self,
project_id_or_key,
repo_id_or_name,
summary,
description,
base,
branch,
issue_id=None,
assignee_id=None,
notified_user_id=None,
attachment_id=None):
"""
Adds pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param str summary: Summary of pull request
:param str description: Description of pull request
:param str base: Branch name of merge base
:param str branch: Name of merging branch
:param int issue_id: Related issue’s ID
:param int assignee_id: Assignee’s ID of pull request
:param list[int] or int notified_user_id: User ID to send notification when pull request is added
:param list[int] or int attachment_id: ID returned by “Post Attachment File” API
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'summary': summary,
'description': description,
'base': base,
'branch': branch,
'issueId': issue_id,
'assigneeId': assignee_id,
'notifiedUserId[]': notified_user_id,
'attachmentId[]': attachment_id
}
return self._request(
'/projects/{}/git/repositories/{}/pullRequests'.format(
project_id_or_key,
repo_id_or_name),
method='POST',
form_parameters=form_parameters)
def add_pull_request_comment_raw(
self,
project_id_or_key,
repo_id_or_name,
number,
form_parameters):
"""
Adds comments on pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param int number: Pull request number
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/{}/comments'.format(
project_id_or_key,
repo_id_or_name,
number),
method='POST',
form_parameters=form_parameters)
def add_pull_request_comment(
self,
project_id_or_key,
repo_id_or_name,
number,
content,
notified_user_id=None):
"""
Adds comments on pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param int number: Pull request number
:param str content: Comment
:param list[int] or int notified_user_id: User ID to send notification when comment is added
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'content': content,
'notifiedUserId[]': notified_user_id
}
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/{}/comments'.format(
project_id_or_key,
repo_id_or_name,
number),
method='POST',
form_parameters=form_parameters)
def add_status_raw(self, project_id_or_key, form_parameters):
"""
Adds new Status to the project.
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/statuses'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_status(self, project_id_or_key, name, color):
"""
Adds new Status to the project.
:param str project_id_or_key: Project ID or Project Key
:param str name: Status name
:param str color: Background color: You can use the below colors
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name,
'color': color
}
return self._request(
'/projects/{}/statuses'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_version_milestone_raw(self, project_id_or_key, form_parameters):
"""
Adds new Version/Milestone to the project.
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/versions'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_version_milestone(
self,
project_id_or_key,
name,
description=None,
start_date=None,
release_due_date=None):
"""
Adds new Version/Milestone to the project.
:param str project_id_or_key: Project ID or Project Key
:param str name: Version name
:param str description: Version description
:param str start_date: Start Date (yyyy-MM-dd)
:param str release_due_date: End Date (yyyy-MM-dd)
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name,
'description': description,
'startDate': start_date,
'releaseDueDate': release_due_date
}
return self._request(
'/projects/{}/versions'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_webhook_raw(self, project_id_or_key, form_parameters):
"""
Adds new webhook.
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/webhooks'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def add_webhook(
self,
project_id_or_key,
name=None,
description=None,
hook_url=None,
all_event=None,
activity_type_ids=None):
"""
Adds new webhook.
:param str project_id_or_key: Project ID or Project Key
:param str name: Name
:param str description: Description
:param str hook_url: hook URL
:param bool all_event: True to make all events notified
:param list[int] or int activity_type_ids: Event ID to be notified
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name,
'description': description,
'hookUrl': hook_url,
'allEvent': self._bool_to_str(all_event),
'activityTypeIds[]': activity_type_ids
}
return self._request(
'/projects/{}/webhooks'.format(project_id_or_key),
method='POST',
form_parameters=form_parameters)
def delete_category(self, project_id_or_key, _id):
"""
Deletes Category.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Category ID
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/categories/{}'.format(project_id_or_key, _id), method='DELETE')
def delete_custom_field(self, project_id_or_key, _id):
"""
Deletes Custom Field.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Custom field ID
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/customFields/{}'.format(project_id_or_key, _id), method='DELETE')
def delete_issue_type_raw(self, project_id_or_key, _id, form_parameters):
"""
Deletes Issue Type.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Issue Type ID
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/issueTypes/{}'.format(
project_id_or_key,
_id),
method='DELETE',
form_parameters=form_parameters)
def delete_issue_type(
self,
project_id_or_key,
_id,
substitute_issue_type_id):
"""
Deletes Issue Type.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Issue Type ID
:param int substitute_issue_type_id: type ID to change linked issue
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'substituteIssueTypeId': substitute_issue_type_id
}
return self._request(
'/projects/{}/issueTypes/{}'.format(
project_id_or_key,
_id),
method='DELETE',
form_parameters=form_parameters)
def delete_list_item_for_list_type_custom_field(
self, project_id_or_key, _id, item_id):
"""
Deletes list item for list type custom field. Calling API fails if specified custom field’s type is not a list.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Custom field ID
:param int item_id: List item ID
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/customFields/{}/items/{}'.format(
project_id_or_key, _id, item_id), method='DELETE')
def delete_project(self, project_id_or_key):
"""
Deletes project.
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}'.format(project_id_or_key),
method='DELETE')
def delete_project_administrator_raw(
self, project_id_or_key, form_parameters):
"""
Removes Project Administrator role from user
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/administrators'.format(project_id_or_key),
method='DELETE',
form_parameters=form_parameters)
def delete_project_administrator(self, project_id_or_key, user_id=None):
"""
Removes Project Administrator role from user
:param str project_id_or_key: Project ID or Project Key
:param int user_id: User ID
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'userId': user_id
}
return self._request(
'/projects/{}/administrators'.format(project_id_or_key),
method='DELETE',
form_parameters=form_parameters)
@deprecated(reason="This API has been deprecated and is no longer recommended for use. Please replace it with Delete Project Team.https://developer.nulab.com/docs/backlog/api/2/delete-project-team/")
def delete_project_group_raw(self, project_id_or_key, form_parameters):
"""
Removes a group from the project. ※ Deprecated API. https://developer.nulab.com/docs/backlog/api/2/delete-project-team/
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/groups'.format(project_id_or_key),
method='DELETE',
form_parameters=form_parameters)
@deprecated(reason="This API has been deprecated and is no longer recommended for use. Please replace it with Delete Project Team.https://developer.nulab.com/docs/backlog/api/2/delete-project-team/")
def delete_project_group(self, project_id_or_key, group_id=None):
"""
Removes a group from the project. ※ Deprecated API. https://developer.nulab.com/docs/backlog/api/2/delete-project-team/
:param str project_id_or_key: Project ID or Project Key
:param int group_id: Group ID
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'groupId': group_id
}
return self._request(
'/projects/{}/groups'.format(project_id_or_key),
method='DELETE',
form_parameters=form_parameters)
def delete_project_team_raw(self, project_id_or_key, form_parameters):
"""
Removes a team from the project.
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/teams'.format(project_id_or_key),
method='DELETE',
form_parameters=form_parameters)
def delete_project_team(self, project_id_or_key, team_id=None):
"""
Removes a team from the project.
:param str project_id_or_key: Project ID or Project Key
:param int team_id: Team ID
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'teamId': team_id
}
return self._request(
'/projects/{}/teams'.format(project_id_or_key),
method='DELETE',
form_parameters=form_parameters)
def delete_project_user_raw(self, project_id_or_key, form_parameters):
"""
Removes user from list project members.
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/users'.format(project_id_or_key),
method='DELETE',
form_parameters=form_parameters)
def delete_project_user(self, project_id_or_key, user_id=None):
"""
Removes user from list project members.
:param str project_id_or_key: Project ID or Project Key
:param int user_id: User ID
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'userId': user_id
}
return self._request(
'/projects/{}/users'.format(project_id_or_key),
method='DELETE',
form_parameters=form_parameters)
def delete_pull_request_attachments(
self,
project_id_or_key,
repo_id_or_name,
number,
attachment_id):
"""
Deletes attached files on pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param int number: Pull request number
:param int attachment_id: Attached file’s ID
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/{}/attachments/{}'.format(
project_id_or_key,
repo_id_or_name,
number,
attachment_id),
method='DELETE')
def delete_status_raw(self, project_id_or_key, _id, form_parameters):
"""
Deletes Status.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Status ID
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/statuses/{}'.format(
project_id_or_key,
_id),
method='DELETE',
form_parameters=form_parameters)
def delete_status(self, project_id_or_key, _id, substitute_status_id):
"""
Deletes Status.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Status ID
:param int substitute_status_id: Status ID to replace linked issues statuses.
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'substituteStatusId': substitute_status_id
}
return self._request(
'/projects/{}/statuses/{}'.format(
project_id_or_key,
_id),
method='DELETE',
form_parameters=form_parameters)
def delete_version(self, project_id_or_key, _id):
"""
Deletes Version.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Version のID
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/versions/{}'.format(project_id_or_key, _id), method='DELETE')
def delete_webhook(self, project_id_or_key, webhook_id):
"""
Deletes webhook.
:param str project_id_or_key: Project ID or Project Key
:param str webhook_id: Webhook ID
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/webhooks/{}'.format(project_id_or_key, webhook_id), method='DELETE')
def download_pull_request_attachment(
self,
project_id_or_key,
repo_id_or_name,
number,
attachment_id):
"""
Downloads attached files on pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param int number: Pull request number
:param int attachment_id: Attached file’s ID
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/{}/attachments/{}'.format(
project_id_or_key,
repo_id_or_name,
number,
attachment_id),
method='GET')
def get_category_list(self, project_id_or_key):
"""
Returns list of Categories in the project.
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/categories'.format(project_id_or_key),
method='GET')
def get_custom_field_list(self, project_id_or_key):
"""
Returns list of Custom Fields in the project.
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/customFields'.format(project_id_or_key),
method='GET')
def get_file(self, project_id_or_key, _id):
"""
Downloads the file.
:param str project_id_or_key: Project ID or Project Key
:param int _id: File ID
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/files/{}'.format(project_id_or_key, _id), method='GET')
def get_git_repository(self, project_id_or_key, repo_id_or_name):
"""
Returns Git repository.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}'.format(
project_id_or_key,
repo_id_or_name),
method='GET')
def get_issue_type_list(self, project_id_or_key):
"""
Returns list of Issue Types in the project.
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/issueTypes'.format(project_id_or_key),
method='GET')
def get_list_of_git_repositories(self, project_id_or_key):
"""
Returns list of Git repositories.
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories'.format(project_id_or_key),
method='GET')
def get_list_of_project_administrators(self, project_id_or_key):
"""
Returns list of users who has Project Administrator role
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/administrators'.format(project_id_or_key),
method='GET')
def get_list_of_pull_request_attachment(
self, project_id_or_key, repo_id_or_name, number):
"""
Returns list of attached files on pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param int number: Pull request number
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/{}/attachments'.format(
project_id_or_key, repo_id_or_name, number), method='GET')
def get_list_of_shared_files_raw(
self,
project_id_or_key,
path,
query_parameters):
"""
Gets list of Shared Files.
:param str project_id_or_key: Project ID or Project key
:param str path: Directory path
:param dict query_parameters: query_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/files/metadata/{}'.format(
project_id_or_key,
path),
method='GET',
query_parameters=query_parameters)
def get_list_of_shared_files(
self,
project_id_or_key,
path,
order=None,
offset=None,
count=None):
"""
Gets list of Shared Files.
:param str project_id_or_key: Project ID or Project key
:param str path: Directory path
:param str order: “asc” or “desc” default=“desc”
:param int offset: offset
:param int count: number of records to retrieve(1-100) default=20
:return: requests Response object
:rtype: requests.Response
"""
query_parameters = {
'order': order,
'offset': offset,
'count': count
}
return self._request(
'/projects/{}/files/metadata/{}'.format(
project_id_or_key,
path),
method='GET',
query_parameters=query_parameters)
def get_list_of_webhooks(self, project_id_or_key):
"""
Returns list of webhooks.
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/webhooks'.format(project_id_or_key),
method='GET')
def get_number_of_pull_request_comments(
self, project_id_or_key, repo_id_or_name, number):
"""
Returns number of comments on pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param int number: Pull request number
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/{}/comments/count'.format(
project_id_or_key, repo_id_or_name, number), method='GET')
def get_number_of_pull_requests(self, project_id_or_key, repo_id_or_name):
"""
Returns number of pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/count'.format(
project_id_or_key, repo_id_or_name), method='GET')
def get_project(self, project_id_or_key):
"""
Returns information about project.
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}'.format(project_id_or_key),
method='GET')
def get_project_disk_usage(self, project_id_or_key):
"""
Returns information about project disk usage.
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/diskUsage'.format(project_id_or_key),
method='GET')
@deprecated(reason="This API has been deprecated and is no longer recommended for use. Please replace it with Get Project Team List.https://developer.nulab.com/docs/backlog/api/2/get-project-team-list/")
def get_project_group_list(self, project_id_or_key):
"""
Returns list of project groups. ※ Deprecated API. https://developer.nulab.com/docs/backlog/api/2/get-project-team-list/
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/groups'.format(project_id_or_key),
method='GET')
def get_project_icon(self, project_id_or_key):
"""
Downloads project icon.
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/image'.format(project_id_or_key),
method='GET')
def get_project_list_raw(self, query_parameters):
"""
Returns list of projects.
:param dict query_parameters: query_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects',
method='GET',
query_parameters=query_parameters)
def get_project_list(self, archived=None, all=None):
"""
Returns list of projects.
:param bool archived: For unspecified parameters, this form returns all projects. For ‘false’ parameters, it returns unarchived projects. For ‘true’ parameters, it returns archived projects.
:param bool all: Only applies to administrators. If ‘true,’ it returns all projects. If ‘false,’ it returns only projects they have joined (set to ‘false’ by default).
:return: requests Response object
:rtype: requests.Response
"""
query_parameters = {
'archived': self._bool_to_str(archived),
'all': self._bool_to_str(all)
}
return self._request(
'/projects',
method='GET',
query_parameters=query_parameters)
def get_project_recent_updates_raw(
self, project_id_or_key, query_parameters):
"""
Returns recent update in the project.
:param str project_id_or_key: Project ID or Project Key
:param dict query_parameters: query_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/activities'.format(project_id_or_key),
method='GET',
query_parameters=query_parameters)
def get_project_recent_updates(
self,
project_id_or_key,
activity_type_id=None,
min_id=None,
max_id=None,
count=None,
order=None):
"""
Returns recent update in the project.
:param str project_id_or_key: Project ID or Project Key
:param list[int] or int activity_type_id: type(1-26)
:param int min_id: minimum ID
:param int max_id: maximum ID
:param int count: number of records to retrieve(1-100) default=20
:param str order: “asc” or “desc” default=“desc”
:return: requests Response object
:rtype: requests.Response
"""
query_parameters = {
'activityTypeId[]': activity_type_id,
'minId': min_id,
'maxId': max_id,
'count': count,
'order': order
}
return self._request(
'/projects/{}/activities'.format(project_id_or_key),
method='GET',
query_parameters=query_parameters)
def get_project_team_list(self, project_id_or_key):
"""
Returns list of project teams.
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/teams'.format(project_id_or_key),
method='GET')
def get_project_user_list_raw(self, project_id_or_key, query_parameters):
"""
Returns list of project members.
:param str project_id_or_key: Project ID or Project Key
:param dict query_parameters: query_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/users'.format(project_id_or_key),
method='GET',
query_parameters=query_parameters)
def get_project_user_list(
self,
project_id_or_key,
exclude_group_members=None):
"""
Returns list of project members.
:param str project_id_or_key: Project ID or Project Key
:param bool exclude_group_members: Set to true to exclude members that part of project groups and false to get all members. Default is false.
:return: requests Response object
:rtype: requests.Response
"""
query_parameters = {
'excludeGroupMembers': self._bool_to_str(exclude_group_members)
}
return self._request(
'/projects/{}/users'.format(project_id_or_key),
method='GET',
query_parameters=query_parameters)
def get_pull_request(self, project_id_or_key, repo_id_or_name, number):
"""
Returns pull reuqest.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param int number: Pull request number
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/{}'.format(
project_id_or_key,
repo_id_or_name,
number),
method='GET')
def get_pull_request_comment(
self,
project_id_or_key,
repo_id_or_name,
number):
"""
Returns list of pull request comments.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param int number: Pull request number
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/{}/comments'.format(
project_id_or_key, repo_id_or_name, number), method='GET')
def get_pull_request_list_raw(
self,
project_id_or_key,
repo_id_or_name,
query_parameters):
"""
Returns list of pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param dict query_parameters: query_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}/pullRequests'.format(
project_id_or_key,
repo_id_or_name),
method='GET',
query_parameters=query_parameters)
def get_pull_request_list(
self,
project_id_or_key,
repo_id_or_name,
status_id=None,
assignee_id=None,
issue_id=None,
created_user_id=None,
offset=None,
count=None):
"""
Returns list of pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param list[int] or int status_id: Status ID
:param list[int] or int assignee_id: Assignee ID
:param list[int] or int issue_id: Issue ID
:param list[int] or int created_user_id: Created User ID
:param int offset: offset
:param int count: number of records to retrieve(1-100) default=20
:return: requests Response object
:rtype: requests.Response
"""
query_parameters = {
'statusId[]': status_id,
'assigneeId[]': assignee_id,
'issueId[]': issue_id,
'createdUserId[]': created_user_id,
'offset': offset,
'count': count
}
return self._request(
'/projects/{}/git/repositories/{}/pullRequests'.format(
project_id_or_key,
repo_id_or_name),
method='GET',
query_parameters=query_parameters)
def get_status_list_of_project(self, project_id_or_key):
"""
Returns list of status in the project.
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/statuses'.format(project_id_or_key),
method='GET')
def get_version_milestone_list(self, project_id_or_key):
"""
Returns list of Versions/Milestones in the project.
:param str project_id_or_key: Project ID or Project Key
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/versions'.format(project_id_or_key),
method='GET')
def get_webhook(self, project_id_or_key, webhook_id):
"""
Returns information about webhook.
:param str project_id_or_key: Project ID or Project Key
:param str webhook_id: Webhook ID
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/webhooks/{}'.format(project_id_or_key, webhook_id), method='GET')
def update_category_raw(self, project_id_or_key, _id, form_parameters):
"""
Updates information about Category.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Category ID
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/categories/{}'.format(
project_id_or_key,
_id),
method='PATCH',
form_parameters=form_parameters)
def update_category(self, project_id_or_key, _id, name=None):
"""
Updates information about Category.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Category ID
:param str name: Category name
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name
}
return self._request(
'/projects/{}/categories/{}'.format(
project_id_or_key,
_id),
method='PATCH',
form_parameters=form_parameters)
def update_custom_field_raw(self, project_id_or_key, _id, form_parameters):
"""
Updates Custom Field.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Custom Field ID
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/customFields/{}'.format(
project_id_or_key,
_id),
method='PATCH',
form_parameters=form_parameters)
def update_custom_field(
self,
project_id_or_key,
_id,
name=None,
applicable_issue_types=None,
description=None,
required=None):
"""
Updates Custom Field.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Custom Field ID
:param str name: Name
:param list[int] or int applicable_issue_types: Type ID to enable Custom fields
:param str description: Description
:param bool required: True to make the Custom field required
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name,
'applicableIssueTypes[]': applicable_issue_types,
'description': description,
'required': self._bool_to_str(required)
}
return self._request(
'/projects/{}/customFields/{}'.format(
project_id_or_key,
_id),
method='PATCH',
form_parameters=form_parameters)
def update_issue_type_raw(self, project_id_or_key, _id, form_parameters):
"""
Updates information about Issue Type.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Issue Type ID
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/issueTypes/{}'.format(
project_id_or_key,
_id),
method='PATCH',
form_parameters=form_parameters)
def update_issue_type(
self,
project_id_or_key,
_id,
name=None,
color=None,
template_summary=None,
template_description=None):
"""
Updates information about Issue Type.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Issue Type ID
:param str name: Issue Type Name
:param str color: Background color : available
:param str template_summary: Subject
:param str template_description: Description
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name,
'color': color,
'templateSummary': template_summary,
'templateDescription': template_description
}
return self._request(
'/projects/{}/issueTypes/{}'.format(
project_id_or_key,
_id),
method='PATCH',
form_parameters=form_parameters)
def update_list_item_for_list_type_custom_field(
self, project_id_or_key, _id, item_id):
"""
Updates list item for list type custom field. Calling API fails if specified custom field’s type is not a list.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Custom field ID
:param int item_id: List item ID
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/customFields/{}/items/{}'.format(
project_id_or_key, _id, item_id), method='PATCH')
def update_order_of_status_raw(self, project_id_or_key, form_parameters):
"""
Updates order about Status.
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/statuses/updateDisplayOrder'.format(project_id_or_key),
method='PATCH',
form_parameters=form_parameters)
def update_order_of_status(self, project_id_or_key, status_id=None):
"""
Updates order about Status.
:param str project_id_or_key: Project ID or Project Key
:param list[int] or int status_id: Status ID List to order them. You have to send all status of project. It has following restrictions as below.
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'statusId[]': status_id
}
return self._request(
'/projects/{}/statuses/updateDisplayOrder'.format(project_id_or_key),
method='PATCH',
form_parameters=form_parameters)
def update_project_raw(self, project_id_or_key, form_parameters):
"""
Updates information about project.
:param str project_id_or_key: Project ID or Project Key
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}'.format(project_id_or_key),
method='PATCH',
form_parameters=form_parameters)
def update_project(
self,
project_id_or_key,
name=None,
key=None,
chart_enabled=None,
subtasking_enabled=None,
project_leader_can_edit_project_leader=None,
text_formatting_rule=None,
archived=None):
"""
Updates information about project.
:param str project_id_or_key: Project ID or Project Key
:param str name: Project Name
:param str key: Project Key
:param bool chart_enabled: Enable chart
:param bool subtasking_enabled: Enable subtasking
:param bool project_leader_can_edit_project_leader: Allow project administrators to manage each other
:param str text_formatting_rule: Formatting rules “backlog” or “markdown”
:param bool archived: archive
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name,
'key': key,
'chartEnabled': self._bool_to_str(chart_enabled),
'subtaskingEnabled': self._bool_to_str(subtasking_enabled),
'projectLeaderCanEditProjectLeader': self._bool_to_str(project_leader_can_edit_project_leader),
'textFormattingRule': text_formatting_rule,
'archived': self._bool_to_str(archived)}
return self._request(
'/projects/{}'.format(project_id_or_key),
method='PATCH',
form_parameters=form_parameters)
def update_pull_request_raw(
self,
project_id_or_key,
repo_id_or_name,
number,
form_parameters):
"""
Updates pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param int number: Pull request number
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/{}'.format(
project_id_or_key,
repo_id_or_name,
number),
method='PATCH',
form_parameters=form_parameters)
def update_pull_request(
self,
project_id_or_key,
repo_id_or_name,
number,
summary=None,
description=None,
issue_id=None,
assignee_id=None,
notified_user_id=None,
comment=None):
"""
Updates pull requests.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param int number: Pull request number
:param str summary: Summary of pull request
:param str description: Description of pull request
:param int issue_id: Related issue’s ID
:param int assignee_id: Assignee’s ID of pull request
:param list[int] or int notified_user_id: User ID to send notification when pull request is added
:param str comment: Comment
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'summary': summary,
'description': description,
'issueId': issue_id,
'assigneeId': assignee_id,
'notifiedUserId[]': notified_user_id,
'comment': comment
}
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/{}'.format(
project_id_or_key,
repo_id_or_name,
number),
method='PATCH',
form_parameters=form_parameters)
def update_pull_request_comment_information_raw(
self,
project_id_or_key,
repo_id_or_name,
number,
comment_id,
form_parameters):
"""
Updates pull request comment information.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param int number: Pull request number
:param int comment_id: Comment’s ID
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/{}/comments/{}'.format(
project_id_or_key,
repo_id_or_name,
number,
comment_id),
method='PATCH',
form_parameters=form_parameters)
def update_pull_request_comment_information(
self,
project_id_or_key,
repo_id_or_name,
number,
comment_id,
content=None):
"""
Updates pull request comment information.
:param str project_id_or_key: Project ID or Project Key
:param str repo_id_or_name: Repository ID or Repository name
:param int number: Pull request number
:param int comment_id: Comment’s ID
:param str content: Comment’s body
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'content': content
}
return self._request(
'/projects/{}/git/repositories/{}/pullRequests/{}/comments/{}'.format(
project_id_or_key,
repo_id_or_name,
number,
comment_id),
method='PATCH',
form_parameters=form_parameters)
def update_status_raw(self, project_id_or_key, _id, form_parameters):
"""
Updates information about Status.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Status ID
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/statuses/{}'.format(
project_id_or_key,
_id),
method='PATCH',
form_parameters=form_parameters)
def update_status(self, project_id_or_key, _id, name=None, color=None):
"""
Updates information about Status.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Status ID
:param str name: Status Name
:param str color: Background color : available
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name,
'color': color
}
return self._request(
'/projects/{}/statuses/{}'.format(
project_id_or_key,
_id),
method='PATCH',
form_parameters=form_parameters)
def update_version_milestone_raw(
self, project_id_or_key, _id, form_parameters):
"""
Updates information about Version/Milestone.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Version ID
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/versions/{}'.format(
project_id_or_key,
_id),
method='PATCH',
form_parameters=form_parameters)
def update_version_milestone(
self,
project_id_or_key,
_id,
name,
description=None,
start_date=None,
release_due_date=None,
archived=None):
"""
Updates information about Version/Milestone.
:param str project_id_or_key: Project ID or Project Key
:param int _id: Version ID
:param str name: Version Name
:param str description: Version Description
:param str start_date: Start Date (yyyy-MM-dd)
:param str release_due_date: End Date (yyyy-MM-dd)
:param bool archived: archived
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name,
'description': description,
'startDate': start_date,
'releaseDueDate': release_due_date,
'archived': self._bool_to_str(archived)
}
return self._request(
'/projects/{}/versions/{}'.format(
project_id_or_key,
_id),
method='PATCH',
form_parameters=form_parameters)
def update_webhook_raw(
self,
project_id_or_key,
webhook_id,
form_parameters):
"""
Updates information about webhook.
:param str project_id_or_key: Project ID or Project Key
:param str webhook_id: Webhook ID
:param dict form_parameters: form_parameters
:return: requests Response object
:rtype: requests.Response
"""
return self._request(
'/projects/{}/webhooks/{}'.format(
project_id_or_key,
webhook_id),
method='PATCH',
form_parameters=form_parameters)
def update_webhook(
self,
project_id_or_key,
webhook_id,
name=None,
description=None,
hook_url=None,
all_event=None,
activity_type_ids=None):
"""
Updates information about webhook.
:param str project_id_or_key: Project ID or Project Key
:param str webhook_id: Webhook ID
:param str name: Name
:param str description: Description
:param str hook_url: hook URL
:param bool all_event: True to make all events notified
:param list[int] or int activity_type_ids: Event ID to be notified
:return: requests Response object
:rtype: requests.Response
"""
form_parameters = {
'name': name,
'description': description,
'hookUrl': hook_url,
'allEvent': self._bool_to_str(all_event),
'activityTypeIds[]': activity_type_ids
}
return self._request(
'/projects/{}/webhooks/{}'.format(
project_id_or_key,
webhook_id),
method='PATCH',
form_parameters=form_parameters)
| 31.468953 | 236 | 0.599411 | 7,623 | 67,910 | 5.069133 | 0.039223 | 0.047203 | 0.109311 | 0.104342 | 0.92829 | 0.920397 | 0.909528 | 0.891181 | 0.875886 | 0.853113 | 0 | 0.000736 | 0.319658 | 67,910 | 2,157 | 237 | 31.483542 | 0.835527 | 0.372979 | 0 | 0.755191 | 0 | 0.005464 | 0.140106 | 0.070108 | 0 | 0 | 0 | 0 | 0 | 1 | 0.110383 | false | 0 | 0.003279 | 0 | 0.224044 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
204cc2e6b9d9bf1685d45571bc21454123745731 | 14,898 | py | Python | graphs/forms.py | KristenZuber/COVID-19 | c5aae96b4272c2a2f34ddead109195b92ac25468 | [
"MIT"
] | null | null | null | graphs/forms.py | KristenZuber/COVID-19 | c5aae96b4272c2a2f34ddead109195b92ac25468 | [
"MIT"
] | null | null | null | graphs/forms.py | KristenZuber/COVID-19 | c5aae96b4272c2a2f34ddead109195b92ac25468 | [
"MIT"
] | null | null | null | from django import forms
from .models import (
AlPredict,
AkPredict,
AzPredict,
ArPredict,
CaPredict,
CoPredict,
CtPredict,
DePredict,
FlPredict,
GaPredict,
HiPredict,
IdPredict,
IlPredict,
InPredict,
IaPredict,
KsPredict,
KyPredict,
LaPredict,
MePredict,
MdPredict,
MaPredict,
MiPredict,
MnPredict,
MsPredict,
MoPredict,
MtPredict,
NePredict,
NvPredict,
NhPredict,
NjPredict,
NmPredict,
NyPredict,
NcPredict,
NdPredict,
OhPredict,
OkPredict,
OrPredict,
PaPredict,
RiPredict,
ScPredict,
SdPredict,
TnPredict,
TxPredict,
UtPredict,
VtPredict,
VaPredict,
WaPredict,
WvPredict,
WiPredict,
WyPredict
)
class AlForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = AlPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class AkForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = AkPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class AzForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = AzPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class ArForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = ArPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class CaForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = CaPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class CoForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = CoPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class CtForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = CtPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class DeForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = DePredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class FlForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = FlPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class GaForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = GaPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class HiForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = HiPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class IdForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = IdPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class IlForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = IlPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class InForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = InPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class IaForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = IaPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class KsForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = KsPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class KyForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = KyPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class LaForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = LaPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class MeForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = MePredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class MdForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = MdPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class MaForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = MaPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class MiForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = MiPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class MnForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = MnPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class MsForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = MsPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class MoForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = MoPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class MtForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = MtPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class NeForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = NePredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class NvForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = NvPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class NhForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = NhPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class NjForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = NjPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class NmForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = NmPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class NyForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = NyPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class NcForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = NcPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class NdForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = NdPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class OhForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = OhPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class OkForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = OkPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class OrForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = OrPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class PaForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = PaPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class RiForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = RiPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class ScForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = ScPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class SdForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = SdPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class TnForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = TnPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class TxForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = TxPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class UtForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = UtPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class VtForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = VtPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class VaForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = VaPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class WaForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = WaPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class WvForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = WvPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class WiForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = WiPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
class WyForm(forms.ModelForm):
x = forms.FloatField(label = "Enter a temperature in °F to get a prediction: ", required = False)
class Meta:
model = WyPredict
labels = {
'x':'Enter a temperature to get a prediction: ',
}
fields = ['x']
| 26.891697 | 101 | 0.586455 | 1,807 | 14,898 | 4.862756 | 0.069175 | 0.068283 | 0.193468 | 0.182087 | 0.858655 | 0.858655 | 0.858655 | 0.858655 | 0.858655 | 0.858655 | 0 | 0 | 0.302658 | 14,898 | 553 | 102 | 26.940326 | 0.840986 | 0 | 0 | 0.551876 | 0 | 0 | 0.302054 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.004415 | 0 | 0.335541 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
646f23b56b149d37ce165bcacbad99584fb68157 | 1,189 | py | Python | test/imputation/cs/test_central_tendency.py | xyz8983/impyute | fd2a75790ce2e1fa72ff47cc86840da63a8b3b2c | [
"MIT"
] | null | null | null | test/imputation/cs/test_central_tendency.py | xyz8983/impyute | fd2a75790ce2e1fa72ff47cc86840da63a8b3b2c | [
"MIT"
] | null | null | null | test/imputation/cs/test_central_tendency.py | xyz8983/impyute | fd2a75790ce2e1fa72ff47cc86840da63a8b3b2c | [
"MIT"
] | 1 | 2019-07-30T15:43:39.000Z | 2019-07-30T15:43:39.000Z | """test_averagings.py"""
import numpy as np
import impyute as impy
mask = np.zeros((5, 5), dtype=bool)
data_c = impy.dataset.test_data(mask=mask)
mask[0][0] = True
data_m = impy.dataset.test_data(mask=mask)
def test_mean_return_type():
""" Check return type, should return an np.ndarray"""
imputed = impy.mode(data_m)
assert isinstance(imputed, np.ndarray)
def test_mode_return_type():
""" Check return type, should return an np.ndarray"""
imputed = impy.mode(data_m)
assert isinstance(imputed, np.ndarray)
def test_median_return_type():
""" Check return type, should return an np.ndarray"""
imputed = impy.mode(data_m)
assert isinstance(imputed, np.ndarray)
def test_mean_impute_missing_values():
""" After imputation, no Nan's should exist"""
imputed = impy.mean(data_m)
assert not np.isnan(imputed).any()
def test_mode_impute_missing_values():
""" After imputation, no NaN's should exist"""
imputed = impy.mode(data_m)
assert not np.isnan(imputed).any()
def test_median_impute_missing_values():
""" After imputation, no NaN's should exist"""
imputed = impy.median(data_m)
assert not np.isnan(imputed).any()
| 30.487179 | 57 | 0.707317 | 179 | 1,189 | 4.519553 | 0.251397 | 0.043263 | 0.081582 | 0.093943 | 0.830655 | 0.830655 | 0.758962 | 0.758962 | 0.720643 | 0.720643 | 0 | 0.004024 | 0.164003 | 1,189 | 38 | 58 | 31.289474 | 0.809859 | 0.234651 | 0 | 0.416667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 1 | 0.25 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
64a986bd316e33e481b70766f2f08e6b2f1e38fa | 141,719 | py | Python | Autocoders/src/generators/templates/component/hpp.py | dstockhouse/eaglesat-fprime | e640b3faea0000e1ca8acab4d6ff66150196c32b | [
"Apache-2.0"
] | null | null | null | Autocoders/src/generators/templates/component/hpp.py | dstockhouse/eaglesat-fprime | e640b3faea0000e1ca8acab4d6ff66150196c32b | [
"Apache-2.0"
] | null | null | null | Autocoders/src/generators/templates/component/hpp.py | dstockhouse/eaglesat-fprime | e640b3faea0000e1ca8acab4d6ff66150196c32b | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
##################################################
## DEPENDENCIES
import sys
import os
import os.path
try:
import builtins as builtin
except ImportError:
import __builtin__ as builtin
from os.path import getmtime, exists
import time
import types
from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion
from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple
from Cheetah.Template import Template
from Cheetah.DummyTransaction import *
from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList
from Cheetah.CacheRegion import CacheRegion
import Cheetah.Filters as Filters
import Cheetah.ErrorCatchers as ErrorCatchers
##################################################
## MODULE CONSTANTS
VFFSL=valueFromFrameOrSearchList
VFSL=valueFromSearchList
VFN=valueForName
currentTime=time.time
__CHEETAH_version__ = '2.4.4'
__CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0)
__CHEETAH_genTime__ = 1554930752.386722
__CHEETAH_genTimestamp__ = 'Wed Apr 10 14:12:32 2019'
__CHEETAH_src__ = 'hpp.tmpl'
__CHEETAH_srcLastModified__ = 'Wed Apr 10 11:25:47 2019'
__CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine'
if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple:
raise AssertionError(
'This template was compiled with Cheetah version'
' %s. Templates compiled before version %s must be recompiled.'%(
__CHEETAH_version__, RequiredCheetahVersion))
##################################################
## CLASSES
class hpp(Template):
##################################################
## CHEETAH GENERATED METHODS
def __init__(self, *args, **KWs):
super(hpp, self).__init__(*args, **KWs)
if not self._CHEETAH__instanceInitialized:
cheetahKWArgs = {}
allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split()
for k,v in KWs.items():
if k in allowedKWs: cheetahKWArgs[k] = v
self._initCheetahInstance(**cheetahKWArgs)
def respond(self, trans=None):
## CHEETAH: main method generated for this template
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
write(u'''// ======================================================================
// \\title ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 2, col 12
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 2, col 12.
write(u'''ComponentAc.hpp
// \\author Auto-generated
// \\brief hpp file for ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 4, col 25
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 4, col 25.
write(u''' component base class
//
// \\copyright
// Copyright 2009-2015, by the California Institute of Technology.
// ALL RIGHTS RESERVED. United States Government Sponsorship
// acknowledged. Any commercial use must be negotiated with the Office
// of Technology Transfer at the California Institute of Technology.
//
// This software may be subject to U.S. export control laws and
// regulations. By accepting this document, the user agrees to comply
// with all U.S. export laws and regulations. User has the
// responsibility to obtain export licenses, or other export authority
// as may be required before exporting such information to foreign
// countries or providing access to foreign persons.
// ======================================================================
#ifndef ''')
_v = VFN(VFSL([locals()]+SL+[globals(), builtin],"name",True),"upper",False)() # u'${name.upper()}' on line 20, col 10
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper()}')) # from line 20, col 10.
write(u'''_COMP_HPP_
#define ''')
_v = VFN(VFSL([locals()]+SL+[globals(), builtin],"name",True),"upper",False)() # u'${name.upper()}' on line 21, col 10
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper()}')) # from line 21, col 10.
write(u'''_COMP_HPP_
#include <Fw/Cfg/Config.hpp>
#include <Fw/Port/InputSerializePort.hpp>
#include <Fw/Port/OutputSerializePort.hpp>
#include <Fw/Comp/ActiveComponentBase.hpp>
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_guarded_ports",True) or VFSL([locals()]+SL+[globals(), builtin],"has_parameters",True): # generated from line 27, col 1
write(u'''#include <Os/Mutex.hpp>
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_commands",True) or VFSL([locals()]+SL+[globals(), builtin],"has_parameters",True): # generated from line 30, col 1
write(u'''#include <Fw/Cmd/CmdString.hpp>
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_channels",True): # generated from line 33, col 1
write(u'''#include <Fw/Tlm/TlmString.hpp>
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_channels",True) or VFSL([locals()]+SL+[globals(), builtin],"has_events",True): # generated from line 36, col 1
write(u'''#include <Fw/Time/TimePortAc.hpp>
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_parameters",True): # generated from line 39, col 1
write(u'''#include <Fw/Prm/PrmString.hpp>
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_events",True): # generated from line 42, col 1
write(u'''#include <Fw/Log/LogString.hpp>
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_internal_interfaces",True): # generated from line 45, col 1
write(u'''#include <Fw/Types/InternalInterfaceString.hpp>
''')
for t in VFSL([locals()]+SL+[globals(), builtin],"types_includes",True): # generated from line 48, col 1
write(u'''#include <''')
_v = VFSL([locals()]+SL+[globals(), builtin],"t",True) # u'$t' on line 49, col 12
if _v is not None: write(_filter(_v, rawExpr=u'$t')) # from line 49, col 12.
write(u'''>
''')
for t in VFSL([locals()]+SL+[globals(), builtin],"c_includes",True): # generated from line 51, col 1
write(u'''#include <''')
_v = VFSL([locals()]+SL+[globals(), builtin],"t",True) # u'$t' on line 52, col 12
if _v is not None: write(_filter(_v, rawExpr=u'$t')) # from line 52, col 12.
write(u'''>
''')
for t in VFSL([locals()]+SL+[globals(), builtin],"port_includes",True): # generated from line 54, col 1
if 'LogText' in VFSL([locals()]+SL+[globals(), builtin],"t",True): # generated from line 55, col 3
write(u'''#if FW_ENABLE_TEXT_LOGGING == 1
''')
write(u'''#include <''')
_v = VFSL([locals()]+SL+[globals(), builtin],"t",True) # u'$t' on line 58, col 12
if _v is not None: write(_filter(_v, rawExpr=u'$t')) # from line 58, col 12.
write(u'''>
''')
if 'LogText' in VFSL([locals()]+SL+[globals(), builtin],"t",True): # generated from line 59, col 3
write(u'''#endif
''')
for t in VFSL([locals()]+SL+[globals(), builtin],"ser_includes",True): # generated from line 63, col 1
write(u'''#include <''')
_v = VFSL([locals()]+SL+[globals(), builtin],"t",True) # u'$t' on line 64, col 12
if _v is not None: write(_filter(_v, rawExpr=u'$t')) # from line 64, col 12.
write(u'''>
''')
write(u'''
''')
if VFSL([locals()]+SL+[globals(), builtin],"namespace_list",True) != None: # generated from line 67, col 1
for namespace in VFSL([locals()]+SL+[globals(), builtin],"namespace_list",True): # generated from line 68, col 2
write(u'''namespace ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"namespace",True) # u'${namespace}' on line 69, col 11
if _v is not None: write(_filter(_v, rawExpr=u'${namespace}')) # from line 69, col 11.
write(u''' {
''')
write(u'''
//! \\class ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 73, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 73, col 14.
write(u'''ComponentBase
//! \\brief Auto-generated base for ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 74, col 38
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 74, col 38.
write(u''' component
//!
class ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 76, col 9
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 76, col 9.
write(u'''ComponentBase :
''')
if VFSL([locals()]+SL+[globals(), builtin],"kind",True) == "passive": # generated from line 77, col 1
write(u''' public Fw::PassiveComponentBase
''')
elif VFSL([locals()]+SL+[globals(), builtin],"kind",True) == "queued": # generated from line 79, col 1
write(u''' public Fw::QueuedComponentBase
''')
else: # generated from line 81, col 1
write(u''' public Fw::ActiveComponentBase
''')
write(u''' {
// ----------------------------------------------------------------------
// Friend classes
// ----------------------------------------------------------------------
//! Friend class for white-box testing
//!
friend class ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 92, col 18
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 92, col 18.
write(u'''ComponentBaseFriend;
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_typed_input_ports",True): # generated from line 94, col 1
write(u''' public:
// ----------------------------------------------------------------------
// Getters for typed input ports
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, full, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"typed_input_ports",True): # generated from line 101, col 3
namespace = VFSL([locals()]+SL+[globals(), builtin],"port_namespaces",True)[VFSL([locals()]+SL+[globals(), builtin],"type",True)]
write(u''' //! Get input port at index
//!
//! \\return ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 105, col 17
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 105, col 17.
write(u'''[portNum]
//!
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"namespace",True) # u'$namespace' on line 107, col 5
if _v is not None: write(_filter(_v, rawExpr=u'$namespace')) # from line 107, col 5.
write(u'''::Input''')
_v = VFSL([locals()]+SL+[globals(), builtin],"type",True) # u'${type}' on line 107, col 22
if _v is not None: write(_filter(_v, rawExpr=u'${type}')) # from line 107, col 22.
write(u'''Port* get_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 107, col 39
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 107, col 39.
write(u'''_InputPort(
NATIVE_INT_TYPE portNum ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 108, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 108, col 33.
write(u'''
);
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_serial_input_ports",True): # generated from line 113, col 1
write(u''' public:
// ----------------------------------------------------------------------
// Get serial input ports
// ----------------------------------------------------------------------
''')
for instance, sync, priority, full, max_num in VFSL([locals()]+SL+[globals(), builtin],"serial_input_ports",True): # generated from line 120, col 3
write(u''' //! Get input port at index
//!
//! \\return ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 123, col 17
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 123, col 17.
write(u'''[portNum]
//!
Fw::InputSerializePort* get_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 125, col 33
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 125, col 33.
write(u'''_InputPort(
NATIVE_INT_TYPE portNum ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 126, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 126, col 33.
write(u'''
);
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_typed_output_ports",True): # generated from line 131, col 1
write(u''' public:
// ----------------------------------------------------------------------
// Connect typed input ports to typed output ports
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"typed_output_ports",True): # generated from line 138, col 3
namespace = VFSL([locals()]+SL+[globals(), builtin],"port_namespaces",True)[VFSL([locals()]+SL+[globals(), builtin],"type",True)]
if VFSL([locals()]+SL+[globals(), builtin],"role",True) == "LogTextEvent": # generated from line 140, col 5
write(u'''#if FW_ENABLE_TEXT_LOGGING == 1
''')
write(u''' //! Connect port to ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 143, col 25
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 143, col 25.
write(u'''[portNum]
//!
void set_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 145, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 145, col 14.
write(u'''_OutputPort(
NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 146, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 146, col 34.
write(u'''
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"namespace",True) # u'$namespace' on line 147, col 9
if _v is not None: write(_filter(_v, rawExpr=u'$namespace')) # from line 147, col 9.
write(u'''::Input''')
_v = VFSL([locals()]+SL+[globals(), builtin],"type",True) # u'${type}' on line 147, col 26
if _v is not None: write(_filter(_v, rawExpr=u'${type}')) # from line 147, col 26.
write(u'''Port *port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port") # u'$doxygen_post_comment("The port")' on line 147, col 44
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port")')) # from line 147, col 44.
write(u'''
);
''')
if VFSL([locals()]+SL+[globals(), builtin],"role",True) == "LogTextEvent": # generated from line 149, col 5
write(u'''#endif
''')
write(u'''
''')
write(u'''#if FW_PORT_SERIALIZATION
public:
// ----------------------------------------------------------------------
// Connect serialization input ports to typed output ports
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"typed_output_ports",True): # generated from line 162, col 3
if VFSL([locals()]+SL+[globals(), builtin],"role",True) == "LogTextEvent": # generated from line 163, col 5
write(u'''#if FW_ENABLE_TEXT_LOGGING == 1
''')
write(u''' //! Connect port to ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 166, col 25
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 166, col 25.
write(u'''[portNum]
//!
void set_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 168, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 168, col 14.
write(u'''_OutputPort(
NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 169, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 169, col 34.
write(u'''
Fw::InputSerializePort *port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port") # u'$doxygen_post_comment("The port")' on line 170, col 38
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port")')) # from line 170, col 38.
write(u'''
);
''')
if VFSL([locals()]+SL+[globals(), builtin],"role",True) == "LogTextEvent": # generated from line 172, col 5
write(u'''#endif
''')
write(u'''
''')
write(u'''#endif
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_serial_output_ports",True): # generated from line 180, col 1
write(u''' public:
// ----------------------------------------------------------------------
// Connect serial input ports to serial output ports
// ----------------------------------------------------------------------
#if FW_PORT_SERIALIZATION
''')
for instance, sync, priority, max_num in VFSL([locals()]+SL+[globals(), builtin],"serial_output_ports",True): # generated from line 189, col 3
write(u''' //! Connect port to ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 190, col 25
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 190, col 25.
write(u'''[portNum]
//!
void set_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 192, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 192, col 14.
write(u'''_OutputPort(
NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 193, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 193, col 34.
write(u'''
Fw::InputSerializePort *port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port") # u'$doxygen_post_comment("The port")' on line 194, col 38
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port")')) # from line 194, col 38.
write(u'''
);
''')
write(u'''
public:
// ----------------------------------------------------------------------
// Connect serialization input ports to serial output ports
// ----------------------------------------------------------------------
''')
for instance, sync, priority, max_num in VFSL([locals()]+SL+[globals(), builtin],"serial_output_ports",True): # generated from line 205, col 3
write(u''' //! Connect port to ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 206, col 25
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 206, col 25.
write(u'''[portNum]
//!
void set_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 208, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 208, col 14.
write(u'''_OutputPort(
NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 209, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 209, col 34.
write(u'''
Fw::InputPortBase *port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port") # u'$doxygen_post_comment("The port")' on line 210, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port")')) # from line 210, col 33.
write(u'''
);
''')
write(u'''#endif
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_commands",True) or VFSL([locals()]+SL+[globals(), builtin],"has_parameters",True): # generated from line 217, col 1
write(u''' public:
// ----------------------------------------------------------------------
// Command registration
// ----------------------------------------------------------------------
//! \\brief Register commands with the Command Dispatcher
//!
//! Connect the dispatcher first
//!
void regCommands(void);
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_parameters",True): # generated from line 231, col 1
write(u''' public:
// ----------------------------------------------------------------------
// Parameter loading
// ----------------------------------------------------------------------
//! \\brief Load the parameters from a parameter source
//!
//! Connect the parameter first
//!
void loadParameters(void);
''')
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Component construction, initialization, and destruction
// ----------------------------------------------------------------------
#if FW_OBJECT_NAMES == 1
//! Construct a ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 252, col 21
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 252, col 21.
write(u'''ComponentBase object
//!
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 254, col 5
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 254, col 5.
write(u'''ComponentBase(
const char* compName ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("Component name") # u'$doxygen_post_comment("Component name")' on line 255, col 30
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("Component name")')) # from line 255, col 30.
write(u'''
);
#else
//! Construct a ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 258, col 21
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 258, col 21.
write(u'''ComponentBase object
//!
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 260, col 5
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 260, col 5.
write(u'''ComponentBase(void);
#endif
//! Initialize a ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 263, col 22
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 263, col 22.
write(u'''ComponentBase object
//!
void init(
''')
if VFSL([locals()]+SL+[globals(), builtin],"kind",True) == "passive": # generated from line 266, col 1
write(u''' NATIVE_INT_TYPE instance = 0 ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The instance number") # u'$doxygen_post_comment("The instance number")' on line 267, col 38
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The instance number")')) # from line 267, col 38.
write(u'''
''')
elif VFSL([locals()]+SL+[globals(), builtin],"needs_msg_size",True): # generated from line 268, col 1
write(u''' NATIVE_INT_TYPE queueDepth, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The queue depth") # u'$doxygen_post_comment("The queue depth")' on line 269, col 37
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The queue depth")')) # from line 269, col 37.
write(u'''
NATIVE_INT_TYPE msgSize, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The message size") # u'$doxygen_post_comment("The message size")' on line 270, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The message size")')) # from line 270, col 34.
write(u'''
NATIVE_INT_TYPE instance = 0 ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The instance number") # u'$doxygen_post_comment("The instance number")' on line 271, col 38
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The instance number")')) # from line 271, col 38.
write(u'''
''')
else: # generated from line 272, col 1
write(u''' NATIVE_INT_TYPE queueDepth, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The queue depth") # u'$doxygen_post_comment("The queue depth")' on line 273, col 37
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The queue depth")')) # from line 273, col 37.
write(u'''
NATIVE_INT_TYPE instance = 0 ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The instance number") # u'$doxygen_post_comment("The instance number")' on line 274, col 38
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The instance number")')) # from line 274, col 38.
write(u'''
''')
write(u''' );
//! Destroy a ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 278, col 19
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 278, col 19.
write(u'''ComponentBase object
//!
virtual ~''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 280, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 280, col 14.
write(u'''ComponentBase(void);
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_guarded_ports",True): # generated from line 282, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
//! Mutex operations for guarded ports.
// ----------------------------------------------------------------------
//! You can override these operations to provide more sophisticated
//! synchronization.
// ----------------------------------------------------------------------
//! Lock the guarded mutex
//!
virtual void lock(void);
//! Unlock the guarded mutex
//!
virtual void unLock(void);
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"handlers_typed",True)) > 0: # generated from line 301, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Handlers to implement for typed input ports
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, full, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"handlers_typed",True): # generated from line 308, col 3
params = VFSL([locals()]+SL+[globals(), builtin],"port_params",True)[VFSL([locals()]+SL+[globals(), builtin],"instance",True)]
return_type = VFSL([locals()]+SL+[globals(), builtin],"port_return_type_strs",True)[VFSL([locals()]+SL+[globals(), builtin],"instance",True)]
write(u''' //! Handler for input port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 311, col 32
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 311, col 32.
write(u'''
//
virtual ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"return_type",True) # u'${return_type}' on line 313, col 13
if _v is not None: write(_filter(_v, rawExpr=u'${return_type}')) # from line 313, col 13.
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 313, col 27
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 313, col 27.
write(u'''_handler(
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"params",True)) == 0: # generated from line 314, col 5
write(u''' NATIVE_INT_TYPE portNum ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 315, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 315, col 33.
write(u'''
''')
else: # generated from line 316, col 5
write(u''' NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 317, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 317, col 34.
write(u'''
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"emit_port_params",False)(8, VFSL([locals()]+SL+[globals(), builtin],"params",True)) # u'$emit_port_params(8, $params)' on line 318, col 1
if _v is not None: write(_filter(_v, rawExpr=u'$emit_port_params(8, $params)')) # from line 318, col 1.
write(u'''
''')
write(u''' ) = 0;
''')
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Port handler base-class functions for typed input ports.
// ----------------------------------------------------------------------
// Call these functions directly to bypass the corresponding ports.
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, full, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"handlers_typed",True): # generated from line 331, col 3
params = VFSL([locals()]+SL+[globals(), builtin],"port_params",True)[VFSL([locals()]+SL+[globals(), builtin],"instance",True)]
return_type = VFSL([locals()]+SL+[globals(), builtin],"port_return_type_strs",True)[VFSL([locals()]+SL+[globals(), builtin],"instance",True)]
write(u''' //! Handler base-class function for input port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 334, col 52
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 334, col 52.
write(u'''
//!
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"return_type",True) # u'${return_type}' on line 336, col 5
if _v is not None: write(_filter(_v, rawExpr=u'${return_type}')) # from line 336, col 5.
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 336, col 19
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 336, col 19.
write(u'''_handlerBase(
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"params",True)) == 0: # generated from line 337, col 5
write(u''' NATIVE_INT_TYPE portNum ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 338, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 338, col 33.
write(u'''
''')
else: # generated from line 339, col 5
write(u''' NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 340, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 340, col 34.
write(u'''
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"emit_port_params",False)(8, VFSL([locals()]+SL+[globals(), builtin],"params",True)) # u'$emit_port_params(8, $params)' on line 341, col 1
if _v is not None: write(_filter(_v, rawExpr=u'$emit_port_params(8, $params)')) # from line 341, col 1.
write(u'''
''')
write(u''' );
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"handlers_serial",True)) > 0: # generated from line 347, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Handlers to implement for serial input ports
// ----------------------------------------------------------------------
''')
for instance, sync, priority, full, max_num in VFSL([locals()]+SL+[globals(), builtin],"handlers_serial",True): # generated from line 354, col 3
write(u''' //! Handler for input port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 355, col 32
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 355, col 32.
write(u'''
//!
virtual void ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 357, col 18
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 357, col 18.
write(u'''_handler(
NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 358, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 358, col 34.
write(u'''
Fw::SerializeBufferBase &Buffer ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The serialization buffer") # u'$doxygen_post_comment("The serialization buffer")' on line 359, col 41
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The serialization buffer")')) # from line 359, col 41.
write(u'''
) = 0;
''')
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Port handler base-class functions for serial input ports.
// ----------------------------------------------------------------------
// Call these functions directly to bypass the corresponding ports.
// ----------------------------------------------------------------------
''')
for instance, sync, priority, full, max_num in VFSL([locals()]+SL+[globals(), builtin],"handlers_serial",True): # generated from line 371, col 3
write(u''' //! Handler base-class function for input port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 372, col 52
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 372, col 52.
write(u'''
//!
void ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 374, col 10
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 374, col 10.
write(u'''_handlerBase(
NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 375, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 375, col 34.
write(u'''
Fw::SerializeBufferBase &Buffer ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The serialization buffer") # u'$doxygen_post_comment("The serialization buffer")' on line 376, col 41
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The serialization buffer")')) # from line 376, col 41.
write(u'''
);
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"pre_message_hooks_typed",True)) > 0: # generated from line 381, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Pre-message hooks for typed async input ports.
// ----------------------------------------------------------------------
// Each of these functions is invoked just before processing a message
// on the corresponding port. By default they do nothing. You can
// override them to provide specific pre-message behavior.
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, full, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"pre_message_hooks_typed",True): # generated from line 392, col 3
params = VFSL([locals()]+SL+[globals(), builtin],"port_params",True)[VFSL([locals()]+SL+[globals(), builtin],"instance",True)]
write(u''' //! Pre-message hook for async input port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 394, col 47
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 394, col 47.
write(u'''
//!
virtual void ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 396, col 18
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 396, col 18.
write(u'''_preMsgHook(
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"params",True)) == 0: # generated from line 397, col 5
write(u''' NATIVE_INT_TYPE portNum ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 398, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 398, col 33.
write(u'''
''')
else: # generated from line 399, col 5
write(u''' NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 400, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 400, col 34.
write(u'''
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"emit_port_params",False)(8, VFSL([locals()]+SL+[globals(), builtin],"params",True)) # u'$emit_port_params(8, $params)' on line 401, col 1
if _v is not None: write(_filter(_v, rawExpr=u'$emit_port_params(8, $params)')) # from line 401, col 1.
write(u'''
''')
write(u''' );
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"pre_message_hooks_serial",True)) > 0: # generated from line 407, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Pre-message hooks for serial async input ports.
// ----------------------------------------------------------------------
// Each of these functions is invoked just before processing a message
// on the corresponding port. By default they do nothing. You can
// override them to provide specific pre-message behavior.
// ----------------------------------------------------------------------
''')
for instance, sync, priority, full, max_num in VFSL([locals()]+SL+[globals(), builtin],"pre_message_hooks_serial",True): # generated from line 418, col 3
if VFSL([locals()]+SL+[globals(), builtin],"sync",True) == "async": # generated from line 419, col 5
write(u''' //! Pre-message hook for async input port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 420, col 47
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 420, col 47.
write(u'''
//!
virtual void ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 422, col 18
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 422, col 18.
write(u'''_preMsgHook(
NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 423, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 423, col 34.
write(u'''
Fw::SerializeBufferBase &Buffer ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The serialization buffer") # u'$doxygen_post_comment("The serialization buffer")' on line 424, col 41
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The serialization buffer")')) # from line 424, col 41.
write(u'''
);
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"typed_invocation_functions",True)) > 0: # generated from line 430, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Invocation functions for typed output ports
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"typed_invocation_functions",True): # generated from line 437, col 3
params = VFSL([locals()]+SL+[globals(), builtin],"port_params",True)[VFSL([locals()]+SL+[globals(), builtin],"instance",True)]
return_type = VFSL([locals()]+SL+[globals(), builtin],"port_return_type_strs",True)[VFSL([locals()]+SL+[globals(), builtin],"instance",True)]
write(u''' //! Invoke output port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 440, col 28
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 440, col 28.
write(u'''
//!
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"return_type",True) # u'${return_type}' on line 442, col 5
if _v is not None: write(_filter(_v, rawExpr=u'${return_type}')) # from line 442, col 5.
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 442, col 19
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 442, col 19.
write(u'''_out(
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"params",True)) == 0: # generated from line 443, col 5
write(u''' NATIVE_INT_TYPE portNum ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 444, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 444, col 33.
write(u'''
''')
else: # generated from line 445, col 5
write(u''' NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 446, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 446, col 34.
write(u'''
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"emit_port_params",False)(8, VFSL([locals()]+SL+[globals(), builtin],"params",True)) # u'$emit_port_params(8, $params)' on line 447, col 1
if _v is not None: write(_filter(_v, rawExpr=u'$emit_port_params(8, $params)')) # from line 447, col 1.
write(u'''
''')
write(u''' );
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"serial_invocation_functions",True)) > 0: # generated from line 453, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Invocation functions for serial output ports
// ----------------------------------------------------------------------
''')
for instance, sync, priority, max_num in VFSL([locals()]+SL+[globals(), builtin],"serial_invocation_functions",True): # generated from line 460, col 3
write(u''' //! Invoke output port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 461, col 28
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 461, col 28.
write(u'''
//!
void ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 463, col 10
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 463, col 10.
write(u'''_out(
NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 464, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 464, col 34.
write(u'''
Fw::SerializeBufferBase &Buffer ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The serialization buffer") # u'$doxygen_post_comment("The serialization buffer")' on line 465, col 41
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The serialization buffer")')) # from line 465, col 41.
write(u'''
);
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_input_ports",True): # generated from line 470, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Getters for numbers of input ports
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, full, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"input_ports",True): # generated from line 477, col 3
write(u''' //! Get the number of ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 478, col 27
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 478, col 27.
write(u''' input ports
//!
//! \\return The number of ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 480, col 31
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 480, col 31.
write(u''' input ports
//!
NATIVE_INT_TYPE getNum_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 482, col 28
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 482, col 28.
write(u'''_InputPorts(void);
''')
write(u'''
// ----------------------------------------------------------------------
// Enumerations for number of ports
// ----------------------------------------------------------------------
enum {
''')
for instance, type, sync, priority, full, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"input_ports",True): # generated from line 491, col 3
write(u''' NUM_''')
_v = VFN(VFSL([locals()]+SL+[globals(), builtin],"instance",True),"upper",False)() # u'${instance.upper()}' on line 492, col 12
if _v is not None: write(_filter(_v, rawExpr=u'${instance.upper()}')) # from line 492, col 12.
write(u'''_INPUT_PORTS = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"max_num",True) # u'$max_num' on line 492, col 46
if _v is not None: write(_filter(_v, rawExpr=u'$max_num')) # from line 492, col 46.
write(u''',
''')
write(u''' };
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_output_ports",True): # generated from line 497, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Getters for numbers of output ports
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"output_ports",True): # generated from line 504, col 3
if VFSL([locals()]+SL+[globals(), builtin],"role",True) == "LogTextEvent": # generated from line 505, col 5
write(u'''#if FW_ENABLE_TEXT_LOGGING == 1
''')
write(u''' //! Get the number of ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 508, col 27
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 508, col 27.
write(u''' output ports
//!
//! \\return The number of ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 510, col 31
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 510, col 31.
write(u''' output ports
//!
NATIVE_INT_TYPE getNum_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 512, col 28
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 512, col 28.
write(u'''_OutputPorts(void);
''')
if VFSL([locals()]+SL+[globals(), builtin],"role",True) =="LogTextEvent": # generated from line 513, col 5
write(u'''#endif
''')
write(u'''
''')
write(u'''
enum {
''')
for instance, type, sync, priority, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"output_ports",True): # generated from line 520, col 3
write(u''' NUM_''')
_v = VFN(VFSL([locals()]+SL+[globals(), builtin],"instance",True),"upper",False)() # u'${instance.upper()}' on line 521, col 12
if _v is not None: write(_filter(_v, rawExpr=u'${instance.upper()}')) # from line 521, col 12.
write(u'''_OUTPUT_PORTS = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"max_num",True) # u'$max_num' on line 521, col 47
if _v is not None: write(_filter(_v, rawExpr=u'$max_num')) # from line 521, col 47.
write(u''',
''')
write(u''' };
PROTECTED:
// ----------------------------------------------------------------------
// Connection status queries for output ports
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"output_ports",True): # generated from line 531, col 3
if VFSL([locals()]+SL+[globals(), builtin],"role",True) == "LogTextEvent": # generated from line 532, col 5
write(u'''#if FW_ENABLE_TEXT_LOGGING == 1
''')
write(u''' //! Check whether port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 535, col 28
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 535, col 28.
write(u''' is connected
//!
//! \\return Whether port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 537, col 30
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 537, col 30.
write(u''' is connected
//!
bool isConnected_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 539, col 22
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 539, col 22.
write(u'''_OutputPort(
NATIVE_INT_TYPE portNum ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 540, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 540, col 33.
write(u'''
);
''')
if VFSL([locals()]+SL+[globals(), builtin],"role",True) == "LogTextEvent": # generated from line 542, col 5
write(u'''#endif
''')
write(u'''
''')
if VFSL([locals()]+SL+[globals(), builtin],"kind",True) == "queued": # generated from line 548, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Message dispatch
// ----------------------------------------------------------------------
//! Dispatch a message on the queue
//!
virtual MsgDispatchStatus doDispatch(void);
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"command_enums",True)) > 0: # generated from line 560, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Command enums
// ----------------------------------------------------------------------
''')
for enum_type, enum_members in VFSL([locals()]+SL+[globals(), builtin],"command_enums",True): # generated from line 567, col 3
write(u''' typedef enum {
''')
for member, init, comment in VFSL([locals()]+SL+[globals(), builtin],"enum_members",True): # generated from line 569, col 5
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"member",True) # u'${member}' on line 570, col 7
if _v is not None: write(_filter(_v, rawExpr=u'${member}')) # from line 570, col 7.
_v = VFSL([locals()]+SL+[globals(), builtin],"init",True) # u'${init}' on line 570, col 16
if _v is not None: write(_filter(_v, rawExpr=u'${init}')) # from line 570, col 16.
write(u''', /* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 570, col 28
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 570, col 28.
write(u''' */
''')
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"enum_type",True) # u'${enum_type}' on line 572, col 7
if _v is not None: write(_filter(_v, rawExpr=u'${enum_type}')) # from line 572, col 7.
write(u'''_MAX
} ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"enum_type",True) # u'$enum_type' on line 573, col 7
if _v is not None: write(_filter(_v, rawExpr=u'$enum_type')) # from line 573, col 7.
write(u''';
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_commands",True) or VFSL([locals()]+SL+[globals(), builtin],"has_parameters",True): # generated from line 577, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Command opcodes
// ----------------------------------------------------------------------
enum {
''')
for mnemonic, opcodes, sync, priority,full,comment in VFSL([locals()]+SL+[globals(), builtin],"commands",True): # generated from line 585, col 3
inst = 0
for opcode in VFSL([locals()]+SL+[globals(), builtin],"opcodes",True): # generated from line 587, col 5
if len(VFSL([locals()]+SL+[globals(), builtin],"opcodes",True)) > 1: # generated from line 588, col 7
write(u''' OPCODE_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"mnemonic.upper",True) # u'${mnemonic.upper}' on line 589, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${mnemonic.upper}')) # from line 589, col 14.
write(u'''_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"inst",True) # u'$inst' on line 589, col 32
if _v is not None: write(_filter(_v, rawExpr=u'$inst')) # from line 589, col 32.
write(u''' = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"opcode",True) # u'${opcode}' on line 589, col 40
if _v is not None: write(_filter(_v, rawExpr=u'${opcode}')) # from line 589, col 40.
write(u''', /* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 589, col 54
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 589, col 54.
write(u''' */
''')
else: # generated from line 590, col 7
write(u''' OPCODE_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"mnemonic.upper",True) # u'${mnemonic.upper}' on line 591, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${mnemonic.upper}')) # from line 591, col 14.
write(u''' = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"opcode",True) # u'${opcode}' on line 591, col 34
if _v is not None: write(_filter(_v, rawExpr=u'${opcode}')) # from line 591, col 34.
write(u''', /* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 591, col 48
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 591, col 48.
write(u''' */
''')
inst = VFSL([locals()]+SL+[globals(), builtin],"inst",True) + 1
for ids, name, type, osets, osavs, size, default, comment, typeinfo in VFSL([locals()]+SL+[globals(), builtin],"parameters",True): # generated from line 596, col 3
inst = 0
for opcode in VFSL([locals()]+SL+[globals(), builtin],"osets",True): # generated from line 598, col 5
if len(VFSL([locals()]+SL+[globals(), builtin],"osets",True)) > 1: # generated from line 599, col 7
write(u''' OPCODE_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name.upper",True) # u'${name.upper}' on line 600, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper}')) # from line 600, col 14.
write(u'''_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"inst",True) # u'${inst}' on line 600, col 28
if _v is not None: write(_filter(_v, rawExpr=u'${inst}')) # from line 600, col 28.
write(u'''_SET = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"opcode",True) # u'${opcode}' on line 600, col 42
if _v is not None: write(_filter(_v, rawExpr=u'${opcode}')) # from line 600, col 42.
write(u''', //!< opcode to set parameter ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 600, col 82
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 600, col 82.
write(u''' for instance ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"inst",True) # u'$inst' on line 600, col 101
if _v is not None: write(_filter(_v, rawExpr=u'$inst')) # from line 600, col 101.
write(u'''
''')
else: # generated from line 601, col 7
write(u''' OPCODE_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name.upper",True) # u'${name.upper}' on line 602, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper}')) # from line 602, col 14.
write(u'''_SET = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"opcode",True) # u'${opcode}' on line 602, col 34
if _v is not None: write(_filter(_v, rawExpr=u'${opcode}')) # from line 602, col 34.
write(u''', //!< opcode to set parameter ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 602, col 74
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 602, col 74.
write(u'''
''')
inst = VFSL([locals()]+SL+[globals(), builtin],"inst",True) + 1
inst = 0
for opcode in VFSL([locals()]+SL+[globals(), builtin],"osavs",True): # generated from line 607, col 5
if len(VFSL([locals()]+SL+[globals(), builtin],"osavs",True)) > 1: # generated from line 608, col 7
write(u''' OPCODE_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name.upper",True) # u'${name.upper}' on line 609, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper}')) # from line 609, col 14.
write(u'''_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"inst",True) # u'${inst}' on line 609, col 28
if _v is not None: write(_filter(_v, rawExpr=u'${inst}')) # from line 609, col 28.
write(u'''_SAVE = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"opcode",True) # u'${opcode}' on line 609, col 43
if _v is not None: write(_filter(_v, rawExpr=u'${opcode}')) # from line 609, col 43.
write(u''', //!< opcode to save parameter ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 609, col 84
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 609, col 84.
write(u''' for instance ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"inst",True) # u'$inst' on line 609, col 103
if _v is not None: write(_filter(_v, rawExpr=u'$inst')) # from line 609, col 103.
write(u'''
''')
else: # generated from line 610, col 7
write(u''' OPCODE_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name.upper",True) # u'${name.upper}' on line 611, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper}')) # from line 611, col 14.
write(u'''_SAVE = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"opcode",True) # u'${opcode}' on line 611, col 35
if _v is not None: write(_filter(_v, rawExpr=u'${opcode}')) # from line 611, col 35.
write(u''', //!< opcode to save parameter ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 611, col 76
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 611, col 76.
write(u'''
''')
inst = VFSL([locals()]+SL+[globals(), builtin],"inst",True) + 1
write(u''' };
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_commands",True): # generated from line 619, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Command handlers to implement
// ----------------------------------------------------------------------
''')
for mnemonic, opcode, sync, priority,full,comment in VFSL([locals()]+SL+[globals(), builtin],"commands",True): # generated from line 626, col 3
params = VFSL([locals()]+SL+[globals(), builtin],"command_params",True)[VFSL([locals()]+SL+[globals(), builtin],"mnemonic",True)]
write(u''' //! Handler for command ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"mnemonic",True) # u'$mnemonic' on line 628, col 29
if _v is not None: write(_filter(_v, rawExpr=u'$mnemonic')) # from line 628, col 29.
write(u'''
/* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 629, col 8
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 629, col 8.
write(u''' */
virtual void ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"mnemonic",True) # u'${mnemonic}' on line 630, col 18
if _v is not None: write(_filter(_v, rawExpr=u'${mnemonic}')) # from line 630, col 18.
write(u'''_cmdHandler(
FwOpcodeType opCode, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The opcode") # u'$doxygen_post_comment("The opcode")' on line 631, col 30
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The opcode")')) # from line 631, col 30.
write(u'''
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"params",True)) == 0: # generated from line 632, col 5
write(u''' U32 cmdSeq ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The command sequence number") # u'$doxygen_post_comment("The command sequence number")' on line 633, col 20
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The command sequence number")')) # from line 633, col 20.
write(u'''
''')
else: # generated from line 634, col 5
write(u''' U32 cmdSeq, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The command sequence number") # u'$doxygen_post_comment("The command sequence number")' on line 635, col 21
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The command sequence number")')) # from line 635, col 21.
write(u'''
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"emit_non_port_params",False)(8, VFSL([locals()]+SL+[globals(), builtin],"params",True)) # u'$emit_non_port_params(8, $params)' on line 636, col 1
if _v is not None: write(_filter(_v, rawExpr=u'$emit_non_port_params(8, $params)')) # from line 636, col 1.
write(u'''
''')
write(u''' ) = 0;
''')
mnemonics = [ VFSL([locals()]+SL+[globals(), builtin],"mnemonic",True) for mnemonic, opcode, sync, priority, full, comment in VFSL([locals()]+SL+[globals(), builtin],"commands",True) if VFSL([locals()]+SL+[globals(), builtin],"sync",True) == "async" ]
if len(VFSL([locals()]+SL+[globals(), builtin],"mnemonics",True)) > 0: # generated from line 647, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Pre-message hooks for async commands.
// ----------------------------------------------------------------------
// Each of these functions is invoked just before processing the
// corresponding command. By default they do nothing. You can
// override them to provide specific pre-command behavior.
// ----------------------------------------------------------------------
''')
for mnemonic in VFSL([locals()]+SL+[globals(), builtin],"mnemonics",True): # generated from line 658, col 3
write(u''' //! Pre-message hook for command ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"mnemonic",True) # u'$mnemonic' on line 659, col 38
if _v is not None: write(_filter(_v, rawExpr=u'$mnemonic')) # from line 659, col 38.
write(u'''
//!
virtual void ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"mnemonic",True) # u'${mnemonic}' on line 661, col 18
if _v is not None: write(_filter(_v, rawExpr=u'${mnemonic}')) # from line 661, col 18.
write(u'''_preMsgHook(
FwOpcodeType opCode, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The opcode") # u'$doxygen_post_comment("The opcode")' on line 662, col 30
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The opcode")')) # from line 662, col 30.
write(u'''
U32 cmdSeq ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The command sequence number") # u'$doxygen_post_comment("The command sequence number")' on line 663, col 20
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The command sequence number")')) # from line 663, col 20.
write(u'''
);
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_commands",True): # generated from line 668, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Command handler base-class functions.
// Call these functions directly to bypass the command input port.
// ----------------------------------------------------------------------
''')
for mnemonic, opcode, sync, priority, full, comment in VFSL([locals()]+SL+[globals(), builtin],"commands",True): # generated from line 676, col 3
write(u''' //! Base-class handler function for command ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"mnemonic",True) # u'$mnemonic' on line 677, col 49
if _v is not None: write(_filter(_v, rawExpr=u'$mnemonic')) # from line 677, col 49.
write(u'''
//!
void ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"mnemonic",True) # u'${mnemonic}' on line 679, col 10
if _v is not None: write(_filter(_v, rawExpr=u'${mnemonic}')) # from line 679, col 10.
write(u'''_cmdHandlerBase(
FwOpcodeType opCode, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The opcode") # u'$doxygen_post_comment("The opcode")' on line 680, col 30
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The opcode")')) # from line 680, col 30.
write(u'''
U32 cmdSeq, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The command sequence number") # u'$doxygen_post_comment("The command sequence number")' on line 681, col 21
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The command sequence number")')) # from line 681, col 21.
write(u'''
Fw::CmdArgBuffer &args ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The command argument buffer") # u'$doxygen_post_comment("The command argument buffer")' on line 682, col 32
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The command argument buffer")')) # from line 682, col 32.
write(u'''
);
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_commands",True) or VFSL([locals()]+SL+[globals(), builtin],"has_parameters",True): # generated from line 687, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Command response
// ----------------------------------------------------------------------
//! Emit command response
//!
void cmdResponse_out(
FwOpcodeType opCode, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The opcode") # u'$doxygen_post_comment("The opcode")' on line 697, col 30
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The opcode")')) # from line 697, col 30.
write(u'''
U32 cmdSeq, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The command sequence number") # u'$doxygen_post_comment("The command sequence number")' on line 698, col 21
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The command sequence number")')) # from line 698, col 21.
write(u'''
Fw::CommandResponse response ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The command response") # u'$doxygen_post_comment("The command response")' on line 699, col 38
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The command response")')) # from line 699, col 38.
write(u'''
);
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_events",True): # generated from line 703, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Event IDs
// ----------------------------------------------------------------------
enum {
''')
for ids, name, severity, format_string, throttle, comment in VFSL([locals()]+SL+[globals(), builtin],"events",True): # generated from line 711, col 3
inst = 0
for id in VFSL([locals()]+SL+[globals(), builtin],"ids",True): # generated from line 713, col 7
if len(VFSL([locals()]+SL+[globals(), builtin],"ids",True)) == 1: # generated from line 714, col 9
write(u''' EVENTID_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name.upper",True) # u'${name.upper}' on line 715, col 15
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper}')) # from line 715, col 15.
write(u''' = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"id",True) # u'$id' on line 715, col 31
if _v is not None: write(_filter(_v, rawExpr=u'$id')) # from line 715, col 31.
write(u''', /* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 715, col 39
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 715, col 39.
write(u''' */
''')
else: # generated from line 716, col 9
write(u''' EVENTID_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name.upper",True) # u'${name.upper}' on line 717, col 15
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper}')) # from line 717, col 15.
write(u'''_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"inst",True) # u'${inst}' on line 717, col 29
if _v is not None: write(_filter(_v, rawExpr=u'${inst}')) # from line 717, col 29.
write(u''' = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"id",True) # u'$id' on line 717, col 39
if _v is not None: write(_filter(_v, rawExpr=u'$id')) # from line 717, col 39.
write(u''', /* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 717, col 47
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 717, col 47.
write(u''' */
''')
inst = VFSL([locals()]+SL+[globals(), builtin],"inst",True) + 1
write(u''' };
''')
throttle_values = [ (VFSL([locals()]+SL+[globals(), builtin],"name",True), VFSL([locals()]+SL+[globals(), builtin],"throttle",True)) for ids, name, severity, format_string, throttle, comment in VFSL([locals()]+SL+[globals(), builtin],"events",True) if VFSL([locals()]+SL+[globals(), builtin],"throttle",True) != None ]
if len(VFSL([locals()]+SL+[globals(), builtin],"throttle_values",True)) > 0: # generated from line 729, col 3
write(u''' // ----------------------------------------------------------------------
// Event Throttle values - sets initial value of countdown variable
// ----------------------------------------------------------------------
enum {
''')
for name, throttle in VFSL([locals()]+SL+[globals(), builtin],"throttle_values",True): # generated from line 735, col 5
write(u''' EVENTID_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name.upper",True) # u'${name.upper}' on line 736, col 15
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper}')) # from line 736, col 15.
write(u'''_THROTTLE = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"throttle",True) # u'$throttle' on line 736, col 40
if _v is not None: write(_filter(_v, rawExpr=u'$throttle')) # from line 736, col 40.
write(u''', ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("Throttle reset count for " + VFSL([locals()]+SL+[globals(), builtin],"name",True)) # u'$doxygen_post_comment("Throttle reset count for " + $name)' on line 736, col 51
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("Throttle reset count for " + $name)')) # from line 736, col 51.
write(u'''
''')
write(u''' };
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"event_enums",True)) > 0: # generated from line 742, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Event enums
// ----------------------------------------------------------------------
''')
for enum_type, enum_members in VFSL([locals()]+SL+[globals(), builtin],"event_enums",True): # generated from line 749, col 3
write(u''' typedef enum {
''')
for member, init, comment in VFSL([locals()]+SL+[globals(), builtin],"enum_members",True): # generated from line 751, col 5
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"member",True) # u'${member}' on line 752, col 7
if _v is not None: write(_filter(_v, rawExpr=u'${member}')) # from line 752, col 7.
_v = VFSL([locals()]+SL+[globals(), builtin],"init",True) # u'${init}' on line 752, col 16
if _v is not None: write(_filter(_v, rawExpr=u'${init}')) # from line 752, col 16.
write(u''', /* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 752, col 28
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 752, col 28.
write(u''' */
''')
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"enum_type",True) # u'${enum_type}' on line 754, col 7
if _v is not None: write(_filter(_v, rawExpr=u'${enum_type}')) # from line 754, col 7.
write(u'''_MAX
} ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"enum_type",True) # u'$enum_type' on line 755, col 7
if _v is not None: write(_filter(_v, rawExpr=u'$enum_type')) # from line 755, col 7.
write(u''';
''')
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Event logging functions
// ----------------------------------------------------------------------
''')
for ids, name, severity, format_string, throttle, comment in VFSL([locals()]+SL+[globals(), builtin],"events",True): # generated from line 765, col 1
params = VFSL([locals()]+SL+[globals(), builtin],"event_params",True)[VFSL([locals()]+SL+[globals(), builtin],"name",True)]
write(u''' //! Log event ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 767, col 19
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 767, col 19.
write(u'''
//!
''')
if not VFSL([locals()]+SL+[globals(), builtin],"comment",True) is None: # generated from line 769, col 3
write(u''' /* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 770, col 8
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 770, col 8.
write(u''' */
''')
write(u''' void log_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"severity",True) # u'${severity}' on line 772, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${severity}')) # from line 772, col 14.
write(u'''_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 772, col 26
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 772, col 26.
write(u'''(
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"emit_non_port_params",False)(8, VFSL([locals()]+SL+[globals(), builtin],"params",True)) # u'$emit_non_port_params(8, $params)' on line 773, col 1
if _v is not None: write(_filter(_v, rawExpr=u'$emit_non_port_params(8, $params)')) # from line 773, col 1.
write(u'''
);
''')
if VFSL([locals()]+SL+[globals(), builtin],"throttle",True) != None: # generated from line 776, col 3
write(u''' // reset throttle value for ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 777, col 33
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 777, col 33.
write(u'''
void log_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"severity",True) # u'${severity}' on line 778, col 14
if _v is not None: write(_filter(_v, rawExpr=u'${severity}')) # from line 778, col 14.
write(u'''_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 778, col 26
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 778, col 26.
write(u'''_ThrottleClear(void);
''')
write(u'''
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_telemetry",True): # generated from line 782, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Channel IDs
// ----------------------------------------------------------------------
enum {
''')
for ids, name, type, size, update, comment, typeinfo in VFSL([locals()]+SL+[globals(), builtin],"channels",True): # generated from line 790, col 3
inst = 0
for id in VFSL([locals()]+SL+[globals(), builtin],"ids",True): # generated from line 792, col 6
if len(VFSL([locals()]+SL+[globals(), builtin],"ids",True)) == 1: # generated from line 793, col 8
write(u''' CHANNELID_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name.upper",True) # u'${name.upper}' on line 794, col 17
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper}')) # from line 794, col 17.
write(u''' = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"id",True) # u'$id' on line 794, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$id')) # from line 794, col 33.
write(u''', //!< Channel ID for ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 794, col 58
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 794, col 58.
write(u'''
''')
else: # generated from line 795, col 8
write(u''' CHANNELID_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name.upper",True) # u'${name.upper}' on line 796, col 17
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper}')) # from line 796, col 17.
write(u'''_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"inst",True) # u'${inst}' on line 796, col 31
if _v is not None: write(_filter(_v, rawExpr=u'${inst}')) # from line 796, col 31.
write(u''' = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"id",True) # u'$id' on line 796, col 41
if _v is not None: write(_filter(_v, rawExpr=u'$id')) # from line 796, col 41.
write(u''', //!< Channel ID for ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 796, col 66
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 796, col 66.
write(u''', instance ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"inst",True) # u'$inst' on line 796, col 82
if _v is not None: write(_filter(_v, rawExpr=u'$inst')) # from line 796, col 82.
write(u'''
''')
inst = VFSL([locals()]+SL+[globals(), builtin],"inst",True) + 1
write(u''' };
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"channel_enums",True)) > 0: # generated from line 804, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Channel enums
// ----------------------------------------------------------------------
''')
for enum_type, enum_members in VFSL([locals()]+SL+[globals(), builtin],"channel_enums",True): # generated from line 811, col 3
write(u''' typedef enum {
''')
for member, init, comment in VFSL([locals()]+SL+[globals(), builtin],"enum_members",True): # generated from line 813, col 5
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"member",True) # u'${member}' on line 814, col 7
if _v is not None: write(_filter(_v, rawExpr=u'${member}')) # from line 814, col 7.
_v = VFSL([locals()]+SL+[globals(), builtin],"init",True) # u'${init}' on line 814, col 16
if _v is not None: write(_filter(_v, rawExpr=u'${init}')) # from line 814, col 16.
write(u''', /* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 814, col 28
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 814, col 28.
write(u''' */
''')
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"enum_type",True) # u'${enum_type}' on line 816, col 7
if _v is not None: write(_filter(_v, rawExpr=u'${enum_type}')) # from line 816, col 7.
write(u'''_MAX
} ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"enum_type",True) # u'$enum_type' on line 817, col 7
if _v is not None: write(_filter(_v, rawExpr=u'$enum_type')) # from line 817, col 7.
write(u''';
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_telemetry",True): # generated from line 821, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Telemetry write functions
// ----------------------------------------------------------------------
''')
for ids, name, type, size, update, comment, typeinfo in VFSL([locals()]+SL+[globals(), builtin],"channels",True): # generated from line 828, col 3
write(u''' //! Write telemetry channel ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 829, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 829, col 33.
write(u'''
//!
''')
if not VFSL([locals()]+SL+[globals(), builtin],"comment",True) is None: # generated from line 831, col 5
write(u''' /* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 832, col 8
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 832, col 8.
write(u''' */
''')
if VFSL([locals()]+SL+[globals(), builtin],"type",True) == "string": # generated from line 834, col 5
write(u''' void tlmWrite_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 835, col 19
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 835, col 19.
write(u'''(
Fw::TlmString& arg ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The telemetry value") # u'$doxygen_post_comment("The telemetry value")' on line 836, col 28
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The telemetry value")')) # from line 836, col 28.
write(u'''
);
''')
else: # generated from line 838, col 5
if VFSL([locals()]+SL+[globals(), builtin],"typeinfo",True) == "enum": # generated from line 839, col 7
write(u''' void tlmWrite_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 840, col 19
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 840, col 19.
write(u'''(
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"type",True) # u'${type}' on line 841, col 9
if _v is not None: write(_filter(_v, rawExpr=u'${type}')) # from line 841, col 9.
write(u''' arg ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The telemetry value") # u'$doxygen_post_comment("The telemetry value")' on line 841, col 21
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The telemetry value")')) # from line 841, col 21.
write(u'''
);
''')
else: # generated from line 843, col 7
if VFSL([locals()]+SL+[globals(), builtin],"typeinfo",True) == "user": # generated from line 844, col 9
write(u''' void tlmWrite_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 845, col 19
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 845, col 19.
write(u'''(
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"type",True) # u'${type}' on line 846, col 9
if _v is not None: write(_filter(_v, rawExpr=u'${type}')) # from line 846, col 9.
write(u'''& arg ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The telemetry value") # u'$doxygen_post_comment("The telemetry value")' on line 846, col 22
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The telemetry value")')) # from line 846, col 22.
write(u'''
);
''')
else: # generated from line 848, col 9
write(u''' void tlmWrite_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 849, col 19
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 849, col 19.
write(u'''(
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"type",True) # u'${type}' on line 850, col 9
if _v is not None: write(_filter(_v, rawExpr=u'${type}')) # from line 850, col 9.
write(u''' arg ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The telemetry value") # u'$doxygen_post_comment("The telemetry value")' on line 850, col 21
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The telemetry value")')) # from line 850, col 21.
write(u'''
);
''')
write(u'''
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_telemetry",True) or VFSL([locals()]+SL+[globals(), builtin],"has_events",True): # generated from line 858, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Time
// ----------------------------------------------------------------------
//! Get the time
//!
//! \\return The current time
//!
Fw::Time getTime(void);
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"parameter_enums",True)) > 0: # generated from line 872, col 1
for enum_type, enum_members in VFSL([locals()]+SL+[globals(), builtin],"parameter_enums",True): # generated from line 873, col 3
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Parameter enums
// ----------------------------------------------------------------------
typedef enum {
''')
for member, init, comment in VFSL([locals()]+SL+[globals(), builtin],"enum_members",True): # generated from line 881, col 5
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"member",True) # u'${member}' on line 882, col 7
if _v is not None: write(_filter(_v, rawExpr=u'${member}')) # from line 882, col 7.
_v = VFSL([locals()]+SL+[globals(), builtin],"init",True) # u'${init}' on line 882, col 16
if _v is not None: write(_filter(_v, rawExpr=u'${init}')) # from line 882, col 16.
write(u''', /* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 882, col 28
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 882, col 28.
write(u''' */
''')
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"enum_type",True) # u'${enum_type}' on line 884, col 7
if _v is not None: write(_filter(_v, rawExpr=u'${enum_type}')) # from line 884, col 7.
write(u'''_MAX
} ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"enum_type",True) # u'$enum_type' on line 885, col 7
if _v is not None: write(_filter(_v, rawExpr=u'$enum_type')) # from line 885, col 7.
write(u''';
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_parameters",True): # generated from line 889, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Parameter IDs
// ----------------------------------------------------------------------
enum {
''')
for ids, name, type, osets, osavs, size, default, comment, typeinfo in VFSL([locals()]+SL+[globals(), builtin],"parameters",True): # generated from line 897, col 3
inst = 0
for id in VFSL([locals()]+SL+[globals(), builtin],"ids",True): # generated from line 899, col 5
if len(VFSL([locals()]+SL+[globals(), builtin],"ids",True)) > 1: # generated from line 900, col 7
write(u''' PARAMID_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name.upper",True) # u'${name.upper}' on line 901, col 15
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper}')) # from line 901, col 15.
write(u'''_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"inst",True) # u'${inst}' on line 901, col 29
if _v is not None: write(_filter(_v, rawExpr=u'${inst}')) # from line 901, col 29.
write(u''' = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"id",True) # u'${id}' on line 901, col 39
if _v is not None: write(_filter(_v, rawExpr=u'${id}')) # from line 901, col 39.
write(u''',
''')
else: # generated from line 902, col 7
write(u''' PARAMID_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name.upper",True) # u'${name.upper}' on line 903, col 15
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper}')) # from line 903, col 15.
write(u''' = ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"id",True) # u'${id}' on line 903, col 31
if _v is not None: write(_filter(_v, rawExpr=u'${id}')) # from line 903, col 31.
write(u''',
''')
inst = VFSL([locals()]+SL+[globals(), builtin],"inst",True) + 1
for ids, name, type, osets, osavs, size, default, comment, typeinfo in VFSL([locals()]+SL+[globals(), builtin],"parameters",True): # generated from line 908, col 3
if len(VFSL([locals()]+SL+[globals(), builtin],"ids",True)) > 1: # generated from line 909, col 5
write(u''' PARAMID_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name.upper",True) # u'${name.upper}' on line 910, col 15
if _v is not None: write(_filter(_v, rawExpr=u'${name.upper}')) # from line 910, col 15.
write(u''', // for notifications
''')
write(u''' };
PROTECTED:
// ----------------------------------------------------------------------
// Parameter update hook
// ----------------------------------------------------------------------
//! \\brief Called whenever a parameter is updated
//!
//! This function does nothing by default. You may override it.
//!
virtual void parameterUpdated(
FwPrmIdType id ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The parameter ID") # u'$doxygen_post_comment("The parameter ID")' on line 926, col 24
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The parameter ID")')) # from line 926, col 24.
write(u'''
);
// ----------------------------------------------------------------------
// Parameter load hook
// ----------------------------------------------------------------------
//! \\brief Called whenever a parameters are loaded
//!
//! This function does nothing by default. You may override it.
//!
virtual void parametersLoaded();
PROTECTED:
// ----------------------------------------------------------------------
// Parameter get functions
// ----------------------------------------------------------------------
''')
for ids, name, type, osets, osavs, size, default, comment, typeinfo in VFSL([locals()]+SL+[globals(), builtin],"parameters",True): # generated from line 945, col 3
write(u''' //! Get parameter ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 946, col 23
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 946, col 23.
write(u'''
//!
''')
if not VFSL([locals()]+SL+[globals(), builtin],"comment",True) is None: # generated from line 948, col 5
write(u''' /* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 949, col 8
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 949, col 8.
write(u''' */
''')
write(u''' //! \\return The parameter value
//!
''')
if VFSL([locals()]+SL+[globals(), builtin],"type",True) == "string": # generated from line 953, col 5
write(u''' Fw::ParamString paramGet_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 954, col 30
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 954, col 30.
write(u'''(
Fw::ParamValid& valid ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("Whether the parameter is valid") # u'$doxygen_post_comment("Whether the parameter is valid")' on line 955, col 31
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("Whether the parameter is valid")')) # from line 955, col 31.
write(u'''
);
''')
else: # generated from line 957, col 5
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"type",True) # u'$type' on line 958, col 5
if _v is not None: write(_filter(_v, rawExpr=u'$type')) # from line 958, col 5.
write(u''' paramGet_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 958, col 20
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 958, col 20.
write(u'''(
Fw::ParamValid& valid ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("Whether the parameter is valid") # u'$doxygen_post_comment("Whether the parameter is valid")' on line 959, col 31
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("Whether the parameter is valid")')) # from line 959, col 31.
write(u'''
);
''')
write(u'''
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"internal_interface_enums",True)) > 0: # generated from line 965, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Internal interface enums
// ----------------------------------------------------------------------
''')
for enum_type, enum_members in VFSL([locals()]+SL+[globals(), builtin],"internal_interface_enums",True): # generated from line 972, col 3
write(u''' typedef enum {
''')
for member, init, comment in VFSL([locals()]+SL+[globals(), builtin],"enum_members",True): # generated from line 974, col 5
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"member",True) # u'${member}' on line 975, col 7
if _v is not None: write(_filter(_v, rawExpr=u'${member}')) # from line 975, col 7.
_v = VFSL([locals()]+SL+[globals(), builtin],"init",True) # u'${init}' on line 975, col 16
if _v is not None: write(_filter(_v, rawExpr=u'${init}')) # from line 975, col 16.
write(u''', /* ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"comment",True) # u'$comment' on line 975, col 28
if _v is not None: write(_filter(_v, rawExpr=u'$comment')) # from line 975, col 28.
write(u''' */
''')
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"enum_type",True) # u'${enum_type}' on line 977, col 7
if _v is not None: write(_filter(_v, rawExpr=u'${enum_type}')) # from line 977, col 7.
write(u'''_MAX
} ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"enum_type",True) # u'$enum_type' on line 978, col 7
if _v is not None: write(_filter(_v, rawExpr=u'$enum_type')) # from line 978, col 7.
write(u''';
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_internal_interfaces",True): # generated from line 982, col 1
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Internal interface handlers
// ----------------------------------------------------------------------
''')
for name, priority, full in VFSL([locals()]+SL+[globals(), builtin],"internal_interfaces",True): # generated from line 989, col 3
params = VFSL([locals()]+SL+[globals(), builtin],"internal_interface_params",True)[VFSL([locals()]+SL+[globals(), builtin],"name",True)]
write(u''' //! Internal Interface handler for ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 991, col 40
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 991, col 40.
write(u'''
//!
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"params",True)) == 0: # generated from line 993, col 5
write(u''' virtual void ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 994, col 18
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 994, col 18.
write(u'''_internalInterfaceHandler(void) = 0;
''')
else: # generated from line 995, col 5
write(u''' virtual void ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 996, col 18
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 996, col 18.
write(u'''_internalInterfaceHandler(
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"emit_non_port_params",False)(8, VFSL([locals()]+SL+[globals(), builtin],"params",True)) # u'$emit_non_port_params(8, $params)' on line 997, col 1
if _v is not None: write(_filter(_v, rawExpr=u'$emit_non_port_params(8, $params)')) # from line 997, col 1.
write(u'''
) = 0;
''')
write(u'''
''')
write(u''' PROTECTED:
// ----------------------------------------------------------------------
// Internal interface base-class functions
// ----------------------------------------------------------------------
''')
for name, priority, full in VFSL([locals()]+SL+[globals(), builtin],"internal_interfaces",True): # generated from line 1008, col 3
params = VFSL([locals()]+SL+[globals(), builtin],"internal_interface_params",True)[VFSL([locals()]+SL+[globals(), builtin],"name",True)]
write(u''' //! Base class function for ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 1010, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 1010, col 33.
write(u'''
//!
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"params",True)) == 0: # generated from line 1012, col 5
write(u''' void ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 1013, col 10
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 1013, col 10.
write(u'''_internalInterfaceInvoke(void);
''')
else: # generated from line 1014, col 5
write(u''' void ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 1015, col 10
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 1015, col 10.
write(u'''_internalInterfaceInvoke(
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"emit_non_port_params",False)(8, VFSL([locals()]+SL+[globals(), builtin],"params",True)) # u'$emit_non_port_params(8, $params)' on line 1016, col 1
if _v is not None: write(_filter(_v, rawExpr=u'$emit_non_port_params(8, $params)')) # from line 1016, col 1.
write(u'''
);
''')
write(u'''
''')
if VFSL([locals()]+SL+[globals(), builtin],"needs_msg_size",True): # generated from line 1022, col 1
write(u'''
PRIVATE:
NATIVE_INT_TYPE m_msgSize; //!< store max message size
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_typed_input_ports",True): # generated from line 1027, col 1
write(u'''
PRIVATE:
// ----------------------------------------------------------------------
// Typed input ports
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, full, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"typed_input_ports",True): # generated from line 1035, col 3
namespace = VFSL([locals()]+SL+[globals(), builtin],"port_namespaces",True)[VFSL([locals()]+SL+[globals(), builtin],"type",True)]
write(u''' //! Input port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 1037, col 20
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 1037, col 20.
write(u'''
//!
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"namespace",True) # u'$namespace' on line 1039, col 5
if _v is not None: write(_filter(_v, rawExpr=u'$namespace')) # from line 1039, col 5.
write(u'''::Input''')
_v = VFSL([locals()]+SL+[globals(), builtin],"type",True) # u'${type}' on line 1039, col 22
if _v is not None: write(_filter(_v, rawExpr=u'${type}')) # from line 1039, col 22.
write(u'''Port m_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 1039, col 36
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 1039, col 36.
write(u'''_InputPort[NUM_''')
_v = VFN(VFSL([locals()]+SL+[globals(), builtin],"instance",True),"upper",False)() # u'${instance.upper()}' on line 1039, col 62
if _v is not None: write(_filter(_v, rawExpr=u'${instance.upper()}')) # from line 1039, col 62.
write(u'''_INPUT_PORTS];
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_serial_input_ports",True): # generated from line 1043, col 1
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Serial input ports
// ----------------------------------------------------------------------
''')
for instance, sync, priority, full, max_num in VFSL([locals()]+SL+[globals(), builtin],"serial_input_ports",True): # generated from line 1050, col 3
write(u''' //! Input port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 1051, col 20
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 1051, col 20.
write(u'''
//!
Fw::InputSerializePort m_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 1053, col 30
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 1053, col 30.
write(u'''_InputPort[NUM_''')
_v = VFN(VFSL([locals()]+SL+[globals(), builtin],"instance",True),"upper",False)() # u'${instance.upper()}' on line 1053, col 56
if _v is not None: write(_filter(_v, rawExpr=u'${instance.upper()}')) # from line 1053, col 56.
write(u'''_INPUT_PORTS];
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_typed_output_ports",True): # generated from line 1057, col 1
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Typed output ports
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"typed_output_ports",True): # generated from line 1064, col 3
namespace = VFSL([locals()]+SL+[globals(), builtin],"port_namespaces",True)[VFSL([locals()]+SL+[globals(), builtin],"type",True)]
if VFSL([locals()]+SL+[globals(), builtin],"role",True) == "LogTextEvent": # generated from line 1066, col 5
write(u'''#if FW_ENABLE_TEXT_LOGGING == 1
''')
write(u''' //! Output port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 1069, col 21
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 1069, col 21.
write(u'''
//!
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"namespace",True) # u'$namespace' on line 1071, col 5
if _v is not None: write(_filter(_v, rawExpr=u'$namespace')) # from line 1071, col 5.
write(u'''::Output''')
_v = VFSL([locals()]+SL+[globals(), builtin],"type",True) # u'${type}' on line 1071, col 23
if _v is not None: write(_filter(_v, rawExpr=u'${type}')) # from line 1071, col 23.
write(u'''Port m_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 1071, col 37
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 1071, col 37.
write(u'''_OutputPort[NUM_''')
_v = VFN(VFSL([locals()]+SL+[globals(), builtin],"instance",True),"upper",False)() # u'${instance.upper()}' on line 1071, col 64
if _v is not None: write(_filter(_v, rawExpr=u'${instance.upper()}')) # from line 1071, col 64.
write(u'''_OUTPUT_PORTS];
''')
if VFSL([locals()]+SL+[globals(), builtin],"role",True) == "LogTextEvent": # generated from line 1072, col 5
write(u'''#endif
''')
write(u'''
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_serial_output_ports",True): # generated from line 1078, col 1
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Serial output ports
// ----------------------------------------------------------------------
''')
for instance, sync, priority, max_num in VFSL([locals()]+SL+[globals(), builtin],"serial_output_ports",True): # generated from line 1085, col 3
write(u''' //! Output port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 1086, col 21
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 1086, col 21.
write(u'''
//!
Fw::OutputSerializePort m_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 1088, col 31
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 1088, col 31.
write(u'''_OutputPort[NUM_''')
_v = VFN(VFSL([locals()]+SL+[globals(), builtin],"instance",True),"upper",False)() # u'${instance.upper()}' on line 1088, col 58
if _v is not None: write(_filter(_v, rawExpr=u'${instance.upper()}')) # from line 1088, col 58.
write(u'''_OUTPUT_PORTS];
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_typed_input_ports",True): # generated from line 1092, col 1
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Calls for messages received on typed input ports
// ----------------------------------------------------------------------
''')
for instance, type, sync, priority, full, role, max_num in VFSL([locals()]+SL+[globals(), builtin],"typed_input_ports",True): # generated from line 1099, col 3
params = VFSL([locals()]+SL+[globals(), builtin],"port_params",True)[VFSL([locals()]+SL+[globals(), builtin],"instance",True)]
return_type = VFSL([locals()]+SL+[globals(), builtin],"port_return_type_strs",True)[VFSL([locals()]+SL+[globals(), builtin],"instance",True)]
write(u''' //! Callback for port ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'$instance' on line 1102, col 27
if _v is not None: write(_filter(_v, rawExpr=u'$instance')) # from line 1102, col 27.
write(u'''
//!
static ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"return_type",True) # u'${return_type}' on line 1104, col 12
if _v is not None: write(_filter(_v, rawExpr=u'${return_type}')) # from line 1104, col 12.
write(u'''m_p_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 1104, col 30
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 1104, col 30.
write(u'''_in(
Fw::PassiveComponentBase* callComp, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The component instance") # u'$doxygen_post_comment("The component instance")' on line 1105, col 45
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The component instance")')) # from line 1105, col 45.
write(u'''
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"params",True)) == 0: # generated from line 1106, col 5
write(u''' NATIVE_INT_TYPE portNum ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 1107, col 33
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 1107, col 33.
write(u'''
''')
else: # generated from line 1108, col 5
write(u''' NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 1109, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 1109, col 34.
write(u'''
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"emit_port_params",False)(8, VFSL([locals()]+SL+[globals(), builtin],"params",True)) # u'$emit_port_params(8, $params)' on line 1110, col 1
if _v is not None: write(_filter(_v, rawExpr=u'$emit_port_params(8, $params)')) # from line 1110, col 1.
write(u'''
''')
write(u''' );
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_serializable_ports",True): # generated from line 1116, col 1
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Call for messages received on serial input ports
// ----------------------------------------------------------------------
#if FW_PORT_SERIALIZATION
''')
for instance, sync, priority, full, max_num in VFSL([locals()]+SL+[globals(), builtin],"serial_input_ports",True): # generated from line 1125, col 3
write(u'''
//! Serial port callback
//!
static void m_p_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"instance",True) # u'${instance}' on line 1129, col 21
if _v is not None: write(_filter(_v, rawExpr=u'${instance}')) # from line 1129, col 21.
write(u'''_in(
Fw::PassiveComponentBase* callComp, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The component instance") # u'$doxygen_post_comment("The component instance")' on line 1130, col 45
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The component instance")')) # from line 1130, col 45.
write(u'''
NATIVE_INT_TYPE portNum, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The port number") # u'$doxygen_post_comment("The port number")' on line 1131, col 34
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The port number")')) # from line 1131, col 34.
write(u'''
Fw::SerializeBufferBase &Buffer ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The serialization buffer") # u'$doxygen_post_comment("The serialization buffer")' on line 1132, col 41
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The serialization buffer")')) # from line 1132, col 41.
write(u'''
);
''')
write(u'''
#endif
''')
if VFSL([locals()]+SL+[globals(), builtin],"kind",True) == "active": # generated from line 1139, col 1
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Message dispatch functions
// ----------------------------------------------------------------------
//! Called in the message loop to dispatch a message from the queue
//!
virtual MsgDispatchStatus doDispatch(void);
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_guarded_ports",True) or VFSL([locals()]+SL+[globals(), builtin],"has_parameters",True): # generated from line 1151, col 1
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Mutexes
// ----------------------------------------------------------------------
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_guarded_ports",True): # generated from line 1159, col 1
write(u''' //! Mutex for guarded ports
//!
Os::Mutex m_guardedPortMutex;
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_parameters",True): # generated from line 1165, col 1
write(u''' //! Mutex for locking parameters during sets and saves
//!
Os::Mutex m_paramLock;
''')
if VFSL([locals()]+SL+[globals(), builtin],"has_parameters",True): # generated from line 1171, col 1
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Parameter validity flags
// ----------------------------------------------------------------------
''')
for ids, name, type, osets, osavs, size, default, comment, typeinfo in VFSL([locals()]+SL+[globals(), builtin],"parameters",True): # generated from line 1178, col 3
write(u''' //! True if parameter ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 1179, col 27
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 1179, col 27.
write(u''' was successfully received
//!
Fw::ParamValid m_param_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 1181, col 28
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 1181, col 28.
write(u'''_valid;
''')
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Parameter variables
// ----------------------------------------------------------------------
''')
for ids, name, type, osets, osavs, size, default, comment, typeinfo in VFSL([locals()]+SL+[globals(), builtin],"parameters",True): # generated from line 1190, col 3
write(u''' //! Parameter ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 1191, col 19
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 1191, col 19.
write(u'''
//!
''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_pre_comment",False)(VFSL([locals()]+SL+[globals(), builtin],"comment",True)) # u'$doxygen_pre_comment($comment)' on line 1193, col 5
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_pre_comment($comment)')) # from line 1193, col 5.
write(u'''
''')
if VFSL([locals()]+SL+[globals(), builtin],"type",True) == "string": # generated from line 1194, col 5
write(u''' Fw::ParamString m_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 1195, col 23
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 1195, col 23.
write(u''';
''')
else: # generated from line 1196, col 5
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"type",True) # u'${type}' on line 1197, col 5
if _v is not None: write(_filter(_v, rawExpr=u'${type}')) # from line 1197, col 5.
write(u''' m_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 1197, col 15
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 1197, col 15.
write(u''';
''')
write(u'''
''')
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Private parameter get function
// ----------------------------------------------------------------------
//! Get a parameter by ID
//!
//! \\return Whether the parameter is valid
//!
Fw::ParamValid getParam(
FwPrmIdType id, ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The ID") # u'$doxygen_post_comment("The ID")' on line 1212, col 25
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The ID")')) # from line 1212, col 25.
write(u'''
Fw::ParamBuffer& buff ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The paramter value") # u'$doxygen_post_comment("The paramter value")' on line 1213, col 31
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The paramter value")')) # from line 1213, col 31.
write(u'''
);
PRIVATE:
// ----------------------------------------------------------------------
// Parameter set functions
// ----------------------------------------------------------------------
''')
for ids, name, type, osets, osavs, size, default, comment, typeinfo in VFSL([locals()]+SL+[globals(), builtin],"parameters",True): # generated from line 1222, col 3
write(u''' //! Set parameter ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 1223, col 23
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 1223, col 23.
write(u'''
//!
//! \\return The command response
//!
Fw::CommandResponse paramSet_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 1227, col 34
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 1227, col 34.
write(u'''(
Fw::SerializeBufferBase &val ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"doxygen_post_comment",False)("The serialization buffer") # u'$doxygen_post_comment("The serialization buffer")' on line 1228, col 38
if _v is not None: write(_filter(_v, rawExpr=u'$doxygen_post_comment("The serialization buffer")')) # from line 1228, col 38.
write(u'''
);
''')
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Parameter save functions
// ----------------------------------------------------------------------
''')
for ids, name, type, osets, osavs, size, default, comment, typeinfo in VFSL([locals()]+SL+[globals(), builtin],"parameters",True): # generated from line 1238, col 3
write(u''' //! Save parameter ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 1239, col 24
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 1239, col 24.
write(u'''
//!
//! \\return The command response
//!
Fw::CommandResponse paramSave_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 1243, col 35
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 1243, col 35.
write(u'''(void);
''')
update_channels = [ (VFSL([locals()]+SL+[globals(), builtin],"name",True), VFSL([locals()]+SL+[globals(), builtin],"type",True)) for ids, name, type, size, update, comment, typeinfo in VFSL([locals()]+SL+[globals(), builtin],"channels",True) if VFSL([locals()]+SL+[globals(), builtin],"update",True) != None and VFSL([locals()]+SL+[globals(), builtin],"update",True) != "always" ]
if len(VFSL([locals()]+SL+[globals(), builtin],"update_channels",True)) > 0: # generated from line 1252, col 1
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// First update flags for telemetry channels
// ----------------------------------------------------------------------
''')
for name, type in VFSL([locals()]+SL+[globals(), builtin],"update_channels",True): # generated from line 1259, col 3
write(u''' //! Initialized to true; cleared when channel ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 1260, col 51
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 1260, col 51.
write(u''' is first updated
//!
bool m_first_update_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 1262, col 25
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 1262, col 25.
write(u''';
''')
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Last value storage for telemetry channels
// ----------------------------------------------------------------------
''')
for name, type in VFSL([locals()]+SL+[globals(), builtin],"update_channels",True): # generated from line 1271, col 3
write(u''' //! Records the last emitted value for channel ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'$name' on line 1272, col 52
if _v is not None: write(_filter(_v, rawExpr=u'$name')) # from line 1272, col 52.
write(u'''
//!
''')
if VFSL([locals()]+SL+[globals(), builtin],"type",True) == "string": # generated from line 1274, col 5
write(u''' Fw::TlmString m_last_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 1275, col 26
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 1275, col 26.
write(u''';
''')
else: # generated from line 1276, col 5
write(u''' ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"type",True) # u'$type' on line 1277, col 5
if _v is not None: write(_filter(_v, rawExpr=u'$type')) # from line 1277, col 5.
write(u''' m_last_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 1277, col 18
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 1277, col 18.
write(u''';
''')
write(u'''
''')
write(u'''
''')
if len(VFSL([locals()]+SL+[globals(), builtin],"events",True)) > 0: # generated from line 1283, col 1
write(u''' PRIVATE:
// ----------------------------------------------------------------------
// Counter values for event throttling
// ----------------------------------------------------------------------
''')
for ids, name, severity, format_string, throttle, comment in VFSL([locals()]+SL+[globals(), builtin],"events",True): # generated from line 1288, col 1
if VFSL([locals()]+SL+[globals(), builtin],"throttle",True) != None: # generated from line 1289, col 3
write(u''' NATIVE_UINT_TYPE m_''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 1290, col 24
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 1290, col 24.
write(u'''Throttle; //!< throttle for ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"name",True) # u'${name}' on line 1290, col 59
if _v is not None: write(_filter(_v, rawExpr=u'${name}')) # from line 1290, col 59.
write(u'''
''')
write(u'''
};
''')
if VFSL([locals()]+SL+[globals(), builtin],"namespace_list",True) != None: # generated from line 1297, col 1
for namespace in VFSL([locals()]+SL+[globals(), builtin],"namespace_list",True): # generated from line 1298, col 2
write(u'''} // end namespace ''')
_v = VFSL([locals()]+SL+[globals(), builtin],"namespace",True) # u'$namespace' on line 1299, col 20
if _v is not None: write(_filter(_v, rawExpr=u'$namespace')) # from line 1299, col 20.
write(u'''
''')
write(u'''#endif
''')
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
##################################################
## CHEETAH GENERATED ATTRIBUTES
_CHEETAH__instanceInitialized = False
_CHEETAH_version = __CHEETAH_version__
_CHEETAH_versionTuple = __CHEETAH_versionTuple__
_CHEETAH_genTime = __CHEETAH_genTime__
_CHEETAH_genTimestamp = __CHEETAH_genTimestamp__
_CHEETAH_src = __CHEETAH_src__
_CHEETAH_srcLastModified = __CHEETAH_srcLastModified__
_mainCheetahMethod_for_hpp= 'respond'
## END CLASS DEFINITION
if not hasattr(hpp, '_initCheetahAttributes'):
templateAPIClass = getattr(hpp, '_CHEETAH_templateClass', Template)
templateAPIClass._addCheetahPlumbingCodeToClass(hpp)
# CHEETAH was developed by Tavis Rudd and Mike Orr
# with code, advice and input from many other volunteers.
# For more information visit http://www.CheetahTemplate.org/
##################################################
## if run from command line:
if __name__ == '__main__':
from Cheetah.TemplateCmdLineIface import CmdLineIface
CmdLineIface(templateObj=hpp()).run()
| 60.980637 | 393 | 0.506495 | 16,728 | 141,719 | 4.158477 | 0.057209 | 0.075184 | 0.09022 | 0.142849 | 0.855944 | 0.832468 | 0.809137 | 0.796688 | 0.780185 | 0.766083 | 0 | 0.036112 | 0.264333 | 141,719 | 2,323 | 394 | 61.006888 | 0.631107 | 0.173329 | 0 | 0.818357 | 0 | 0 | 0.318545 | 0.109113 | 0 | 0 | 0 | 0 | 0.000481 | 1 | 0.000961 | false | 0.003844 | 0.00865 | 0 | 0.014416 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b3d69ea1fc55218d29987528529a5ce1beb4582f | 8,122 | py | Python | uninas/utils/generate/networks/manually/mixnet.py | cogsys-tuebingen/uninas | 06729b9cf517ec416fb798ae387c5bd9c3a278ac | [
"MIT"
] | 18 | 2020-11-22T16:03:08.000Z | 2022-03-15T12:11:46.000Z | uninas/utils/generate/networks/manually/mixnet.py | cogsys-tuebingen/uninas | 06729b9cf517ec416fb798ae387c5bd9c3a278ac | [
"MIT"
] | 2 | 2022-01-04T08:10:17.000Z | 2022-01-05T08:13:14.000Z | uninas/utils/generate/networks/manually/mixnet.py | cogsys-tuebingen/uninas | 06729b9cf517ec416fb798ae387c5bd9c3a278ac | [
"MIT"
] | 6 | 2021-03-08T07:08:52.000Z | 2022-02-24T12:00:43.000Z | """
MixConv: Mixed depthwise convolutional kernels
https://arxiv.org/abs/1907.09595
manually engineering as a search space according to the paper has
- expansion groups {1, 2} for in/out 1x1 convs
- expansion size (probably {3, 6}?)
- squeeze-excitation (probably {None, 0.25, 0.5}?)
- mixed kernel size {3, 3.5, 3.5.7, 3.5.7.9, 3.5.7.9.11}
- activation function? relu/swish are used
which are at least 120 options per block in a naive implementation
(although repeated blocks suggest that they are topology-grouped within stages)
"""
import torch.nn as nn
from uninas.modules.networks.stackedcells import StackedCellsNetworkBody
from uninas.modules.stems.mobilenet import MobileNetV2Stem
from uninas.modules.layers.mobilenet import MobileInvertedConvLayer
from uninas.modules.heads.cnn import FeatureMixClassificationHead
from uninas.utils.shape import Shape
from uninas.utils.generate.networks.manually.abstract import get_stem_instance, get_head_instance,\
get_passthrough_partials, get_network
def get_mixnet_s(s_in=Shape([3, 224, 224]), s_out=Shape([1000])) -> nn.Module:
stem = get_stem_instance(MobileNetV2Stem, features=16, features1=16, act_fun='relu', act_fun1='relu')
head = get_head_instance(FeatureMixClassificationHead, features=1536, act_fun='relu')
defaults = dict(k_size=(3,), k_size_in=1, k_size_out=1, padding='same', dilation=1,
bn_affine=True, act_inplace=True, att_dict=None)
se25 = dict(att_cls='SqueezeExcitationChannelModule', use_c_substitute=True,
c_mul=0.25, squeeze_act='swish', excite_act='sigmoid',
squeeze_bias=True, excite_bias=True, squeeze_bn=False)
se5 = dict(att_cls='SqueezeExcitationChannelModule', use_c_substitute=True,
c_mul=0.5, squeeze_act='swish', excite_act='sigmoid',
squeeze_bias=True, excite_bias=True, squeeze_bn=False)
cell_partials, cell_order = get_passthrough_partials([
(24, MobileInvertedConvLayer, defaults, dict(stride=2, expansion=6, act_fun='relu', k_size=(3,), k_size_in=(1, 1), k_size_out=(1, 1))),
(24, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=3, act_fun='relu', k_size=(3,), k_size_in=(1, 1), k_size_out=(1, 1))),
(40, MobileInvertedConvLayer, defaults, dict(stride=2, expansion=6, act_fun='swish', k_size=(3, 5, 7), att_dict=se5)),
(40, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se5)),
(40, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se5)),
(40, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se5)),
(80, MobileInvertedConvLayer, defaults, dict(stride=2, expansion=6, act_fun='swish', k_size=(3, 5, 7), k_size_out=(1, 1), att_dict=se25)),
(80, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5), k_size_out=(1, 1), att_dict=se25)),
(80, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5), k_size_out=(1, 1), att_dict=se25)),
(120, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5, 7), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se5)),
(120, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=3, act_fun='swish', k_size=(3, 5, 7, 9), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se5)),
(120, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=3, act_fun='swish', k_size=(3, 5, 7, 9), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se5)),
(200, MobileInvertedConvLayer, defaults, dict(stride=2, expansion=6, act_fun='swish', k_size=(3, 5, 7, 9, 11), att_dict=se5)),
(200, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5, 7, 9), k_size_out=(1, 1), att_dict=se5)),
(200, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5, 7, 9), k_size_out=(1, 1), att_dict=se5)),
])
return get_network(StackedCellsNetworkBody, stem, head, cell_partials, cell_order, s_in, s_out)
def get_mixnet_m(s_in=Shape([3, 224, 224]), s_out=Shape([1000])) -> nn.Module:
stem = get_stem_instance(MobileNetV2Stem, features=24, features1=24, act_fun='relu', act_fun1='relu')
head = get_head_instance(FeatureMixClassificationHead, features=1536, act_fun='relu')
defaults = dict(k_size=(3,), k_size_in=1, k_size_out=1, padding='same', dilation=1,
bn_affine=True, act_inplace=True, att_dict=None)
se25 = dict(att_cls='SqueezeExcitationChannelModule', use_c_substitute=True,
c_mul=0.25, squeeze_act='swish', excite_act='sigmoid',
squeeze_bias=True, excite_bias=True, squeeze_bn=False)
se5 = dict(att_cls='SqueezeExcitationChannelModule', use_c_substitute=True,
c_mul=0.5, squeeze_act='swish', excite_act='sigmoid',
squeeze_bias=True, excite_bias=True, squeeze_bn=False)
cell_partials, cell_order = get_passthrough_partials([
(32, MobileInvertedConvLayer, defaults, dict(stride=2, expansion=6, act_fun='relu', k_size=(3, 5, 7), k_size_in=(1, 1), k_size_out=(1, 1))),
(32, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=3, act_fun='relu', k_size=(3,), k_size_in=(1, 1), k_size_out=(1, 1))),
(40, MobileInvertedConvLayer, defaults, dict(stride=2, expansion=6, act_fun='swish', k_size=(3, 5, 7, 9), att_dict=se5)),
(40, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se5)),
(40, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se5)),
(40, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se5)),
(80, MobileInvertedConvLayer, defaults, dict(stride=2, expansion=6, act_fun='swish', k_size=(3, 5, 7), att_dict=se25)),
(80, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5, 7, 9), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se25)),
(80, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5, 7, 9), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se25)),
(80, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5, 7, 9), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se25)),
(120, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3,), att_dict=se5)),
(120, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=3, act_fun='swish', k_size=(3, 5, 7, 9), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se5)),
(120, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=3, act_fun='swish', k_size=(3, 5, 7, 9), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se5)),
(120, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=3, act_fun='swish', k_size=(3, 5, 7, 9), k_size_in=(1, 1), k_size_out=(1, 1), att_dict=se5)),
(200, MobileInvertedConvLayer, defaults, dict(stride=2, expansion=6, act_fun='swish', k_size=(3, 5, 7, 9), att_dict=se5)),
(200, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5, 7, 9), k_size_out=(1, 1), att_dict=se5)),
(200, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5, 7, 9), k_size_out=(1, 1), att_dict=se5)),
(200, MobileInvertedConvLayer, defaults, dict(stride=1, expansion=6, act_fun='swish', k_size=(3, 5, 7, 9), k_size_out=(1, 1), att_dict=se5)),
])
return get_network(StackedCellsNetworkBody, stem, head, cell_partials, cell_order, s_in, s_out)
| 79.627451 | 167 | 0.691948 | 1,283 | 8,122 | 4.157443 | 0.112237 | 0.079678 | 0.03937 | 0.253656 | 0.841582 | 0.838583 | 0.838583 | 0.838583 | 0.838583 | 0.838583 | 0 | 0.069496 | 0.144299 | 8,122 | 101 | 168 | 80.415842 | 0.697986 | 0.065501 | 0 | 0.652174 | 0 | 0 | 0.047613 | 0.015827 | 0 | 0 | 0 | 0 | 0 | 1 | 0.028986 | false | 0.043478 | 0.101449 | 0 | 0.15942 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
37978c5098612d0eb7b5cd72d671dca53ea64f6a | 4,030 | py | Python | tests/test_path.py | nattvara/saas | 766b538ac90daa8f8eadce8a1fd43f83413610de | [
"MIT"
] | 2 | 2019-01-18T16:14:03.000Z | 2019-04-12T13:27:50.000Z | tests/test_path.py | nattvara/saas | 766b538ac90daa8f8eadce8a1fd43f83413610de | [
"MIT"
] | null | null | null | tests/test_path.py | nattvara/saas | 766b538ac90daa8f8eadce8a1fd43f83413610de | [
"MIT"
] | null | null | null | """Path test."""
from saas.mount.file import Path
import unittest
class TestPath(unittest.TestCase):
"""Test path class."""
def test_path_can_parse_parts_of_path(self):
"""Test path can parse parts."""
path = '/example.com/2019-01-13H20:00/foo/bar/baz.png'
path = Path(path)
self.assertEqual('example.com', path.domain)
self.assertEqual('2019-01-13H20:00', path.captured_at)
self.assertEqual('/foo/bar/baz.png', path.end)
def test_path_can_parse_end_correctly(self):
"""Test path can parse end correctly."""
path = Path('/example.com/2019-01-13H20:00')
self.assertEqual('/', path.end)
path = Path('/example.com/2019-01-13H20:00/')
self.assertEqual('/', path.end)
path = Path('/example.com/2019-01-13H20:00/foo')
self.assertEqual('/foo', path.end)
path = Path('/example.com/2019-01-13H20:00/foo/')
self.assertEqual('/foo', path.end)
path = Path('/example.com/2019-01-13H20:00/foo/bar')
self.assertEqual('/foo/bar', path.end)
path = Path('/example.com/2019-01-13H20:00/foo/bar/')
self.assertEqual('/foo/bar', path.end)
path = Path('/example.com/2019-01-13H20:00/foo/bar/baz.png')
self.assertEqual('/foo/bar/baz.png', path.end)
def test_check_for_domain(self):
"""Test check for domain."""
path = Path('/example.com/2019-01-13H20:00')
self.assertTrue(path.includes_domain())
path = Path('/example.com/')
self.assertTrue(path.includes_domain())
path = Path('/example.com')
self.assertTrue(path.includes_domain())
path = Path('/')
self.assertFalse(path.includes_domain())
def test_check_for_captured_at(self):
"""Test check for captured_at."""
path = Path('/example.com/2019-01-13H20:00/foo/bar')
self.assertTrue(path.includes_captured_at())
path = Path('/example.com/2019-01-13H20:00/foo')
self.assertTrue(path.includes_captured_at())
path = Path('/example.com/2019-01-13H20:00/')
self.assertTrue(path.includes_captured_at())
path = Path('/example.com/2019-01-13H20:00')
self.assertTrue(path.includes_captured_at())
path = Path('/example.com/')
self.assertFalse(path.includes_captured_at())
path = Path('/')
self.assertFalse(path.includes_captured_at())
def test_check_for_end(self):
"""Test check for end."""
path = Path('/example.com/2019-01-13H20:00/foo/bar')
self.assertTrue(path.includes_end())
path = Path('/example.com/2019-01-13H20:00/foo')
self.assertTrue(path.includes_end())
path = Path('/example.com/2019-01-13H20:00/')
self.assertFalse(path.includes_end())
path = Path('/example.com/2019-01-13H20:00')
self.assertFalse(path.includes_end())
path = Path('/example.com/')
self.assertFalse(path.includes_end())
path = Path('/')
self.assertFalse(path.includes_captured_at())
def test_end_can_be_treated_as_file(self):
"""Test end can be treated as file."""
path = Path('/example.com/2019-01-13H20:00/foo/bar')
self.assertEqual('/foo/bar', path.end_as_file())
path = Path('/example.com/2019-01-13H20:00/foo/bar/')
self.assertEqual('/foo/bar', path.end_as_file())
path = Path('/example.com/2019-01-13H20:00/')
self.assertEqual('/', path.end_as_file())
def test_end_can_be_treated_as_directory(self):
"""Test end can be treated as directory."""
path = Path('/example.com/2019-01-13H20:00/foo/bar')
self.assertEqual('/foo/bar/', path.end_as_directory())
path = Path('/example.com/2019-01-13H20:00/foo/bar/')
self.assertEqual('/foo/bar/', path.end_as_directory())
path = Path('/example.com/2019-01-13H20:00/')
self.assertEqual('/', path.end_as_directory())
if __name__ == '__main__':
unittest.main()
| 33.583333 | 68 | 0.620099 | 542 | 4,030 | 4.46679 | 0.083026 | 0.102437 | 0.156134 | 0.193309 | 0.881454 | 0.841388 | 0.804626 | 0.762082 | 0.739777 | 0.739777 | 0 | 0.09031 | 0.208685 | 4,030 | 119 | 69 | 33.865546 | 0.668862 | 0.05732 | 0 | 0.706667 | 0 | 0 | 0.258726 | 0.209965 | 0 | 0 | 0 | 0 | 0.426667 | 1 | 0.093333 | false | 0 | 0.026667 | 0 | 0.133333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
37c7e2f207c6de8f6497b1608f2088ba4f4a200c | 55 | py | Python | theBroker/venv/Lib/site-packages/ttn/github_com/TheThingsNetwork/api/trace/__init__.py | emirgo/WeatherStation | f0f8c3464470991fc962d83cea20f3bcfd6a04b6 | [
"MIT"
] | 32 | 2017-11-01T16:03:48.000Z | 2021-11-16T12:35:34.000Z | theBroker/venv/Lib/site-packages/ttn/github_com/TheThingsNetwork/api/trace/__init__.py | emirgo/WeatherStation | f0f8c3464470991fc962d83cea20f3bcfd6a04b6 | [
"MIT"
] | 28 | 2017-11-20T09:45:59.000Z | 2021-12-14T09:31:24.000Z | theBroker/venv/Lib/site-packages/ttn/github_com/TheThingsNetwork/api/trace/__init__.py | emirgo/WeatherStation | f0f8c3464470991fc962d83cea20f3bcfd6a04b6 | [
"MIT"
] | 22 | 2017-11-03T10:21:50.000Z | 2021-04-08T05:20:51.000Z | from .trace_pb2_grpc import *
from .trace_pb2 import *
| 18.333333 | 29 | 0.781818 | 9 | 55 | 4.444444 | 0.555556 | 0.45 | 0.6 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.042553 | 0.145455 | 55 | 2 | 30 | 27.5 | 0.808511 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
80afbfa7f766c54297e85d7250f44a10bd04349d | 1,802 | py | Python | tests/test_commandline_train.py | ai-zahran/Montreal-Forced-Aligner | decbacfe86f81703022da4e95fd109eb94e7686d | [
"MIT"
] | null | null | null | tests/test_commandline_train.py | ai-zahran/Montreal-Forced-Aligner | decbacfe86f81703022da4e95fd109eb94e7686d | [
"MIT"
] | null | null | null | tests/test_commandline_train.py | ai-zahran/Montreal-Forced-Aligner | decbacfe86f81703022da4e95fd109eb94e7686d | [
"MIT"
] | null | null | null | import os
import pytest
from montreal_forced_aligner.command_line.mfa import parser
from montreal_forced_aligner.command_line.train_acoustic_model import run_train_acoustic_model
# @pytest.mark.skip(reason='Optimization')
def test_train_and_align_basic(
basic_corpus_dir,
sick_dict_path,
generated_dir,
temp_dir,
mono_train_config_path,
textgrid_output_model_path,
):
if os.path.exists(textgrid_output_model_path):
os.remove(textgrid_output_model_path)
command = [
"train",
basic_corpus_dir,
sick_dict_path,
os.path.join(generated_dir, "basic_output"),
"-t",
temp_dir,
"--config_path",
mono_train_config_path,
"-q",
"--clean",
"--debug",
"-o",
textgrid_output_model_path,
]
args, unknown = parser.parse_known_args(command)
run_train_acoustic_model(args, unknown)
assert os.path.exists(textgrid_output_model_path)
@pytest.mark.skip(reason="Optimization")
def test_train_and_align_basic_speaker_dict(
basic_corpus_dir,
speaker_dictionary_path,
generated_dir,
temp_dir,
mono_train_config_path,
textgrid_output_model_path,
):
if os.path.exists(textgrid_output_model_path):
os.remove(textgrid_output_model_path)
command = [
"train",
basic_corpus_dir,
speaker_dictionary_path,
os.path.join(generated_dir, "basic_output"),
"-t",
temp_dir,
"--config_path",
mono_train_config_path,
"-q",
"--clean",
"--debug",
"-o",
textgrid_output_model_path,
]
args, unknown = parser.parse_known_args(command)
run_train_acoustic_model(args, unknown)
assert os.path.exists(textgrid_output_model_path)
| 26.115942 | 94 | 0.673696 | 221 | 1,802 | 5.036199 | 0.230769 | 0.125786 | 0.17071 | 0.206649 | 0.918239 | 0.918239 | 0.783468 | 0.783468 | 0.783468 | 0.783468 | 0 | 0 | 0.23141 | 1,802 | 68 | 95 | 26.5 | 0.80361 | 0.022198 | 0 | 0.852459 | 0 | 0 | 0.063636 | 0 | 0 | 0 | 0 | 0 | 0.032787 | 1 | 0.032787 | false | 0 | 0.065574 | 0 | 0.098361 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
80e60f9fe7ac33a37653f06bcb1b29d0d6d670a9 | 12,037 | py | Python | pymtl3_net/ocnlib/ifcs/packets.py | cornell-brg/ocn-posh | 7f8bfd800627364cfc37dc5d6a36333ee2e48c99 | [
"BSD-3-Clause"
] | 3 | 2019-06-07T13:27:06.000Z | 2019-07-16T19:00:23.000Z | pymtl3_net/ocnlib/ifcs/packets.py | cornell-brg/ocn-posh | 7f8bfd800627364cfc37dc5d6a36333ee2e48c99 | [
"BSD-3-Clause"
] | 12 | 2019-07-23T02:29:31.000Z | 2019-07-25T11:07:00.000Z | pymtl3_net/ocnlib/ifcs/packets.py | cornell-brg/posh-ocn | 7f8bfd800627364cfc37dc5d6a36333ee2e48c99 | [
"BSD-3-Clause"
] | null | null | null | """
==========================================================================
packets.py
==========================================================================
Collection of packets definition.
Convention: The fields/constructor arguments should appear in the order
of [ <id related>, opaque_nbits, vc, payload_nbits ]
Author : Yanghui Ou, Shunning Jiang
Date : Oct 26, 2019
"""
from pymtl3 import *
#=========================================================================
# Generic packet
#=========================================================================
def mk_generic_pkt( nrouters=4, opaque_nbits=8, vc=2, payload_nbits=16,
prefix="GenericPacket" ):
IdType = mk_bits( clog2( nrouters ) )
OpqType = mk_bits( opaque_nbits )
PayloadType = mk_bits( payload_nbits )
new_name = f"{prefix}_{nrouters}_{vc}_{opaque_nbits}_{payload_nbits}"
if vc > 1:
VcIdType = mk_bits( clog2( vc ) )
def str_func( s ):
return f"{s.src}>{s.dst}:{s.opaque}:{s.vc_id}:{s.payload}"
return mk_bitstruct( new_name, {
'src': IdType,
'dst': IdType,
'opaque': OpqType,
'vc_id': VcIdType,
'payload': PayloadType,
},
namespace = { '__str__': str_func }
)
else:
def str_func( s ):
return f"{s.src}>{s.dst}:{s.opaque}:{s.payload}"
return mk_bitstruct( new_name, {
'src': IdType,
'dst': IdType,
'opaque': OpqType,
'payload': PayloadType,
},
namespace = { '__str__': str_func }
)
#=========================================================================
# xbar packet
#=========================================================================
def mk_xbar_pkt( num_inports=2, num_outports=2, opaque_nbits=8,
payload_nbits=32 ):
SrcT = Bits1 if num_inports==1 else mk_bits( clog2( num_inports ) )
DstT = Bits1 if num_outports==1 else mk_bits( clog2( num_outports ) )
OpqT = mk_bits( opaque_nbits )
PldT = mk_bits( payload_nbits )
new_name = f'XbarPacket{num_inports}x{num_outports}_{opaque_nbits}_{payload_nbits}'
def str_func( s ):
return f'{s.src}>{s.dst}:{s.opaque}:{s.payload}'
return mk_bitstruct( new_name, {
'src' : SrcT,
'dst' : DstT,
'opaque' : OpqT,
'payload' : PldT,
},
namespace = { '__str__' : str_func },
)
#=========================================================================
# ring packet
#=========================================================================
def mk_ring_pkt( nrouters=4, opaque_nbits=8, vc=2, payload_nbits=32 ):
return mk_generic_pkt( nrouters, opaque_nbits, vc, payload_nbits, "RingPacket" )
#=========================================================================
# mesh packet
#=========================================================================
def mk_mesh_pkt( ncols=2, nrows=2,
opaque_nbits=8, vc=1, payload_nbits=32 ):
assert ncols > 0 and nrows > 0
XType = mk_bits(clog2( ncols )) if ncols != 1 else Bits1
YType = mk_bits(clog2( nrows )) if nrows != 1 else Bits1
OpqType = mk_bits(opaque_nbits)
PayloadType = mk_bits(payload_nbits)
new_name = f"MeshPacket_{ncols}x{nrows}_{vc}_{opaque_nbits}_{payload_nbits}"
if vc > 1:
VcIdType = mk_bits( clog2( vc ) )
def str_func( s ):
return f"({s.src_x},{s.src_y})>({s.dst_x},{s.dst_y}):{s.opaque}:{s.vc_id}:{s.payload}"
return mk_bitstruct( new_name, {
'src_x': XType,
'src_y': YType,
'dst_x': XType,
'dst_y': YType,
'opaque': OpqType,
'vc_id': VcIdType,
'payload': PayloadType,
},
namespace = { '__str__': str_func },
)
else:
def str_func( s ):
return f"({s.src_x},{s.src_y})>({s.dst_x},{s.dst_y}):{s.opaque}:{s.payload}"
return mk_bitstruct( new_name, {
'src_x': XType,
'src_y': YType,
'dst_x': XType,
'dst_y': YType,
'opaque': OpqType,
'payload': PayloadType,
},
namespace = { '__str__': str_func }
)
#=========================================================================
# cmesh packet
#=========================================================================
def mk_cmesh_pkt( ncols=2, nrows=2,
inports=8, outports=8,
opaque_nbits=8, vc=1, payload_nbits=32 ):
XType = mk_bits(clog2(ncols)) if ncols != 1 else Bits1
YType = mk_bits(clog2(nrows)) if nrows != 1 else Bits1
TType = mk_bits(clog2(outports-4)) if outports > 5 else Bits1
OpqType = mk_bits( opaque_nbits )
PayloadType = mk_bits( payload_nbits )
new_name = f"CMeshPacket_{ncols}x{nrows}_{inports}x{outports}_" \
f"{opaque_nbits}_{vc}_{payload_nbits}"
if vc > 1:
VcIdType = mk_bits( clog2( vc ) )
return mk_bitstruct( new_name, {
'src_x': XType,
'src_y': YType,
'dst_x': XType,
'dst_y': YType,
'dst_ter': TType,
'opaque': OpqType,
'vc_id': VcIdType,
'payload': PayloadType,
})
else:
return mk_bitstruct( new_name, {
'src_x': XType,
'src_y': YType,
'dst_x': XType,
'dst_y': YType,
'dst_ter': TType,
'opaque': OpqType,
'payload': PayloadType,
})
#=========================================================================
# Butterfly packet
#=========================================================================
def mk_bfly_pkt( k_ary=2, n_fly=2, opaque_nbits=8, vc=0, payload_nbits=32 ):
IdType = mk_bits( clog2( k_ary ** n_fly ) )
assert k_ary > 1
DstType = mk_bits( clog2( k_ary ) * n_fly )
OpqType = mk_bits( opaque_nbits )
PayloadType = mk_bits( payload_nbits )
new_name = f"BflyPacket_{k_ary}_{n_fly}_{vc}_{opaque_nbits}_{payload_nbits}"
if vc > 1:
VcIdType = mk_bits( clog2(vc) )
return mk_bitstruct( new_name, {
'src': IdType,
'dst': DstType,
'opaque': OpqType,
'vc_id': VcIdType,
'payload': PayloadType,
})
else:
return mk_bitstruct( new_name, {
'src': IdType,
'dst': DstType,
'opaque': OpqType,
'payload': PayloadType,
})
#=========================================================================
# ring packet with timestamp
#=========================================================================
def mk_ring_pkt_timestamp( nrouters=4, opaque_nbits=8, vc=2, payload_nbits=32, max_time=10 ):
IdType = mk_bits( clog2( nrouters ) )
OpqType = mk_bits( opaque_nbits )
PayloadType = mk_bits( payload_nbits )
TimestampType = mk_bits( clog2(max_time + 1) )
new_name = f"RingPacketTimestamp_{nrouters}_{opaque_nbits}_{vc}_{payload_nbits}_{max_time}"
if vc > 1:
VcIdType = mk_bits( clog2( vc ) )
def str_func( s ):
return f"{s.src}>{s.dst}:{s.opaque}:{s.vc_id}:{s.payload}:{s.timestamp}"
return mk_bitstruct( new_name, {
'src': IdType,
'dst': IdType,
'opaque': OpqType,
'vc_id': VcIdType,
'payload': PayloadType,
'timestamp': TimestampType,
},
namespace = { '__str__': str_func }
)
else:
def str_func( s ):
return f"{s.src}>{s.dst}:{s.opaque}:{s.payload}:{s.timestamp}"
return mk_bitstruct( new_name, {
'src': IdType,
'dst': IdType,
'opaque': OpqType,
'payload': PayloadType,
'timestamp': TimestampType,
},
namespace = { '__str__': str_func }
)
#=========================================================================
# mesh packet with timestamp
#=========================================================================
def mk_mesh_pkt_timestamp( ncols=2, nrows=2,
opaque_nbits=8, vc=1, payload_nbits=32,
max_time=10 ):
XType = mk_bits( clog2( ncols ) )
YType = mk_bits( clog2( nrows ) )
OpqType = mk_bits( opaque_nbits )
PayloadType = mk_bits( payload_nbits )
TimestampType = mk_bits( clog2(max_time + 1) )
new_name = f"MeshPacketTimestamp_{ncols}x{nrows}_{opaque_nbits}"\
f"_{vc}_{payload_nbits}_{max_time}"
if vc > 1:
VcIdType = mk_bits( clog2( vc ) )
def str_func( s ):
return f"({s.src_x},{s.src_y})>({s.dst_x},{s.dst_y}):{s.opaque}:{s.vc_id}:{s.payload}:{s.timestamp}"
return mk_bitstruct( new_name, {
'src_x': XType,
'src_y': YType,
'dst_x': XType,
'dst_y': YType,
'opaque': OpqType,
'vc_id': VcIdType,
'payload': PayloadType,
'timestamp': TimestampType,
},
namespace = { '__str__': str_func },
)
else:
def str_func( s ):
return f"({s.src_x},{s.src_y})>({s.dst_x},{s.dst_y}):{s.opaque}:{s.payload}:{s.timestamp}"
return mk_bitstruct( new_name, {
'src_x': XType,
'src_y': YType,
'dst_x': XType,
'dst_y': YType,
'opaque': OpqType,
'payload': PayloadType,
'timestamp': TimestampType,
},
namespace = { '__str__': str_func },
)
#=========================================================================
# cmesh packet with timestamp
#=========================================================================
def mk_cmesh_pkt_timestamp( ncols=2, nrows=2, inports=8, outports=8,
opaque_nbits=8, vc=1, payload_nbits=32, max_time=10 ):
XType = mk_bits( clog2( ncols ) )
YType = mk_bits( clog2( nrows ) )
TType = mk_bits(clog2( outports - 4 )) if outports > 5 else mk_bits(1)
OpqType = mk_bits( opaque_nbits )
PayloadType = mk_bits( payload_nbits )
TimestampType = mk_bits( clog2(max_time + 1) )
new_name = f"MeshPacketTimestamp_{ncols}x{nrows}_{inports}x{outports}"\
f"_{vc}_{opaque_nbits}_{payload_nbits}_{max_time}"
if vc > 1:
VcIdType = mk_bits( clog2( vc ) )
return mk_bitstruct( new_name, {
'src_x': XType,
'src_y': YType,
'dst_x': XType,
'dst_y': YType,
'dst_ter': TType,
'opaque': OpqType,
'vc_id': VcIdType,
'payload': PayloadType,
'timestamp': TimestampType,
})
else:
return mk_bitstruct( new_name, {
'src_x': XType,
'src_y': YType,
'dst_x': XType,
'dst_y': YType,
'dst_ter': TType,
'opaque': OpqType,
'payload': PayloadType,
'timestamp': TimestampType,
})
#=========================================================================
# Butterfly packet with timestamp
#=========================================================================
def mk_bfly_pkt_timestamp( k_ary=2, n_fly=2,
opaque_nbits=8, vc=0, payload_nbits=32,
max_time=10 ):
IdType = mk_bits( clog2( k_ary ** n_fly ) )
# if k_ary == 1:
# KaryType = Bits1
# else:
# KaryType = mk_bits( clog2( k_ary ) )
# if n_fly == 1:
# NflyType = Bits1
# else:
# NflyType = mk_bits( clog2( n_fly ) )
# if k_ary ** ( n_fly - 1 ) == 1:
# RrowType = Bits1
# else:
# RrowType = mk_bits( clog2( k_ary ** ( n_fly - 1 ) ) )
DstType = mk_bits( clog2( k_ary ) * n_fly )
# DstType = mk_bits( clog2( k_ary ** ( n_fly - 1 ) ) * n_fly )
OpqType = mk_bits( opaque_nbits )
PayloadType = mk_bits( payload_nbits )
TimestampType = mk_bits( clog2(max_time + 1) )
new_name = f"BflyPacketTimestamp_{k_ary}_{n_fly}_{opaque_nbits}_{vc}_{payload_nbits}"
if vc > 1:
VcIdType = mk_bits( clog2( vc ) )
return mk_bitstruct( new_name, {
'src': IdType,
'dst': DstType,
'opaque': OpqType,
'vc_id': VcIdType,
'payload': PayloadType,
'timestamp': TimestampType,
})
else:
return mk_bitstruct( new_name, {
'src': IdType,
'dst': DstType,
'opaque': OpqType,
'payload': PayloadType,
'timestamp': TimestampType,
})
| 30.706633 | 106 | 0.49838 | 1,338 | 12,037 | 4.193573 | 0.084454 | 0.056674 | 0.066655 | 0.060595 | 0.841561 | 0.811264 | 0.779184 | 0.76653 | 0.745678 | 0.736767 | 0 | 0.016619 | 0.245161 | 12,037 | 391 | 107 | 30.785166 | 0.600925 | 0.198887 | 0 | 0.748175 | 0 | 0.025547 | 0.195559 | 0.126655 | 0 | 0 | 0 | 0 | 0.007299 | 1 | 0.069343 | false | 0 | 0.00365 | 0.036496 | 0.171533 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
03c3a71ac36add29a481962d18de2a01d46b30c6 | 171 | py | Python | PyDSlog/csv/__init__.py | fbussv/PyDSlog | 7a2d1a0a615ac261eca6d9e9613e7edbe7869217 | [
"MIT"
] | null | null | null | PyDSlog/csv/__init__.py | fbussv/PyDSlog | 7a2d1a0a615ac261eca6d9e9613e7edbe7869217 | [
"MIT"
] | null | null | null | PyDSlog/csv/__init__.py | fbussv/PyDSlog | 7a2d1a0a615ac261eca6d9e9613e7edbe7869217 | [
"MIT"
] | null | null | null | from .IO5640Logger import IO5640_csv_saver
from .MLS160ALogger import MLS160A_csv_saver
from .MQTTLogger import MQTT_csv_saver
from .ArduinoLogger import Arduino_csv_saver | 42.75 | 44 | 0.888889 | 24 | 171 | 6 | 0.5 | 0.222222 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.089744 | 0.087719 | 171 | 4 | 45 | 42.75 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
03db13a5cee0ad714f43dcaaf62b9ae3b2afb82c | 14,536 | py | Python | variants/migrations/0001_initial.py | fossabot/mendelmd | 5cafebf5336b8854fe71414b3559428e3e401617 | [
"BSD-3-Clause"
] | 33 | 2016-07-22T21:39:09.000Z | 2021-06-24T02:57:02.000Z | variants/migrations/0001_initial.py | fossabot/mendelmd | 5cafebf5336b8854fe71414b3559428e3e401617 | [
"BSD-3-Clause"
] | 41 | 2017-06-20T03:10:33.000Z | 2021-12-24T23:54:41.000Z | variants/migrations/0001_initial.py | fossabot/mendelmd | 5cafebf5336b8854fe71414b3559428e3e401617 | [
"BSD-3-Clause"
] | 8 | 2017-06-14T21:07:47.000Z | 2021-01-12T17:59:49.000Z | # Generated by Django 2.0.1 on 2018-02-12 19:47
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('individuals', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Variant',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('index', models.TextField(db_index=True)),
('pos_index', models.TextField(db_index=True)),
('chr', models.TextField(db_index=True, verbose_name='Chr')),
('pos', models.IntegerField(db_index=True)),
('variant_id', models.TextField(db_index=True, verbose_name='ID')),
('ref', models.TextField(blank=True, db_index=True, null=True)),
('alt', models.TextField(blank=True, db_index=True, null=True)),
('qual', models.FloatField(db_index=True)),
('filter', models.TextField(db_index=True)),
('info', models.TextField(blank=True, null=True)),
('format', models.TextField(blank=True, db_index=True, null=True)),
('genotype_col', models.TextField(blank=True, db_index=True, null=True)),
('genotype', models.TextField(db_index=True)),
('read_depth', models.IntegerField()),
('gene', models.TextField(blank=True, db_index=True, null=True)),
('mutation_type', models.TextField(db_index=True, null=True)),
('vartype', models.TextField(db_index=True, null=True)),
('genomes1k_maf', models.FloatField(blank=True, db_index=True, null=True, verbose_name='1000 Genomes Frequency')),
('dbsnp_maf', models.FloatField(blank=True, db_index=True, null=True, verbose_name='dbSNP Frequency')),
('esp_maf', models.FloatField(blank=True, db_index=True, null=True, verbose_name='ESP6500 Frequency')),
('dbsnp_build', models.IntegerField(db_index=True, null=True)),
('sift', models.FloatField(blank=True, db_index=True, null=True)),
('sift_pred', models.TextField(blank=True, db_index=True, null=True)),
('polyphen2', models.FloatField(blank=True, db_index=True, null=True)),
('polyphen2_pred', models.TextField(blank=True, db_index=True, null=True)),
('condel', models.FloatField(blank=True, db_index=True, null=True)),
('condel_pred', models.TextField(blank=True, db_index=True, null=True)),
('dann', models.FloatField(blank=True, db_index=True, null=True)),
('cadd', models.FloatField(blank=True, db_index=True, null=True)),
('hi_index_str', models.TextField(blank=True, db_index=True, null=True)),
('hi_index', models.FloatField(blank=True, db_index=True, null=True)),
('hi_index_perc', models.FloatField(blank=True, db_index=True, null=True)),
('is_at_omim', models.BooleanField(db_index=True, default=False)),
('is_at_hgmd', models.BooleanField(db_index=True, default=False)),
('hgmd_entries', models.TextField(blank=True, db_index=True, null=True)),
('snpeff_effect', models.TextField(blank=True, db_index=True, null=True)),
('snpeff_impact', models.TextField(blank=True, db_index=True, null=True)),
('snpeff_gene_name', models.TextField(blank=True, db_index=True, null=True)),
('vep_allele', models.TextField(blank=True, db_index=True, null=True)),
('vep_gene', models.TextField(blank=True, db_index=True, null=True)),
('vep_feature', models.TextField(blank=True, db_index=True, null=True)),
('vep_feature_type', models.TextField(blank=True, db_index=True, null=True)),
('vep_consequence', models.TextField(blank=True, db_index=True, null=True)),
('vep_cdna_position', models.TextField(blank=True, db_index=True, null=True)),
('vep_cds_position', models.TextField(blank=True, db_index=True, null=True)),
('vep_protein_position', models.TextField(blank=True, db_index=True, null=True)),
('vep_amino_acids', models.TextField(blank=True, db_index=True, null=True)),
('vep_codons', models.TextField(blank=True, db_index=True, null=True)),
('vep_existing_variation', models.TextField(blank=True, db_index=True, null=True)),
('vep_distance', models.TextField(blank=True, db_index=True, null=True)),
('vep_strand', models.TextField(blank=True, db_index=True, null=True)),
('vep_symbol', models.TextField(blank=True, db_index=True, null=True)),
('vep_symbol_source', models.TextField(blank=True, db_index=True, null=True)),
('vep_sift', models.TextField(blank=True, db_index=True, null=True)),
('vep_polyphen', models.TextField(blank=True, db_index=True, null=True)),
('vep_condel', models.TextField(blank=True, db_index=True, null=True)),
('ensembl_clin_HGMD', models.BooleanField(db_index=True, default=False)),
('clinvar_CLNSRC', models.TextField(blank=True, db_index=True, null=True)),
('SIFT_score', models.TextField(blank=True, db_index=True, null=True)),
('SIFT_converted_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('Uniprot_acc_Polyphen2', models.TextField(blank=True, db_index=True, null=True)),
('Uniprot_id_Polyphen2', models.TextField(blank=True, db_index=True, null=True)),
('Uniprot_aapos_Polyphen2', models.TextField(blank=True, db_index=True, null=True)),
('Polyphen2_HDIV_score', models.TextField(blank=True, db_index=True, null=True)),
('Polyphen2_HDIV_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('Polyphen2_HDIV_pred', models.TextField(blank=True, db_index=True, null=True)),
('Polyphen2_HVAR_score', models.TextField(blank=True, db_index=True, null=True)),
('Polyphen2_HVAR_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('Polyphen2_HVAR_pred', models.TextField(blank=True, db_index=True, null=True)),
('LRT_score', models.TextField(blank=True, db_index=True, null=True)),
('LRT_converted_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('LRT_pred', models.TextField(blank=True, db_index=True, null=True)),
('LRT_Omega', models.TextField(blank=True, db_index=True, null=True)),
('MutationTaster_score', models.TextField(blank=True, db_index=True, null=True)),
('MutationTaster_converted_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('MutationTaster_pred', models.TextField(blank=True, db_index=True, null=True)),
('MutationTaster_model', models.TextField(blank=True, db_index=True, null=True)),
('MutationTaster_AAE', models.TextField(blank=True, db_index=True, null=True)),
('MutationAssessor_UniprotID', models.TextField(blank=True, db_index=True, null=True)),
('MutationAssessor_variant', models.TextField(blank=True, db_index=True, null=True)),
('MutationAssessor_score', models.TextField(blank=True, db_index=True, null=True)),
('MutationAssessor_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('MutationAssessor_pred', models.TextField(blank=True, db_index=True, null=True)),
('FATHMM_score', models.TextField(blank=True, db_index=True, null=True)),
('FATHMM_converted_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('FATHMM_pred', models.TextField(blank=True, db_index=True, null=True)),
('PROVEAN_score', models.TextField(blank=True, db_index=True, null=True)),
('PROVEAN_converted_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('PROVEAN_pred', models.TextField(blank=True, db_index=True, null=True)),
('Transcript_id_VEST3', models.TextField(blank=True, db_index=True, null=True)),
('Transcript_var_VEST3', models.TextField(blank=True, db_index=True, null=True)),
('VEST3_score', models.TextField(blank=True, db_index=True, null=True)),
('VEST3_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('MetaSVM_score', models.TextField(blank=True, db_index=True, null=True)),
('MetaSVM_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('MetaSVM_pred', models.TextField(blank=True, db_index=True, null=True)),
('MetaLR_score', models.TextField(blank=True, db_index=True, null=True)),
('MetaLR_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('MetaLR_pred', models.TextField(blank=True, db_index=True, null=True)),
('Reliability_index', models.TextField(blank=True, db_index=True, null=True)),
('CADD_raw', models.TextField(blank=True, db_index=True, null=True)),
('CADD_raw_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('CADD_phred', models.TextField(blank=True, db_index=True, null=True)),
('DANN_score', models.TextField(blank=True, db_index=True, null=True)),
('DANN_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('fathmm_MKL_coding_score', models.TextField(blank=True, db_index=True, null=True)),
('fathmm_MKL_coding_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('fathmm_MKL_coding_pred', models.TextField(blank=True, db_index=True, null=True)),
('fathmm_MKL_coding_group', models.TextField(blank=True, db_index=True, null=True)),
('Eigen_raw', models.TextField(blank=True, db_index=True, null=True)),
('Eigen_phred', models.TextField(blank=True, db_index=True, null=True)),
('Eigen_raw_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('Eigen_PC_raw', models.TextField(blank=True, db_index=True, null=True)),
('Eigen_PC_raw_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('GenoCanyon_score', models.TextField(blank=True, db_index=True, null=True)),
('GenoCanyon_score_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('integrated_fitCons_score', models.TextField(blank=True, db_index=True, null=True)),
('integrated_fitCons_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('integrated_confidence_value', models.TextField(blank=True, db_index=True, null=True)),
('GM12878_fitCons_score', models.TextField(blank=True, db_index=True, null=True)),
('GM12878_fitCons_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('GM12878_confidence_value', models.TextField(blank=True, db_index=True, null=True)),
('H1_hESC_fitCons_score', models.TextField(blank=True, db_index=True, null=True)),
('H1_hESC_fitCons_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('H1_hESC_confidence_value', models.TextField(blank=True, db_index=True, null=True)),
('HUVEC_fitCons_score', models.TextField(blank=True, db_index=True, null=True)),
('HUVEC_fitCons_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('HUVEC_confidence_value', models.TextField(blank=True, db_index=True, null=True)),
('GERP_NR', models.TextField(blank=True, db_index=True, null=True)),
('GERP_RS', models.TextField(blank=True, db_index=True, null=True)),
('GERP_RS_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('phyloP100way_vertebrate', models.TextField(blank=True, db_index=True, null=True)),
('phyloP100way_vertebrate_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('phyloP20way_mammalian', models.TextField(blank=True, db_index=True, null=True)),
('phyloP20way_mammalian_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('phastCons100way_vertebrate', models.TextField(blank=True, db_index=True, null=True)),
('phastCons100way_vertebrate_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('phastCons20way_mammalian', models.TextField(blank=True, db_index=True, null=True)),
('phastCons20way_mammalian_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('SiPhy_29way_pi', models.TextField(blank=True, db_index=True, null=True)),
('SiPhy_29way_logOdds', models.TextField(blank=True, db_index=True, null=True)),
('SiPhy_29way_logOdds_rankscore', models.TextField(blank=True, db_index=True, null=True)),
('clinvar_rs', models.TextField(blank=True, db_index=True, null=True)),
('clinvar_clnsig', models.TextField(blank=True, db_index=True, null=True)),
('clinvar_trait', models.TextField(blank=True, db_index=True, null=True)),
('clinvar_golden_stars', models.TextField(blank=True, db_index=True, null=True)),
('mcap_score', models.FloatField(blank=True, db_index=True, null=True)),
('mcap_rankscore', models.FloatField(blank=True, db_index=True, null=True)),
('mcap_pred', models.TextField(blank=True, db_index=True, null=True)),
('revel_score', models.TextField(blank=True, db_index=True, null=True)),
('individual', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='individuals.Individual')),
],
),
]
| 83.54023 | 130 | 0.63587 | 1,732 | 14,536 | 5.12933 | 0.100462 | 0.116614 | 0.183251 | 0.231315 | 0.898131 | 0.886988 | 0.87742 | 0.856821 | 0.842976 | 0.793224 | 0 | 0.007638 | 0.216359 | 14,536 | 173 | 131 | 84.023121 | 0.772276 | 0.003096 | 0 | 0 | 1 | 0 | 0.169439 | 0.072676 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.012048 | 0 | 0.036145 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
03f7eb0b4c77c688aed208f8dbf696e80bd23b99 | 102 | py | Python | smlibppm/api/__init__.py | justnat3/smlibppm | 6e9affb4c0f07602c2e2149a90ee6ebb6cac2124 | [
"MIT"
] | 2 | 2022-01-14T01:58:00.000Z | 2022-01-26T04:31:12.000Z | smlibppm/api/__init__.py | justnat3/smlibppm | 6e9affb4c0f07602c2e2149a90ee6ebb6cac2124 | [
"MIT"
] | 4 | 2022-01-14T01:17:27.000Z | 2022-02-04T03:31:58.000Z | smlibppm/api/__init__.py | justnat3/smlibppm | 6e9affb4c0f07602c2e2149a90ee6ebb6cac2124 | [
"MIT"
] | 2 | 2022-01-16T23:14:11.000Z | 2022-01-17T00:13:56.000Z | from smlibppm.core.ppm import (
PPM
)
from smlibppm.core.utils import (
generate_rand_color
)
| 14.571429 | 33 | 0.72549 | 14 | 102 | 5.142857 | 0.642857 | 0.333333 | 0.444444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.196078 | 102 | 6 | 34 | 17 | 0.878049 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.333333 | 0 | 0.333333 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
20a6c1efb8f53901e5711e13d0657214d6abdf97 | 21,311 | py | Python | tests/test_relationship.py | cameron/datahog | 815178ae576bc4b4e1994ca9fcdc0c1f854bfccf | [
"BSD-3-Clause"
] | 4 | 2015-09-09T23:05:39.000Z | 2016-10-20T15:24:58.000Z | tests/test_relationship.py | cameron/datahog | 815178ae576bc4b4e1994ca9fcdc0c1f854bfccf | [
"BSD-3-Clause"
] | null | null | null | tests/test_relationship.py | cameron/datahog | 815178ae576bc4b4e1994ca9fcdc0c1f854bfccf | [
"BSD-3-Clause"
] | null | null | null | # vim: fileencoding=utf8:et:sw=4:ts=8:sts=4
import os
import sys
import unittest
import datahog
from datahog import error
import psycopg2
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
import base
from pgmock import *
class RelationshipTests(base.TestCase):
def setUp(self):
super(RelationshipTests, self).setUp()
datahog.set_context(1, datahog.NODE)
datahog.set_context(2, datahog.NODE,
{'base_ctx': 1, 'storage': datahog.storage.INT})
datahog.set_context(3, datahog.RELATIONSHIP, {
'base_ctx': 1, 'rel_ctx': 2})
def test_create(self):
add_fetch_result([(1,)])
add_fetch_result([(1,)])
self.assertEqual(
datahog.relationship.create(self.p, 3, 123, 456),
True)
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
insert into relationship (base_id, rel_id, ctx, forward, pos, flags)
select %s, %s, %s, %s, (
select count(*)
from relationship
where
time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
), %s
where exists (
select 1
from node
where
time_removed is null
and id=%s
and ctx=%s
)
returning 1
""", (123, 456, 3, True, 123, 3, True, 0, 123, 1)),
ROWCOUNT,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
insert into relationship (base_id, rel_id, ctx, forward, pos, flags)
select %s, %s, %s, %s, (
select count(*)
from relationship
where
time_removed is null
and rel_id=%s
and ctx=%s
and forward=%s
), %s
where exists (
select 1
from node
where
time_removed is null
and id=%s
and ctx=%s
)
returning 1
""", (123, 456, 3, False, 456, 3, False, 0, 456, 2)),
ROWCOUNT,
COMMIT,
TPC_COMMIT])
def test_create_failure_noobject_forward(self):
add_fetch_result([])
self.assertRaises(error.NoObject,
datahog.relationship.create, self.p, 3, 123, 456)
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
insert into relationship (base_id, rel_id, ctx, forward, pos, flags)
select %s, %s, %s, %s, (
select count(*)
from relationship
where
time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
), %s
where exists (
select 1
from node
where
time_removed is null
and id=%s
and ctx=%s
)
returning 1
""", (123, 456, 3, True, 123, 3, True, 0, 123, 1)),
ROWCOUNT,
TPC_ROLLBACK])
def test_create_failure_noobject_reverse(self):
add_fetch_result([(1,)])
add_fetch_result([])
self.assertRaises(error.NoObject,
datahog.relationship.create, self.p, 3, 123, 456)
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
insert into relationship (base_id, rel_id, ctx, forward, pos, flags)
select %s, %s, %s, %s, (
select count(*)
from relationship
where
time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
), %s
where exists (
select 1
from node
where
time_removed is null
and id=%s
and ctx=%s
)
returning 1
""", (123, 456, 3, True, 123, 3, True, 0, 123, 1)),
ROWCOUNT,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
insert into relationship (base_id, rel_id, ctx, forward, pos, flags)
select %s, %s, %s, %s, (
select count(*)
from relationship
where
time_removed is null
and rel_id=%s
and ctx=%s
and forward=%s
), %s
where exists (
select 1
from node
where
time_removed is null
and id=%s
and ctx=%s
)
returning 1
""", (123, 456, 3, False, 456, 3, False, 0, 456, 2)),
ROWCOUNT,
ROLLBACK,
TPC_ROLLBACK])
def test_create_failure_duplicate(self):
query_fail(psycopg2.IntegrityError)
self.assertEqual(
datahog.relationship.create(self.p, 3, 123, 456),
False)
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE_FAILURE("""
insert into relationship (base_id, rel_id, ctx, forward, pos, flags)
select %s, %s, %s, %s, (
select count(*)
from relationship
where
time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
), %s
where exists (
select 1
from node
where
time_removed is null
and id=%s
and ctx=%s
)
returning 1
""", (123, 456, 3, True, 123, 3, True, 0, 123, 1)),
TPC_ROLLBACK])
def test_create_with_positions(self):
add_fetch_result([(1,)])
add_fetch_result([(1,)])
self.assertEqual(
datahog.relationship.create(self.p, 3, 123, 456, 4, 5),
True)
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
with eligible as (
select 1
from node
where
time_removed is null
and id=%s
and ctx=%s
), bump as (
update relationship
set pos=pos + 1
where
exists (select 1 from eligible)
and time_removed is null
and forward=%s
and base_id=%s
and ctx=%s
and pos >= %s
)
insert into relationship (base_id, rel_id, ctx, forward, pos, flags)
select %s, %s, %s, %s, %s, %s
where exists (select 1 from eligible)
returning 1
""", (123, 1, True, 123, 3, 4, 123, 456, 3, True, 4, 0)),
ROWCOUNT,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
with eligible as (
select 1
from node
where
time_removed is null
and id=%s
and ctx=%s
), bump as (
update relationship
set pos=pos + 1
where
exists (select 1 from eligible)
and time_removed is null
and forward=%s
and rel_id=%s
and ctx=%s
and pos >= %s
)
insert into relationship (base_id, rel_id, ctx, forward, pos, flags)
select %s, %s, %s, %s, %s, %s
where exists (select 1 from eligible)
returning 1
""", (456, 2, False, 456, 3, 5, 123, 456, 3, False, 5, 0)),
ROWCOUNT,
COMMIT,
TPC_COMMIT])
def test_list_forwards(self):
add_fetch_result([(456, 0, 0), (457, 0, 1), (458, 0, 2), (459, 0, 3)])
self.assertEqual(
datahog.relationship.list(self.p, 123, 3),
([
{'ctx': 3, 'base_id': 123, 'rel_id': 456, 'flags': set([])},
{'ctx': 3, 'base_id': 123, 'rel_id': 457, 'flags': set([])},
{'ctx': 3, 'base_id': 123, 'rel_id': 458, 'flags': set([])},
{'ctx': 3, 'base_id': 123, 'rel_id': 459, 'flags': set([])},
], 4))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select rel_id, flags, pos
from relationship
where
time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
and pos >= %s
order by pos asc
limit %s
""", (123, 3, True, 0, 100)),
FETCH_ALL,
COMMIT])
def test_list_reverse(self):
add_fetch_result([(123, 0, 0), (124, 0, 1), (125, 0, 2), (126, 0, 3)])
self.assertEqual(
datahog.relationship.list(self.p, 456, 3, False),
([
{'ctx': 3, 'base_id': 123, 'rel_id': 456, 'flags': set([])},
{'ctx': 3, 'base_id': 124, 'rel_id': 456, 'flags': set([])},
{'ctx': 3, 'base_id': 125, 'rel_id': 456, 'flags': set([])},
{'ctx': 3, 'base_id': 126, 'rel_id': 456, 'flags': set([])},
], 4))
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select base_id, flags, pos
from relationship
where
time_removed is null
and rel_id=%s
and ctx=%s
and forward=%s
and pos >= %s
order by pos asc
limit %s
""", (456, 3, False, 0, 100)),
FETCH_ALL,
COMMIT])
def test_get_success(self):
add_fetch_result([(456, 0, 7)])
self.assertEqual(
datahog.relationship.get(self.p, 3, 123, 456),
{'ctx': 3, 'base_id': 123, 'rel_id': 456, 'flags': set([])})
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select rel_id, flags, pos
from relationship
where
time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
and pos >= %s
and rel_id=%s
order by pos asc
limit %s
""", (123, 3, True, 0, 456, 1)),
FETCH_ALL,
COMMIT])
def test_get_failure(self):
add_fetch_result([])
self.assertEqual(
datahog.relationship.get(self.p, 3, 123, 456),
None)
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
select rel_id, flags, pos
from relationship
where
time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
and pos >= %s
and rel_id=%s
order by pos asc
limit %s
""", (123, 3, True, 0, 456, 1)),
FETCH_ALL,
COMMIT])
def test_add_flags(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([(5,)])
add_fetch_result([(5,)])
self.assertEqual(
datahog.relationship.set_flags(self.p, 123, 456, 3, [1, 3], []),
set([1, 3]))
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update relationship
set flags=flags | %s
where time_removed is null and forward=%s and rel_id=%s and ctx=%s and base_id=%s
returning flags
""", (5, True, 456, 3, 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update relationship
set flags=flags | %s
where time_removed is null and forward=%s and rel_id=%s and ctx=%s and base_id=%s
returning flags
""", (5, False, 456, 3, 123)),
FETCH_ALL,
COMMIT,
TPC_COMMIT])
def test_add_flags_no_rel(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([])
self.assertEqual(
datahog.relationship.set_flags(self.p, 123, 456, 3, [1, 3], []),
None)
def test_clear_flags(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([(2,)])
add_fetch_result([(2,)])
self.assertEqual(
datahog.relationship.set_flags(self.p, 123, 456, 3, [], [1, 3]),
set([2]))
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update relationship
set flags=flags & ~%s
where time_removed is null and forward=%s and rel_id=%s and ctx=%s and base_id=%s
returning flags
""", (5, True, 456, 3, 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update relationship
set flags=flags & ~%s
where time_removed is null and forward=%s and rel_id=%s and ctx=%s and base_id=%s
returning flags
""", (5, False, 456, 3, 123)),
FETCH_ALL,
COMMIT,
TPC_COMMIT])
def test_clear_flags_no_rel(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([])
self.assertEqual(
datahog.relationship.set_flags(self.p, 123, 456, 3, [], [1, 3]),
None)
def test_set_flags_add(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([(5,)])
add_fetch_result([(5,)])
self.assertEqual(
datahog.relationship.set_flags(self.p, 123, 456, 3, [1, 3], []),
set([1, 3]))
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update relationship
set flags=flags | %s
where time_removed is null and forward=%s and rel_id=%s and ctx=%s and base_id=%s
returning flags
""", (5, True, 456, 3, 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update relationship
set flags=flags | %s
where time_removed is null and forward=%s and rel_id=%s and ctx=%s and base_id=%s
returning flags
""", (5, False, 456, 3, 123)),
FETCH_ALL,
COMMIT,
TPC_COMMIT])
def test_set_flags_clear(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([(4,)])
add_fetch_result([(4,)])
self.assertEqual(
datahog.relationship.set_flags(self.p, 123, 456, 3, [], [1, 2]),
set([3]))
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update relationship
set flags=flags & ~%s
where time_removed is null and forward=%s and rel_id=%s and ctx=%s and base_id=%s
returning flags
""", (3, True, 456, 3, 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update relationship
set flags=flags & ~%s
where time_removed is null and forward=%s and rel_id=%s and ctx=%s and base_id=%s
returning flags
""", (3, False, 456, 3, 123)),
FETCH_ALL,
COMMIT,
TPC_COMMIT])
def test_set_flags_both(self):
datahog.set_flag(1, 3)
datahog.set_flag(2, 3)
datahog.set_flag(3, 3)
add_fetch_result([(5,)])
add_fetch_result([(5,)])
self.assertEqual(
datahog.relationship.set_flags(self.p, 123, 456, 3, [1, 3], [2]),
set([1, 3]))
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
update relationship
set flags=(flags & ~%s) | %s
where time_removed is null and forward=%s and rel_id=%s and ctx=%s and base_id=%s
returning flags
""", (2, 5, True, 456, 3, 123)),
FETCH_ALL,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
update relationship
set flags=(flags & ~%s) | %s
where time_removed is null and forward=%s and rel_id=%s and ctx=%s and base_id=%s
returning flags
""", (2, 5, False, 456, 3, 123)),
FETCH_ALL,
COMMIT,
TPC_COMMIT])
def test_shift(self):
add_fetch_result([(True,)])
self.assertEqual(
datahog.relationship.shift(self.p, 123, 456, 3, True, 7),
True)
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
with oldpos as (
select pos
from relationship
where
time_removed is null
and forward=%s
and base_id=%s
and ctx=%s
and rel_id=%s
), bump as (
update relationship
set pos=pos + (case
when (select pos from oldpos) < pos
then -1
else 1
end)
where
exists (select 1 from oldpos)
and time_removed is null
and forward=%s
and base_id=%s
and ctx=%s
and pos between symmetric (select pos from oldpos) and %s
returning 1
), move as (
update relationship
set pos=%s
where
time_removed is null
and forward=%s
and base_id=%s
and ctx=%s
and rel_id=%s
returning 1
)
select exists (select 1 from move)
""", (True, 123, 3, 456, True, 123, 3, 7, 7, True, 123, 3, 456)),
FETCH_ONE,
COMMIT])
def test_shift_failure(self):
add_fetch_result([(False,)])
self.assertEqual(
datahog.relationship.shift(self.p, 123, 456, 3, True, 7),
False)
self.assertEqual(eventlog, [
GET_CURSOR,
EXECUTE("""
with oldpos as (
select pos
from relationship
where
time_removed is null
and forward=%s
and base_id=%s
and ctx=%s
and rel_id=%s
), bump as (
update relationship
set pos=pos + (case
when (select pos from oldpos) < pos
then -1
else 1
end)
where
exists (select 1 from oldpos)
and time_removed is null
and forward=%s
and base_id=%s
and ctx=%s
and pos between symmetric (select pos from oldpos) and %s
returning 1
), move as (
update relationship
set pos=%s
where
time_removed is null
and forward=%s
and base_id=%s
and ctx=%s
and rel_id=%s
returning 1
)
select exists (select 1 from move)
""", (True, 123, 3, 456, True, 123, 3, 7, 7, True, 123, 3, 456)),
FETCH_ONE,
COMMIT])
def test_remove(self):
add_fetch_result([(1,)])
add_fetch_result([(1,)])
self.assertEqual(
datahog.relationship.remove(self.p, 123, 456, 3),
True)
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
with removal as (
update relationship
set time_removed=now()
where
time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
and rel_id=%s
returning pos
), bump as (
update relationship
set pos = pos - 1
where
exists (select 1 from removal)
and time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
and pos > (select pos from removal)
)
select 1 from removal
""", (123, 3, True, 456, 123, 3, True)),
ROWCOUNT,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
with removal as (
update relationship
set time_removed=now()
where
time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
and rel_id=%s
returning pos
), bump as (
update relationship
set pos = pos - 1
where
exists (select 1 from removal)
and time_removed is null
and rel_id=%s
and ctx=%s
and forward=%s
and pos > (select pos from removal)
)
select 1 from removal
""", (123, 3, False, 456, 456, 3, False)),
ROWCOUNT,
COMMIT,
TPC_COMMIT])
def test_remove_failure_forward(self):
add_fetch_result([])
self.assertEqual(
datahog.relationship.remove(self.p, 123, 456, 3),
False)
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
with removal as (
update relationship
set time_removed=now()
where
time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
and rel_id=%s
returning pos
), bump as (
update relationship
set pos = pos - 1
where
exists (select 1 from removal)
and time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
and pos > (select pos from removal)
)
select 1 from removal
""", (123, 3, True, 456, 123, 3, True)),
ROWCOUNT,
TPC_ROLLBACK])
def test_remove_failure_reverse(self):
add_fetch_result([(1,)])
add_fetch_result([])
self.assertEqual(
datahog.relationship.remove(self.p, 123, 456, 3),
False)
self.assertEqual(eventlog, [
TPC_BEGIN,
GET_CURSOR,
EXECUTE("""
with removal as (
update relationship
set time_removed=now()
where
time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
and rel_id=%s
returning pos
), bump as (
update relationship
set pos = pos - 1
where
exists (select 1 from removal)
and time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
and pos > (select pos from removal)
)
select 1 from removal
""", (123, 3, True, 456, 123, 3, True)),
ROWCOUNT,
TPC_PREPARE,
RESET,
GET_CURSOR,
EXECUTE("""
with removal as (
update relationship
set time_removed=now()
where
time_removed is null
and base_id=%s
and ctx=%s
and forward=%s
and rel_id=%s
returning pos
), bump as (
update relationship
set pos = pos - 1
where
exists (select 1 from removal)
and time_removed is null
and rel_id=%s
and ctx=%s
and forward=%s
and pos > (select pos from removal)
)
select 1 from removal
""", (123, 3, False, 456, 456, 3, False)),
ROWCOUNT,
ROLLBACK,
TPC_ROLLBACK])
# TODO
# - undirected relationships
if __name__ == '__main__':
unittest.main()
| 24.751452 | 81 | 0.536296 | 2,770 | 21,311 | 3.984838 | 0.049819 | 0.042761 | 0.054176 | 0.070846 | 0.922269 | 0.909676 | 0.891466 | 0.887027 | 0.88295 | 0.859576 | 0 | 0.053474 | 0.348881 | 21,311 | 860 | 82 | 24.780233 | 0.742001 | 0.003425 | 0 | 0.891165 | 0 | 0 | 0.455119 | 0 | 0 | 0 | 0 | 0.001163 | 0.051216 | 1 | 0.028169 | false | 0 | 0.010243 | 0 | 0.039693 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
20b934bfbcf119e542782a7932aff095eedbd1fd | 9,756 | py | Python | backend/python-flask-server/swagger_server/test/test_default_controller.py | ttraulsen/project-manager | 9fbcd8f6151a60c33ab28e21c01d77cf5f591cdd | [
"MIT"
] | null | null | null | backend/python-flask-server/swagger_server/test/test_default_controller.py | ttraulsen/project-manager | 9fbcd8f6151a60c33ab28e21c01d77cf5f591cdd | [
"MIT"
] | null | null | null | backend/python-flask-server/swagger_server/test/test_default_controller.py | ttraulsen/project-manager | 9fbcd8f6151a60c33ab28e21c01d77cf5f591cdd | [
"MIT"
] | null | null | null | # coding: utf-8
from __future__ import absolute_import
from swagger_server.models.comment import Comment
from swagger_server.models.contact import Contact
from swagger_server.models.message import Message
from swagger_server.models.project import Project
from . import BaseTestCase
from six import BytesIO
from flask import json
class TestDefaultController(BaseTestCase):
""" DefaultController integration test stubs """
def test_projects_delete(self):
"""
Test case for projects_delete
You shouldn't delete the entire List
"""
project = Project()
response = self.client.open('/project-tracker/projects',
method='DELETE',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_get(self):
"""
Test case for projects_get
List all projects
"""
response = self.client.open('/project-tracker/projects',
method='GET')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_comments_delete(self):
"""
Test case for projects_id_comments_delete
You can't delete the entire List
"""
project = Comment()
response = self.client.open('/project-tracker/projects/{id}/comments'.format(id=56),
method='DELETE',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_comments_get(self):
"""
Test case for projects_id_comments_get
List all comments on this project
"""
response = self.client.open('/project-tracker/projects/{id}/comments'.format(id=56),
method='GET')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_comments_patch(self):
"""
Test case for projects_id_comments_patch
You can't put the entire List
"""
project = Comment()
response = self.client.open('/project-tracker/projects/{id}/comments'.format(id=56),
method='PATCH',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_comments_post(self):
"""
Test case for projects_id_comments_post
Create a new comment
"""
project = Comment()
response = self.client.open('/project-tracker/projects/{id}/comments'.format(id=56),
method='POST',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_comments_put(self):
"""
Test case for projects_id_comments_put
You can't put the entire List
"""
project = Comment()
response = self.client.open('/project-tracker/projects/{id}/comments'.format(id=56),
method='PUT',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_contacts_delete(self):
"""
Test case for projects_id_contacts_delete
You can't delete the entire List
"""
project = Contact()
response = self.client.open('/project-tracker/projects/{id}/contacts'.format(id=56),
method='DELETE',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_contacts_get(self):
"""
Test case for projects_id_contacts_get
List all contacts associated with this project
"""
response = self.client.open('/project-tracker/projects/{id}/contacts'.format(id=56),
method='GET')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_contacts_patch(self):
"""
Test case for projects_id_contacts_patch
You can't put the entire List
"""
project = Contact()
response = self.client.open('/project-tracker/projects/{id}/contacts'.format(id=56),
method='PATCH',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_contacts_post(self):
"""
Test case for projects_id_contacts_post
Add a new contact
"""
project = Contact()
response = self.client.open('/project-tracker/projects/{id}/contacts'.format(id=56),
method='POST',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_contacts_put(self):
"""
Test case for projects_id_contacts_put
You can't put the entire List
"""
project = Contact()
response = self.client.open('/project-tracker/projects/{id}/contacts'.format(id=56),
method='PUT',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_delete(self):
"""
Test case for projects_id_delete
Delete a project
"""
response = self.client.open('/project-tracker/projects/{id}'.format(id=3.4),
method='DELETE')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_get(self):
"""
Test case for projects_id_get
get specific project by id
"""
response = self.client.open('/project-tracker/projects/{id}'.format(id=56),
method='GET')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_post(self):
"""
Test case for projects_id_post
update an existing project
"""
project = Project()
response = self.client.open('/project-tracker/projects/{id}'.format(id=3.4),
method='POST',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_id_put(self):
"""
Test case for projects_id_put
update an existing project
"""
project = Project()
response = self.client.open('/project-tracker/projects/{id}'.format(id=3.4),
method='PUT',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_patch(self):
"""
Test case for projects_patch
You shouldn't put the entire List
"""
project = Project()
response = self.client.open('/project-tracker/projects',
method='PATCH',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_post(self):
"""
Test case for projects_post
Create a new project
"""
project = Project()
response = self.client.open('/project-tracker/projects',
method='POST',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
def test_projects_put(self):
"""
Test case for projects_put
You shouldn't put the entire List
"""
project = Project()
response = self.client.open('/project-tracker/projects',
method='PUT',
data=json.dumps(project),
content_type='application/json')
self.assert200(response, "Response body is : " + response.data.decode('utf-8'))
if __name__ == '__main__':
import unittest
unittest.main()
| 38.561265 | 92 | 0.538848 | 995 | 9,756 | 5.146734 | 0.081407 | 0.082015 | 0.055653 | 0.055653 | 0.883421 | 0.883421 | 0.855497 | 0.760203 | 0.751611 | 0.741066 | 0 | 0.01664 | 0.353219 | 9,756 | 252 | 93 | 38.714286 | 0.794929 | 0.132226 | 0 | 0.746154 | 0 | 0 | 0.178145 | 0.080686 | 0 | 0 | 0 | 0 | 0.146154 | 1 | 0.146154 | false | 0 | 0.069231 | 0 | 0.223077 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
20d25e44831762725ac236f11c60d81f00f2afde | 5,688 | py | Python | src/genie/libs/parser/iosxe/tests/ShowMplsLdpNeighborDetail/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 204 | 2018-06-27T00:55:27.000Z | 2022-03-06T21:12:18.000Z | src/genie/libs/parser/iosxe/tests/ShowMplsLdpNeighborDetail/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 468 | 2018-06-19T00:33:18.000Z | 2022-03-31T23:23:35.000Z | src/genie/libs/parser/iosxe/tests/ShowMplsLdpNeighborDetail/cli/equal/golden_output_expected.py | balmasea/genieparser | d1e71a96dfb081e0a8591707b9d4872decd5d9d3 | [
"Apache-2.0"
] | 309 | 2019-01-16T20:21:07.000Z | 2022-03-30T12:56:41.000Z | expected_output = {
"vrf": {
"default": {
"peers": {
"10.169.197.252": {
"label_space_id": {
0: {
"local_ldp_ident": "10.169.197.254:0",
"tcp_connection": "10.169.197.252.646 - 10.169.197.254.44315",
"password": "not required, none, in use",
"state": "oper",
"msg_sent": 9981,
"msg_rcvd": 10004,
"downstream": True,
"last_tib_rev_sent": 4103,
"uptime": "3d21h",
"ldp_discovery_sources": {
"interface": {
"GigabitEthernet0/0/0": {
"ip_address": {
"10.169.197.93": {
"holdtime_ms": 15000,
"hello_interval_ms": 5000,
}
}
}
}
},
"address_bound": [
"10.169.197.252",
"192.168.36.49",
"10.120.202.49",
"192.168.36.57",
"10.169.197.101",
"10.169.197.93",
"10.69.111.2",
"10.16.190.254",
],
"peer_holdtime_ms": 180000,
"ka_interval_ms": 60000,
"peer_state": "estab",
"nsr": "Not Ready",
"capabilities": {
"sent": {
"ICCP": {
"type": "0x0405",
"maj_ver": 1,
"min_ver": 0,
},
"dynamic_anouncement": "0x0506",
"mldp_point_to_multipoint": "0x0508",
"mldp_multipoint_to_multipoint": "0x0509",
"typed_wildcard": "0x050B",
},
"received": {
"ICCP": {
"type": "0x0405",
"maj_ver": 1,
"min_ver": 0,
},
"dynamic_anouncement": "0x0506",
"mldp_point_to_multipoint": "0x0508",
"mldp_multipoint_to_multipoint": "0x0509",
"typed_wildcard": "0x050B",
},
},
}
}
},
"10.169.197.253": {
"label_space_id": {
0: {
"local_ldp_ident": "10.169.197.254:0",
"tcp_connection": "10.169.197.253.646 - 10.169.197.254.34904",
"password": "not required, none, in use",
"state": "oper",
"msg_sent": 9966,
"msg_rcvd": 9153,
"downstream": True,
"last_tib_rev_sent": 4103,
"uptime": "3d21h",
"ldp_discovery_sources": {
"interface": {
"GigabitEthernet0/0/2": {
"ip_address": {
"10.169.197.97": {
"holdtime_ms": 15000,
"hello_interval_ms": 5000,
}
}
}
}
},
"address_bound": ["10.120.202.57", "10.169.197.97"],
"peer_holdtime_ms": 180000,
"ka_interval_ms": 60000,
"peer_state": "estab",
"nsr": "Not Ready",
"capabilities": {
"sent": {
"ICCP": {
"type": "0x0405",
"maj_ver": 1,
"min_ver": 0,
},
"dynamic_anouncement": "0x0506",
"mldp_point_to_multipoint": "0x0508",
"mldp_multipoint_to_multipoint": "0x0509",
"typed_wildcard": "0x050B",
}
},
}
}
},
}
}
}
}
| 47.798319 | 90 | 0.240682 | 299 | 5,688 | 4.327759 | 0.341137 | 0.054096 | 0.086553 | 0.034003 | 0.839258 | 0.791345 | 0.791345 | 0.791345 | 0.791345 | 0.791345 | 0 | 0.202324 | 0.667194 | 5,688 | 118 | 91 | 48.20339 | 0.481247 | 0 | 0 | 0.525424 | 0 | 0 | 0.239627 | 0.035338 | 0 | 0 | 0.015823 | 0 | 0 | 1 | 0 | false | 0.016949 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
20dcc5862a284c059e2055e80cb9d2f92cecac16 | 46,778 | py | Python | nighres/segmentation/conditional_shape.py | marcobarilari/nighres | e503bb96a6a73f73020c5d9d7b540bc5f17699a8 | [
"Apache-2.0"
] | 2 | 2020-08-05T18:05:38.000Z | 2022-03-28T12:22:14.000Z | nighres/segmentation/conditional_shape.py | marcobarilari/nighres | e503bb96a6a73f73020c5d9d7b540bc5f17699a8 | [
"Apache-2.0"
] | 23 | 2017-07-17T12:53:22.000Z | 2017-07-24T21:31:16.000Z | nighres/segmentation/conditional_shape.py | marcobarilari/nighres | e503bb96a6a73f73020c5d9d7b540bc5f17699a8 | [
"Apache-2.0"
] | 8 | 2017-10-31T13:57:06.000Z | 2021-03-11T16:17:44.000Z | import numpy
import nibabel
import os
import sys
import json
import nighresjava
from ..io import load_volume, save_volume
from ..utils import _output_dir_4saving, _fname_4saving, \
_check_topology_lut_dir, _check_available_memory
def conditional_shape(target_images, structures, contrasts, background=1,
shape_atlas_probas=None, shape_atlas_labels=None,
intensity_atlas_hist=None,
skeleton_atlas_probas=None, skeleton_atlas_labels=None,
map_to_atlas=None, map_to_target=None,
atlas_file=None,
max_iterations=80, max_difference=0.1, ngb_size=4,
intensity_prior=1.0,
save_data=False, overwrite=False, output_dir=None,
file_name=None):
""" Conditioanl Shape Parcellation
Estimates subcortical structures based on a multi-atlas approach on shape
Parameters
----------
target_images: [niimg]
Input images to perform the parcellation from
structures: int
Number of structures to parcellate
contrasts: int
Number of image intensity contrasts
background: int
Number of background tissue classes (default is 1)
shape_atlas_probas: niimg
Pre-computed shape atlas from the shape levelsets (replacing them)
shape_atlas_labels: niimg
Pre-computed shape atlas from the shape levelsets (replacing them)
intensity_atlas_hist: niimg
Pre-computed intensity atlas from the contrast images (replacing them)
skeleton_atlas_probas: niimg
Pre-computed skeleton atlas from the shape levelsets (replacing them)
skeleton_atlas_labels: niimg
Pre-computed skeleton atlas from the shape levelsets (replacing them)
map_to_atlas: niimg
Coordinate mapping from the target to the atlas (opt)
map_to_target: niimg
Coordinate mapping from the atlas to the target (opt)
atlas_file: json
File with atlas labels and metadata (opt)
max_iterations: int
Maximum number of diffusion iterations to perform
max_difference: float
Maximum difference between diffusion steps
ngb_size: int
Number of neighbors to consider in the diffusion (default is 4)
intensity_prior: float
Importance scaling factor for the intensities in [0,1] (default is 1.0)
save_data: bool
Save output data to file (default is False)
overwrite: bool
Overwrite existing results (default is False)
output_dir: str, optional
Path to desired output directory, will be created if it doesn't exist
file_name: str, optional
Desired base name for output files with file extension
(suffixes will be added)
Returns
----------
dict
Dictionary collecting outputs under the following keys
(suffix of output files in brackets)
* max_spatial_proba (niimg): Maximum spatial probability map (_cspmax-sproba)
* max_spatial_label (niimg): Maximum spatial probability labels (_cspmax-slabel)
* max_combined_proba (niimg): Maximum spatial and intensity combined probability map (_cspmax-cproba)
* max_combined_label (niimg): Maximum spatial and intensity combined probability labels (_cspmax-clabel)
* max_proba (niimg): Maximum probability map (_cspmax-proba)
* max_label (niimg): Maximum probability labels (_cspmax-label)
* neighbors (niimg): Local neighborhood maps (_cspmax-ngb)
Notes
----------
Original Java module by Pierre-Louis Bazin.
"""
print('\nConditional Shape Parcellation')
# check topology_lut_dir and set default if not given
topology_lut_dir = _check_topology_lut_dir(None)
# make sure that saving related parameters are correct
if save_data:
output_dir = _output_dir_4saving(output_dir, target_images[0])
spatial_proba_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=target_images[0],
suffix='cspmax-sproba', ))
spatial_label_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=target_images[0],
suffix='cspmax-slabel'))
combined_proba_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=target_images[0],
suffix='cspmax-cproba', ))
combined_label_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=target_images[0],
suffix='cspmax-clabel'))
proba_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=target_images[0],
suffix='cspmax-proba', ))
label_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=target_images[0],
suffix='cspmax-label'))
neighbor_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=target_images[0],
suffix='cspmax-ngb'))
if overwrite is False \
and os.path.isfile(spatial_proba_file) \
and os.path.isfile(spatial_label_file) \
and os.path.isfile(combined_proba_file) \
and os.path.isfile(combined_label_file) \
and os.path.isfile(proba_file) \
and os.path.isfile(label_file) \
and os.path.isfile(neighbor_file):
print("skip computation (use existing results)")
output = {'max_spatial_proba': spatial_proba_file,
'max_spatial_label': spatial_label_file,
'max_combined_proba': combined_proba_file,
'max_combined_label': combined_label_file,
'max_proba': proba_file,
'max_label': label_file,
'neighbors': neighbor_file}
return output
# start virtual machine, if not already running
try:
mem = _check_available_memory()
nighresjava.initVM(initialheap=mem['init'], maxheap=mem['max'])
except ValueError:
pass
# create instance
cspmax = nighresjava.ConditionalShapeSegmentation()
cspmax.setNumberOfSubjectsObjectsBgAndContrasts(1,structures,background,contrasts)
# set parameters
cspmax.setOptions(True, False, False, False, True)
cspmax.setDiffusionParameters(max_iterations, max_difference)
cspmax.setIntensityImportancePrior(intensity_prior)
# load atlas metadata, if given (after setting up the numbers above!!)
if atlas_file is not None:
f = open(atlas_file)
metadata = json.load(f)
f.close()
# structures = metadata['MASSP Labels']
contrastList = numpy.zeros(structures*contrasts, dtype=int)
for st in range(structures):
#print('Label '+str(st+1)+": "+str(metadata[metadata['Label '+str(st+1)][1]]))
for c in metadata[metadata['Label '+str(st+1)][1]]:
contrastList[st*contrasts+c] = 1
cspmax.setContrastList(nighresjava.JArray('int')(
(contrastList.flatten('F')).astype(int).tolist()))
# load target image for parameters
print("load: "+str(target_images[0]))
img = load_volume(target_images[0])
data = img.get_data()
trg_affine = img.get_affine()
trg_header = img.get_header()
trg_resolution = [x.item() for x in trg_header.get_zooms()]
trg_dimensions = data.shape
cspmax.setTargetDimensions(trg_dimensions[0], trg_dimensions[1], trg_dimensions[2])
cspmax.setTargetResolutions(trg_resolution[0], trg_resolution[1], trg_resolution[2])
# target image 1
cspmax.setTargetImageAt(0, nighresjava.JArray('float')(
(data.flatten('F')).astype(float)))
# if further contrast are specified, input them
for contrast in range(1,contrasts):
print("load: "+str(target_images[contrast]))
data = load_volume(target_images[contrast]).get_data()
cspmax.setTargetImageAt(contrast, nighresjava.JArray('float')(
(data.flatten('F')).astype(float)))
# load the shape and intensity atlases
print("load: "+str(os.path.join(output_dir,intensity_atlas_hist)))
hist = load_volume(os.path.join(output_dir,intensity_atlas_hist)).get_data()
cspmax.setConditionalHistogram(nighresjava.JArray('float')(
(hist.flatten('F')).astype(float)))
print("load: "+str(os.path.join(output_dir,shape_atlas_probas)))
# load a first image for dim, res
img = load_volume(os.path.join(output_dir,shape_atlas_probas))
pdata = img.get_data()
header = img.get_header()
affine = img.get_affine()
resolution = [x.item() for x in header.get_zooms()]
dimensions = pdata.shape
cspmax.setAtlasDimensions(dimensions[0], dimensions[1], dimensions[2])
cspmax.setAtlasResolutions(resolution[0], resolution[1], resolution[2])
print("load: "+str(os.path.join(output_dir,shape_atlas_labels)))
ldata = load_volume(os.path.join(output_dir,shape_atlas_labels)).get_data()
if map_to_target is not None:
print("map atlas to subject")
print("load: "+str(map_to_target))
mdata = load_volume(map_to_target).get_data()
cspmax.setMappingToTarget(nighresjava.JArray('float')(
(mdata.flatten('F')).astype(float)))
cspmax.setShapeAtlasProbasAndLabels(nighresjava.JArray('float')(
(pdata.flatten('F')).astype(float)),
nighresjava.JArray('int')(
(ldata.flatten('F')).astype(int).tolist()))
print("load: "+str(os.path.join(output_dir,skeleton_atlas_probas)))
pdata = load_volume(os.path.join(output_dir,skeleton_atlas_probas)).get_data()
print("load: "+str(os.path.join(output_dir,skeleton_atlas_labels)))
ldata = load_volume(os.path.join(output_dir,skeleton_atlas_labels)).get_data()
cspmax.setSkeletonAtlasProbasAndLabels(nighresjava.JArray('float')(
(pdata.flatten('F')).astype(float)),
nighresjava.JArray('int')(
(ldata.flatten('F')).astype(int).tolist()))
# execute
try:
cspmax.estimateTarget()
#cspmax.strictSimilarityDiffusion(ngb_size)
cspmax.fastSimilarityDiffusion(ngb_size)
#cspmax.fastJointSimilarityDiffusion(ngb_size)
#cspmax.fastCombinedSimilarityDiffusion(ngb_size)
#cspmax.globalSmoothing(ngb_size)
cspmax.collapseToJointMaps()
cspmax.precomputeStoppingStatistics(3.0)
cspmax.topologyBoundaryDefinition("wcs", topology_lut_dir)
#cspmax.conditionalVolumeCertaintyThreshold(3.0)
#cspmax.conditionalPrecomputedVolumeGrowth(3.0)
cspmax.conditionalPrecomputedDirectVolumeGrowth(3.0)
cspmax.collapseSpatialPriorMaps()
#cspmax.collapseConditionalMaps()
#cspmax.collapseToJointMaps()
#cspmax.topologyObjectDefinition("26/6", topology_lut_dir)
#cspmax.conditionalCollapsedVolumeGrowth(3.0)
except:
# if the Java module fails, reraise the error it throws
print("\n The underlying Java code did not execute cleanly: ")
print(sys.exc_info()[0])
raise
return
# reshape output to what nibabel likes
dimensions = (dimensions[0],dimensions[1],dimensions[2],cspmax.getBestDimension())
dims3D = (dimensions[0],dimensions[1],dimensions[2])
dims_ngb = (dimensions[0],dimensions[1],dimensions[2],ngb_size)
dims3Dtrg = (trg_dimensions[0],trg_dimensions[1],trg_dimensions[2])
dims3D = dims3Dtrg
dims_ngb = (trg_dimensions[0],trg_dimensions[1],trg_dimensions[2],ngb_size)
dims_extra = (trg_dimensions[0],trg_dimensions[1],trg_dimensions[2],4)
intens_dims = (structures+background,structures+background,contrasts)
intens_hist_dims = ((structures+background)*(structures+background),cspmax.getNumberOfBins()+6,contrasts)
spatial_proba_data = numpy.reshape(numpy.array(cspmax.getBestSpatialProbabilityMaps(1),
dtype=numpy.float32), dims3Dtrg, 'F')
spatial_label_data = numpy.reshape(numpy.array(cspmax.getBestSpatialProbabilityLabels(1),
dtype=numpy.int32), dims3Dtrg, 'F')
# combined_proba_data = numpy.reshape(numpy.array(cspmax.getBestProbabilityMaps(1),
# dtype=numpy.float32), dims3Dtrg, 'F')
# combined_label_data = numpy.reshape(numpy.array(cspmax.getBestProbabilityLabels(1),
# dtype=numpy.int32), dims3Dtrg, 'F')
combined_proba_data = numpy.reshape(numpy.array(cspmax.getJointProbabilityMaps(4),
dtype=numpy.float32), dims_extra, 'F')
combined_label_data = numpy.reshape(numpy.array(cspmax.getJointProbabilityLabels(4),
dtype=numpy.int32), dims_extra, 'F')
proba_data = numpy.reshape(numpy.array(cspmax.getFinalProba(),
dtype=numpy.float32), dims3Dtrg, 'F')
label_data = numpy.reshape(numpy.array(cspmax.getFinalLabel(),
dtype=numpy.int32), dims3Dtrg, 'F')
neighbor_data = numpy.reshape(numpy.array(cspmax.getNeighborhoodMaps(ngb_size),
dtype=numpy.float32), dims_ngb, 'F')
# adapt header max for each image so that correct max is displayed
# and create nifiti objects
header['cal_max'] = numpy.nanmax(spatial_proba_data)
spatial_proba = nibabel.Nifti1Image(spatial_proba_data, trg_affine, trg_header)
header['cal_max'] = numpy.nanmax(spatial_label_data)
spatial_label = nibabel.Nifti1Image(spatial_label_data, trg_affine, trg_header)
header['cal_max'] = numpy.nanmax(combined_proba_data)
combined_proba = nibabel.Nifti1Image(combined_proba_data, trg_affine, trg_header)
header['cal_max'] = numpy.nanmax(combined_label_data)
combined_label = nibabel.Nifti1Image(combined_label_data, trg_affine, trg_header)
trg_header['cal_max'] = numpy.nanmax(proba_data)
proba = nibabel.Nifti1Image(proba_data, trg_affine, trg_header)
trg_header['cal_max'] = numpy.nanmax(label_data)
label = nibabel.Nifti1Image(label_data, trg_affine, trg_header)
header['cal_min'] = numpy.nanmin(neighbor_data)
header['cal_max'] = numpy.nanmax(neighbor_data)
neighbors = nibabel.Nifti1Image(neighbor_data, trg_affine, trg_header)
if save_data:
save_volume(spatial_proba_file, spatial_proba)
save_volume(spatial_label_file, spatial_label)
save_volume(combined_proba_file, combined_proba)
save_volume(combined_label_file, combined_label)
save_volume(proba_file, proba)
save_volume(label_file, label)
save_volume(neighbor_file, neighbors)
output= {'max_spatial_proba': spatial_proba_file, 'max_spatial_label': spatial_label_file,
'max_combined_proba': combined_proba_file, 'max_combined_label': combined_label_file,
'max_proba': proba_file, 'max_label': label_file, 'neighbors': neighbor_file}
return output
else:
output= {'max_spatial_proba': spatial_proba, 'max_spatial_label': spatial_label,
'max_combined_proba': combined_proba, 'max_combined_label': combined_label,
'max_proba': proba, 'max_label': label, 'neighbors': neighbors}
return output
def conditional_shape_atlasing(subjects, structures, contrasts,
levelset_images=None, skeleton_images=None,
contrast_images=None, background=1, smoothing=1.0,
save_data=False, overwrite=False, output_dir=None,
file_name=None):
""" Conditioanl Shape Parcellation Atlasing
Builds a multi-atlas prior for conditional shape parcellation
Parameters
----------
subjects: int
Number of atlas subjects
structures: int
Number of structures to parcellate
contrasts: int
Number of image intensity contrasts
levelset_images: [niimg]
Atlas shape levelsets indexed by (subjects,structures)
skeleton_images: [niimg]
Atlas shape skeletons indexed by (subjects,structures)
contrast_images: [niimg]
Atlas images to use in the parcellation, indexed by (subjects, contrasts)
background: int
Number of separate tissue classes for the background (default is 1)
smoothing: float
Standard deviation in number of bins used in histogram smoothing
(default is 1)
save_data: bool
Save output data to file (default is False)
overwrite: bool
Overwrite existing results (default is False)
output_dir: str, optional
Path to desired output directory, will be created if it doesn't exist
file_name: str, optional
Desired base name for output files with file extension
(suffixes will be added)
Returns
----------
dict
Dictionary collecting outputs under the following keys
(suffix of output files in brackets)
* max_spatial_proba (niimg): Maximum spatial probability map (_cspmax-sproba)
* max_spatial_label (niimg): Maximum spatial probability labels (_cspmax-slabel)
* cond_hist (niimg): Conditional intensity histograms (_cspmax-chist)
* max_skeleton_proba (niimg): Maximum skeleton probability map (_cspmax-kproba)
* max_skeleton_label (niimg): Maximum skeleton probability labels (_cspmax-klabel)
Notes
----------
Original Java module by Pierre-Louis Bazin.
"""
print('\nConditional Shape Atlasing')
# make sure that saving related parameters are correct
if save_data:
output_dir = _output_dir_4saving(output_dir, contrast_images[0][0])
spatial_proba_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=contrast_images[0][0],
suffix='cspmax-sproba', ))
spatial_label_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=contrast_images[0][0],
suffix='cspmax-slabel'))
condhist_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=contrast_images[0][0],
suffix='cspmax-chist'))
skeleton_proba_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=contrast_images[0][0],
suffix='cspmax-kproba', ))
skeleton_label_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=contrast_images[0][0],
suffix='cspmax-klabel'))
if overwrite is False \
and os.path.isfile(spatial_proba_file) \
and os.path.isfile(spatial_label_file) \
and os.path.isfile(condhist_file) \
and os.path.isfile(skeleton_proba_file) \
and os.path.isfile(skeleton_label_file):
print("skip computation (use existing results)")
output = {'max_spatial_proba': spatial_proba_file,
'max_spatial_label': spatial_label_file,
'cond_hist': condhist_file,
'max_skeleton_proba': skeleton_proba_file,
'max_skeleton_label': skeleton_label_file}
return output
# start virtual machine, if not already running
try:
mem = _check_available_memory()
nighresjava.initVM(initialheap=mem['init'], maxheap=mem['max'])
except ValueError:
pass
# create instance
cspmax = nighresjava.ConditionalShapeSegmentation()
# set parameters
cspmax.setNumberOfSubjectsObjectsBgAndContrasts(subjects,structures,background,contrasts)
cspmax.setOptions(True, False, False, False, True)
cspmax.setHistogramSmoothing(smoothing)
# load target image for parameters
# load a first image for dim, res
img = load_volume(contrast_images[0][0])
data = img.get_data()
header = img.get_header()
affine = img.get_affine()
trg_resolution = [x.item() for x in header.get_zooms()]
trg_dimensions = data.shape
cspmax.setTargetDimensions(trg_dimensions[0], trg_dimensions[1], trg_dimensions[2])
cspmax.setTargetResolutions(trg_resolution[0], trg_resolution[1], trg_resolution[2])
resolution = trg_resolution
dimensions = trg_dimensions
cspmax.setAtlasDimensions(dimensions[0], dimensions[1], dimensions[2])
cspmax.setAtlasResolutions(resolution[0], resolution[1], resolution[2])
# load the atlas structures and contrasts, if needed
for sub in range(subjects):
for struct in range(structures):
print("load: "+str(levelset_images[sub][struct]))
data = load_volume(levelset_images[sub][struct]).get_data()
cspmax.setLevelsetImageAt(sub, struct, nighresjava.JArray('float')(
(data.flatten('F')).astype(float)))
for contrast in range(contrasts):
print("load: "+str(contrast_images[sub][contrast]))
data = load_volume(contrast_images[sub][contrast]).get_data()
cspmax.setContrastImageAt(sub, contrast, nighresjava.JArray('float')(
(data.flatten('F')).astype(float)))
# execute first step
scale = 1.0
try:
scale = cspmax.computeAtlasPriors()
except:
# if the Java module fails, reraise the error it throws
print("\n The underlying Java code did not execute cleanly: ")
print(sys.exc_info()[0])
raise
return
# clean up and go to second step
levelset_images = None
contrast_images = None
for sub in range(subjects):
for struct in range(structures):
print("load: "+str(skeleton_images[sub][struct]))
data = load_volume(skeleton_images[sub][struct]).get_data()
cspmax.setSkeletonImageAt(sub, struct, nighresjava.JArray('float')(
(data.flatten('F')).astype(float)))
try:
cspmax.computeSkeletonPriors(scale)
except:
# if the Java module fails, reraise the error it throws
print("\n The underlying Java code did not execute cleanly: ")
print(sys.exc_info()[0])
raise
return
skeleton_images = None
# reshape output to what nibabel likes
dimensions = (dimensions[0],dimensions[1],dimensions[2],cspmax.getBestDimension())
dimskel = (dimensions[0],dimensions[1],dimensions[2],int(cspmax.getBestDimension()/4))
dims3Dtrg = (trg_dimensions[0],trg_dimensions[1],trg_dimensions[2])
intens_dims = (structures+background,structures+background,contrasts)
intens_hist_dims = ((structures+background)*(structures+background),cspmax.getNumberOfBins()+6,contrasts)
spatial_proba_data = numpy.reshape(numpy.array(cspmax.getBestSpatialProbabilityMaps(dimensions[3]),
dtype=numpy.float32), dimensions, 'F')
spatial_label_data = numpy.reshape(numpy.array(cspmax.getBestSpatialProbabilityLabels(dimensions[3]),
dtype=numpy.int32), dimensions, 'F')
intens_hist_data = numpy.reshape(numpy.array(cspmax.getConditionalHistogram(),
dtype=numpy.float32), intens_hist_dims, 'F')
skeleton_proba_data = numpy.reshape(numpy.array(cspmax.getBestSkeletonProbabilityMaps(dimskel[3]),
dtype=numpy.float32), dimskel, 'F')
skeleton_label_data = numpy.reshape(numpy.array(cspmax.getBestSkeletonProbabilityLabels(dimskel[3]),
dtype=numpy.int32), dimskel, 'F')
# adapt header max for each image so that correct max is displayed
# and create nifiti objects
header['cal_max'] = numpy.nanmax(spatial_proba_data)
spatial_proba = nibabel.Nifti1Image(spatial_proba_data, affine, header)
header['cal_max'] = numpy.nanmax(spatial_label_data)
spatial_label = nibabel.Nifti1Image(spatial_label_data, affine, header)
chist = nibabel.Nifti1Image(intens_hist_data, None, None)
header['cal_max'] = numpy.nanmax(skeleton_proba_data)
skeleton_proba = nibabel.Nifti1Image(skeleton_proba_data, affine, header)
header['cal_max'] = numpy.nanmax(skeleton_label_data)
skeleton_label = nibabel.Nifti1Image(skeleton_label_data, affine, header)
if save_data:
save_volume(spatial_proba_file, spatial_proba)
save_volume(spatial_label_file, spatial_label)
save_volume(condhist_file, chist)
save_volume(skeleton_proba_file, skeleton_proba)
save_volume(skeleton_label_file, skeleton_label)
output= {'max_spatial_proba': spatial_proba_file,
'max_spatial_label': spatial_label_file,
'cond_hist': condhist_file,
'max_skeleton_proba': skeleton_proba_file,
'max_skeleton_label': skeleton_label_file}
return output
else:
output= {'max_spatial_proba': spatial_proba,
'max_spatial_label': spatial_label,
'cond_hist': chist,
'max_skeleton_proba': skeleton_proba,
'max_skeleton_label': skeleton_label}
return output
def conditional_shape_updating(subjects, structures, contrasts,
levelset_images=None, skeleton_images=None,
contrast_images=None,
atlas_weight=10.0, update_weight=10.0,
shape_atlas_probas=None, shape_atlas_labels=None,
intensity_atlas_hist=None,
skeleton_atlas_probas=None, skeleton_atlas_labels=None,
save_data=False, overwrite=False, output_dir=None,
file_name=None):
""" Conditioanl Shape Parcellation Atlas Updating
Updates a multi-atlas prior for conditional shape parcellation
Parameters
----------
subjects: int
Number of atlas subjects
structures: int
Number of structures to parcellate
contrasts: int
Number of image intensity contrasts
levelset_images: [niimg]
Atlas shape levelsets indexed by (subjects,structures)
skeleton_images: [niimg]
Atlas shape skeletons indexed by (subjects,structures)
contrast_images: [niimg]
Atlas images to use in the parcellation, indexed by (subjects, contrasts)
atlas_weight: float
Weighting factor for the current atlas (e.g. number of subjects already included)
update_weight: float
Weighting factor for the new update (e.g. number of subjects to be added)
shape_atlas_probas: niimg
Pre-computed shape atlas from the shape levelsets (replacing them)
shape_atlas_labels: niimg
Pre-computed shape atlas from the shape levelsets (replacing them)
intensity_atlas_hist: niimg
Pre-computed intensity atlas from the contrast images (replacing them)
skeleton_atlas_probas: niimg
Pre-computed skeleton atlas from the shape levelsets (replacing them)
skeleton_atlas_labels: niimg
Pre-computed skeleton atlas from the shape levelsets (replacing them)
save_data: bool
Save output data to file (default is False)
overwrite: bool
Overwrite existing results (default is False)
output_dir: str, optional
Path to desired output directory, will be created if it doesn't exist
file_name: str, optional
Desired base name for output files with file extension
(suffixes will be added)
Returns
----------
dict
Dictionary collecting outputs under the following keys
(suffix of output files in brackets)
* max_spatial_proba (niimg): Maximum spatial probability map (_cspmax-sproba)
* max_spatial_label (niimg): Maximum spatial probability labels (_cspmax-slabel)
* cond_hist (niimg): Conditional intensity histograms (_cspmax-chist)
* max_skeleton_proba (niimg): Maximum skeleton probability map (_cspmax-kproba)
* max_skeleton_label (niimg): Maximum skeleton probability labels (_cspmax-klabel)
Notes
----------
Original Java module by Pierre-Louis Bazin.
"""
print('\nConditional Shape Updating')
# make sure that saving related parameters are correct
if save_data:
output_dir = _output_dir_4saving(output_dir, contrast_images[0][0])
spatial_proba_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=contrast_images[0][0],
suffix='cspmax-sproba', ))
spatial_label_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=contrast_images[0][0],
suffix='cspmax-slabel'))
condhist_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=contrast_images[0][0],
suffix='cspmax-chist'))
skeleton_proba_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=contrast_images[0][0],
suffix='cspmax-kproba', ))
skeleton_label_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=contrast_images[0][0],
suffix='cspmax-klabel'))
if overwrite is False \
and os.path.isfile(spatial_proba_file) \
and os.path.isfile(spatial_label_file) \
and os.path.isfile(condhist_file) \
and os.path.isfile(skeleton_proba_file) \
and os.path.isfile(skeleton_label_file):
print("skip computation (use existing results)")
output = {'max_spatial_proba': spatial_proba_file,
'max_spatial_label': spatial_label_file,
'cond_hist': condhist_file,
'max_skeleton_proba': skeleton_proba_file,
'max_skeleton_label': skeleton_label_file}
return output
# start virtual machine, if not already running
try:
mem = _check_available_memory()
nighresjava.initVM(initialheap=mem['init'], maxheap=mem['max'])
except ValueError:
pass
# create instance
cspmax = nighresjava.ConditionalShapeSegmentation()
# set parameters
cspmax.setNumberOfSubjectsObjectsBgAndContrasts(subjects,structures,3,contrasts)
cspmax.setOptions(True, False, False, False, True)
# load target image for parameters
# load a first image for dim, res
img = load_volume(contrast_images[0][0])
data = img.get_data()
header = img.get_header()
affine = img.get_affine()
trg_resolution = [x.item() for x in header.get_zooms()]
trg_dimensions = data.shape
cspmax.setTargetDimensions(trg_dimensions[0], trg_dimensions[1], trg_dimensions[2])
cspmax.setTargetResolutions(trg_resolution[0], trg_resolution[1], trg_resolution[2])
resolution = trg_resolution
dimensions = trg_dimensions
cspmax.setAtlasDimensions(dimensions[0], dimensions[1], dimensions[2])
cspmax.setAtlasResolutions(resolution[0], resolution[1], resolution[2])
# load the shape and intensity atlases
print("load: "+str(os.path.join(output_dir,intensity_atlas_hist)))
hist = load_volume(os.path.join(output_dir,intensity_atlas_hist)).get_data()
cspmax.setConditionalHistogram(nighresjava.JArray('float')(
(hist.flatten('F')).astype(float)))
print("load: "+str(os.path.join(output_dir,shape_atlas_probas)))
pdata = load_volume(os.path.join(output_dir,shape_atlas_probas)).get_data()
print("load: "+str(os.path.join(output_dir,shape_atlas_labels)))
ldata = load_volume(os.path.join(output_dir,shape_atlas_labels)).get_data()
cspmax.setShapeAtlasProbasAndLabels(nighresjava.JArray('float')(
(pdata.flatten('F')).astype(float)),
nighresjava.JArray('int')(
(ldata.flatten('F')).astype(int).tolist()))
print("load: "+str(os.path.join(output_dir,skeleton_atlas_probas)))
pdata = load_volume(os.path.join(output_dir,skeleton_atlas_probas)).get_data()
print("load: "+str(os.path.join(output_dir,skeleton_atlas_labels)))
ldata = load_volume(os.path.join(output_dir,skeleton_atlas_labels)).get_data()
cspmax.setSkeletonAtlasProbasAndLabels(nighresjava.JArray('float')(
(pdata.flatten('F')).astype(float)),
nighresjava.JArray('int')(
(ldata.flatten('F')).astype(int).tolist()))
# load the atlas structures and contrasts, if needed
for sub in range(subjects):
for struct in range(structures):
print("load: "+str(levelset_images[sub][struct]))
data = load_volume(levelset_images[sub][struct]).get_data()
cspmax.setLevelsetImageAt(sub, struct, nighresjava.JArray('float')(
(data.flatten('F')).astype(float)))
for contrast in range(contrasts):
print("load: "+str(contrast_images[sub][contrast]))
data = load_volume(contrast_images[sub][contrast]).get_data()
cspmax.setContrastImageAt(sub, contrast, nighresjava.JArray('float')(
(data.flatten('F')).astype(float)))
# execute first step
scale = 1.0
try:
scale = cspmax.updateAtlasPriors(atlas_weight, update_weight)
except:
# if the Java module fails, reraise the error it throws
print("\n The underlying Java code did not execute cleanly: ")
print(sys.exc_info()[0])
raise
return
# clean up and go to second step
levelset_images = None
contrast_images = None
for sub in range(subjects):
for struct in range(structures):
print("load: "+str(skeleton_images[sub][struct]))
data = load_volume(skeleton_images[sub][struct]).get_data()
cspmax.setSkeletonImageAt(sub, struct, nighresjava.JArray('float')(
(data.flatten('F')).astype(float)))
try:
cspmax.updateSkeletonPriors(scale, atlas_weight, update_weight)
except:
# if the Java module fails, reraise the error it throws
print("\n The underlying Java code did not execute cleanly: ")
print(sys.exc_info()[0])
raise
return
skeleton_images = None
# reshape output to what nibabel likes
dimensions = (dimensions[0],dimensions[1],dimensions[2],cspmax.getBestDimension())
dimskel = (dimensions[0],dimensions[1],dimensions[2],int(cspmax.getBestDimension()/4))
dims3Dtrg = (trg_dimensions[0],trg_dimensions[1],trg_dimensions[2])
intens_dims = (structures+1,structures+1,contrasts)
intens_hist_dims = ((structures+1)*(structures+1),cspmax.getNumberOfBins()+6,contrasts)
spatial_proba_data = numpy.reshape(numpy.array(cspmax.getBestSpatialProbabilityMaps(dimensions[3]),
dtype=numpy.float32), dimensions, 'F')
spatial_label_data = numpy.reshape(numpy.array(cspmax.getBestSpatialProbabilityLabels(dimensions[3]),
dtype=numpy.int32), dimensions, 'F')
intens_hist_data = numpy.reshape(numpy.array(cspmax.getConditionalHistogram(),
dtype=numpy.float32), intens_hist_dims, 'F')
skeleton_proba_data = numpy.reshape(numpy.array(cspmax.getBestSkeletonProbabilityMaps(dimskel[3]),
dtype=numpy.float32), dimskel, 'F')
skeleton_label_data = numpy.reshape(numpy.array(cspmax.getBestSkeletonProbabilityLabels(dimskel[3]),
dtype=numpy.int32), dimskel, 'F')
# adapt header max for each image so that correct max is displayed
# and create nifiti objects
header['cal_max'] = numpy.nanmax(spatial_proba_data)
spatial_proba = nibabel.Nifti1Image(spatial_proba_data, affine, header)
header['cal_max'] = numpy.nanmax(spatial_label_data)
spatial_label = nibabel.Nifti1Image(spatial_label_data, affine, header)
chist = nibabel.Nifti1Image(intens_hist_data, None, None)
header['cal_max'] = numpy.nanmax(skeleton_proba_data)
skeleton_proba = nibabel.Nifti1Image(skeleton_proba_data, affine, header)
header['cal_max'] = numpy.nanmax(skeleton_label_data)
skeleton_label = nibabel.Nifti1Image(skeleton_label_data, affine, header)
if save_data:
save_volume(spatial_proba_file, spatial_proba)
save_volume(spatial_label_file, spatial_label)
save_volume(condhist_file, chist)
save_volume(skeleton_proba_file, skeleton_proba)
save_volume(skeleton_label_file, skeleton_label)
output= {'max_spatial_proba': spatial_proba_file,
'max_spatial_label': spatial_label_file,
'cond_hist': condhist_file,
'max_skeleton_proba': skeleton_proba_file,
'max_skeleton_label': skeleton_label_file}
return output
else:
output= {'max_spatial_proba': spatial_proba,
'max_spatial_label': spatial_label,
'cond_hist': chist,
'max_skeleton_proba': skeleton_proba,
'max_skeleton_label': skeleton_label}
return output
def conditional_shape_map_intensities(structures, contrasts, targets,
contrast_images=None, target_images=None,
shape_atlas_probas=None, shape_atlas_labels=None,
intensity_atlas_hist=None,
skeleton_atlas_probas=None, skeleton_atlas_labels=None,
save_data=False, overwrite=False, output_dir=None,
file_name=None):
""" Conditioanl Shape Parcellation Intensity Mapping
Maps intensity priors between contrasts for conditional shape parcellation
Parameters
----------
structures: int
Number of structures to parcellate
contrasts: int
Number of atlas image intensity contrasts
targets: int
Number of target image intensity contrasts
contrast_images: [niimg]
Average atlas images (per atlas contrast)
target_images: [niimg]
Average target images (per target contrast)
shape_atlas_probas: niimg
Pre-computed shape atlas from the shape levelsets (replacing them)
shape_atlas_labels: niimg
Pre-computed shape atlas from the shape levelsets (replacing them)
intensity_atlas_hist: niimg
Pre-computed intensity atlas from the contrast images (replacing them)
skeleton_atlas_probas: niimg
Pre-computed skeleton atlas from the shape levelsets (replacing them)
skeleton_atlas_labels: niimg
Pre-computed skeleton atlas from the shape levelsets (replacing them)
save_data: bool
Save output data to file (default is False)
overwrite: bool
Overwrite existing results (default is False)
output_dir: str, optional
Path to desired output directory, will be created if it doesn't exist
file_name: str, optional
Desired base name for output files with file extension
(suffixes will be added)
Returns
----------
dict
Dictionary collecting outputs under the following keys
(suffix of output files in brackets)
* cond_hist (niimg): Conditional intensity histograms (_cspmax-chist)
Notes
----------
Original Java module by Pierre-Louis Bazin.
"""
print('\nConditional Shape Intensity Mapping')
# make sure that saving related parameters are correct
if save_data:
output_dir = _output_dir_4saving(output_dir, target_images[0])
condhist_file = os.path.join(output_dir,
_fname_4saving(module=__name__,file_name=file_name,
rootfile=target_images[0],
suffix='cspmax-chist'))
if overwrite is False \
and os.path.isfile(condhist_file):
print("skip computation (use existing results)")
output = {'cond_hist': condhist_file}
return output
# start virtual machine, if not already running
try:
mem = _check_available_memory()
nighresjava.initVM(initialheap=mem['init'], maxheap=mem['max'])
except ValueError:
pass
# create instance
cspmax = nighresjava.ConditionalShapeSegmentation()
# set parameters
cspmax.setNumberOfSubjectsObjectsBgAndContrasts(1,structures,1,contrasts)
cspmax.setOptions(True, False, False, False, True)
cspmax.setNumberOfTargetContrasts(targets)
# load target image for parameters
# load a first image for dim, res
img = load_volume(contrast_images[0])
data = img.get_data()
header = img.get_header()
affine = img.get_affine()
trg_resolution = [x.item() for x in header.get_zooms()]
trg_dimensions = data.shape
cspmax.setTargetDimensions(trg_dimensions[0], trg_dimensions[1], trg_dimensions[2])
cspmax.setTargetResolutions(trg_resolution[0], trg_resolution[1], trg_resolution[2])
resolution = trg_resolution
dimensions = trg_dimensions
cspmax.setAtlasDimensions(dimensions[0], dimensions[1], dimensions[2])
cspmax.setAtlasResolutions(resolution[0], resolution[1], resolution[2])
# load the shape and intensity atlases
print("load: "+str(os.path.join(output_dir,intensity_atlas_hist)))
hist = load_volume(os.path.join(output_dir,intensity_atlas_hist)).get_data()
cspmax.setConditionalHistogram(nighresjava.JArray('float')(
(hist.flatten('F')).astype(float)))
print("load: "+str(os.path.join(output_dir,shape_atlas_probas)))
pdata = load_volume(os.path.join(output_dir,shape_atlas_probas)).get_data()
print("load: "+str(os.path.join(output_dir,shape_atlas_labels)))
ldata = load_volume(os.path.join(output_dir,shape_atlas_labels)).get_data()
cspmax.setShapeAtlasProbasAndLabels(nighresjava.JArray('float')(
(pdata.flatten('F')).astype(float)),
nighresjava.JArray('int')(
(ldata.flatten('F')).astype(int).tolist()))
print("load: "+str(os.path.join(output_dir,skeleton_atlas_probas)))
pdata = load_volume(os.path.join(output_dir,skeleton_atlas_probas)).get_data()
print("load: "+str(os.path.join(output_dir,skeleton_atlas_labels)))
ldata = load_volume(os.path.join(output_dir,skeleton_atlas_labels)).get_data()
cspmax.setSkeletonAtlasProbasAndLabels(nighresjava.JArray('float')(
(pdata.flatten('F')).astype(float)),
nighresjava.JArray('int')(
(ldata.flatten('F')).astype(int).tolist()))
# load the atlas and target images
for contrast in range(contrasts):
print("load: "+str(contrast_images[contrast]))
data = load_volume(contrast_images[contrast]).get_data()
cspmax.setAvgAtlasImageAt(contrast, nighresjava.JArray('float')(
(data.flatten('F')).astype(float)))
for target in range(targets):
print("load: "+str(target_images[target]))
data = load_volume(target_images[target]).get_data()
cspmax.setAvgTargetImageAt(target, nighresjava.JArray('float')(
(data.flatten('F')).astype(float)))
# execute the transfer
try:
cspmax.mapAtlasTargetIntensityPriors()
except:
# if the Java module fails, reraise the error it throws
print("\n The underlying Java code did not execute cleanly: ")
print(sys.exc_info()[0])
raise
return
# reshape output to what nibabel likes
intens_hist_dims = ((structures+1)*(structures+1),cspmax.getNumberOfBins()+6,targets)
intens_hist_data = numpy.reshape(numpy.array(cspmax.getTargetConditionalHistogram(),
dtype=numpy.float32), intens_hist_dims, 'F')
# adapt header max for each image so that correct max is displayed
# and create nifiti objects
chist = nibabel.Nifti1Image(intens_hist_data, None, None)
if save_data:
save_volume(condhist_file, chist)
output= {'cond_hist': condhist_file}
return output
else:
output= {'cond_hist': chist}
return output
| 43.758653 | 112 | 0.638334 | 5,182 | 46,778 | 5.540332 | 0.07198 | 0.02163 | 0.016719 | 0.02675 | 0.846256 | 0.823824 | 0.808847 | 0.797039 | 0.783246 | 0.766353 | 0 | 0.008974 | 0.268695 | 46,778 | 1,068 | 113 | 43.799625 | 0.830279 | 0.235709 | 0 | 0.75 | 0 | 0 | 0.060467 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.006711 | false | 0.006711 | 0.015101 | 0 | 0.052013 | 0.078859 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
45cba07b0d9840947fcdc46e255270926b98e403 | 34,425 | py | Python | easygl/structures/vectors.py | overdev/easygl-0.1.0-alpha1 | c3ab70c272db670cbe4e79a99371d21da466d8a0 | [
"MIT"
] | null | null | null | easygl/structures/vectors.py | overdev/easygl-0.1.0-alpha1 | c3ab70c272db670cbe4e79a99371d21da466d8a0 | [
"MIT"
] | null | null | null | easygl/structures/vectors.py | overdev/easygl-0.1.0-alpha1 | c3ab70c272db670cbe4e79a99371d21da466d8a0 | [
"MIT"
] | null | null | null | # !/usr/bin/python
# -*- coding: utf-8 -*-
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
#
# The MIT License (MIT)
#
# Copyright (c) 2017 Jorge A. Gomes (jorgegomes83 at hotmail dot com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
import struct
import math
from collections import namedtuple as nt
from typing import Union, Sequence, Iterable, Container
from easygl.arrays.datatypes import DType
__all__ = [
'Vec2',
'Vec3',
'Vec4',
'FrozenVec4',
]
V2 = "xy yx u v uv vu".split()
V3 = ("xy xz yx yz zx zy xyz xzy yxz yzx zxy zyx "
"r g b "
"rg rb gr gb br bg rgb rbg grb gbr brg bgr").split()
V4 = ("xy xz xw yx yz yw zx zy zw wx wy wz "
"xyz xyw xzy xzw xwy xwz yxz yxw yzx yzw ywx ywz zxy zxw zyx zyw zwx zwy wxy wxz wyx wyz wzx wzy "
"xyzw xywz xzyw xzwy xwyz xwzy yxzw yxwz yzxw yzwx ywxz ywzx "
"zxyw zxwy zyxw zywx zwxy zwyx wxyz wxzy wyxz wyzx wzxy wzyx "
"r g b a "
"rg rb ra gr gb ga br bg ba ar ag ab "
"rgb rga rbg rba rag rab grb gra gbr gba gar gab brg bra bgr bga bar bag arg arb agr agb abr abg "
"rgba rgab rbga rbag ragb rabg grba grab gbra gbar garb gabr "
"brga brag bgra bgar barg bagr argb arbg agrb agbr abrg abgr").split()
def getargs(l, *args):
# type: (list, ...) -> None
for i in args: # type: Union[int, float, Iterable]
if isinstance(i, (int, float)):
l.append(i)
else:
getargs(l, *i)
class Arithvector(Iterable, Sequence):
def __len__(self):
return 0
def __abs__(self):
return self.__class__(*(abs(v) for v in self))
def __neg__(self):
return self.__class__(*(-v for v in self))
def __add__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
return self.__class__(*(v + other for v in self))
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
return self.__class__(*(self[i] + other[i] for i in range(n)))
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __radd__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
return self.__class__(*(other + v for v in self))
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
return self.__class__(*(other[i] + self[i] for i in range(n)))
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __iadd__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
for i in range(len(self)):
self[i] += other
return self
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
for i in range(n):
self[i] += other[i]
return self
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __sub__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
return self.__class__(*(v - other for v in self))
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
return self.__class__(*(self[i] - other[i] for i in range(n)))
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __rsub__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
return self.__class__(*(other - v for v in self))
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
return self.__class__(*(other[i] - self[i] for i in range(n)))
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __isub__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
for i in range(len(self)):
self[i] -= other
return self
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
for i in range(n):
self[i] -= other[i]
return self
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __mul__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
return self.__class__(*(v * other for v in self))
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
return self.__class__(*(self[i] * other[i] for i in range(n)))
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __rmul__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
return self.__class__(*(other * v for v in self))
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
return self.__class__(*(other[i] * self[i] for i in range(n)))
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __imul__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
for i in range(len(self)):
self[i] *= other
return self
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
for i in range(n):
self[i] *= other[i]
return self
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __truediv__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
return self.__class__(*(v / other for v in self))
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
return self.__class__(*(self[i] / other[i] for i in range(n)))
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __rtruediv__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
return self.__class__(*(other / v for v in self))
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
return self.__class__(*(other[i] / self[i] for i in range(n)))
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __itruediv__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
for i in range(len(self)):
self[i] /= other
return self
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
for i in range(n):
self[i] /= other[i]
return self
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __mod__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
return self.__class__(*(v % other for v in self))
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
return self.__class__(*(self[i] % other[i] for i in range(n)))
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __rmod__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
return self.__class__(*(other % v for v in self))
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
return self.__class__(*(other[i] % self[i] for i in range(n)))
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __imod__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> Arithvector
if isinstance(other, (int, float)):
for i in range(len(self)):
self[i] %= other
return self
else:
try:
n = len(self)
if n != len(other):
return NotImplemented
for i in range(n):
self[i] %= other[i]
return self
except (TypeError, ValueError, IndexError, KeyError):
return NotImplemented
def __eq__(self, other):
# type: (Union[int, float, list, tuple, Arithvector]) -> bool
try:
n = len(self)
return all(self[i] == other[i] for i in range(n)) and len(other) == n
except (TypeError, KeyError, IndexError):
return False
class Vec2(Arithvector):
__slots__ = 'x', 'y'
@classmethod
def zero(cls):
# type: () -> Vec2
return cls(0., 0.)
@classmethod
def one(cls):
# type: () -> Vec2
return cls(1., 1.)
@classmethod
def horz(cls, x):
# type: (float) -> Vec2
return cls(x, 0.)
@classmethod
def vert(cls, y):
# type: (float) -> Vec2
return cls(0., y)
# region - - -- ----==<[ COMMON ]>==---- -- - -
def __init__(self, *args):
values = []
getargs(values, *args)
if len(values) != len(self):
raise ValueError("Too much or to few values: expected 2, got {}.".format(len(values)))
self.x = float(values[0])
self.y = float(values[1])
def __len__(self):
return 2
def __getitem__(self, key):
# type: (int) -> float
return (self.x, self.y).__getitem__(key)
def __setitem__(self, key, value):
# type: (int, float) -> None
super(Vec2, self).__setattr__({0: 'x', 1: 'y'}[key], value)
def __iter__(self):
return (self.x, self.y).__iter__()
def __getattr__(self, name):
if name[0] in 'xy':
swz = 'xy'
elif name[0] in 'uv':
swz = 'uv'
else:
raise AttributeError("Vec2 has no '{}' attribute.".format(name))
if len(name) == 1:
attr = {'x': 'x', 'y': 'y', 'u': 'x', 'v': 'y'}
return getattr(self, attr[name])
elif len(name) not in (2, 3, 4):
raise AttributeError("Attribute swizzling is too long ({}).".format(len(name)))
else:
v = {2: Vec2, 3: Vec3, 4: Vec4}[len(name)]
i = [self.x, self.y]
try:
return v(*(i[swz.index(ch)] for ch in name))
except ValueError:
raise AttributeError("Vec2 '{}' swizzled with invalid attribute(s).".format(name))
def __setattr__(self, name, value):
# type: (str, Union[tuple, list, Container, Iterable, Sequence, Vec2]) -> None
if name in V2:
attr = {'x': 'x', 'y': 'y', 'u': 'x', 'v': 'y'}
n = len(name)
if n > 1:
try:
if n != len(value):
raise ValueError("Attribute needs {} float values, not {}.".format(n, len(value)))
except TypeError:
raise ValueError("Attribute needs {} float values, not 1.".format(n))
for i, ch in enumerate(name):
super(Vec2, self).__setattr__(attr[ch], float(value[i]))
else:
super(Vec2, self).__setattr__(attr[name], float(value))
elif name in self.__slots__:
super(Vec2, self).__setattr__(name, float(value))
else:
raise AttributeError("Vec2 object has no '{}' attribute.".format(name))
def __str__(self):
return "({}, {})".format(self.x, self.y)
def __repr__(self):
return "Vec2{}".format(str(self))
# endregion
# region - - -- ----==<[ OTHER ]>==---- -- - -
def hypot(self):
# type: () -> None
return self.x ** 2 + self.y ** 2
def dot(self, other):
# type: (Vec2) -> float
return ((self.x * other.x) +
(self.y * other.y))
def cross(self, other):
# type: (Vec2) -> float
return self.x * other.y - self.y * other.x
def length(self):
# type: () -> float
return math.sqrt(self.hypot())
def normalize(self):
# type: () -> Vec2
magnitude = self.length()
if magnitude != 0.:
self.x /= magnitude
self.y /= magnitude
else:
self.x = self.y = 0.
return self
def normalized(self):
# type: () -> Vec2
magnitude = self.length()
if magnitude != 0.:
return Vec2(
self.x / magnitude,
self.y / magnitude,
)
return Vec2(0., 0.)
# endregion
@staticmethod
def bytesize(as_double=False):
if as_double:
return struct.calcsize(DType.double_v2.format)
return struct.calcsize(DType.float_v2.format)
@staticmethod
def pack_values(*values, as_double=False):
if as_double:
fmt = DType.double_v2.format
else:
fmt = DType.float_v2.format
return struct.pack(fmt, *values)
def pack(self, as_double=False):
if as_double:
fmt = DType.double_v2.format
else:
fmt = DType.float_v2.format
return struct.pack(fmt, self.x, self.y)
def unpack(self, buffer, as_double=False):
if as_double:
fmt = DType.double_v2.format
else:
fmt = DType.float_v2.format
self.x, self.y = struct.unpack(fmt, buffer)
return self
def pack_into(self, buffer, offset, as_double=False):
if as_double:
fmt = DType.double_v2.format
else:
fmt = DType.float_v2.format
struct.pack_into(fmt, buffer, offset, self.x, self.y)
@staticmethod
def pack_values_into(*values, buffer, offset, as_double=False):
if as_double:
fmt = DType.double_v2.format
else:
fmt = DType.float_v2.format
struct.pack_into(fmt, buffer, offset, *values)
def unpack_from(self, buffer, offset, as_double=False):
if as_double:
fmt = DType.double_v2.format
else:
fmt = DType.float_v2.format
self.x, self.y = struct.unpack_from(fmt, buffer, offset)
return self
class Vec3(Arithvector):
__slots__ = 'x', 'y', 'z'
# region - - -- ----==<[ COMMON ]>==---- -- - -
def __init__(self, *args):
values = []
getargs(values, *args)
if len(values) != len(self):
raise ValueError("Too much or to few values: expected 3, got {}.".format(len(values)))
self.x = float(values[0])
self.y = float(values[1])
self.z = float(values[2])
def __len__(self):
return 3
def __getitem__(self, key):
# type: (int) -> float
return (self.x, self.y, self.z).__getitem__(key)
def __setitem__(self, key, value):
# type: (int, float) -> None
super(Vec3, self).__setattr__({0: 'x', 1: 'y', 2: 'z'}[key], value)
def __iter__(self):
return (self.x, self.y, self.z).__iter__()
def __str__(self):
return "({}, {}, {})".format(self.x, self.y, self.z)
def __repr__(self):
return "Vec3{}".format(str(self))
def __getattr__(self, name):
if name[0] in 'xyz':
swz = 'xyz'
elif name[0] in 'rgb':
swz = 'rgb'
else:
raise AttributeError("Vec3 has no '{}' attribute.".format(name))
if len(name) == 1:
attr = {'x': 'x', 'y': 'y', 'z': 'z', 'r': 'x', 'g': 'y', 'b': 'z'}
return getattr(self, attr[name])
elif len(name) not in (2, 3, 4):
raise AttributeError("Attribute swizzling is too long ({}).".format(len(name)))
else:
v = {2: Vec2, 3: Vec3, 4: Vec4}[len(name)]
i = [self.x, self.y, self.z]
try:
return v(*(i[swz.index(ch)] for ch in name))
except ValueError:
raise AttributeError("Vec3 '{}' swizzled with invalid attribute(s).".format(name))
def __setattr__(self, name, value):
# type: (str, Union[tuple, list, Container, Iterable, Sequence, Vec3]) -> None
if name in V3:
attr = {'x': 'x', 'y': 'y', 'z': 'z', 'r': 'x', 'g': 'y', 'b': 'z'}
n = len(name)
if n > 1:
try:
if n != len(value):
raise ValueError("Attribute needs {} float values, not {}.".format(n, len(value)))
except TypeError:
raise ValueError("Attribute needs {} float values, not 1.".format(n))
for i, ch in enumerate(name):
super(Vec3, self).__setattr__(attr[ch], float(value[i]))
else:
super(Vec3, self).__setattr__(attr[name], float(value))
elif name in self.__slots__:
super(Vec3, self).__setattr__(name, float(value))
else:
raise AttributeError("Vec3 object has no '{}' attribute.".format(name))
# endregion
# region - - -- ----==<[ OTHER ]>==---- -- - -
def hypot(self):
# type: () -> None
return self.x ** 2 + self.y ** 2 + self.z ** 2
def dot(self, other):
# type: (Union[Vec3, Vec4]) -> float
return ((self.x * other.x) +
(self.y * other.y) +
(self.z * other.z))
def cross(self, other):
# type: (Union[Vec3, Vec4]) -> Vec3
return Vec3(self.x * other.z - self.z * other.y,
self.y * other.x - self.x * other.z,
self.z * other.y - self.y * other.x)
def length(self):
# type: () -> float
return math.sqrt(self.hypot())
def normalize(self):
# type: () -> Vec3
magnitude = self.length()
if magnitude != 0.:
self.x /= magnitude
self.y /= magnitude
self.z /= magnitude
else:
self.x = self.y = self.z = 0.
return self
def normalized(self):
# type: () -> Vec3
magnitude = self.length()
if magnitude != 0.:
return Vec3(
self.x / magnitude,
self.y / magnitude,
self.z / magnitude
)
return Vec3(0., 0., 0.)
# endregion
@staticmethod
def bytesize(as_double=False):
if as_double:
return struct.calcsize(DType.double_v2.format)
return struct.calcsize(DType.float_v2.format)
@staticmethod
def pack_values(*values, as_double=False):
if as_double:
fmt = DType.double_v3.format
else:
fmt = DType.float_v3.format
return struct.pack(fmt, *values)
def pack(self, as_double=False):
if as_double:
fmt = DType.double_v3.format
else:
fmt = DType.float_v3.format
return struct.pack(fmt, self.x, self.y, self.z)
def unpack(self, buffer, as_double=False):
if as_double:
fmt = DType.double_v3.format
else:
fmt = DType.float_v3.format
self.x, self.y, self.z = struct.unpack(fmt, buffer)
return self
def pack_into(self, buffer, offset, as_double=False):
if as_double:
fmt = DType.double_v3.format
else:
fmt = DType.float_v3.format
struct.pack_into(fmt, buffer, offset, self.x, self.y, self.z)
@staticmethod
def pack_values_into(*values, buffer, offset, as_double=False):
if as_double:
fmt = DType.double_v3.format
else:
fmt = DType.float_v3.format
struct.pack_into(fmt, buffer, offset, *values)
def unpack_from(self, buffer, offset, as_double=False):
if as_double:
fmt = DType.double_v3.format
else:
fmt = DType.float_v3.format
self.x, self.y, self.z = struct.unpack_from(fmt, buffer, offset)
class Vec4(Arithvector):
__slots__ = 'x', 'y', 'z', 'w'
# region - - -- ----==<[ COMMON ]>==---- -- - -
def __init__(self, *args):
values = []
getargs(values, *args)
if len(values) != len(self):
raise ValueError("Too much or to few values: expected 4, got {}.".format(len(values)))
self.x = float(values[0])
self.y = float(values[1])
self.z = float(values[2])
self.w = float(values[3])
def __len__(self):
return 4
def __getitem__(self, key):
# type: (int) -> float
return (self.x, self.y, self.z, self.w).__getitem__(key)
def __setitem__(self, key, value):
# type: (int, float) -> None
super(Vec4, self).__setattr__({0: 'x', 1: 'y', 2: 'z', 3: 'w'}[key], float(value))
def __iter__(self):
return (self.x, self.y, self.z, self.w).__iter__()
def __str__(self):
return "({}, {}, {}, {})".format(self.x, self.y, self.z, self.w)
def __repr__(self):
return "Vec4{}".format(str(self))
def __getattr__(self, name):
if name[0] in 'xyzw':
swz = 'xyzw'
elif name[0] in 'rgba':
swz = 'rgba'
else:
raise AttributeError("Vec4 has no '{}' attribute.".format(name))
if len(name) == 1:
attr = {'x': 'x', 'y': 'y', 'z': 'z', 'w': 'w', 'r': 'x', 'g': 'y', 'b': 'z', 'a': 'w'}
return getattr(self, attr[name])
elif len(name) not in (2, 3, 4):
raise AttributeError("Attribute swizzling is too long ({}).".format(len(name)))
else:
v = {2: Vec2, 3: Vec3, 4: Vec4}[len(name)]
i = [self.x, self.y, self.z, self.w]
try:
return v(*(i[swz.index(ch)] for ch in name))
except ValueError:
raise AttributeError("Vec4 '{}' swizzled with invalid attribute(s).".format(name))
def __setattr__(self, name, value):
# type: (str, Union[tuple, list, Container, Iterable, Sequence, Vec4]) -> None
if name in V4:
attr = {'x': 'x', 'y': 'y', 'z': 'z', 'w': 'w', 'r': 'x', 'g': 'y', 'b': 'z', 'a': 'w'}
n = len(name)
if n > 1:
try:
if n != len(value):
raise ValueError("Attribute needs {} float values, not {}.".format(n, len(value)))
except TypeError:
raise ValueError("Attribute needs {} float values, not 1.".format(n))
for i, ch in enumerate(name):
super(Vec4, self).__setattr__(attr[ch], float(value[i]))
else:
super(Vec4, self).__setattr__(attr[name], float(value))
elif name in self.__slots__:
super(Vec4, self).__setattr__(name, float(value))
else:
raise AttributeError("Vec4 object has no '{}' attribute.".format(name))
# endregion
# region - - -- ----==<[ OTHER ]>==---- -- - -
def hypot(self):
# type: () -> None
return self.x ** 2 + self.y ** 2 + self.z ** 2
def dot(self, other):
# type: (Union[Vec3, Vec4]) -> float
return ((self.x * other.x) +
(self.y * other.y) +
(self.z * other.z))
def cross(self, other):
# type: (Union[Vec3, Vec4]) -> Vec4
return Vec4(self.x * other.z - self.z * other.y,
self.y * other.x - self.x * other.z,
self.z * other.y - self.y * other.x,
1.)
def length(self):
# type: () -> float
return math.sqrt(self.hypot())
def normalize(self):
# type: () -> Vec4
magnitude = self.length()
if magnitude != 0.:
self.x /= magnitude
self.y /= magnitude
self.z /= magnitude
else:
self.x = self.y = self.z = 0.
self.w = 1.
return self
def normalized(self):
# type: () -> Vec4
magnitude = self.length()
if magnitude != 0.:
return Vec4(
self.x / magnitude,
self.y / magnitude,
self.z / magnitude,
1.
)
return Vec4(0., 0., 0., 1.)
# endregion
@staticmethod
def bytesize(as_double=False):
if as_double:
return struct.calcsize(DType.double_v2.format)
return struct.calcsize(DType.float_v2.format)
@staticmethod
def pack_values(*values, as_double=False):
if as_double:
fmt = DType.double_v4.format
else:
fmt = DType.float_v4.format
return struct.pack(fmt, *values)
def pack(self, as_double=False):
if as_double:
fmt = DType.double_v4.format
else:
fmt = DType.float_v4.format
return struct.pack(fmt, self.x, self.y, self.z, self.w)
def unpack(self, buffer, as_double=False):
if as_double:
fmt = DType.double_v4.format
else:
fmt = DType.float_v4.format
self.x, self.y, self.z, self.w = struct.unpack(fmt, buffer)
return self
def pack_into(self, buffer, offset, as_double=False):
if as_double:
fmt = DType.double_v4.format
else:
fmt = DType.float_v4.format
struct.pack_into(fmt, buffer, offset, self.x, self.y, self.z, self.w)
@staticmethod
def pack_values_into(*values, buffer, offset, as_double=False):
if as_double:
fmt = DType.double_v4.format
else:
fmt = DType.float_v4.format
struct.pack_into(fmt, buffer, offset, *values)
def unpack_from(self, buffer, offset, as_double=False):
if as_double:
fmt = DType.double_v4.format
else:
fmt = DType.float_v4.format
self.x, self.y, self.z, self.w = struct.unpack_from(fmt, buffer, offset)
class FrozenVec4(nt('FrozenVec4', 'x y z w'), Arithvector):
def __getattr__(self, name):
if name[0] in 'xyzw':
swz = 'xyzw'
elif name[0] in 'rgba':
swz = 'rgba'
else:
raise AttributeError("Vec4 has no '{}' attribute.".format(name))
if len(name) == 1:
attr = {'x': 'x', 'y': 'y', 'z': 'z', 'w': 'w', 'r': 'x', 'g': 'y', 'b': 'z', 'a': 'w'}
return getattr(self, attr[name])
elif len(name) not in (2, 3, 4):
raise AttributeError("Attribute swizzling is too long ({}).".format(len(name)))
else:
v = {2: Vec2, 3: Vec3, 4: FrozenVec4}[len(name)]
# i = [self.x, self.y, self.z, self.w]
try:
return v(*(self[swz.index(ch)] for ch in name))
except ValueError:
raise AttributeError("Vec4 '{}' swizzled with invalid attribute(s).".format(name))
# del __iadd__, __isub__, __imul__, __itruediv__, __ifloordiv__, __imod__
# region - - -- ----==<[ OTHER ]>==---- -- - -
def hypot(self):
# type: () -> None
return self.x ** 2 + self.y ** 2 + self.z ** 2
def dot(self, other):
# type: (Union[Vec3, Vec4, FrozenVec4]) -> float
return ((self.x * other.x) +
(self.y * other.y) +
(self.z * other.z))
def cross(self, other):
# type: (Union[Vec3, Vec4]) -> Vec4
return FrozenVec4(self.x * other.z - self.z * other.y,
self.y * other.x - self.x * other.z,
self.z * other.y - self.y * other.x,
1.)
def length(self):
# type: () -> float
return math.sqrt(self.hypot())
def normalized(self):
# type: () -> FrozenVec4
magnitude = self.length()
if magnitude != 0.:
return FrozenVec4(
self.x / magnitude,
self.y / magnitude,
self.z / magnitude,
1.
)
return FrozenVec4(0., 0., 0., 1.)
# endregion
class FrozenVec3(nt('FrozenVec3', 'x y z'), Arithvector):
def __getattr__(self, name):
if name[0] in 'xyz':
swz = 'xyz'
elif name[0] in 'rgb':
swz = 'rgb'
else:
raise AttributeError("FrozenVec3 has no '{}' attribute.".format(name))
if len(name) == 1:
attr = {'x': 'x', 'y': 'y', 'z': 'z', 'r': 'x', 'g': 'y', 'b': 'z'}
return getattr(self, attr[name])
elif len(name) not in (2, 3, 4):
raise AttributeError("Attribute swizzling is too long ({}).".format(len(name)))
else:
v = {2: Vec2, 3: FrozenVec3, 4: FrozenVec4}[len(name)]
i = [self.x, self.y, self.z]
try:
return v(*(i[swz.index(ch)] for ch in name))
except ValueError:
raise AttributeError("FrozenVec3 '{}' swizzled with invalid attribute(s).".format(name))
# region - - -- ----==<[ OTHER ]>==---- -- - -
def hypot(self):
# type: () -> None
return self.x ** 2 + self.y ** 2 + self.z ** 2
def dot(self, other):
# type: (Union[Vec3, FrozenVec3, Vec4, FrozenVec4]) -> float
return ((self.x * other.x) +
(self.y * other.y) +
(self.z * other.z))
def cross(self, other):
# type: (Union[Vec3, FrozenVec3, Vec4, FrozenVec4]) -> Vec4
return FrozenVec4(self.x * other.z - self.z * other.y,
self.y * other.x - self.x * other.z,
self.z * other.y - self.y * other.x,
1.)
def length(self):
# type: () -> float
return math.sqrt(self.hypot())
def normalized(self):
# type: () -> FrozenVec3
magnitude = self.length()
if magnitude != 0.:
return FrozenVec3(
self.x / magnitude,
self.y / magnitude,
self.z / magnitude,
)
return FrozenVec3(0., 0., 0.)
# endregion
class FrozenVec2(nt('FrozenVec2', 'x y z'), Arithvector):
def __getattr__(self, name):
if name[0] in 'xy':
swz = 'xy'
elif name[0] in 'uv':
swz = 'uv'
else:
raise AttributeError("FrozenVec2 has no '{}' attribute.".format(name))
if len(name) == 1:
attr = {'x': 'x', 'y': 'y', 'u': 'x', 'v': 'y'}
return getattr(self, attr[name])
elif len(name) not in (2, 3, 4):
raise AttributeError("Attribute swizzling is too long ({}).".format(len(name)))
else:
v = {2: FrozenVec2, 3: FrozenVec3, 4: FrozenVec4}[len(name)]
i = [self.x, self.y]
try:
return v(*(i[swz.index(ch)] for ch in name))
except ValueError:
raise AttributeError("FrozenVec2 '{}' swizzled with invalid attribute(s).".format(name))
# region - - -- ----==<[ OTHER ]>==---- -- - -
def hypot(self):
# type: () -> float
return self.x ** 2 + self.y ** 2
def dot(self, other):
# type: (Vec2) -> float
return ((self.x * other.x) +
(self.y * other.y))
def cross(self, other):
# type: (Vec2) -> float
return self.x * other.y - self.y * other.x
def length(self):
# type: () -> float
return math.sqrt(self.hypot())
def normalized(self):
# type: () -> FrozenVec2
magnitude = self.length()
if magnitude != 0.:
return FrozenVec2(
self.x / magnitude,
self.y / magnitude,
)
return FrozenVec2(0., 0.)
# endregion
| 33.26087 | 106 | 0.513145 | 4,099 | 34,425 | 4.183703 | 0.093437 | 0.01866 | 0.012595 | 0.017494 | 0.845647 | 0.835967 | 0.828445 | 0.811243 | 0.798589 | 0.774739 | 0 | 0.014286 | 0.349339 | 34,425 | 1,034 | 107 | 33.293037 | 0.751328 | 0.124793 | 0 | 0.753623 | 0 | 0.002635 | 0.069661 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.142292 | false | 0 | 0.006588 | 0.060606 | 0.361001 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
afdce320b934b8ae45aa52a5944dc1f9a6f22be7 | 148 | py | Python | customers/admin.py | omides248/cinema-django | 92748a95109fe23e64411558a15cfc98fa4f3c1c | [
"MIT"
] | null | null | null | customers/admin.py | omides248/cinema-django | 92748a95109fe23e64411558a15cfc98fa4f3c1c | [
"MIT"
] | null | null | null | customers/admin.py | omides248/cinema-django | 92748a95109fe23e64411558a15cfc98fa4f3c1c | [
"MIT"
] | null | null | null | from django.contrib import admin
from customers.models import Order
from .models import User
admin.site.register(User)
admin.site.register(Order)
| 18.5 | 34 | 0.817568 | 22 | 148 | 5.5 | 0.5 | 0.198347 | 0.214876 | 0.347107 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.108108 | 148 | 7 | 35 | 21.142857 | 0.916667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.6 | 0 | 0.6 | 0 | 1 | 0 | 0 | null | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
afe0a0d071cc354cde0fb46bacd99ba6dd82b5cf | 2,555 | py | Python | torpido/wavelet/wavelets/bior6_8.py | AP-Atul/Torpido | a646b4d6de7f2e2c96de4c64ce3113f53e3931c2 | [
"Unlicense"
] | 21 | 2020-12-23T07:13:10.000Z | 2022-01-12T10:32:22.000Z | wavelet/wavelets/bior6_8.py | AP-Atul/wavelets-ext | 00ced22462c369584ebd32f9b5f357f092de0142 | [
"MIT"
] | 2 | 2020-12-30T10:45:42.000Z | 2021-09-25T09:52:00.000Z | wavelet/wavelets/bior6_8.py | AP-Atul/wavelets-ext | 00ced22462c369584ebd32f9b5f357f092de0142 | [
"MIT"
] | 1 | 2021-02-06T21:39:41.000Z | 2021-02-06T21:39:41.000Z | """ Biorthogonal 6.8 wavelet """
class Biorthogonal68:
"""
Properties
----------
near symmetric, not orthogonal, biorthogonal
All values are from http://wavelets.pybytes.com/wavelet/bior6.8/
"""
__name__ = "Biorthogonal Wavelet 6.8"
__motherWaveletLength__ = 18 # length of the mother wavelet
__transformWaveletLength__ = 2 # minimum wavelength of input signal
# decomposition filter
# low-pass
decompositionLowFilter = [
0.0,
0.0019088317364812906,
-0.0019142861290887667,
-0.016990639867602342,
0.01193456527972926,
0.04973290349094079,
-0.07726317316720414,
-0.09405920349573646,
0.4207962846098268,
0.8259229974584023,
0.4207962846098268,
-0.09405920349573646,
-0.07726317316720414,
0.04973290349094079,
0.01193456527972926,
-0.016990639867602342,
-0.0019142861290887667,
0.0019088317364812906,
]
# high-pass
decompositionHighFilter = [
0.0,
0.0,
0.0,
0.014426282505624435,
-0.014467504896790148,
-0.07872200106262882,
0.04036797903033992,
0.41784910915027457,
-0.7589077294536541,
0.41784910915027457,
0.04036797903033992,
-0.07872200106262882,
-0.014467504896790148,
0.014426282505624435,
0.0,
0.0,
0.0,
0.0,
]
# reconstruction filters
# low pass
reconstructionLowFilter = [
0.0,
0.0,
0.0,
0.014426282505624435,
0.014467504896790148,
-0.07872200106262882,
-0.04036797903033992,
0.41784910915027457,
0.7589077294536541,
0.41784910915027457,
-0.04036797903033992,
-0.07872200106262882,
0.014467504896790148,
0.014426282505624435,
0.0,
0.0,
0.0,
0.0,
]
# high-pass
reconstructionHighFilter = [
0.0,
-0.0019088317364812906,
-0.0019142861290887667,
0.016990639867602342,
0.01193456527972926,
-0.04973290349094079,
-0.07726317316720414,
0.09405920349573646,
0.4207962846098268,
-0.8259229974584023,
0.4207962846098268,
0.09405920349573646,
-0.07726317316720414,
-0.04973290349094079,
0.01193456527972926,
0.016990639867602342,
-0.0019142861290887667,
-0.0019088317364812906,
]
| 24.333333 | 72 | 0.585519 | 202 | 2,555 | 7.346535 | 0.29703 | 0.040431 | 0.048518 | 0.048518 | 0.714286 | 0.714286 | 0.714286 | 0.714286 | 0.714286 | 0.714286 | 0 | 0.619075 | 0.322896 | 2,555 | 104 | 73 | 24.567308 | 0.238728 | 0.119765 | 0 | 0.809524 | 0 | 0 | 0.01087 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0.095238 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
aff38a96bd4a0f4e23c108d441c6df8c12586ecb | 12,001 | py | Python | pymyorm/connection.py | oldjun/PyMyORM | ac49910f21d3f3d3d4b3d75a0f998526963f0a2a | [
"MIT"
] | 1 | 2021-12-01T23:47:24.000Z | 2021-12-01T23:47:24.000Z | pymyorm/connection.py | oldjun/PyMyORM | ac49910f21d3f3d3d4b3d75a0f998526963f0a2a | [
"MIT"
] | null | null | null | pymyorm/connection.py | oldjun/PyMyORM | ac49910f21d3f3d3d4b3d75a0f998526963f0a2a | [
"MIT"
] | 2 | 2022-01-03T15:03:37.000Z | 2022-02-16T09:00:58.000Z | import pymysql
import time
import logging
from pymysql import cursors
from pymyorm.batch import Batch
class Connection(object):
def __init__(self, host, port, user, password, database, charset='utf8') -> None:
self.__conn = None
self.__debug = False
self.__config = dict(host=host, port=port, user=user, password=password, database=database, charset=charset)
self.__autocommit = True
self.__last_ping_time = int(time.time())
self.__ping = 3600
def __del__(self):
self.close()
def open(self, debug=False):
self.__debug = debug
self.close()
try:
if self.__debug:
logging.info(str(self.__config))
self.__autocommit = True
self.__conn = pymysql.connect(**self.__config, cursorclass=cursors.DictCursor)
if self.__debug:
logging.info('mysql connect success')
except Exception as e:
if self.__debug:
logging.error('mysql connect error')
raise e
def close(self):
if self.__conn is not None:
if self.__debug:
logging.info('mysql connection closed')
self.__conn.close()
self.__conn = None
def set_ping(self, seconds):
self.__ping = seconds
def ping(self):
current_time = int(time.time())
if current_time - self.__last_ping_time > self.__ping:
try:
if self.__debug:
logging.info('conn ping')
self.__conn.ping()
except Exception as e:
if self.__debug:
logging.error(str(e))
self.__last_ping_time = int(time.time())
def fetchone(self, sql):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.execute(sql)
result = cursor.fetchone()
cursor.close()
return result
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def fetchall(self, sql):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.execute(sql)
result = cursor.fetchall()
cursor.close()
return result
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def batch(self, sql):
try:
if self.__debug:
logging.info(f"batch sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.execute(sql)
return Batch(cursor)
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def insert(self, sql):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.execute(sql)
last_insert_id = cursor.lastrowid
cursor.close()
return last_insert_id
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def insert_batch(self, sql, data):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.executemany(sql, data)
last_insert_id = cursor.lastrowid
cursor.close()
return last_insert_id
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def execute(self, sql):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
num = cursor.execute(sql)
cursor.close()
return num
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def count(self, sql):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.execute(sql)
data = cursor.fetchone()
total = 0
for v in data.values():
total = v
cursor.close()
return total
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def sum(self, sql):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.execute(sql)
data = cursor.fetchone()
total = 0
for v in data.values():
total = v
cursor.close()
return total
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def min(self, sql):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.execute(sql)
data = cursor.fetchone()
total = 0
for v in data.values():
total = v
cursor.close()
return total
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def max(self, sql):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.execute(sql)
data = cursor.fetchone()
total = 0
for v in data.values():
total = v
cursor.close()
return total
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def average(self, sql):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.execute(sql)
data = cursor.fetchone()
total = 0
for v in data.values():
total = v
cursor.close()
return total
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def exists(self, sql):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.execute(sql)
data = cursor.fetchone()
total = 0
for v in data.values():
total = v
cursor.close()
return total == 1
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def column(self, sql):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.execute(sql)
result = cursor.fetchall()
cursor.close()
return result
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def scalar(self, sql):
try:
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
self.__conn.autocommit(self.__autocommit)
cursor = self.__conn.cursor()
cursor.execute(sql)
result = cursor.fetchone()
cursor.close()
return result
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def begin(self):
try:
sql = "begin"
if self.__debug:
logging.info(f"sql: {sql}")
if self.__conn is None:
self.open(self.__debug)
cursor = self.__conn.cursor()
cursor.execute(sql)
cursor.close()
self.__autocommit = False
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def rollback(self):
try:
sql = "rollback"
if self.__debug:
logging.info(f"sql: {sql}")
cursor = self.__conn.cursor()
cursor.execute(sql)
cursor.close()
self.__autocommit = True
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def commit(self):
try:
sql = "commit"
if self.__debug:
logging.info(f"sql: {sql}")
cursor = self.__conn.cursor()
cursor.execute(sql)
cursor.close()
self.__autocommit = True
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def savepoint(self, identifier):
try:
sql = f"savepoint {identifier}"
if self.__debug:
logging.info(f"sql: {sql}")
cursor = self.__conn.cursor()
cursor.execute(sql)
cursor.close()
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def rollback_savepoint(self, identifier):
try:
sql = f"rollback to savepoint {identifier}"
if self.__debug:
logging.info(f"sql: {sql}")
cursor = self.__conn.cursor()
cursor.execute(sql)
cursor.close()
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
def release_savepoint(self, identifier):
try:
sql = f"release savepoint {identifier}"
if self.__debug:
logging.info(f"sql: {sql}")
cursor = self.__conn.cursor()
cursor.execute(sql)
cursor.close()
except pymysql.OperationalError as e:
logging.error(str(e))
self.open(self.__debug)
| 32.174263 | 116 | 0.516624 | 1,263 | 12,001 | 4.655582 | 0.074426 | 0.097959 | 0.079592 | 0.10119 | 0.843707 | 0.838435 | 0.805442 | 0.796259 | 0.796259 | 0.776701 | 0 | 0.001627 | 0.385301 | 12,001 | 372 | 117 | 32.260753 | 0.795445 | 0 | 0 | 0.819767 | 0 | 0 | 0.032247 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.075581 | false | 0.005814 | 0.014535 | 0 | 0.133721 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
aff8d702abe33d85bdbe97ed9521897f29414b61 | 949 | py | Python | test/unit_tests/test_general.py | aasw0ng/thornode-telegram-bot | 5f73b882381548f45fc9e690c6e4845def9600b7 | [
"MIT"
] | 15 | 2020-04-21T07:51:26.000Z | 2021-11-02T05:45:48.000Z | test/unit_tests/test_general.py | aasw0ng/thornode-telegram-bot | 5f73b882381548f45fc9e690c6e4845def9600b7 | [
"MIT"
] | 78 | 2020-04-13T23:01:16.000Z | 2021-05-09T11:46:25.000Z | test/unit_tests/test_general.py | aasw0ng/thornode-telegram-bot | 5f73b882381548f45fc9e690c6e4845def9600b7 | [
"MIT"
] | 5 | 2020-09-03T21:19:16.000Z | 2021-11-20T00:17:56.000Z | import unittest
from datetime import timedelta
from service.utils import format_to_days_and_hours
class UnitTests(unittest.TestCase):
def test_days_formatter(self):
self.assertEqual(format_to_days_and_hours(timedelta(days=-1)), '< 1 hour')
self.assertEqual(format_to_days_and_hours(timedelta(days=0)), '< 1 hour')
self.assertEqual(format_to_days_and_hours(timedelta(days=1 / 24)), '1 hour')
self.assertEqual(format_to_days_and_hours(timedelta(days=2 / 24)), '2 hours')
self.assertEqual(format_to_days_and_hours(timedelta(days=6 / 24)), '6 hours')
self.assertEqual(format_to_days_and_hours(timedelta(days=1)), '1 day')
self.assertEqual(format_to_days_and_hours(timedelta(days=69)), '69 days')
self.assertEqual(format_to_days_and_hours(timedelta(days=1 + 12 / 24)), '1 day 12 hours')
self.assertEqual(format_to_days_and_hours(timedelta(days=1 + 1 / 24)), '1 day 1 hour')
| 49.947368 | 97 | 0.728135 | 143 | 949 | 4.538462 | 0.202797 | 0.123267 | 0.1849 | 0.231125 | 0.753467 | 0.72265 | 0.72265 | 0.72265 | 0.72265 | 0.64869 | 0 | 0.044554 | 0.148577 | 949 | 18 | 98 | 52.722222 | 0.758663 | 0 | 0 | 0 | 0 | 0 | 0.077977 | 0 | 0 | 0 | 0 | 0 | 0.642857 | 1 | 0.071429 | false | 0 | 0.214286 | 0 | 0.357143 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
b346201e5670319261c4cfc2064bce68de3ecc52 | 10,684 | py | Python | contract_api/testcases/unit_testcases/test_registry.py | Vivek205/snet-marketplace-service | 2feaaddf6471c8495d3a0cfccb22d68be3b74998 | [
"MIT"
] | null | null | null | contract_api/testcases/unit_testcases/test_registry.py | Vivek205/snet-marketplace-service | 2feaaddf6471c8495d3a0cfccb22d68be3b74998 | [
"MIT"
] | null | null | null | contract_api/testcases/unit_testcases/test_registry.py | Vivek205/snet-marketplace-service | 2feaaddf6471c8495d3a0cfccb22d68be3b74998 | [
"MIT"
] | null | null | null | from unittest import TestCase
from common.repository import Repository
from contract_api.config import NETWORK_ID, NETWORKS
from contract_api.registry import Registry
db = Repository(net_id=NETWORK_ID, NETWORKS=NETWORKS)
class TestRegistry(TestCase):
def test_curation(self):
registry = Registry(obj_repo=db)
insert_service_query = """INSERT INTO service (org_id,service_id,service_path,ipfs_hash,is_curated,service_email
,row_created,row_updated) VALUES ('snet','freecall',NULL,'QmQtm73kmKhv6mKTkn7qW3uMPtgK6c5Qytb11sCxY98s5j',0,
NULL,'2019-08-23 07:00:31','2020-03-18 13:07:55');"""
db.execute(insert_service_query)
registry.curate_service('snet', 'freecall', True)
service_details = db.execute("SELECT is_curated FROM service where service_id=%s and org_id=%s",
['freecall', 'snet'])
if len(service_details) > 0:
assert service_details[0]['is_curated'] == 1
else:
assert False
registry.curate_service('snet', 'freecall', False)
service_details = db.execute("SELECT is_curated FROM service where service_id=%s and org_id=%s",
['freecall', 'snet'])
if len(service_details) > 0:
assert service_details[0]['is_curated'] == 0
else:
assert False
def tearDown(self):
db.execute("DELETE FROM service WHERE 1")
def test_get_service_data_by_org_id_and_service_id_with_media(self):
registry = Registry(obj_repo=db)
self.clear_dependencies()
insert_organization_query = """INSERT INTO organization
(row_id ,org_id, organization_name, owner_address, org_metadata_uri, org_email, org_assets_url, row_created, row_updated, description, assets_hash, contacts)
VALUES(10,'snet', 'gene-annotation-service', 'owner_add', 'uri', 'email', '{"url":"google.com"}', '2021-01-08 05:48:26', '2021-01-08 05:48:26', 'description', '{}','{}');"""
db.execute(insert_organization_query)
insert_service_query = """INSERT INTO service
(row_id ,org_id, service_id, service_path, ipfs_hash, is_curated, service_email, row_created, row_updated)
VALUES(10,'snet', 'gene-annotation-service', 'service_path', 'QmdGjaVYPMSGpC1qT3LDALSNCCu7JPf7j51H1GQirvQJYf', 1, 'email', '2021-01-08 05:48:26', '2021-01-08 05:48:26');"""
db.execute(insert_service_query)
insert_metadata_query = """INSERT INTO service_metadata
(row_id ,service_row_id, org_id, service_id, display_name, description, short_description, url, json, model_ipfs_hash, encoding, `type`, mpe_address, assets_url, assets_hash, service_rating, ranking, contributors, row_created, row_updated)
VALUES(10,10, 'snet', 'gene-annotation-service', 'Annotation Service', 'Use this service to annotate a humane genome with uniform terms, Reactome pathway memberships, and BioGrid protein interactions.', 'short description', 'https://mozi-ai.github.io/annotation-service/', '{"name":"John", "age":31, "city":"New York"}', 'QmXqonxB9EvNBe11J8oCYXMQAtPKAb2x8CyFLmQpkvVaLf', 'proto', 'grpc', '0x8FB1dC8df86b388C7e00689d1eCb533A160B4D0C','{"hero_image": "https://test-s3-push"}', '{"hero_image": "QmVcE6fEDP764ibadXTjZHk251Lmt5xAxdc4P9mPA4kksk/hero_gene-annotation-2b.png"}','{"rating": 0.0, "total_users_rated": 0}', 1, '[{"name": "dummy dummy", "email_id": "dummy@dummy.io"}]', '2021-01-08 05:48:26', '2021-01-08 05:48:26')"""
db.execute(insert_metadata_query)
insert_service_media = """INSERT INTO service_media
(row_id,org_id, service_id, url, `order`, file_type, asset_type, alt_text, created_on, updated_on, ipfs_url, service_row_id)
VALUES(10,'snet', 'gene-annotation-service', 'https://test-s3-push', 5, 'text', 'hero_image','data is missing', '2021-01-08 13:31:50', '2021-01-08 13:31:50', 'Qmbb7tmKZX2TSxDKsK6DEAbp3tPgNUYP11CC93Cft7EkFb/hero_fbprophet_forecast1', 10);"""
db.execute(insert_service_media)
response = registry.get_service_data_by_org_id_and_service_id('snet','gene-annotation-service')
assert response == {'service_row_id': 10,
'org_id': 'snet',
'service_id': 'gene-annotation-service',
'display_name': 'Annotation Service',
'description': 'Use this service to annotate a humane genome with uniform terms, Reactome pathway memberships, and BioGrid protein interactions.',
'short_description': 'short description',
'url': 'https://mozi-ai.github.io/annotation-service/',
'json': '{"name":"John", "age":31, "city":"New York"}',
'model_ipfs_hash': 'QmXqonxB9EvNBe11J8oCYXMQAtPKAb2x8CyFLmQpkvVaLf',
'encoding': 'proto', 'type': 'grpc', 'mpe_address': '0x8FB1dC8df86b388C7e00689d1eCb533A160B4D0C',
'service_rating': {'rating': 0.0, 'total_users_rated': 0},
'ranking': 1,
'contributors': [{'name': 'dummy dummy', 'email_id': 'dummy@dummy.io'}],
'service_path': 'service_path', 'ipfs_hash': 'QmdGjaVYPMSGpC1qT3LDALSNCCu7JPf7j51H1GQirvQJYf',
'is_curated': 1,
'service_email': 'email',
'organization_name': 'gene-annotation-service',
'owner_address': 'owner_add',
'org_metadata_uri': 'uri',
'org_email': 'email',
'org_assets_url': {'url': 'google.com'},
'org_description': 'description',
'contacts': {},
'is_available': 0,
'groups': [],
'tags': [],
'media': [{'row_id': 10, 'url': 'https://test-s3-push', 'file_type': 'text', 'order': 5, 'alt_text': 'data is missing',"asset_type":"hero_image"}]
}
def test_get_service_data_by_org_id_and_service_id_without_media(self):
registry = Registry(obj_repo=db)
self.clear_dependencies()
insert_organization_query = """INSERT INTO organization
(row_id ,org_id, organization_name, owner_address, org_metadata_uri, org_email, org_assets_url, row_created, row_updated, description, assets_hash, contacts)
VALUES(10,'snet', 'gene-annotation-service', 'owner_add', 'uri', 'email', '{"url":"google.com"}', '2021-01-08 05:48:26', '2021-01-08 05:48:26', 'description', '{}','{}');"""
db.execute(insert_organization_query)
insert_service_query = """INSERT INTO service
(row_id ,org_id, service_id, service_path, ipfs_hash, is_curated, service_email, row_created, row_updated)
VALUES(10,'snet', 'gene-annotation-service', 'service_path', 'QmdGjaVYPMSGpC1qT3LDALSNCCu7JPf7j51H1GQirvQJYf', 1, 'email', '2021-01-08 05:48:26', '2021-01-08 05:48:26');"""
db.execute(insert_service_query)
insert_metadata_query = """INSERT INTO service_metadata
(row_id ,service_row_id, org_id, service_id, display_name, description, short_description, url, json, model_ipfs_hash, encoding, `type`, mpe_address, assets_url, assets_hash, service_rating, ranking, contributors, row_created, row_updated)
VALUES(10,10, 'snet', 'gene-annotation-service', 'Annotation Service', 'Use this service to annotate a humane genome with uniform terms, Reactome pathway memberships, and BioGrid protein interactions.', 'short description', 'https://mozi-ai.github.io/annotation-service/', '{"name":"John", "age":31, "city":"New York"}', 'QmXqonxB9EvNBe11J8oCYXMQAtPKAb2x8CyFLmQpkvVaLf', 'proto', 'grpc', '0x8FB1dC8df86b388C7e00689d1eCb533A160B4D0C','{"hero_image": "https://test-s3-push"}', '{"hero_image": "QmVcE6fEDP764ibadXTjZHk251Lmt5xAxdc4P9mPA4kksk/hero_gene-annotation-2b.png"}','{"rating": 0.0, "total_users_rated": 0}', 1, '[{"name": "dummy dummy", "email_id": "dummy@dummy.io"}]', '2021-01-08 05:48:26', '2021-01-08 05:48:26')"""
db.execute(insert_metadata_query)
response = registry.get_service_data_by_org_id_and_service_id('snet','gene-annotation-service')
assert response == {'service_row_id': 10,
'org_id': 'snet',
'service_id': 'gene-annotation-service',
'display_name': 'Annotation Service',
'description': 'Use this service to annotate a humane genome with uniform terms, Reactome pathway memberships, and BioGrid protein interactions.',
'short_description': 'short description',
'url': 'https://mozi-ai.github.io/annotation-service/',
'json': '{"name":"John", "age":31, "city":"New York"}',
'model_ipfs_hash': 'QmXqonxB9EvNBe11J8oCYXMQAtPKAb2x8CyFLmQpkvVaLf',
'encoding': 'proto', 'type': 'grpc', 'mpe_address': '0x8FB1dC8df86b388C7e00689d1eCb533A160B4D0C',
'service_rating': {'rating': 0.0, 'total_users_rated': 0},
'ranking': 1,
'contributors': [{'name': 'dummy dummy', 'email_id': 'dummy@dummy.io'}],
'service_path': 'service_path', 'ipfs_hash': 'QmdGjaVYPMSGpC1qT3LDALSNCCu7JPf7j51H1GQirvQJYf',
'is_curated': 1,
'service_email': 'email',
'organization_name': 'gene-annotation-service',
'owner_address': 'owner_add',
'org_metadata_uri': 'uri',
'org_email': 'email',
'org_assets_url': {'url': 'google.com'},
'org_description': 'description',
'contacts': {},
'is_available': 0,
'groups': [],
'tags': [],
'media': []
}
def clear_dependencies(self):
db.execute("DELETE FROM service WHERE 1")
db.execute("DELETE FROM organization WHERE 1")
db.execute("DELETE FROM service WHERE 1")
db.execute("DELETE FROM service_metadata WHERE 1")
db.execute("DELETE FROM service_media WHERE 1")
| 72.680272 | 731 | 0.603051 | 1,149 | 10,684 | 5.369017 | 0.144473 | 0.05787 | 0.018155 | 0.019452 | 0.876317 | 0.861242 | 0.837899 | 0.830605 | 0.824121 | 0.824121 | 0 | 0.062571 | 0.262542 | 10,684 | 146 | 732 | 73.178082 | 0.720396 | 0 | 0 | 0.769841 | 0 | 0.119048 | 0.590416 | 0.130195 | 0 | 0 | 0.015724 | 0 | 0.047619 | 1 | 0.039683 | false | 0 | 0.031746 | 0 | 0.079365 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2fa57a1e46365a03c01fc80bf716b211eb738c45 | 155 | py | Python | MLpkgMA/__init__.py | DahitoMA/Coding_challenge | e080e61a26f09f9b80420b9483b09ca08ee166f9 | [
"MIT"
] | null | null | null | MLpkgMA/__init__.py | DahitoMA/Coding_challenge | e080e61a26f09f9b80420b9483b09ca08ee166f9 | [
"MIT"
] | null | null | null | MLpkgMA/__init__.py | DahitoMA/Coding_challenge | e080e61a26f09f9b80420b9483b09ca08ee166f9 | [
"MIT"
] | null | null | null | from MLpkgMA import Interface
from MLpkgMA import ConcreteML_Class
from MLpkgMA import ComprehensibilityAnalysis
from MLpkgMA import FunctionalityAnalysis
| 31 | 45 | 0.896774 | 17 | 155 | 8.117647 | 0.470588 | 0.318841 | 0.492754 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.103226 | 155 | 4 | 46 | 38.75 | 0.992806 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
643a5815dc40b8ca2df86cb5a3a12db102673cc2 | 10,499 | py | Python | swagger_client/api/registration_api.py | chbndrhnns/ahoi-client | 8bd25f541c05af17c82904fa250272514b7971f2 | [
"MIT"
] | null | null | null | swagger_client/api/registration_api.py | chbndrhnns/ahoi-client | 8bd25f541c05af17c82904fa250272514b7971f2 | [
"MIT"
] | null | null | null | swagger_client/api/registration_api.py | chbndrhnns/ahoi-client | 8bd25f541c05af17c82904fa250272514b7971f2 | [
"MIT"
] | null | null | null | # coding: utf-8
"""
[AHOI cookbook](/ahoi/docs/cookbook/index.html) [Data Privacy](/sandboxmanager/#/privacy) [Terms of Service](/sandboxmanager/#/terms) [Imprint](https://sparkassen-hub.com/impressum/) © 2016‐2017 Starfinanz - Ein Unternehmen der Finanz Informatik # noqa: E501
OpenAPI spec version: 2.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from swagger_client.api_client import ApiClient
class RegistrationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_json_web_key(self, **kwargs): # noqa: E501
"""Request API jwk public key # noqa: E501
A valid API public key will be returned in JWK format to be used to encrypt registration data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_json_web_key(async=True)
>>> result = thread.get()
:param async bool
:return: JwkJwkPublicKey
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_json_web_key_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_json_web_key_with_http_info(**kwargs) # noqa: E501
return data
def get_json_web_key_with_http_info(self, **kwargs): # noqa: E501
"""Request API jwk public key # noqa: E501
A valid API public key will be returned in JWK format to be used to encrypt registration data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_json_web_key_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: JwkJwkPublicKey
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_json_web_key" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/registration/jwk', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='JwkJwkPublicKey', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_registration_public_key(self, **kwargs): # noqa: E501
"""Request API public key # noqa: E501
A valid API public key will be returned to be used to encrypt registration data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_registration_public_key(async=True)
>>> result = thread.get()
:param async bool
:return: RegistrationPublicKey
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.get_registration_public_key_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_registration_public_key_with_http_info(**kwargs) # noqa: E501
return data
def get_registration_public_key_with_http_info(self, **kwargs): # noqa: E501
"""Request API public key # noqa: E501
A valid API public key will be returned to be used to encrypt registration data # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.get_registration_public_key_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: RegistrationPublicKey
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_registration_public_key" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/registration/keys', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RegistrationPublicKey', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def register(self, **kwargs): # noqa: E501
"""Client registration # noqa: E501
Registers an authenticated and authorized specific client # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.register(async=True)
>>> result = thread.get()
:param async bool
:return: RegistrationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return self.register_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.register_with_http_info(**kwargs) # noqa: E501
return data
def register_with_http_info(self, **kwargs): # noqa: E501
"""Client registration # noqa: E501
Registers an authenticated and authorized specific client # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.register_with_http_info(async=True)
>>> result = thread.get()
:param async bool
:return: RegistrationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method register" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/registration', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RegistrationResponse', # noqa: E501
auth_settings=auth_settings,
async=params.get('async'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 35.469595 | 277 | 0.607391 | 1,184 | 10,499 | 5.149493 | 0.14527 | 0.048548 | 0.027555 | 0.035427 | 0.88601 | 0.880433 | 0.880433 | 0.860915 | 0.858291 | 0.845662 | 0 | 0.01797 | 0.305648 | 10,499 | 295 | 278 | 35.589831 | 0.818381 | 0.044195 | 0 | 0.78 | 0 | 0 | 0.141717 | 0.040072 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.026667 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
643fa7e720e8db23342e04871c5eb665409bb2d8 | 45 | py | Python | i2c/py-arg.py | newdreamlj/rpi-sensor-node | c7ca84a13b2333b76e4eada39f952cb9bcf0d35a | [
"Apache-2.0"
] | null | null | null | i2c/py-arg.py | newdreamlj/rpi-sensor-node | c7ca84a13b2333b76e4eada39f952cb9bcf0d35a | [
"Apache-2.0"
] | null | null | null | i2c/py-arg.py | newdreamlj/rpi-sensor-node | c7ca84a13b2333b76e4eada39f952cb9bcf0d35a | [
"Apache-2.0"
] | null | null | null | import sys
print sys.argv
print sys.argv[0]
| 9 | 17 | 0.755556 | 9 | 45 | 3.777778 | 0.555556 | 0.470588 | 0.705882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.026316 | 0.155556 | 45 | 4 | 18 | 11.25 | 0.868421 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.333333 | null | null | 0.666667 | 1 | 1 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 8 |
ff930d48143a88c965761adee1544bf0a099e397 | 147 | py | Python | archive/nexus-api-v2/Mongo/URLs/Interfaces/__init__.py | cloud-hybrid/delta | 402b00ed5aaa32ccef628361e9635879b7ace44f | [
"BSD-3-Clause"
] | null | null | null | archive/nexus-api-v2/Mongo/URLs/Interfaces/__init__.py | cloud-hybrid/delta | 402b00ed5aaa32ccef628361e9635879b7ace44f | [
"BSD-3-Clause"
] | null | null | null | archive/nexus-api-v2/Mongo/URLs/Interfaces/__init__.py | cloud-hybrid/delta | 402b00ed5aaa32ccef628361e9635879b7ace44f | [
"BSD-3-Clause"
] | 1 | 2022-01-03T05:33:15.000Z | 2022-01-03T05:33:15.000Z | from Mongo import *
from Mongo.URLs import *
from Mongo.URLs.Imports import *
from API.ASGI.Authentication.Context import Interface as Authorizer
| 24.5 | 67 | 0.809524 | 21 | 147 | 5.666667 | 0.571429 | 0.226891 | 0.252101 | 0.319328 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129252 | 147 | 5 | 68 | 29.4 | 0.929688 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
ff94f289c4dfbbecd2284a02ad5a0a1383f1e8dd | 116 | py | Python | src/react-material-backend/routes/checkstatus.py | JyotiSunkara/Energy-Monitoring-And-Control | efba4ac611e7054b78492ccf5e758a81621c8d6d | [
"MIT"
] | 1 | 2020-06-27T03:25:11.000Z | 2020-06-27T03:25:11.000Z | src/react-material-backend/routes/checkstatus.py | JyotiSunkara/Energy-Monitoring-And-Control | efba4ac611e7054b78492ccf5e758a81621c8d6d | [
"MIT"
] | null | null | null | src/react-material-backend/routes/checkstatus.py | JyotiSunkara/Energy-Monitoring-And-Control | efba4ac611e7054b78492ccf5e758a81621c8d6d | [
"MIT"
] | null | null | null | import os
os.system("sshpass -p 'mouni1995' ssh 'user@10.2.24.157' 'cd pybacnet;cd tools;python checkstatus.py' ")
| 29 | 104 | 0.724138 | 20 | 116 | 4.2 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.116505 | 0.112069 | 116 | 3 | 105 | 38.666667 | 0.699029 | 0 | 0 | 0 | 0 | 0.5 | 0.791304 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.5 | 0.5 | 0 | 0.5 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 7 |
ffa7c39eb6c463d744d2ae4302b5efaa9235c20d | 110,965 | py | Python | cgi/full_pwpw_gexp.py | jakelin212/jakelin212.github.io | 955ea621a9ed876fa85ab779065ec9430888f7ba | [
"MIT"
] | null | null | null | cgi/full_pwpw_gexp.py | jakelin212/jakelin212.github.io | 955ea621a9ed876fa85ab779065ec9430888f7ba | [
"MIT"
] | null | null | null | cgi/full_pwpw_gexp.py | jakelin212/jakelin212.github.io | 955ea621a9ed876fa85ab779065ec9430888f7ba | [
"MIT"
] | null | null | null | full_pwpw_gexp={'cancermap_cluster_27_vs_28': 1, 'cancermap_cluster_27_vs_29': 5, 'cancermap_cluster_7_vs_49': 6, 'cancermap_cluster_3_and_28': 3, 'cancermap_cluster_36_and_39': 4, 'annotated_class_na_vs_Erythroid': 2, 'annotated_class_NonCancer_and_Prolif_Myelo': 2, 'cancermap_cluster_3_and_29': 1, 'annotated_class_TCL_and_LP': 0, 'cancermap_cluster_36_and_37': 7, 'cancermap_cluster_18_and_19': 6, 'annotated_class_vs_cancermap_cluster_pre-B-ALL_vs_CLL': 7, 'annotated_class_vs_cancermap_cluster_Myeloid_vs_AML': 2, 'annotated_class_LP_and_Erythroid': 0, 'annotated_class_CLL': 9, 'cancermap_cluster_2_and_7': 3, 'cancermap_cluster_36_vs_37': 3, 'annotated_class_BCL_and_MM': 1, 'cancermap_cluster_30_vs_50': 3, 'cancermap_cluster_36_vs_39': 9, 'cancermap_cluster_36_vs_38': 9, 'cancermap_cluster_16_and_50': 4, 'cancermap_cluster_20_and_28': 3, 'cancermap_cluster_20_and_29': 2, 'annotated_class_vs_cancermap_cluster_T-ALL_vs_pre-B-ALL': 9, 'annotated_class_Cancer_Lymphoma_vs_Cancer_Myeloma': 6, 'cancermap_cluster_28_and_50': 4, 'cancermap_cluster_20_and_22': 6, 'cancermap_cluster_20_and_23': 0, 'annotated_class_T-ALL_vs_CLL': 0, 'cancermap_cluster_25_vs_31': 9, 'cancermap_cluster_25_vs_30': 3, 'cancermap_cluster_25_vs_33': 2, 'cancermap_cluster_25_vs_32': 0, 'cancermap_cluster_25_vs_35': 8, 'cancermap_cluster_25_vs_34': 3, 'cancermap_cluster_25_vs_37': 1, 'cancermap_cluster_25_vs_36': 3, 'cancermap_cluster_25_vs_39': 1, 'cancermap_cluster_25_vs_38': 4, 'annotated_class_vs_cancermap_cluster_AML_vs_MM': 3, 'cancermap_cluster_1_vs_50': 5, 'cancermap_cluster_2_and_27': 7, 'cancermap_cluster_2_and_26': 0, 'cancermap_cluster_2_and_25': 7, 'cancermap_cluster_2_and_24': 2, 'cancermap_cluster_23_vs_28': 3, 'cancermap_cluster_23_vs_29': 4, 'cancermap_cluster_31_vs_38': 2, 'cancermap_cluster_31_vs_39': 7, 'cancermap_cluster_31_vs_36': 8, 'cancermap_cluster_31_vs_37': 5, 'cancermap_cluster_31_vs_34': 3, 'cancermap_cluster_31_vs_35': 1, 'cancermap_cluster_31_vs_32': 8, 'cancermap_cluster_31_vs_33': 3, 'cancermap_cluster_2_and_29': 3, 'cancermap_cluster_2_and_28': 5, 'annotated_class_BCL_and_Myeloid': 7, 'cancermap_cluster_19_and_50': 1, 'cancermap_cluster_23_and_45': 4, 'cancermap_cluster_23_and_44': 7, 'cancermap_cluster_23_and_47': 6, 'cancermap_cluster_23_and_46': 4, 'cancermap_cluster_23_and_41': 9, 'cancermap_cluster_23_and_40': 2, 'cancermap_cluster_23_and_43': 5, 'cancermap_cluster_23_and_42': 7, 'annotated_class_CellLine_and_Erythroid': 9, 'cancermap_cluster_23_and_49': 3, 'cancermap_cluster_23_and_48': 0, 'cancermap_cluster_12_vs_26': 9, 'cancermap_cluster_7_vs_18': 3, 'cancermap_cluster_7_vs_19': 1, 'cancermap_cluster_7_vs_14': 2, 'cancermap_cluster_7_vs_15': 3, 'cancermap_cluster_7_vs_16': 6, 'cancermap_cluster_7_vs_17': 1, 'cancermap_cluster_7_vs_10': 9, 'cancermap_cluster_7_vs_11': 1, 'cancermap_cluster_7_vs_12': 6, 'cancermap_cluster_7_vs_13': 4, 'cancermap_cluster_12_vs_27': 5, 'cancermap_cluster_11_vs_49': 6, 'cancermap_cluster_11_vs_48': 8, 'cancermap_cluster_BCL_and_CellLine': 8, 'cancermap_cluster_11_vs_40': 1, 'cancermap_cluster_11_vs_43': 8, 'cancermap_cluster_11_vs_42': 9, 'cancermap_cluster_11_vs_45': 0, 'cancermap_cluster_11_vs_44': 0, 'cancermap_cluster_11_vs_47': 8, 'cancermap_cluster_11_vs_46': 2, 'cancermap_cluster_36_and_48': 1, 'cancermap_cluster_26_vs_49': 0, 'cancermap_cluster_26_vs_48': 5, 'cancermap_cluster_26_vs_43': 4, 'cancermap_cluster_26_vs_42': 8, 'cancermap_cluster_26_vs_41': 7, 'cancermap_cluster_26_vs_40': 5, 'cancermap_cluster_26_vs_47': 0, 'cancermap_cluster_26_vs_46': 7, 'cancermap_cluster_26_vs_45': 3, 'cancermap_cluster_26_vs_44': 2, 'cancermap_cluster_35_vs_47': 2, 'cancermap_cluster_35_vs_46': 4, 'cancermap_cluster_35_vs_45': 6, 'cancermap_cluster_35_vs_44': 5, 'cancermap_cluster_35_vs_43': 8, 'cancermap_cluster_35_vs_42': 0, 'cancermap_cluster_35_vs_41': 7, 'cancermap_cluster_35_vs_40': 7, 'cancermap_cluster_35_vs_49': 9, 'cancermap_cluster_35_vs_48': 7, 'cancermap_cluster_Myeloid_and_T-ALL': 8, 'cancermap_cluster_44_vs_50': 6, 'annotated_class_T-Lymphoid_and_CellLine': 4, 'cancermap_cluster_10_vs_38': 3, 'annotated_class_CellLine_Leukemia_vs_CellLine_Myeloma': 1, 'cancermap_cluster_25_and_38': 0, 'cancermap_cluster_25_and_39': 8, 'cancermap_cluster_14_vs_39': 3, 'cancermap_cluster_14_vs_38': 4, 'cancermap_cluster_22_and_48': 8, 'cancermap_cluster_22_and_49': 8, 'cancermap_cluster_25_and_32': 6, 'cancermap_cluster_35_and_50': 0, 'cancermap_cluster_25_and_30': 4, 'cancermap_cluster_36_and_44': 7, 'cancermap_cluster_25_and_36': 3, 'cancermap_cluster_25_and_37': 7, 'cancermap_cluster_25_and_34': 0, 'cancermap_cluster_25_and_35': 2, 'annotated_class_TCL_and_Erythroid': 7, 'cancermap_cluster_13_and_28': 5, 'cancermap_cluster_13_and_29': 2, 'cancermap_cluster_Cancer_Myeloma_and_CellLine_Myeloma': 1, 'cancermap_cluster_13_and_25': 9, 'cancermap_cluster_13_and_26': 2, 'cancermap_cluster_13_and_27': 5, 'cancermap_cluster_13_and_20': 4, 'cancermap_cluster_13_and_21': 0, 'cancermap_cluster_13_and_22': 7, 'cancermap_cluster_13_and_23': 2, 'cancermap_cluster_10_vs_32': 9, 'annotated_class_na_vs_T-ALL': 6, 'annotated_class_Myeloid_and_Erythroid': 4, 'annotated_class_Myeloid_vs_StemCell': 2, 'cancermap_cluster_10_vs_34': 2, 'cancermap_cluster_2_vs_24': 4, 'cancermap_cluster_2_vs_25': 2, 'cancermap_cluster_2_vs_26': 4, 'cancermap_cluster_2_vs_27': 1, 'cancermap_cluster_2_vs_20': 0, 'cancermap_cluster_2_vs_21': 4, 'cancermap_cluster_2_vs_22': 9, 'cancermap_cluster_2_vs_23': 6, 'annotated_class_T-Lymphoid_vs_CLL': 5, 'cancermap_cluster_2_vs_28': 6, 'cancermap_cluster_2_vs_29': 5, 'cancermap_cluster_41_and_45': 1, 'cancermap_cluster_41_and_44': 1, 'cancermap_cluster_29_and_41': 6, 'cancermap_cluster_29_and_40': 9, 'cancermap_cluster_29_and_47': 7, 'cancermap_cluster_29_and_46': 5, 'cancermap_cluster_29_and_45': 1, 'cancermap_cluster_41_and_42': 9, 'cancermap_cluster_17_vs_21': 6, 'cancermap_cluster_17_vs_20': 8, 'cancermap_cluster_29_and_49': 1, 'cancermap_cluster_29_and_48': 0, 'cancermap_cluster_41_and_49': 1, 'cancermap_cluster_41_and_48': 2, 'cancermap_cluster_17_vs_27': 7, 'cancermap_cluster_17_vs_26': 2, 'cancermap_cluster_24_and_40': 6, 'cancermap_cluster_24_and_41': 4, 'cancermap_cluster_24_and_42': 0, 'cancermap_cluster_24_and_43': 0, 'cancermap_cluster_24_and_44': 8, 'cancermap_cluster_24_and_45': 9, 'cancermap_cluster_24_and_46': 9, 'cancermap_cluster_24_and_47': 7, 'cancermap_cluster_24_and_48': 2, 'cancermap_cluster_24_and_49': 4, 'annotated_class_CellLine_Lymphoma_and_Prolif_Myelo': 7, 'annotated_class_vs_cancermap_cluster_T-Lymphoid_vs_BCL': 6, 'cancermap_cluster_NonCancer_and_other': 6, 'annotated_class_Cancer_Leukemia': 8, 'cancermap_cluster_MM': 5, 'annotated_class_B-Lymphoid_vs_CLL': 5, 'annotated_class_BCL_and_T-ALL': 7, 'annotated_class_NonCancer_vs_acute_leukemias': 0, 'annotated_class_CellLine_and_Lymphoid': 6, 'annotated_class_B-Lymphoid': 0, 'cancermap_cluster_23_and_25': 8, 'annotated_class_BCL_vs_StemCell': 7, 'cancermap_cluster_12_and_30': 0, 'cancermap_cluster_14_and_18': 3, 'cancermap_cluster_14_and_19': 9, 'annotated_class_Cancer_Leukemia_and_other': 1, 'cancermap_cluster_12_and_31': 0, 'cancermap_cluster_14_and_15': 6, 'cancermap_cluster_14_and_16': 6, 'cancermap_cluster_14_and_17': 8, 'annotated_class_TCL_and_pre-B-ALL': 2, 'cancermap_cluster_12_and_32': 5, 'annotated_class_CellLine_Lymphoma_vs_other': 2, 'cancermap_cluster_4_and_8': 6, 'cancermap_cluster_4_and_9': 5, 'cancermap_cluster_12_and_34': 2, 'cancermap_cluster_4_and_6': 5, 'cancermap_cluster_4_and_7': 2, 'cancermap_cluster_4_and_5': 7, 'annotated_class_TCL_and_StemCell': 9, 'annotated_class_MM_vs_LP': 7, 'cancermap_cluster_12_and_36': 5, 'annotated_class_Cancer_Lymphoma_and_Prolif_Lympho': 3, 'annotated_class_vs_cancermap_cluster_CellLine_Lymphoma_vs_CellLine_Myeloma': 5, 'cancermap_cluster_12_and_37': 9, 'annotated_class_BCL_and_CML': 9, 'annotated_class_AML_and_Lymphoid': 7, 'annotated_class_T-Lymphoid_vs_TCL': 2, 'cancermap_cluster_10_and_47': 3, 'cancermap_cluster_15_vs_28': 4, 'annotated_class_Cancer_Lymphoma_vs_other': 3, 'cancermap_cluster_27_vs_39': 1, 'cancermap_cluster_27_vs_38': 4, 'cancermap_cluster_27_vs_37': 3, 'cancermap_cluster_27_vs_36': 7, 'cancermap_cluster_27_vs_35': 6, 'cancermap_cluster_27_vs_34': 1, 'cancermap_cluster_27_vs_33': 1, 'cancermap_cluster_27_vs_32': 2, 'cancermap_cluster_27_vs_31': 3, 'cancermap_cluster_27_vs_30': 8, 'cancermap_cluster_18_and_25': 9, 'cancermap_cluster_18_and_24': 8, 'cancermap_cluster_18_and_27': 5, 'cancermap_cluster_18_and_26': 4, 'cancermap_cluster_18_and_21': 6, 'cancermap_cluster_18_and_20': 5, 'cancermap_cluster_18_and_23': 9, 'cancermap_cluster_18_and_22': 8, 'cancermap_cluster_10_and_43': 3, 'cancermap_cluster_18_and_29': 9, 'cancermap_cluster_18_and_28': 0, 'cancermap_cluster_10_and_42': 6, 'annotated_class_LP_and_CLL': 1, 'annotated_class_Lymphoid_vs_B-Lymphoid': 2, 'cancermap_cluster_10_and_41': 2, 'cancermap_cluster_1_and_26': 2, 'cancermap_cluster_10_and_40': 1, 'cancermap_cluster_10_vs_19': 5, 'annotated_class_vs_cancermap_cluster_na_vs_pre-B-ALL': 6, 'cancermap_cluster_10_vs_18': 7, 'annotated_class_StemCell_vs_CML': 1, 'cancermap_cluster_32_and_38': 4, 'cancermap_cluster_32_and_39': 9, 'cancermap_cluster_10_and_49': 3, 'cancermap_cluster_32_and_33': 6, 'cancermap_cluster_10_and_48': 7, 'cancermap_cluster_32_and_36': 5, 'cancermap_cluster_32_and_37': 3, 'cancermap_cluster_32_and_34': 5, 'cancermap_cluster_32_and_35': 2, 'annotated_class_vs_cancermap_cluster_NonCancer_vs_chronic_leukemias': 8, 'annotated_class_CellLine_Leukemia': 1, 'cancermap_cluster_chronic_leukemias': 0, 'annotated_class_T-ALL_vs_CML': 2, 'cancermap_cluster_Cancer_Lymphoma_vs_CellLine_Myeloma': 1, 'cancermap_cluster_21_and_34': 1, 'annotated_class_T-ALL_and_pre-B-ALL': 5, 'cancermap_cluster_1_and_47': 5, 'annotated_class_MM_vs_CLL': 8, 'annotated_class_MP_and_LP': 9, 'cancermap_cluster_20_and_35': 1, 'cancermap_cluster_20_and_34': 5, 'cancermap_cluster_20_and_37': 1, 'cancermap_cluster_20_and_36': 9, 'cancermap_cluster_20_and_31': 0, 'cancermap_cluster_20_and_30': 2, 'cancermap_cluster_20_and_33': 7, 'cancermap_cluster_20_and_32': 6, 'cancermap_cluster_16_vs_19': 7, 'cancermap_cluster_16_vs_18': 3, 'cancermap_cluster_20_and_39': 8, 'cancermap_cluster_20_and_38': 0, 'cancermap_cluster_23_vs_33': 6, 'cancermap_cluster_23_vs_32': 9, 'cancermap_cluster_23_vs_31': 8, 'cancermap_cluster_23_vs_30': 2, 'cancermap_cluster_23_vs_37': 3, 'cancermap_cluster_23_vs_36': 4, 'cancermap_cluster_23_vs_35': 8, 'cancermap_cluster_Myeloid_and_AML': 1, 'cancermap_cluster_23_vs_39': 1, 'cancermap_cluster_23_vs_38': 4, 'cancermap_cluster_AML_and_CLL': 5, 'cancermap_cluster_1_and_27': 1, 'cancermap_cluster_19_and_40': 3, 'cancermap_cluster_19_and_41': 4, 'cancermap_cluster_19_and_42': 0, 'cancermap_cluster_19_and_43': 5, 'cancermap_cluster_19_and_44': 3, 'cancermap_cluster_19_and_45': 8, 'cancermap_cluster_19_and_46': 8, 'cancermap_cluster_19_and_47': 4, 'cancermap_cluster_19_and_48': 6, 'cancermap_cluster_19_and_49': 9, 'annotated_class_Cancer_Lymphoma_and_Cancer_Leukemia': 8, 'annotated_class_CellLine_Myeloma_vs_other': 2, 'annotated_class_vs_cancermap_cluster_MM_vs_CLL': 3, 'cancermap_cluster_12_vs_42': 8, 'cancermap_cluster_18_vs_31': 4, 'cancermap_cluster_12_vs_40': 4, 'cancermap_cluster_12_vs_41': 4, 'cancermap_cluster_37_and_48': 1, 'cancermap_cluster_37_and_49': 1, 'cancermap_cluster_12_vs_44': 7, 'cancermap_cluster_18_vs_30': 5, 'cancermap_cluster_37_and_44': 6, 'cancermap_cluster_37_and_45': 2, 'cancermap_cluster_37_and_46': 1, 'cancermap_cluster_28_and_29': 2, 'cancermap_cluster_37_and_40': 0, 'cancermap_cluster_37_and_41': 9, 'cancermap_cluster_37_and_42': 2, 'cancermap_cluster_37_and_43': 6, 'annotated_class_AML_vs_Erythroid': 5, 'cancermap_cluster_18_vs_32': 8, 'cancermap_cluster_16_vs_26': 8, 'cancermap_cluster_18_vs_35': 7, 'annotated_class_Myeloid_vs_pre-B-ALL': 4, 'annotated_class_Myeloid_and_Lymphoid': 9, 'cancermap_cluster_18_vs_34': 5, 'annotated_class_Cancer_Myeloma_vs_Prolif_Lympho': 0, 'cancermap_cluster_18_vs_37': 4, 'cancermap_cluster_18_vs_36': 1, 'annotated_class_Myeloid_and_LP': 1, 'cancermap_cluster_Myeloid_vs_pre-B-ALL': 5, 'cancermap_cluster_18_vs_39': 3, 'cancermap_cluster_AML_vs_pre-B-ALL': 3, 'cancermap_cluster_18_vs_38': 0, 'annotated_class_vs_cancermap_cluster_T-Lymphoid_vs_CLL': 0, 'annotated_class_MM_and_CLL': 6, 'cancermap_cluster_4_vs_7': 6, 'cancermap_cluster_4_vs_6': 8, 'cancermap_cluster_4_vs_5': 6, 'cancermap_cluster_26_vs_39': 0, 'cancermap_cluster_26_vs_36': 7, 'cancermap_cluster_26_vs_37': 1, 'cancermap_cluster_4_vs_9': 7, 'cancermap_cluster_4_vs_8': 7, 'cancermap_cluster_26_vs_32': 1, 'cancermap_cluster_26_vs_33': 7, 'cancermap_cluster_26_vs_30': 9, 'cancermap_cluster_26_vs_31': 9, 'annotated_class_MP_vs_CML': 8, 'cancermap_cluster_Myeloid_vs_MM': 7, 'annotated_class_Erythroid_vs_CLL': 1, 'annotated_class_T-Lymphoid_and_na': 5, 'cancermap_cluster_44_vs_49': 1, 'cancermap_cluster_44_vs_48': 2, 'cancermap_cluster_35_and_48': 3, 'cancermap_cluster_35_and_49': 9, 'cancermap_cluster_35_and_46': 3, 'cancermap_cluster_35_and_47': 6, 'cancermap_cluster_35_and_44': 9, 'cancermap_cluster_35_and_45': 9, 'cancermap_cluster_44_vs_47': 4, 'cancermap_cluster_44_vs_46': 9, 'cancermap_cluster_44_vs_45': 9, 'cancermap_cluster_35_and_41': 3, 'cancermap_cluster_11_and_29': 6, 'annotated_class_CellLine_Myeloma_and_other': 1, 'annotated_class_na_and_Erythroid': 0, 'cancermap_cluster_13_and_15': 1, 'cancermap_cluster_28_and_31': 1, 'cancermap_cluster_13_and_17': 9, 'cancermap_cluster_13_and_16': 2, 'cancermap_cluster_13_and_19': 2, 'cancermap_cluster_13_and_18': 0, 'annotated_class_LP_and_pre-B-ALL': 2, 'annotated_class_vs_cancermap_cluster_Cancer_Leukemia_vs_Cancer_Myeloma': 2, 'cancermap_cluster_4_and_49': 5, 'annotated_class_vs_cancermap_cluster_Lymphoid_vs_pre-B-ALL': 0, 'cancermap_cluster_16_vs_48': 6, 'annotated_class_TCL_and_AML': 3, 'cancermap_cluster_2_and_23': 0, 'cancermap_cluster_2_and_22': 1, 'cancermap_cluster_29_and_50': 5, 'cancermap_cluster_17_vs_18': 6, 'cancermap_cluster_2_and_21': 3, 'cancermap_cluster_2_and_20': 3, 'annotated_class_vs_cancermap_cluster_BCL_vs_T-ALL': 1, 'cancermap_cluster_40_and_46': 2, 'annotated_class_acute_leukemias_and_chronic_leukemias': 1, 'cancermap_cluster_40_and_47': 9, 'cancermap_cluster_41_and_50': 9, 'cancermap_cluster_40_and_44': 6, 'cancermap_cluster_Myeloid_vs_CLL': 3, 'cancermap_cluster_40_and_45': 8, 'cancermap_cluster_acute_leukemias': 5, 'annotated_class_Erythroid_vs_StemCell': 1, 'cancermap_cluster_37_and_50': 8, 'annotated_class_MM_and_StemCell': 6, 'annotated_class_TCL_and_CML': 2, 'cancermap_cluster_11_and_25': 1, 'cancermap_cluster_40_and_41': 7, 'annotated_class_LP_and_CML': 9, 'cancermap_cluster_45_vs_48': 7, 'cancermap_cluster_45_vs_49': 7, 'cancermap_cluster_1_and_25': 7, 'cancermap_cluster_45_vs_46': 4, 'cancermap_cluster_45_vs_47': 3, 'annotated_class_MM_vs_Lymphoid': 4, 'cancermap_cluster_14_and_21': 7, 'cancermap_cluster_14_and_20': 7, 'cancermap_cluster_21_and_49': 5, 'cancermap_cluster_21_and_48': 1, 'cancermap_cluster_14_and_25': 2, 'cancermap_cluster_14_and_24': 0, 'cancermap_cluster_14_and_27': 5, 'cancermap_cluster_14_and_26': 9, 'cancermap_cluster_21_and_43': 6, 'cancermap_cluster_21_and_42': 3, 'cancermap_cluster_21_and_41': 7, 'cancermap_cluster_21_and_40': 8, 'cancermap_cluster_21_and_47': 2, 'cancermap_cluster_21_and_46': 7, 'cancermap_cluster_21_and_45': 1, 'cancermap_cluster_21_and_44': 2, 'cancermap_cluster_12_vs_28': 3, 'cancermap_cluster_12_vs_29': 9, 'cancermap_cluster_22_vs_24': 2, 'annotated_class_CellLine_Lymphoma_and_CellLine_Myeloma': 8, 'cancermap_cluster_13_and_49': 6, 'cancermap_cluster_13_vs_50': 7, 'annotated_class_vs_cancermap_cluster_AML_vs_CellLine': 4, 'annotated_class_TCL_vs_StemCell': 7, 'cancermap_cluster_27_and_29': 7, 'cancermap_cluster_27_and_28': 3, 'annotated_class_Prolif_Myelo_and_CellLine_Myeloma': 2, 'cancermap_cluster_AML_and_pre-B-ALL': 9, 'annotated_class_MP_vs_StemCell': 0, 'annotated_class_MP_and_MM': 6, 'annotated_class_CellLine_vs_StemCell': 7, 'annotated_class_vs_cancermap_cluster_TCL_vs_pre-B-ALL': 0, 'annotated_class_vs_cancermap_cluster_AML_vs_CLL': 5, 'annotated_class_MP_vs_B-Lymphoid': 7, 'annotated_class_MP': 1, 'annotated_class_Cancer_Myeloma_and_other': 8, 'annotated_class_Lymphoid_vs_pre-B-ALL': 0, 'annotated_class_vs_cancermap_cluster_CellLine_vs_CLL': 4, 'cancermap_cluster_16_and_36': 7, 'annotated_class_BCL_vs_AML': 4, 'cancermap_cluster_32_and_49': 5, 'cancermap_cluster_32_and_48': 1, 'cancermap_cluster_32_and_47': 9, 'cancermap_cluster_32_and_46': 6, 'cancermap_cluster_32_and_45': 2, 'cancermap_cluster_32_and_44': 8, 'cancermap_cluster_32_and_43': 0, 'cancermap_cluster_32_and_42': 8, 'cancermap_cluster_32_and_41': 3, 'cancermap_cluster_32_and_40': 6, 'annotated_class_BCL_vs_T-ALL': 8, 'cancermap_cluster_18_and_38': 0, 'cancermap_cluster_18_and_39': 3, 'cancermap_cluster_14_and_23': 1, 'annotated_class_MP_vs_LP': 3, 'cancermap_cluster_18_and_32': 4, 'cancermap_cluster_18_and_33': 6, 'cancermap_cluster_18_and_30': 8, 'cancermap_cluster_18_and_31': 8, 'cancermap_cluster_18_and_36': 4, 'cancermap_cluster_18_and_37': 7, 'cancermap_cluster_acute_leukemias_and_chronic_leukemias': 2, 'cancermap_cluster_18_and_35': 8, 'cancermap_cluster_16_vs_46': 0, 'annotated_class_AML_and_LP': 1, 'annotated_class_Myeloid': 8, 'cancermap_cluster_39_vs_43': 2, 'cancermap_cluster_6_vs_7': 4, 'cancermap_cluster_5_vs_31': 9, 'cancermap_cluster_6_vs_9': 2, 'cancermap_cluster_6_vs_8': 3, 'cancermap_cluster_5_vs_32': 4, 'cancermap_cluster_16_vs_24': 0, 'cancermap_cluster_16_vs_25': 3, 'cancermap_cluster_19': 8, 'cancermap_cluster_5_vs_33': 5, 'cancermap_cluster_16_vs_20': 0, 'cancermap_cluster_16_vs_21': 0, 'cancermap_cluster_16_vs_22': 4, 'cancermap_cluster_16_vs_23': 1, 'cancermap_cluster_13': 6, 'cancermap_cluster_42_vs_49': 0, 'cancermap_cluster_11': 5, 'cancermap_cluster_10': 9, 'cancermap_cluster_17': 7, 'cancermap_cluster_16': 5, 'cancermap_cluster_15': 1, 'cancermap_cluster_42_vs_48': 9, 'cancermap_cluster_23_vs_46': 0, 'cancermap_cluster_23_vs_47': 9, 'cancermap_cluster_23_vs_44': 5, 'cancermap_cluster_23_vs_45': 5, 'cancermap_cluster_23_vs_42': 2, 'cancermap_cluster_39_vs_45': 4, 'cancermap_cluster_23_vs_40': 2, 'cancermap_cluster_23_vs_41': 7, 'annotated_class_AML_and_StemCell': 9, 'cancermap_cluster_39_vs_44': 4, 'cancermap_cluster_23_vs_48': 8, 'cancermap_cluster_23_vs_49': 5, 'cancermap_cluster_42_vs_45': 7, 'annotated_class_B-Lymphoid_and_StemCell': 5, 'cancermap_cluster_42_vs_44': 7, 'cancermap_cluster_39_vs_49': 3, 'cancermap_cluster_24': 4, 'cancermap_cluster_39_vs_48': 8, 'annotated_class_T-Lymphoid_and_T-ALL': 7, 'cancermap_cluster_23_vs_34': 9, 'annotated_class_vs_cancermap_cluster_Cancer_Lymphoma_vs_CellLine_Myeloma': 1, 'annotated_class_StemCell_vs_CLL': 7, 'cancermap_cluster_15_and_34': 1, 'cancermap_cluster_28_and_39': 8, 'annotated_class_vs_cancermap_cluster_NonCancer_vs_Cancer_Leukemia': 2, 'annotated_class_Lymphoid_and_CLL': 1, 'cancermap_cluster_28_and_38': 9, 'cancermap_cluster_6_vs_11': 6, 'cancermap_cluster_6_vs_10': 7, 'cancermap_cluster_6_vs_13': 3, 'cancermap_cluster_6_vs_12': 1, 'cancermap_cluster_6_vs_15': 5, 'cancermap_cluster_6_vs_14': 3, 'cancermap_cluster_6_vs_17': 2, 'cancermap_cluster_6_vs_16': 2, 'cancermap_cluster_6_vs_19': 5, 'cancermap_cluster_6_vs_18': 8, 'annotated_class_Cancer_Lymphoma_vs_Prolif_Lympho': 1, 'cancermap_cluster_20_and_21': 9, 'cancermap_cluster_20_and_26': 7, 'cancermap_cluster_20_and_27': 7, 'cancermap_cluster_20_and_24': 4, 'cancermap_cluster_20_and_25': 9, 'cancermap_cluster_28_and_35': 4, 'cancermap_cluster_13_and_46': 0, 'annotated_class_B-Lymphoid_and_LP': 2, 'cancermap_cluster_28_and_34': 2, 'cancermap_cluster_22': 2, 'cancermap_cluster_28_and_37': 7, 'annotated_class_AML_and_Erythroid': 0, 'cancermap_cluster_36_and_49': 0, 'cancermap_cluster_28_and_36': 7, 'cancermap_cluster_36_and_43': 5, 'cancermap_cluster_36_and_42': 0, 'cancermap_cluster_36_and_41': 3, 'cancermap_cluster_36_and_40': 4, 'cancermap_cluster_36_and_47': 9, 'cancermap_cluster_36_and_46': 1, 'cancermap_cluster_36_and_45': 6, 'cancermap_cluster_28_and_30': 7, 'annotated_class_TCL_vs_pre-B-ALL': 3, 'annotated_class_na_and_B-Lymphoid': 6, 'cancermap_cluster_16_vs_47': 0, 'cancermap_cluster_13_and_47': 9, 'cancermap_cluster_MM_vs_pre-B-ALL': 7, 'cancermap_cluster_28_and_33': 1, 'annotated_class_T-ALL_vs_B-Lymphoid': 1, 'cancermap_cluster_21_and_27': 3, 'cancermap_cluster_23': 4, 'cancermap_cluster_28_and_32': 5, 'cancermap_cluster_26_vs_29': 6, 'cancermap_cluster_26_vs_28': 8, 'cancermap_cluster_23_and_29': 8, 'cancermap_cluster_23_and_28': 9, 'cancermap_cluster_23_and_27': 4, 'cancermap_cluster_23_and_26': 5, 'cancermap_cluster_26_vs_27': 1, 'cancermap_cluster_23_and_24': 1, 'annotated_class_Myeloid_and_StemCell': 1, 'cancermap_cluster_13_and_44': 1, 'cancermap_cluster_20': 0, 'cancermap_cluster_23_vs_24': 1, 'annotated_class_BCL_vs_Erythroid': 8, 'cancermap_cluster_10_and_29': 0, 'annotated_class_NonCancer_vs_chronic_leukemias': 0, 'annotated_class_Prolif_Lympho_and_CellLine_Myeloma': 0, 'cancermap_cluster_14_vs_17': 6, 'cancermap_cluster_14_vs_16': 5, 'cancermap_cluster_14_vs_15': 7, 'cancermap_cluster_4_vs_50': 4, 'cancermap_cluster_21': 1, 'cancermap_cluster_23_vs_25': 2, 'cancermap_cluster_25_and_50': 1, 'cancermap_cluster_14_vs_19': 5, 'cancermap_cluster_14_vs_18': 6, 'cancermap_cluster_17_and_50': 0, 'cancermap_cluster_10_and_46': 7, 'annotated_class_Prolif_Myelo': 5, 'cancermap_cluster_23_vs_26': 3, 'cancermap_cluster_20_and_49': 4, 'cancermap_cluster_10_and_45': 0, 'annotated_class_Prolif_Myelo_vs_CellLine_Myeloma': 9, 'cancermap_cluster_34_and_45': 5, 'cancermap_cluster_34_and_44': 9, 'cancermap_cluster_34_and_47': 9, 'cancermap_cluster_34_and_46': 8, 'cancermap_cluster_34_and_41': 0, 'cancermap_cluster_34_and_40': 1, 'cancermap_cluster_34_and_43': 2, 'cancermap_cluster_34_and_42': 7, 'cancermap_cluster_34_and_49': 6, 'cancermap_cluster_34_and_48': 6, 'cancermap_cluster_4_and_38': 9, 'cancermap_cluster_4_and_39': 7, 'cancermap_cluster_4_and_32': 0, 'cancermap_cluster_4_and_33': 4, 'cancermap_cluster_4_and_30': 7, 'cancermap_cluster_4_and_31': 1, 'cancermap_cluster_4_and_36': 1, 'cancermap_cluster_4_and_37': 6, 'cancermap_cluster_4_and_34': 1, 'cancermap_cluster_4_and_35': 1, 'cancermap_cluster_10_vs_48': 6, 'cancermap_cluster_11_vs_41': 7, 'cancermap_cluster_10_vs_40': 9, 'cancermap_cluster_10_vs_41': 2, 'cancermap_cluster_10_vs_42': 3, 'cancermap_cluster_18_vs_33': 2, 'cancermap_cluster_10_vs_44': 6, 'cancermap_cluster_10_vs_45': 1, 'cancermap_cluster_10_vs_46': 4, 'cancermap_cluster_10_vs_47': 8, 'annotated_class_Erythroid_vs_CML': 4, 'cancermap_cluster_12_and_41': 7, 'cancermap_cluster_12_and_40': 1, 'cancermap_cluster_12_and_43': 4, 'cancermap_cluster_12_and_42': 6, 'cancermap_cluster_12_and_45': 2, 'cancermap_cluster_12_and_44': 8, 'cancermap_cluster_12_and_47': 5, 'cancermap_cluster_12_and_46': 0, 'cancermap_cluster_12_and_49': 5, 'cancermap_cluster_12_and_48': 2, 'cancermap_cluster_45_vs_50': 7, 'cancermap_cluster_CellLine_Myeloma': 6, 'annotated_class_vs_cancermap_cluster_TCL_vs_AML': 6, 'annotated_class_Lymphoid_and_B-Lymphoid': 0, 'cancermap_cluster_14_and_36': 5, 'cancermap_cluster_14_and_37': 0, 'cancermap_cluster_14_and_34': 8, 'cancermap_cluster_14_and_35': 8, 'cancermap_cluster_14_and_32': 9, 'cancermap_cluster_14_and_33': 6, 'cancermap_cluster_14_and_30': 6, 'cancermap_cluster_14_and_31': 4, 'cancermap_cluster_21_and_50': 9, 'cancermap_cluster_Myeloid_and_CLL': 4, 'cancermap_cluster_14_and_38': 0, 'cancermap_cluster_14_and_39': 5, 'annotated_class_vs_cancermap_cluster_CellLine_Leukemia_vs_Cancer_Myeloma': 8, 'cancermap_cluster_27_and_38': 3, 'cancermap_cluster_27_and_39': 6, 'annotated_class_BCL_and_CellLine': 0, 'cancermap_cluster_CellLine_and_MM': 7, 'cancermap_cluster_27_and_30': 9, 'cancermap_cluster_27_and_31': 5, 'cancermap_cluster_27_and_32': 0, 'cancermap_cluster_27_and_33': 8, 'cancermap_cluster_27_and_34': 5, 'cancermap_cluster_27_and_35': 7, 'cancermap_cluster_27_and_36': 1, 'cancermap_cluster_27_and_37': 2, 'annotated_class_AML_vs_Lymphoid': 7, 'annotated_class_Myeloid_vs_CLL': 4, 'annotated_class_TCL_and_CLL': 1, 'annotated_class_CellLine': 4, 'cancermap_cluster_CellLine_and_CLL': 1, 'cancermap_cluster_12_and_50': 6, 'annotated_class_Cancer_Lymphoma_and_CellLine_Myeloma': 4, 'annotated_class_NonCancer_vs_Cancer_Lymphoma': 9, 'annotated_class_TCL_vs_Lymphoid': 1, 'annotated_class_BCL_and_B-Lymphoid': 2, 'cancermap_cluster_31_vs_50': 3, 'cancermap_cluster_other_and_chronic_leukemias': 5, 'cancermap_cluster_25_vs_45': 6, 'cancermap_cluster_5_vs_18': 9, 'cancermap_cluster_5_vs_19': 2, 'cancermap_cluster_5_vs_12': 0, 'cancermap_cluster_5_vs_13': 1, 'cancermap_cluster_5_vs_10': 7, 'cancermap_cluster_5_vs_11': 6, 'cancermap_cluster_5_vs_16': 0, 'cancermap_cluster_5_vs_17': 6, 'cancermap_cluster_5_vs_14': 2, 'cancermap_cluster_5_vs_15': 3, 'cancermap_cluster_AML_vs_T-ALL': 8, 'annotated_class_vs_cancermap_cluster_T-Lymphoid_vs_T-ALL': 5, 'cancermap_cluster_32_and_50': 0, 'cancermap_cluster_T-ALL_vs_MM': 8, 'cancermap_cluster_18_and_49': 2, 'cancermap_cluster_18_and_48': 8, 'cancermap_cluster_18_and_47': 8, 'cancermap_cluster_18_and_46': 1, 'cancermap_cluster_18_and_45': 6, 'cancermap_cluster_18_and_44': 7, 'cancermap_cluster_18_and_43': 7, 'cancermap_cluster_18_and_42': 7, 'cancermap_cluster_18_and_41': 2, 'cancermap_cluster_18_and_40': 4, 'cancermap_cluster_16_and_37': 5, 'cancermap_cluster_2_vs_5': 6, 'cancermap_cluster_2_vs_4': 4, 'annotated_class_Myeloid_vs_Lymphoid': 7, 'cancermap_cluster_2_vs_7': 9, 'cancermap_cluster_2_vs_6': 7, 'cancermap_cluster_2_vs_3': 7, 'cancermap_cluster_10_and_28': 4, 'cancermap_cluster_23_vs_50': 6, 'cancermap_cluster_9_vs_18': 1, 'cancermap_cluster_9_vs_19': 8, 'cancermap_cluster_9_vs_16': 6, 'cancermap_cluster_9_vs_17': 6, 'cancermap_cluster_9_vs_14': 6, 'cancermap_cluster_9_vs_15': 2, 'cancermap_cluster_9_vs_12': 8, 'cancermap_cluster_9_vs_13': 7, 'cancermap_cluster_9_vs_10': 9, 'cancermap_cluster_9_vs_11': 7, 'cancermap_cluster_AML': 4, 'cancermap_cluster_3_vs_6': 1, 'cancermap_cluster_3_vs_7': 3, 'cancermap_cluster_3_vs_4': 8, 'cancermap_cluster_3_vs_5': 1, 'cancermap_cluster_2_vs_9': 1, 'cancermap_cluster_3_vs_8': 6, 'cancermap_cluster_3_vs_9': 6, 'cancermap_cluster_38_vs_39': 8, 'cancermap_cluster_2_vs_8': 9, 'cancermap_cluster_AML_and_MM': 6, 'cancermap_cluster_BCL_and_T-ALL': 6, 'annotated_class_CellLine_vs_LP': 3, 'cancermap_cluster_26': 5, 'cancermap_cluster_27': 3, 'cancermap_cluster_28_and_46': 0, 'cancermap_cluster_25': 6, 'cancermap_cluster_28_and_40': 1, 'cancermap_cluster_28_and_41': 6, 'cancermap_cluster_28_and_42': 6, 'cancermap_cluster_28_and_43': 6, 'cancermap_cluster_16_vs_33': 2, 'cancermap_cluster_16_vs_32': 1, 'cancermap_cluster_16_vs_31': 3, 'cancermap_cluster_16_vs_30': 2, 'cancermap_cluster_28_and_48': 4, 'cancermap_cluster_28_and_49': 1, 'cancermap_cluster_28': 9, 'cancermap_cluster_29': 9, 'annotated_class_CellLine_Lymphoma_and_Prolif_Lympho': 5, 'cancermap_cluster_38_and_39': 9, 'cancermap_cluster_11_vs_16': 6, 'cancermap_cluster_11_vs_17': 7, 'cancermap_cluster_32_vs_33': 8, 'cancermap_cluster_11_vs_15': 2, 'cancermap_cluster_32_vs_35': 9, 'cancermap_cluster_32_vs_34': 6, 'cancermap_cluster_32_vs_37': 7, 'cancermap_cluster_32_vs_36': 8, 'cancermap_cluster_32_vs_39': 2, 'cancermap_cluster_32_vs_38': 9, 'cancermap_cluster_11_vs_18': 9, 'cancermap_cluster_11_vs_19': 8, 'annotated_class_Prolif_Lympho': 4, 'cancermap_cluster_23_and_38': 3, 'cancermap_cluster_23_and_39': 5, 'cancermap_cluster_23_and_34': 6, 'cancermap_cluster_23_and_35': 5, 'cancermap_cluster_23_and_36': 9, 'cancermap_cluster_23_and_37': 1, 'cancermap_cluster_23_and_30': 5, 'cancermap_cluster_23_and_31': 7, 'cancermap_cluster_23_and_32': 4, 'cancermap_cluster_23_and_33': 4, 'cancermap_cluster_30_and_49': 2, 'cancermap_cluster_30_and_48': 8, 'annotated_class_BCL_vs_CellLine': 8, 'cancermap_cluster_30_and_41': 1, 'cancermap_cluster_30_and_40': 8, 'cancermap_cluster_30_and_43': 2, 'cancermap_cluster_30_and_42': 9, 'cancermap_cluster_30_and_45': 5, 'cancermap_cluster_30_and_44': 3, 'cancermap_cluster_30_and_47': 7, 'cancermap_cluster_30_and_46': 9, 'cancermap_cluster_4_vs_44': 9, 'cancermap_cluster_4_vs_45': 4, 'cancermap_cluster_4_vs_46': 4, 'cancermap_cluster_4_vs_47': 6, 'cancermap_cluster_4_vs_40': 4, 'cancermap_cluster_4_vs_41': 8, 'cancermap_cluster_4_vs_42': 2, 'cancermap_cluster_Cancer_Myeloma_vs_CellLine_Myeloma': 8, 'cancermap_cluster_1_and_22': 1, 'cancermap_cluster_1_and_23': 9, 'cancermap_cluster_1_and_20': 6, 'cancermap_cluster_1_and_21': 2, 'cancermap_cluster_4_vs_48': 3, 'cancermap_cluster_4_vs_49': 2, 'cancermap_cluster_1_and_24': 4, 'cancermap_cluster_Cancer_Leukemia_vs_Cancer_Myeloma': 0, 'cancermap_cluster_17_and_46': 4, 'cancermap_cluster_22_vs_50': 6, 'cancermap_cluster_17_and_44': 3, 'cancermap_cluster_25_and_31': 9, 'cancermap_cluster_17_and_42': 3, 'cancermap_cluster_17_and_43': 0, 'cancermap_cluster_17_and_40': 2, 'cancermap_cluster_17_and_41': 4, 'cancermap_cluster_22_and_42': 5, 'cancermap_cluster_17_and_48': 0, 'cancermap_cluster_8_vs_50': 0, 'annotated_class_na_and_CML': 9, 'cancermap_cluster_1_and_3': 4, 'annotated_class_AML_vs_T-ALL': 6, 'cancermap_cluster_22_and_40': 7, 'cancermap_cluster_1_and_2': 1, 'cancermap_cluster_22_and_41': 6, 'cancermap_cluster_26_and_50': 5, 'annotated_class_CellLine_Leukemia_vs_other': 9, 'cancermap_cluster_NonCancer_vs_Cancer_Lymphoma': 4, 'cancermap_cluster_1_and_6': 8, 'annotated_class_vs_cancermap_cluster_CellLine_Lymphoma_vs_CellLine_Leukemia': 0, 'cancermap_cluster_4_and_29': 3, 'cancermap_cluster_4_and_28': 3, 'cancermap_cluster_3_and_13': 8, 'cancermap_cluster_3_and_12': 3, 'cancermap_cluster_3_and_15': 0, 'cancermap_cluster_3_and_14': 3, 'cancermap_cluster_3_and_17': 6, 'cancermap_cluster_3_and_16': 0, 'cancermap_cluster_4_and_21': 2, 'cancermap_cluster_4_and_20': 4, 'cancermap_cluster_4_and_23': 6, 'cancermap_cluster_4_and_22': 9, 'cancermap_cluster_4_and_25': 2, 'cancermap_cluster_4_and_24': 0, 'cancermap_cluster_4_and_27': 6, 'cancermap_cluster_4_and_26': 2, 'annotated_class_vs_cancermap_cluster_T-Lymphoid_vs_CellLine': 8, 'cancermap_cluster_33_vs_34': 4, 'cancermap_cluster_33_vs_35': 9, 'cancermap_cluster_33_vs_36': 5, 'cancermap_cluster_33_vs_37': 3, 'cancermap_cluster_33_vs_38': 7, 'cancermap_cluster_33_vs_39': 6, 'cancermap_cluster_10_vs_50': 2, 'cancermap_cluster_17_vs_43': 6, 'cancermap_cluster_7_vs_31': 2, 'annotated_class_CellLine_Lymphoma_vs_Prolif_Lympho': 1, 'annotated_class_vs_cancermap_cluster_Erythroid_vs_CLL': 3, 'cancermap_cluster_Myeloid_vs_T-ALL': 9, 'cancermap_cluster_CellLine_vs_MM': 0, 'annotated_class_AML_vs_StemCell': 6, 'cancermap_cluster_18_vs_20': 3, 'cancermap_cluster_34_and_50': 3, 'annotated_class_vs_cancermap_cluster_na_vs_T-ALL': 6, 'cancermap_cluster_10_and_36': 2, 'cancermap_cluster_11_and_30': 2, 'annotated_class_Cancer_Lymphoma': 6, 'cancermap_cluster_CellLine_vs_pre-B-ALL': 6, 'cancermap_cluster_19_vs_27': 2, 'annotated_class_MM': 9, 'cancermap_cluster_11_and_34': 4, 'cancermap_cluster_13_and_24': 1, 'cancermap_cluster_11_and_36': 1, 'annotated_class_MM_and_CML': 6, 'cancermap_cluster_Cancer_Lymphoma_and_Cancer_Leukemia': 3, 'cancermap_cluster_22_vs_47': 9, 'cancermap_cluster_22_vs_46': 6, 'cancermap_cluster_CLL': 0, 'cancermap_cluster_22_vs_44': 7, 'cancermap_cluster_22_vs_43': 6, 'cancermap_cluster_22_vs_42': 0, 'cancermap_cluster_22_vs_41': 4, 'cancermap_cluster_22_vs_40': 4, 'cancermap_cluster_18_vs_28': 6, 'cancermap_cluster_18_vs_29': 5, 'cancermap_cluster_22_vs_49': 2, 'cancermap_cluster_22_vs_48': 8, 'cancermap_cluster_1_vs_20': 7, 'cancermap_cluster_1_vs_26': 1, 'cancermap_cluster_16_vs_28': 5, 'cancermap_cluster_22_and_38': 2, 'annotated_class_NonCancer_and_chronic_leukemias': 3, 'cancermap_cluster_BCL_and_pre-B-ALL': 0, 'annotated_class_LP_vs_CLL': 2, 'cancermap_cluster_pre-B-ALL': 7, 'cancermap_cluster_21_vs_48': 0, 'cancermap_cluster_21_vs_49': 3, 'cancermap_cluster_21_vs_44': 3, 'cancermap_cluster_21_vs_45': 4, 'cancermap_cluster_21_vs_46': 1, 'cancermap_cluster_21_vs_47': 8, 'cancermap_cluster_21_vs_40': 9, 'cancermap_cluster_21_vs_41': 8, 'cancermap_cluster_21_vs_42': 2, 'cancermap_cluster_21_vs_43': 9, 'cancermap_cluster_5_vs_29': 2, 'cancermap_cluster_5_vs_28': 2, 'cancermap_cluster_5_vs_27': 1, 'cancermap_cluster_5_vs_26': 0, 'cancermap_cluster_5_vs_25': 0, 'cancermap_cluster_5_vs_24': 2, 'cancermap_cluster_5_vs_23': 5, 'cancermap_cluster_5_vs_22': 1, 'cancermap_cluster_5_vs_21': 2, 'cancermap_cluster_5_vs_20': 0, 'cancermap_cluster_7_and_37': 0, 'cancermap_cluster_19_vs_50': 4, 'cancermap_cluster_7_and_36': 1, 'cancermap_cluster_7_and_35': 7, 'cancermap_cluster_19_vs_26': 1, 'cancermap_cluster_7_and_34': 0, 'cancermap_cluster_7_and_33': 2, 'cancermap_cluster_18_and_50': 1, 'cancermap_cluster_7_and_32': 4, 'cancermap_cluster_7_and_31': 9, 'cancermap_cluster_7_and_30': 6, 'cancermap_cluster_10_and_39': 7, 'annotated_class_StemCell_and_CLL': 6, 'cancermap_cluster_1_vs_16': 5, 'cancermap_cluster_1_vs_17': 3, 'cancermap_cluster_1_vs_14': 5, 'cancermap_cluster_1_vs_15': 4, 'cancermap_cluster_1_vs_12': 3, 'cancermap_cluster_1_vs_13': 3, 'cancermap_cluster_1_vs_10': 5, 'cancermap_cluster_1_vs_11': 6, 'cancermap_cluster_1_vs_18': 1, 'cancermap_cluster_1_vs_19': 2, 'cancermap_cluster_9_vs_23': 7, 'cancermap_cluster_9_vs_22': 5, 'cancermap_cluster_9_vs_21': 2, 'cancermap_cluster_9_vs_20': 8, 'cancermap_cluster_9_vs_27': 6, 'cancermap_cluster_9_vs_26': 8, 'cancermap_cluster_9_vs_25': 6, 'cancermap_cluster_9_vs_24': 4, 'cancermap_cluster_10_and_11': 7, 'cancermap_cluster_9_vs_29': 6, 'cancermap_cluster_9_vs_28': 2, 'cancermap_cluster_10_and_14': 4, 'cancermap_cluster_10_and_15': 7, 'cancermap_cluster_10_and_16': 3, 'cancermap_cluster_10_and_17': 7, 'cancermap_cluster_5_vs_8': 3, 'cancermap_cluster_5_vs_9': 0, 'cancermap_cluster_Cancer_Lymphoma_and_Cancer_Myeloma': 4, 'cancermap_cluster_10_vs_43': 4, 'annotated_class_BCL_vs_LP': 5, 'cancermap_cluster_5_vs_6': 1, 'cancermap_cluster_5_vs_7': 9, 'cancermap_cluster_29_and_43': 4, 'cancermap_cluster_29_and_42': 5, 'cancermap_cluster_NonCancer_and_CellLine_Leukemia': 9, 'cancermap_cluster_41_and_47': 4, 'cancermap_cluster_BCL_vs_Myeloid': 4, 'cancermap_cluster_41_and_46': 2, 'cancermap_cluster_39_vs_47': 2, 'annotated_class_NonCancer_and_Cancer_Leukemia': 6, 'annotated_class_AML_vs_MP': 8, 'cancermap_cluster_6_vs_37': 1, 'cancermap_cluster_6_vs_36': 1, 'cancermap_cluster_33': 0, 'cancermap_cluster_6_vs_34': 3, 'cancermap_cluster_6_vs_33': 5, 'cancermap_cluster_6_vs_32': 0, 'cancermap_cluster_6_vs_31': 2, 'cancermap_cluster_6_vs_30': 1, 'cancermap_cluster_39': 4, 'cancermap_cluster_38': 7, 'cancermap_cluster_20_and_46': 3, 'cancermap_cluster_29_and_44': 2, 'cancermap_cluster_20_and_40': 5, 'cancermap_cluster_20_and_41': 3, 'cancermap_cluster_6_vs_39': 1, 'cancermap_cluster_6_vs_38': 5, 'cancermap_cluster_7_and_28': 8, 'cancermap_cluster_7_and_29': 9, 'cancermap_cluster_6_and_10': 4, 'cancermap_cluster_6_and_11': 7, 'cancermap_cluster_6_and_16': 7, 'cancermap_cluster_6_and_17': 9, 'cancermap_cluster_6_and_14': 2, 'cancermap_cluster_6_and_15': 8, 'cancermap_cluster_7_and_20': 8, 'cancermap_cluster_7_and_21': 0, 'cancermap_cluster_7_and_22': 1, 'cancermap_cluster_7_and_23': 2, 'cancermap_cluster_7_and_24': 2, 'cancermap_cluster_7_and_25': 2, 'cancermap_cluster_7_and_26': 2, 'cancermap_cluster_7_and_27': 5, 'cancermap_cluster_MM_and_pre-B-ALL': 5, 'cancermap_cluster_17_vs_22': 4, 'cancermap_cluster_17_vs_25': 2, 'cancermap_cluster_17_vs_24': 8, 'annotated_class_Cancer_Leukemia_vs_other': 2, 'cancermap_cluster_11_vs_32': 3, 'cancermap_cluster_11_vs_33': 0, 'cancermap_cluster_17_and_47': 8, 'annotated_class_vs_cancermap_cluster_Cancer_Leukemia_vs_CellLine_Myeloma': 5, 'annotated_class_B-Lymphoid_and_CLL': 2, 'annotated_class_Cancer_Myeloma_and_CellLine_Myeloma': 3, 'annotated_class_Lymphoid_and_CML': 0, 'cancermap_cluster_30_and_50': 0, 'cancermap_cluster_1_and_39': 2, 'cancermap_cluster_1_and_38': 1, 'cancermap_cluster_1_and_31': 6, 'cancermap_cluster_1_and_30': 5, 'cancermap_cluster_1_and_33': 6, 'cancermap_cluster_1_and_32': 3, 'cancermap_cluster_1_and_35': 1, 'cancermap_cluster_1_and_34': 4, 'cancermap_cluster_1_and_37': 2, 'cancermap_cluster_1_and_36': 3, 'cancermap_cluster_8_vs_40': 4, 'cancermap_cluster_8_vs_41': 9, 'cancermap_cluster_8_vs_42': 2, 'cancermap_cluster_8_vs_43': 2, 'cancermap_cluster_8_vs_44': 1, 'cancermap_cluster_8_vs_45': 9, 'cancermap_cluster_8_vs_46': 2, 'cancermap_cluster_8_vs_47': 3, 'cancermap_cluster_8_vs_48': 7, 'cancermap_cluster_8_vs_49': 2, 'cancermap_cluster_26_and_42': 2, 'cancermap_cluster_26_and_43': 2, 'cancermap_cluster_BCL_and_Myeloid': 1, 'cancermap_cluster_26_and_41': 7, 'cancermap_cluster_26_and_46': 3, 'cancermap_cluster_26_and_47': 0, 'cancermap_cluster_26_and_44': 5, 'cancermap_cluster_26_and_45': 1, 'cancermap_cluster_16_and_45': 5, 'cancermap_cluster_16_and_44': 2, 'cancermap_cluster_26_and_48': 1, 'cancermap_cluster_26_and_49': 5, 'cancermap_cluster_16_and_41': 0, 'cancermap_cluster_16_and_40': 8, 'cancermap_cluster_16_and_43': 5, 'cancermap_cluster_16_and_42': 1, 'cancermap_cluster_33_and_48': 9, 'cancermap_cluster_2_vs_50': 4, 'cancermap_cluster_12_vs_50': 3, 'cancermap_cluster_CellLine_vs_CLL': 0, 'cancermap_cluster_14_vs_37': 5, 'cancermap_cluster_3_and_24': 9, 'cancermap_cluster_3_and_25': 4, 'cancermap_cluster_3_and_26': 7, 'cancermap_cluster_3_and_27': 1, 'cancermap_cluster_4_and_18': 2, 'cancermap_cluster_3_and_21': 7, 'cancermap_cluster_3_and_22': 6, 'cancermap_cluster_3_and_23': 6, 'cancermap_cluster_4_and_14': 7, 'cancermap_cluster_4_and_15': 3, 'cancermap_cluster_4_and_16': 4, 'cancermap_cluster_4_and_17': 5, 'cancermap_cluster_4_and_10': 8, 'cancermap_cluster_4_and_11': 4, 'cancermap_cluster_4_and_12': 2, 'cancermap_cluster_4_and_13': 2, 'annotated_class_Prolif_Myelo_and_Cancer_Myeloma': 3, 'cancermap_cluster_14_vs_36': 5, 'annotated_class_TCL_and_MP': 4, 'annotated_class_na_vs_AML': 2, 'annotated_class_T-Lymphoid_vs_StemCell': 2, 'annotated_class_na_vs_MP': 9, 'annotated_class_na_vs_MM': 4, 'annotated_class_vs_cancermap_cluster_T-Lymphoid_vs_pre-B-ALL': 1, 'cancermap_cluster_14_vs_31': 1, 'annotated_class_T-ALL_and_Lymphoid': 4, 'annotated_class_vs_cancermap_cluster_StemCell_vs_CLL': 5, 'annotated_class_vs_cancermap_cluster_B-Lymphoid_vs_pre-B-ALL': 1, 'cancermap_cluster_14_vs_30': 8, 'annotated_class_vs_cancermap_cluster_Lymphoid_vs_CLL': 7, 'annotated_class_TCL_vs_na': 1, 'annotated_class_na_vs_CML': 3, 'annotated_class_vs_cancermap_cluster_MP_vs_T-ALL': 6, 'cancermap_cluster_41_vs_46': 0, 'cancermap_cluster_41_vs_47': 6, 'cancermap_cluster_41_vs_44': 1, 'cancermap_cluster_41_vs_45': 7, 'cancermap_cluster_41_vs_42': 3, 'cancermap_cluster_41_vs_43': 5, 'cancermap_cluster_21_and_23': 4, 'cancermap_cluster_41_vs_48': 8, 'cancermap_cluster_41_vs_49': 8, 'cancermap_cluster_21_and_22': 8, 'cancermap_cluster_21_and_25': 4, 'annotated_class_NonCancer_vs_Cancer_Leukemia': 2, 'annotated_class_CellLine_Lymphoma_vs_CellLine_Leukemia': 4, 'cancermap_cluster_21_and_24': 4, 'cancermap_cluster_24_vs_30': 9, 'cancermap_cluster_24_vs_31': 0, 'cancermap_cluster_24_vs_32': 4, 'cancermap_cluster_24_vs_33': 4, 'cancermap_cluster_24_vs_34': 3, 'cancermap_cluster_24_vs_35': 1, 'cancermap_cluster_24_vs_36': 3, 'cancermap_cluster_24_vs_37': 1, 'cancermap_cluster_24_vs_38': 8, 'cancermap_cluster_24_vs_39': 3, 'cancermap_cluster_21_and_26': 2, 'cancermap_cluster_CellLine_Leukemia_vs_Cancer_Myeloma': 8, 'annotated_class_TCL_vs_T-ALL': 3, 'annotated_class_na_vs_pre-B-ALL': 9, 'cancermap_cluster_23_and_50': 8, 'annotated_class_CellLine_and_StemCell': 6, 'annotated_class_vs_cancermap_cluster_Prolif_Myelo_vs_Cancer_Myeloma': 5, 'annotated_class_T-Lymphoid_vs_CellLine': 1, 'cancermap_cluster_21_vs_50': 7, 'cancermap_cluster_15_vs_26': 5, 'annotated_class_vs_cancermap_cluster_Prolif_Myelo_vs_CellLine_Myeloma': 2, 'cancermap_cluster_5_vs_30': 7, 'cancermap_cluster_39_vs_42': 3, 'cancermap_cluster_39_vs_41': 8, 'cancermap_cluster_39_vs_40': 2, 'cancermap_cluster_5_vs_34': 7, 'cancermap_cluster_39_vs_46': 0, 'cancermap_cluster_5_vs_36': 0, 'cancermap_cluster_5_vs_37': 9, 'cancermap_cluster_5_vs_38': 0, 'cancermap_cluster_5_vs_39': 4, 'cancermap_cluster_42_vs_47': 2, 'cancermap_cluster_42_vs_46': 4, 'cancermap_cluster_42_vs_43': 8, 'cancermap_cluster_19_vs_41': 9, 'cancermap_cluster_19_vs_40': 1, 'cancermap_cluster_19_vs_43': 9, 'cancermap_cluster_19_vs_42': 3, 'cancermap_cluster_19_vs_45': 3, 'cancermap_cluster_19_vs_44': 1, 'cancermap_cluster_19_vs_47': 4, 'cancermap_cluster_19_vs_46': 7, 'cancermap_cluster_19_vs_49': 9, 'cancermap_cluster_19_vs_48': 4, 'cancermap_cluster_1': 3, 'annotated_class_AML_and_pre-B-ALL': 5, 'annotated_class_vs_cancermap_cluster_BCL_vs_AML': 3, 'cancermap_cluster_11_and_48': 1, 'cancermap_cluster_11_and_49': 3, 'cancermap_cluster_33_and_50': 7, 'cancermap_cluster_11_and_40': 9, 'cancermap_cluster_11_and_41': 8, 'cancermap_cluster_11_and_42': 3, 'cancermap_cluster_11_and_43': 0, 'cancermap_cluster_11_and_44': 8, 'cancermap_cluster_11_and_45': 8, 'cancermap_cluster_11_and_46': 5, 'cancermap_cluster_11_and_47': 1, 'annotated_class_vs_cancermap_cluster_BCL_vs_CLL': 2, 'annotated_class_Cancer_Myeloma': 0, 'cancermap_cluster_25_vs_40': 2, 'cancermap_cluster_25_vs_41': 2, 'cancermap_cluster_25_vs_42': 3, 'cancermap_cluster_25_vs_43': 8, 'cancermap_cluster_25_vs_44': 4, 'cancermap_cluster_Myeloid_vs_CellLine': 0, 'cancermap_cluster_25_vs_46': 9, 'cancermap_cluster_25_vs_47': 1, 'cancermap_cluster_25_vs_48': 4, 'cancermap_cluster_25_vs_49': 9, 'cancermap_cluster_8': 1, 'annotated_class_vs_cancermap_cluster_CellLine_vs_T-ALL': 1, 'cancermap_cluster_9_vs_34': 8, 'cancermap_cluster_9_vs_35': 2, 'cancermap_cluster_9_vs_36': 9, 'cancermap_cluster_9_vs_37': 3, 'cancermap_cluster_9_vs_30': 9, 'cancermap_cluster_9_vs_31': 1, 'cancermap_cluster_9_vs_32': 4, 'cancermap_cluster_9_vs_33': 8, 'cancermap_cluster_10_and_25': 5, 'cancermap_cluster_10_and_24': 2, 'cancermap_cluster_10_and_27': 3, 'cancermap_cluster_10_and_26': 5, 'cancermap_cluster_9_vs_38': 7, 'cancermap_cluster_9_vs_39': 3, 'cancermap_cluster_10_and_23': 0, 'cancermap_cluster_10_and_22': 7, 'cancermap_cluster_4_vs_43': 3, 'cancermap_cluster_1_and_9': 8, 'cancermap_cluster_1_and_8': 4, 'cancermap_cluster_47_vs_48': 6, 'cancermap_cluster_47_vs_49': 4, 'annotated_class_MP_and_T-ALL': 5, 'cancermap_cluster_1_and_7': 5, 'cancermap_cluster_10_and_21': 5, 'cancermap_cluster_1_and_5': 5, 'cancermap_cluster_1_and_4': 0, 'cancermap_cluster_46_vs_49': 2, 'annotated_class_T-Lymphoid_vs_LP': 0, 'cancermap_cluster_Cancer_Leukemia_and_CellLine_Leukemia': 5, 'annotated_class_vs_cancermap_cluster_na_vs_CellLine': 6, 'cancermap_cluster_1_vs_29': 7, 'cancermap_cluster_1_vs_28': 7, 'cancermap_cluster_1_vs_23': 1, 'cancermap_cluster_1_vs_22': 3, 'cancermap_cluster_1_vs_21': 2, 'cancermap_cluster_10_and_20': 9, 'cancermap_cluster_1_vs_27': 0, 'cancermap_cluster_16_vs_50': 6, 'cancermap_cluster_1_vs_25': 7, 'cancermap_cluster_1_vs_24': 4, 'cancermap_cluster_6_vs_28': 8, 'cancermap_cluster_6_vs_29': 1, 'cancermap_cluster_5_and_7': 3, 'cancermap_cluster_7_and_39': 5, 'cancermap_cluster_7_and_38': 8, 'cancermap_cluster_6_vs_20': 6, 'cancermap_cluster_6_vs_21': 6, 'cancermap_cluster_6_vs_22': 6, 'cancermap_cluster_6_vs_23': 5, 'cancermap_cluster_6_vs_24': 3, 'cancermap_cluster_6_vs_25': 4, 'cancermap_cluster_6_vs_26': 3, 'cancermap_cluster_6_vs_27': 4, 'cancermap_cluster_11_and_24': 3, 'annotated_class_vs_cancermap_cluster_LP_vs_pre-B-ALL': 5, 'annotated_class_NonCancer_and_Cancer_Lymphoma': 4, 'cancermap_cluster_9_and_22': 3, 'cancermap_cluster_9_and_23': 8, 'cancermap_cluster_9_and_20': 3, 'cancermap_cluster_9_and_21': 3, 'cancermap_cluster_9_and_26': 1, 'cancermap_cluster_9_and_27': 7, 'cancermap_cluster_9_and_24': 3, 'cancermap_cluster_9_and_25': 4, 'cancermap_cluster_11_vs_30': 5, 'cancermap_cluster_11_vs_31': 6, 'cancermap_cluster_9_and_28': 0, 'cancermap_cluster_9_and_29': 0, 'cancermap_cluster_11_vs_34': 7, 'cancermap_cluster_11_vs_35': 1, 'cancermap_cluster_11_vs_36': 0, 'cancermap_cluster_11_vs_37': 7, 'annotated_class_BCL_vs_MM': 2, 'annotated_class_MP_vs_MM': 4, 'annotated_class_BCL_vs_MP': 7, 'cancermap_cluster_NonCancer_and_CellLine_Myeloma': 3, 'cancermap_cluster_CellLine_and_T-ALL': 6, 'annotated_class_NonCancer_vs_CellLine_Leukemia': 3, 'cancermap_cluster_24_and_34': 2, 'cancermap_cluster_8_vs_35': 7, 'cancermap_cluster_8_vs_34': 7, 'cancermap_cluster_8_vs_37': 6, 'cancermap_cluster_8_vs_36': 1, 'cancermap_cluster_8_vs_31': 1, 'cancermap_cluster_8_vs_30': 6, 'cancermap_cluster_8_vs_33': 1, 'cancermap_cluster_8_vs_32': 5, 'cancermap_cluster_8_vs_39': 9, 'cancermap_cluster_8_vs_38': 4, 'cancermap_cluster_26_and_37': 5, 'cancermap_cluster_26_and_36': 3, 'cancermap_cluster_26_and_35': 4, 'cancermap_cluster_26_and_34': 6, 'cancermap_cluster_26_and_33': 4, 'cancermap_cluster_26_and_32': 4, 'cancermap_cluster_26_and_31': 0, 'cancermap_cluster_26_and_30': 2, 'cancermap_cluster_26_and_39': 6, 'cancermap_cluster_26_and_38': 3, 'cancermap_cluster_24_and_31': 1, 'cancermap_cluster_24_and_30': 3, 'cancermap_cluster_24_and_33': 0, 'cancermap_cluster_24_and_32': 2, 'cancermap_cluster_24_and_35': 6, 'cancermap_cluster_MM_vs_CLL': 4, 'cancermap_cluster_24_and_37': 0, 'cancermap_cluster_24_and_36': 9, 'cancermap_cluster_Cancer_Leukemia_and_Cancer_Myeloma': 7, 'cancermap_cluster_24_and_38': 2, 'cancermap_cluster_3_vs_21': 1, 'cancermap_cluster_3_vs_20': 8, 'cancermap_cluster_3_vs_23': 0, 'cancermap_cluster_3_vs_22': 6, 'cancermap_cluster_3_vs_25': 3, 'cancermap_cluster_3_vs_24': 1, 'cancermap_cluster_3_vs_27': 9, 'cancermap_cluster_3_vs_26': 8, 'cancermap_cluster_3_vs_29': 5, 'cancermap_cluster_3_vs_28': 0, 'annotated_class_Prolif_Lympho_and_other': 5, 'cancermap_cluster_14_vs_27': 7, 'cancermap_cluster_2_and_4': 6, 'cancermap_cluster_2_and_5': 5, 'cancermap_cluster_3_and_39': 8, 'cancermap_cluster_3_and_38': 5, 'annotated_class_B-Lymphoid_and_pre-B-ALL': 5, 'cancermap_cluster_2_and_3': 6, 'cancermap_cluster_3_and_33': 9, 'cancermap_cluster_3_and_32': 3, 'cancermap_cluster_3_and_31': 3, 'cancermap_cluster_3_and_30': 0, 'cancermap_cluster_3_and_37': 7, 'cancermap_cluster_3_and_36': 9, 'cancermap_cluster_3_and_35': 2, 'cancermap_cluster_3_and_34': 7, 'annotated_class_Cancer_Lymphoma_vs_CellLine_Leukemia': 9, 'cancermap_cluster_Cancer_Leukemia_vs_CellLine_Myeloma': 7, 'annotated_class_na_and_MM': 1, 'annotated_class_other_vs_chronic_leukemias': 2, 'annotated_class_NonCancer_and_other': 2, 'cancermap_cluster_34_and_38': 3, 'cancermap_cluster_34_and_39': 5, 'cancermap_cluster_34_and_35': 1, 'cancermap_cluster_34_and_36': 8, 'cancermap_cluster_34_and_37': 7, 'annotated_class_Lymphoid_vs_StemCell': 6, 'annotated_class_vs_cancermap_cluster_T-Lymphoid_vs_Myeloid': 8, 'annotated_class_AML_vs_CellLine': 3, 'cancermap_cluster_41_vs_50': 8, 'cancermap_cluster_6_and_7': 6, 'cancermap_cluster_6_and_8': 3, 'cancermap_cluster_6_and_9': 2, 'annotated_class_MM_vs_B-Lymphoid': 9, 'annotated_class_T-Lymphoid_vs_B-Lymphoid': 7, 'cancermap_cluster_24_vs_27': 0, 'cancermap_cluster_24_vs_26': 3, 'cancermap_cluster_24_vs_25': 2, 'cancermap_cluster_12_vs_47': 9, 'annotated_class_CellLine_Leukemia_vs_Prolif_Lympho': 9, 'cancermap_cluster_24_vs_29': 1, 'cancermap_cluster_24_vs_28': 9, 'cancermap_cluster_30_vs_39': 5, 'cancermap_cluster_30_vs_38': 3, 'cancermap_cluster_30_vs_37': 9, 'cancermap_cluster_30_vs_36': 4, 'cancermap_cluster_30_vs_35': 1, 'cancermap_cluster_30_vs_34': 8, 'cancermap_cluster_30_vs_33': 8, 'cancermap_cluster_30_vs_32': 3, 'cancermap_cluster_30_vs_31': 5, 'cancermap_cluster_AML_and_CellLine': 3, 'annotated_class_CellLine_Leukemia_vs_Prolif_Myelo': 6, 'cancermap_cluster_14_vs_21': 0, 'cancermap_cluster_15_and_44': 6, 'cancermap_cluster_15_and_45': 3, 'cancermap_cluster_2_and_6': 7, 'cancermap_cluster_21_vs_26': 2, 'cancermap_cluster_21_vs_27': 2, 'cancermap_cluster_21_vs_24': 2, 'cancermap_cluster_21_vs_25': 5, 'cancermap_cluster_21_vs_22': 7, 'cancermap_cluster_21_vs_23': 3, 'annotated_class_acute_leukemias_vs_chronic_leukemias': 1, 'annotated_class_CellLine_Lymphoma_and_CellLine_Leukemia': 6, 'cancermap_cluster_21_vs_28': 8, 'cancermap_cluster_21_vs_29': 6, 'cancermap_cluster_5_vs_45': 3, 'cancermap_cluster_5_vs_44': 7, 'cancermap_cluster_5_vs_47': 9, 'cancermap_cluster_5_vs_46': 0, 'cancermap_cluster_5_vs_41': 8, 'cancermap_cluster_5_vs_40': 9, 'cancermap_cluster_5_vs_43': 4, 'cancermap_cluster_28_and_45': 7, 'annotated_class_vs_cancermap_cluster_MM_vs_pre-B-ALL': 7, 'annotated_class_vs_cancermap_cluster_na_vs_CLL': 7, 'cancermap_cluster_5_vs_49': 8, 'cancermap_cluster_5_vs_48': 6, 'cancermap_cluster_17_vs_50': 5, 'cancermap_cluster_28_and_47': 3, 'cancermap_cluster_13_and_43': 8, 'annotated_class_vs_cancermap_cluster_CellLine_Leukemia_vs_CellLine_Myeloma': 3, 'cancermap_cluster_7_and_11': 7, 'cancermap_cluster_13_and_40': 0, 'cancermap_cluster_7_and_10': 6, 'cancermap_cluster_13_and_41': 5, 'cancermap_cluster_7_and_13': 5, 'cancermap_cluster_2_and_8': 9, 'cancermap_cluster_7_and_12': 6, 'cancermap_cluster_2_and_9': 1, 'cancermap_cluster_33_and_40': 2, 'cancermap_cluster_33_and_41': 0, 'cancermap_cluster_33_and_42': 5, 'cancermap_cluster_33_and_43': 1, 'cancermap_cluster_33_and_44': 0, 'cancermap_cluster_33_and_45': 4, 'cancermap_cluster_33_and_46': 0, 'cancermap_cluster_33_and_47': 8, 'cancermap_cluster_5_vs_35': 8, 'cancermap_cluster_33_and_49': 0, 'cancermap_cluster_13_and_45': 8, 'annotated_class_Prolif_Myelo_vs_Cancer_Myeloma': 5, 'cancermap_cluster_7_and_19': 6, 'cancermap_cluster_20_and_48': 6, 'cancermap_cluster_7_and_18': 8, 'cancermap_cluster_16_vs_35': 7, 'cancermap_cluster_25_vs_50': 0, 'cancermap_cluster_20_and_50': 6, 'cancermap_cluster_10_and_32': 9, 'cancermap_cluster_10_and_33': 1, 'cancermap_cluster_10_and_30': 2, 'cancermap_cluster_10_and_31': 8, 'cancermap_cluster_34_vs_37': 2, 'cancermap_cluster_34_vs_36': 5, 'cancermap_cluster_34_vs_35': 6, 'cancermap_cluster_10_and_35': 3, 'cancermap_cluster_12_vs_49': 8, 'cancermap_cluster_34_vs_39': 0, 'cancermap_cluster_34_vs_38': 5, 'cancermap_cluster_Myeloid_and_CellLine': 9, 'annotated_class_MM_vs_Erythroid': 8, 'annotated_class_AML_vs_CLL': 7, 'cancermap_cluster_47_vs_50': 4, 'cancermap_cluster_Cancer_Myeloma': 6, 'annotated_class_T-ALL_and_Erythroid': 9, 'cancermap_cluster_19_and_31': 3, 'cancermap_cluster_19_and_30': 1, 'cancermap_cluster_19_and_33': 3, 'cancermap_cluster_19_and_32': 4, 'cancermap_cluster_19_and_35': 7, 'cancermap_cluster_19_and_34': 2, 'cancermap_cluster_19_and_37': 3, 'cancermap_cluster_19_and_36': 5, 'cancermap_cluster_19_and_39': 9, 'cancermap_cluster_19_and_38': 0, 'annotated_class_T-Lymphoid_vs_Myeloid': 9, 'annotated_class_MM_and_pre-B-ALL': 9, 'cancermap_cluster_1_vs_38': 5, 'cancermap_cluster_1_vs_39': 0, 'annotated_class_T-ALL_and_LP': 6, 'cancermap_cluster_1_vs_34': 9, 'cancermap_cluster_40_and_50': 2, 'cancermap_cluster_1_vs_36': 4, 'cancermap_cluster_1_vs_37': 9, 'cancermap_cluster_1_vs_30': 9, 'cancermap_cluster_1_vs_31': 7, 'cancermap_cluster_1_vs_32': 0, 'cancermap_cluster_1_vs_33': 5, 'cancermap_cluster_6_and_34': 5, 'cancermap_cluster_6_and_35': 0, 'cancermap_cluster_6_and_36': 4, 'cancermap_cluster_6_and_37': 0, 'cancermap_cluster_6_and_30': 7, 'cancermap_cluster_6_and_31': 9, 'cancermap_cluster_6_and_32': 4, 'cancermap_cluster_6_and_33': 2, 'cancermap_cluster_12_vs_15': 3, 'cancermap_cluster_Cancer_Lymphoma': 7, 'cancermap_cluster_12_vs_17': 0, 'cancermap_cluster_12_vs_16': 3, 'cancermap_cluster_6_and_38': 2, 'cancermap_cluster_6_vs_50': 3, 'cancermap_cluster_12_vs_13': 8, 'cancermap_cluster_46_and_50': 8, 'annotated_class_BCL': 4, 'cancermap_cluster_AML_and_T-ALL': 5, 'cancermap_cluster_11_vs_14': 3, 'cancermap_cluster_9_and_31': 5, 'cancermap_cluster_9_and_30': 4, 'cancermap_cluster_9_and_33': 7, 'cancermap_cluster_9_and_32': 7, 'cancermap_cluster_9_and_35': 3, 'cancermap_cluster_9_and_34': 6, 'cancermap_cluster_9_and_37': 4, 'cancermap_cluster_9_and_36': 8, 'cancermap_cluster_9_and_39': 7, 'cancermap_cluster_9_and_38': 7, 'cancermap_cluster_11_vs_25': 3, 'cancermap_cluster_11_vs_24': 6, 'cancermap_cluster_11_vs_23': 9, 'cancermap_cluster_11_vs_22': 8, 'cancermap_cluster_AML_vs_CLL': 5, 'cancermap_cluster_11_vs_20': 9, 'annotated_class_CellLine_Lymphoma_vs_Prolif_Myelo': 1, 'cancermap_cluster_11_and_16': 9, 'cancermap_cluster_36_and_50': 2, 'annotated_class_vs_cancermap_cluster_AML_vs_T-ALL': 5, 'annotated_class_na_and_T-ALL': 8, 'annotated_class_MM_and_Erythroid': 9, 'annotated_class_na_and_CLL': 3, 'annotated_class_T-Lymphoid_vs_MM': 5, 'cancermap_cluster_1_and_13': 9, 'cancermap_cluster_1_and_12': 1, 'cancermap_cluster_1_and_11': 2, 'cancermap_cluster_1_and_10': 7, 'cancermap_cluster_7_and_9': 2, 'cancermap_cluster_7_and_8': 3, 'cancermap_cluster_1_and_15': 4, 'cancermap_cluster_1_and_14': 0, 'cancermap_cluster_14_vs_44': 0, 'cancermap_cluster_1_and_18': 9, 'annotated_class_na_and_CellLine': 9, 'cancermap_cluster_8_and_9': 0, 'cancermap_cluster_8_vs_26': 6, 'cancermap_cluster_8_vs_27': 5, 'cancermap_cluster_8_vs_24': 5, 'cancermap_cluster_8_vs_25': 4, 'cancermap_cluster_8_vs_22': 8, 'cancermap_cluster_8_vs_23': 6, 'cancermap_cluster_8_vs_20': 2, 'cancermap_cluster_8_vs_21': 1, 'cancermap_cluster_Cancer_Leukemia_vs_CellLine_Leukemia': 3, 'cancermap_cluster_Cancer_Lymphoma_vs_Cancer_Myeloma': 5, 'cancermap_cluster_8_vs_28': 7, 'cancermap_cluster_8_vs_29': 5, 'cancermap_cluster_16_and_27': 5, 'cancermap_cluster_16_and_26': 6, 'cancermap_cluster_16_and_25': 3, 'cancermap_cluster_26_and_27': 1, 'cancermap_cluster_16_and_23': 3, 'cancermap_cluster_16_and_22': 4, 'cancermap_cluster_16_and_21': 5, 'cancermap_cluster_16_and_20': 9, 'cancermap_cluster_14_vs_45': 2, 'cancermap_cluster_26_and_28': 8, 'cancermap_cluster_26_and_29': 3, 'cancermap_cluster_16_and_29': 4, 'cancermap_cluster_16_and_28': 6, 'cancermap_cluster_24_and_26': 5, 'cancermap_cluster_24_and_27': 6, 'cancermap_cluster_24_and_25': 9, 'annotated_class_vs_cancermap_cluster_other_vs_chronic_leukemias': 0, 'annotated_class_B-Lymphoid_and_Erythroid': 2, 'cancermap_cluster_24_and_28': 1, 'cancermap_cluster_24_and_29': 9, 'annotated_class_vs_cancermap_cluster_Cancer_Lymphoma_vs_Cancer_Myeloma': 5, 'annotated_class_T-ALL_and_MM': 2, 'cancermap_cluster_14_vs_46': 3, 'cancermap_cluster_3_vs_32': 7, 'cancermap_cluster_3_vs_33': 8, 'cancermap_cluster_3_vs_30': 2, 'cancermap_cluster_3_vs_31': 2, 'cancermap_cluster_3_vs_36': 8, 'cancermap_cluster_3_vs_37': 6, 'cancermap_cluster_3_vs_34': 5, 'cancermap_cluster_3_vs_35': 0, 'cancermap_cluster_3_vs_38': 1, 'cancermap_cluster_3_vs_39': 6, 'annotated_class_AML_and_CLL': 0, 'cancermap_cluster_3_and_48': 7, 'cancermap_cluster_3_and_49': 2, 'cancermap_cluster_3_and_46': 7, 'cancermap_cluster_3_and_47': 4, 'cancermap_cluster_3_and_44': 3, 'cancermap_cluster_3_and_45': 4, 'cancermap_cluster_3_and_42': 5, 'cancermap_cluster_3_and_43': 4, 'cancermap_cluster_3_and_40': 2, 'cancermap_cluster_3_and_41': 4, 'annotated_class_BCL_vs_Lymphoid': 7, 'cancermap_cluster_14_vs_40': 7, 'cancermap_cluster_NonCancer_vs_acute_leukemias': 4, 'cancermap_cluster_11_vs_13': 1, 'annotated_class_Prolif_Myelo_vs_Prolif_Lympho': 8, 'annotated_class_AML_and_CellLine': 7, 'cancermap_cluster_Myeloid_and_MM': 0, 'annotated_class_CellLine_Leukemia_and_Prolif_Myelo': 0, 'cancermap_cluster_14_vs_41': 7, 'annotated_class_Cancer_Lymphoma_and_Prolif_Myelo': 3, 'annotated_class_MP_vs_pre-B-ALL': 8, 'annotated_class_other_and_chronic_leukemias': 0, 'cancermap_cluster_18_vs_19': 2, 'cancermap_cluster_41_and_43': 4, 'annotated_class_na_and_Lymphoid': 9, 'annotated_class_pre-B-ALL_and_CML': 4, 'annotated_class_BCL_and_pre-B-ALL': 7, 'annotated_class_T-ALL': 5, 'cancermap_cluster_14_vs_43': 3, 'annotated_class_CML': 9, 'cancermap_cluster_BCL_vs_AML': 8, 'annotated_class_CellLine_vs_CML': 9, 'cancermap_cluster_25_and_42': 5, 'annotated_class_T-Lymphoid_vs_Lymphoid': 3, 'annotated_class_vs_cancermap_cluster_na_vs_MM': 0, 'cancermap_cluster_21_vs_35': 4, 'cancermap_cluster_21_vs_34': 6, 'cancermap_cluster_21_vs_37': 4, 'cancermap_cluster_21_vs_36': 4, 'cancermap_cluster_21_vs_31': 3, 'cancermap_cluster_21_vs_30': 4, 'cancermap_cluster_21_vs_33': 0, 'cancermap_cluster_21_vs_32': 4, 'cancermap_cluster_23_vs_27': 6, 'cancermap_cluster_21_vs_39': 5, 'cancermap_cluster_21_vs_38': 8, 'annotated_class_Lymphoid_vs_CLL': 6, 'cancermap_cluster_5_vs_50': 7, 'annotated_class_Cancer_Leukemia_vs_CellLine_Leukemia': 9, 'annotated_class_AML_vs_CML': 3, 'annotated_class_T-Lymphoid_and_TCL': 2, 'annotated_class_vs_cancermap_cluster_BCL_vs_CellLine': 5, 'cancermap_cluster_31_and_40': 6, 'cancermap_cluster_27_vs_49': 8, 'annotated_class_na': 2, 'annotated_class_na_vs_LP': 6, 'cancermap_cluster_20_vs_34': 7, 'cancermap_cluster_20_vs_35': 4, 'cancermap_cluster_20_vs_36': 9, 'cancermap_cluster_20_vs_37': 2, 'cancermap_cluster_20_vs_30': 1, 'cancermap_cluster_20_vs_31': 0, 'cancermap_cluster_20_vs_32': 4, 'cancermap_cluster_20_vs_33': 9, 'annotated_class_CellLine_vs_T-ALL': 4, 'cancermap_cluster_BCL_vs_T-ALL': 4, 'cancermap_cluster_20_vs_38': 4, 'cancermap_cluster_20_vs_39': 5, 'annotated_class_vs_cancermap_cluster_TCL_vs_T-ALL': 7, 'annotated_class_TCL_vs_CML': 0, 'cancermap_cluster_27_vs_40': 8, 'cancermap_cluster_25_and_41': 3, 'cancermap_cluster_34_vs_46': 9, 'cancermap_cluster_34_vs_47': 4, 'cancermap_cluster_34_vs_44': 9, 'cancermap_cluster_34_vs_45': 1, 'cancermap_cluster_34_vs_42': 0, 'cancermap_cluster_34_vs_43': 0, 'cancermap_cluster_34_vs_40': 4, 'cancermap_cluster_34_vs_41': 6, 'cancermap_cluster_16_vs_27': 8, 'cancermap_cluster_25_and_43': 8, 'cancermap_cluster_1_and_28': 0, 'cancermap_cluster_34_vs_48': 5, 'cancermap_cluster_34_vs_49': 5, 'cancermap_cluster_13_and_48': 9, 'cancermap_cluster_1_and_29': 2, 'cancermap_cluster_31_and_50': 9, 'cancermap_cluster_6_and_29': 5, 'cancermap_cluster_6_and_28': 6, 'annotated_class_Lymphoid_and_Erythroid': 6, 'annotated_class_na_vs_B-Lymphoid': 5, 'cancermap_cluster_6_and_23': 5, 'cancermap_cluster_6_and_22': 0, 'cancermap_cluster_6_and_21': 1, 'cancermap_cluster_6_and_20': 2, 'cancermap_cluster_6_and_27': 3, 'cancermap_cluster_6_and_26': 1, 'cancermap_cluster_6_and_25': 8, 'cancermap_cluster_6_and_24': 7, 'cancermap_cluster_19_and_26': 0, 'cancermap_cluster_19_and_27': 5, 'cancermap_cluster_2_and_50': 2, 'cancermap_cluster_19_and_25': 8, 'cancermap_cluster_19_and_22': 4, 'cancermap_cluster_19_and_23': 8, 'cancermap_cluster_19_and_20': 3, 'cancermap_cluster_19_and_21': 5, 'cancermap_cluster_19_and_28': 8, 'cancermap_cluster_19_and_29': 3, 'cancermap_cluster_43_vs_48': 9, 'cancermap_cluster_43_vs_49': 2, 'cancermap_cluster_40_and_48': 4, 'cancermap_cluster_40_and_49': 6, 'cancermap_cluster_43_vs_44': 1, 'cancermap_cluster_43_vs_45': 9, 'cancermap_cluster_43_vs_46': 0, 'cancermap_cluster_43_vs_47': 3, 'cancermap_cluster_40_and_42': 7, 'cancermap_cluster_40_and_43': 0, 'cancermap_cluster_CellLine_Leukemia': 7, 'cancermap_cluster_12_vs_20': 7, 'cancermap_cluster_12_vs_21': 1, 'cancermap_cluster_12_vs_22': 5, 'cancermap_cluster_12_vs_23': 6, 'cancermap_cluster_12_vs_24': 7, 'cancermap_cluster_12_vs_25': 3, 'cancermap_cluster_6_vs_48': 0, 'cancermap_cluster_6_vs_49': 6, 'cancermap_cluster_6_vs_46': 1, 'cancermap_cluster_6_vs_47': 8, 'cancermap_cluster_6_vs_44': 0, 'cancermap_cluster_6_vs_45': 7, 'cancermap_cluster_6_vs_42': 3, 'cancermap_cluster_6_vs_43': 3, 'cancermap_cluster_6_vs_40': 6, 'cancermap_cluster_6_vs_41': 9, 'cancermap_cluster_46_and_47': 1, 'cancermap_cluster_46_and_48': 8, 'cancermap_cluster_46_and_49': 7, 'annotated_class_vs_cancermap_cluster_Cancer_Lymphoma_vs_Cancer_Leukemia': 3, 'cancermap_cluster_17_and_45': 3, 'annotated_class_Prolif_Myelo_and_Prolif_Lympho': 3, 'annotated_class_pre-B-ALL_and_CLL': 3, 'annotated_class_Cancer_Lymphoma_vs_CellLine_Lymphoma': 8, 'annotated_class_NonCancer_vs_Cancer_Myeloma': 6, 'annotated_class_TCL_vs_MP': 4, 'annotated_class_TCL_vs_MM': 7, 'annotated_class_na_vs_Lymphoid': 2, 'annotated_class_CellLine_Lymphoma_and_other': 5, 'cancermap_cluster_16_vs_17': 3, 'annotated_class_Cancer_Leukemia_and_Cancer_Myeloma': 3, 'annotated_class_other_and_acute_leukemias': 0, 'cancermap_cluster_17_and_49': 7, 'annotated_class_CellLine_Lymphoma_and_Cancer_Myeloma': 6, 'annotated_class_CellLine_vs_Lymphoid': 9, 'annotated_class_CLL_and_CML': 6, 'cancermap_cluster_16_and_34': 5, 'cancermap_cluster_16_and_35': 6, 'cancermap_cluster_8_vs_19': 3, 'cancermap_cluster_8_vs_18': 8, 'cancermap_cluster_16_and_30': 3, 'cancermap_cluster_16_and_31': 7, 'cancermap_cluster_16_and_32': 3, 'cancermap_cluster_16_and_33': 8, 'cancermap_cluster_8_vs_13': 7, 'cancermap_cluster_8_vs_12': 0, 'cancermap_cluster_8_vs_11': 0, 'cancermap_cluster_8_vs_10': 8, 'cancermap_cluster_8_vs_17': 8, 'cancermap_cluster_8_vs_16': 3, 'cancermap_cluster_8_vs_15': 8, 'cancermap_cluster_8_vs_14': 6, 'annotated_class_vs_cancermap_cluster_MP_vs_MM': 3, 'annotated_class_Myeloid_vs_Erythroid': 7, 'cancermap_cluster_37_vs_38': 6, 'cancermap_cluster_37_vs_39': 1, 'annotated_class_vs_cancermap_cluster_T-Lymphoid_vs_AML': 2, 'annotated_class_pre-B-ALL_vs_Erythroid': 6, 'cancermap_cluster_11_and_19': 5, 'annotated_class_chronic_leukemias': 0, 'cancermap_cluster_35_vs_36': 1, 'cancermap_cluster_35_vs_37': 5, 'cancermap_cluster_35_vs_38': 8, 'cancermap_cluster_35_vs_39': 6, 'cancermap_cluster_29_and_36': 9, 'cancermap_cluster_29_and_37': 8, 'cancermap_cluster_29_and_34': 1, 'cancermap_cluster_29_and_35': 8, 'cancermap_cluster_29_and_32': 6, 'cancermap_cluster_29_and_33': 8, 'cancermap_cluster_29_and_30': 1, 'cancermap_cluster_29_and_31': 8, 'cancermap_cluster_3_and_50': 9, 'cancermap_cluster_29_and_38': 5, 'cancermap_cluster_29_and_39': 2, 'cancermap_cluster_22_and_39': 2, 'cancermap_cluster_pre-B-ALL_and_CLL': 3, 'cancermap_cluster_4_vs_30': 6, 'cancermap_cluster_22_and_33': 3, 'cancermap_cluster_22_and_32': 7, 'cancermap_cluster_22_and_31': 5, 'cancermap_cluster_22_and_30': 9, 'cancermap_cluster_22_and_37': 0, 'cancermap_cluster_22_and_36': 8, 'cancermap_cluster_22_and_35': 5, 'cancermap_cluster_22_and_34': 0, 'cancermap_cluster_12_vs_37': 3, 'cancermap_cluster_5_and_28': 1, 'cancermap_cluster_12_vs_36': 2, 'cancermap_cluster_4_vs_36': 1, 'cancermap_cluster_12_vs_35': 5, 'cancermap_cluster_4_vs_39': 4, 'cancermap_cluster_15_and_19': 0, 'cancermap_cluster_15_and_18': 0, 'cancermap_cluster_4_vs_38': 7, 'annotated_class_AML_vs_MM': 4, 'cancermap_cluster_13_vs_47': 4, 'cancermap_cluster_15_and_17': 5, 'cancermap_cluster_15_and_16': 9, 'cancermap_cluster_12_vs_32': 0, 'cancermap_cluster_27_and_49': 6, 'cancermap_cluster_27_and_48': 5, 'cancermap_cluster_12_vs_31': 8, 'cancermap_cluster_27_and_41': 5, 'cancermap_cluster_27_and_40': 6, 'cancermap_cluster_27_and_43': 4, 'cancermap_cluster_27_and_42': 2, 'cancermap_cluster_27_and_45': 6, 'cancermap_cluster_27_and_44': 1, 'cancermap_cluster_27_and_47': 2, 'cancermap_cluster_27_and_46': 3, 'cancermap_cluster_21_and_29': 5, 'cancermap_cluster_21_and_28': 5, 'cancermap_cluster_28_vs_38': 7, 'cancermap_cluster_28_vs_39': 5, 'cancermap_cluster_28_vs_34': 7, 'cancermap_cluster_28_vs_35': 7, 'cancermap_cluster_28_vs_36': 9, 'cancermap_cluster_28_vs_37': 7, 'cancermap_cluster_28_vs_30': 9, 'cancermap_cluster_28_vs_31': 8, 'cancermap_cluster_28_vs_32': 0, 'cancermap_cluster_28_vs_33': 8, 'annotated_class_CellLine_and_CML': 0, 'annotated_class_Myeloid_vs_AML': 7, 'annotated_class_B-Lymphoid_vs_pre-B-ALL': 0, 'annotated_class_Lymphoid_vs_CML': 0, 'annotated_class_BCL_vs_TCL': 4, 'annotated_class_AML_and_CML': 3, 'annotated_class_TCL_vs_CLL': 7, 'cancermap_cluster_NonCancer_vs_Cancer_Myeloma': 7, 'cancermap_cluster_CellLine': 0, 'cancermap_cluster_37_vs_50': 6, 'annotated_class_Myeloid_and_pre-B-ALL': 5, 'cancermap_cluster_29_vs_49': 2, 'annotated_class_Myeloid_and_CellLine': 9, 'annotated_class_T-Lymphoid_and_BCL': 7, 'annotated_class_T-Lymphoid_and_StemCell': 4, 'cancermap_cluster_3_and_11': 4, 'cancermap_cluster_3_and_10': 5, 'annotated_class_MP_and_CML': 4, 'cancermap_cluster_11_and_17': 0, 'cancermap_cluster_3_and_19': 0, 'cancermap_cluster_11_and_15': 6, 'cancermap_cluster_11_and_14': 1, 'cancermap_cluster_11_and_13': 3, 'cancermap_cluster_11_and_12': 9, 'cancermap_cluster_3_and_18': 9, 'annotated_class_LP_and_StemCell': 7, 'cancermap_cluster_25_and_33': 0, 'cancermap_cluster_11_and_18': 8, 'annotated_class_na_vs_CLL': 3, 'annotated_class_AML_and_T-ALL': 9, 'annotated_class_CellLine_and_MP': 3, 'cancermap_cluster_20_vs_29': 2, 'cancermap_cluster_20_vs_28': 2, 'cancermap_cluster_20_vs_23': 1, 'cancermap_cluster_20_vs_22': 9, 'cancermap_cluster_20_vs_21': 0, 'cancermap_cluster_20_vs_27': 7, 'cancermap_cluster_20_vs_26': 4, 'cancermap_cluster_20_vs_25': 6, 'cancermap_cluster_20_vs_24': 6, 'annotated_class_Myeloid_and_MP': 2, 'cancermap_cluster_AML_vs_MM': 3, 'cancermap_cluster_10_and_50': 9, 'cancermap_cluster_34_vs_50': 8, 'annotated_class_B-Lymphoid_vs_Erythroid': 5, 'cancermap_cluster_5_vs_42': 0, 'annotated_class_T-Lymphoid_vs_BCL': 2, 'cancermap_cluster_other_vs_acute_leukemias': 1, 'annotated_class_na_vs_StemCell': 3, 'cancermap_cluster_31_and_42': 9, 'cancermap_cluster_31_and_43': 8, 'cancermap_cluster_27_vs_48': 4, 'cancermap_cluster_31_and_41': 7, 'cancermap_cluster_31_and_46': 9, 'cancermap_cluster_31_and_47': 4, 'cancermap_cluster_31_and_44': 0, 'cancermap_cluster_31_and_45': 3, 'cancermap_cluster_27_vs_42': 1, 'cancermap_cluster_27_vs_43': 5, 'cancermap_cluster_31_and_48': 9, 'cancermap_cluster_31_and_49': 3, 'cancermap_cluster_27_vs_46': 2, 'cancermap_cluster_27_vs_47': 7, 'cancermap_cluster_27_vs_44': 5, 'cancermap_cluster_27_vs_45': 3, 'cancermap_cluster_38_and_41': 3, 'cancermap_cluster_38_and_40': 2, 'cancermap_cluster_38_and_43': 7, 'cancermap_cluster_38_and_42': 3, 'cancermap_cluster_38_and_45': 5, 'cancermap_cluster_38_and_44': 4, 'cancermap_cluster_38_and_47': 9, 'cancermap_cluster_38_and_46': 2, 'cancermap_cluster_38_and_49': 9, 'cancermap_cluster_38_and_48': 5, 'cancermap_cluster_6_and_50': 6, 'cancermap_cluster_2_and_41': 7, 'cancermap_cluster_2_and_40': 7, 'cancermap_cluster_2_and_43': 0, 'cancermap_cluster_2_and_42': 1, 'cancermap_cluster_2_and_45': 5, 'cancermap_cluster_2_and_44': 2, 'cancermap_cluster_2_and_47': 0, 'cancermap_cluster_2_and_46': 4, 'cancermap_cluster_2_and_49': 8, 'cancermap_cluster_2_and_48': 8, 'cancermap_cluster_Cancer_Leukemia': 3, 'cancermap_cluster_43_vs_50': 7, 'annotated_class_vs_cancermap_cluster_TCL_vs_Myeloid': 9, 'cancermap_cluster_Myeloid': 1, 'annotated_class_pre-B-ALL': 9, 'cancermap_cluster_12_vs_45': 4, 'cancermap_cluster_13_vs_43': 0, 'cancermap_cluster_13_vs_42': 9, 'cancermap_cluster_13_vs_41': 5, 'cancermap_cluster_13_vs_40': 0, 'cancermap_cluster_8_and_50': 9, 'cancermap_cluster_13_vs_46': 7, 'cancermap_cluster_13_vs_45': 3, 'cancermap_cluster_13_vs_44': 1, 'cancermap_cluster_13_vs_49': 7, 'cancermap_cluster_13_vs_48': 0, 'cancermap_cluster_12_vs_48': 2, 'cancermap_cluster_12_vs_39': 3, 'cancermap_cluster_12_vs_38': 8, 'cancermap_cluster_37_and_47': 6, 'annotated_class_CellLine_and_MM': 4, 'cancermap_cluster_32_vs_40': 8, 'cancermap_cluster_32_vs_41': 7, 'cancermap_cluster_32_vs_42': 9, 'cancermap_cluster_32_vs_43': 1, 'cancermap_cluster_32_vs_44': 2, 'cancermap_cluster_32_vs_45': 5, 'cancermap_cluster_32_vs_46': 0, 'cancermap_cluster_32_vs_47': 1, 'cancermap_cluster_32_vs_48': 8, 'cancermap_cluster_32_vs_49': 4, 'cancermap_cluster_12_vs_46': 2, 'cancermap_cluster_12_and_33': 3, 'annotated_class_CellLine_and_T-ALL': 0, 'cancermap_cluster_NonCancer_vs_other': 2, 'annotated_class_T-ALL_vs_pre-B-ALL': 1, 'annotated_class_T-Lymphoid_vs_Erythroid': 1, 'annotated_class_vs_cancermap_cluster_Myeloid_vs_CLL': 2, 'annotated_class_MP_vs_CLL': 7, 'cancermap_cluster_9_and_19': 8, 'cancermap_cluster_9_and_18': 5, 'cancermap_cluster_9_and_13': 5, 'cancermap_cluster_9_and_12': 4, 'cancermap_cluster_9_and_11': 8, 'cancermap_cluster_9_and_10': 3, 'cancermap_cluster_9_and_17': 3, 'cancermap_cluster_9_and_16': 7, 'cancermap_cluster_9_and_15': 1, 'cancermap_cluster_9_and_14': 7, 'cancermap_cluster_12_and_35': 7, 'annotated_class_CellLine_Leukemia_and_Prolif_Lympho': 7, 'cancermap_cluster_27_vs_41': 7, 'annotated_class_TCL_and_na': 6, 'annotated_class_MP_and_B-Lymphoid': 0, 'annotated_class_Cancer_Leukemia_vs_CellLine_Lymphoma': 5, 'cancermap_cluster_48_and_50': 6, 'annotated_class_CellLine_vs_CLL': 5, 'cancermap_cluster_3_vs_18': 3, 'cancermap_cluster_3_vs_19': 0, 'annotated_class_pre-B-ALL_vs_CML': 1, 'cancermap_cluster_3_vs_10': 5, 'cancermap_cluster_3_vs_11': 7, 'cancermap_cluster_3_vs_12': 0, 'cancermap_cluster_3_vs_13': 8, 'cancermap_cluster_3_vs_14': 5, 'cancermap_cluster_3_vs_15': 3, 'cancermap_cluster_3_vs_16': 0, 'cancermap_cluster_3_vs_17': 4, 'cancermap_cluster_22_and_28': 3, 'cancermap_cluster_22_and_29': 0, 'cancermap_cluster_35_and_39': 1, 'cancermap_cluster_35_and_38': 2, 'annotated_class_MM_vs_CML': 4, 'annotated_class_T-Lymphoid_vs_pre-B-ALL': 1, 'cancermap_cluster_22_and_23': 6, 'cancermap_cluster_35_and_37': 8, 'cancermap_cluster_35_and_36': 8, 'cancermap_cluster_22_and_26': 4, 'cancermap_cluster_22_and_27': 9, 'cancermap_cluster_42_and_44': 0, 'cancermap_cluster_17_vs_29': 5, 'cancermap_cluster_42_and_45': 7, 'cancermap_cluster_24_and_39': 1, 'cancermap_cluster_42_and_46': 0, 'annotated_class_MM_vs_StemCell': 9, 'cancermap_cluster_42_and_47': 2, 'annotated_class_vs_cancermap_cluster_Cancer_Lymphoma_vs_CellLine_Leukemia': 4, 'cancermap_cluster_NonCancer_and_Cancer_Leukemia': 3, 'cancermap_cluster_5_and_44': 5, 'cancermap_cluster_5_and_45': 4, 'cancermap_cluster_5_and_46': 2, 'cancermap_cluster_5_and_47': 9, 'cancermap_cluster_5_and_40': 9, 'cancermap_cluster_5_and_41': 6, 'cancermap_cluster_5_and_42': 0, 'cancermap_cluster_5_and_43': 9, 'cancermap_cluster_42_and_43': 2, 'cancermap_cluster_5_and_48': 7, 'cancermap_cluster_5_and_49': 0, 'cancermap_cluster_2_vs_42': 4, 'cancermap_cluster_2_vs_43': 8, 'cancermap_cluster_2_vs_40': 1, 'cancermap_cluster_2_vs_41': 1, 'cancermap_cluster_2_vs_46': 0, 'cancermap_cluster_2_vs_47': 5, 'cancermap_cluster_2_vs_44': 3, 'cancermap_cluster_2_vs_45': 6, 'cancermap_cluster_2_vs_48': 8, 'cancermap_cluster_2_vs_49': 1, 'annotated_class_MP_and_CLL': 8, 'cancermap_cluster_27_and_50': 3, 'annotated_class_NonCancer_and_Prolif_Lympho': 8, 'cancermap_cluster_39_and_50': 4, 'cancermap_cluster_21_and_38': 7, 'cancermap_cluster_21_and_39': 5, 'cancermap_cluster_21_and_36': 1, 'cancermap_cluster_21_and_37': 0, 'cancermap_cluster_28_vs_29': 4, 'cancermap_cluster_21_and_35': 1, 'cancermap_cluster_21_and_32': 3, 'cancermap_cluster_21_and_33': 8, 'cancermap_cluster_21_and_30': 1, 'cancermap_cluster_21_and_31': 2, 'cancermap_cluster_NonCancer_and_Cancer_Lymphoma': 4, 'annotated_class_MM_and_B-Lymphoid': 7, 'annotated_class_Prolif_Myelo_and_other': 1, 'annotated_class_CellLine_Leukemia_and_other': 4, 'annotated_class_Cancer_Leukemia_and_Prolif_Lympho': 3, 'annotated_class_Cancer_Leukemia_and_CellLine_Myeloma': 3, 'cancermap_cluster_NonCancer_vs_Cancer_Leukemia': 0, 'annotated_class_T-Lymphoid_and_CML': 7, 'cancermap_cluster_15_vs_18': 9, 'cancermap_cluster_15_vs_19': 9, 'cancermap_cluster_7_vs_8': 2, 'cancermap_cluster_7_vs_9': 8, 'annotated_class_Erythroid_and_CLL': 7, 'cancermap_cluster_15_vs_16': 8, 'cancermap_cluster_15_vs_17': 2, 'cancermap_cluster_37_vs_49': 4, 'cancermap_cluster_37_vs_48': 9, 'cancermap_cluster_37_vs_45': 4, 'cancermap_cluster_37_vs_44': 8, 'cancermap_cluster_37_vs_47': 2, 'cancermap_cluster_37_vs_46': 9, 'cancermap_cluster_37_vs_41': 4, 'cancermap_cluster_37_vs_40': 5, 'cancermap_cluster_37_vs_43': 6, 'cancermap_cluster_37_vs_42': 8, 'cancermap_cluster_22_and_46': 8, 'annotated_class_LP': 9, 'cancermap_cluster_26_vs_38': 1, 'annotated_class_AML_vs_LP': 2, 'cancermap_cluster_other_vs_chronic_leukemias': 1, 'annotated_class_Myeloid_vs_T-ALL': 3, 'annotated_class_MP_and_Erythroid': 7, 'cancermap_cluster_49_and_50': 3, 'cancermap_cluster_26_vs_34': 2, 'cancermap_cluster_22_and_47': 3, 'cancermap_cluster_26_vs_35': 3, 'cancermap_cluster_10_and_44': 1, 'annotated_class_NonCancer_vs_Prolif_Lympho': 5, 'cancermap_cluster_T-ALL': 0, 'cancermap_cluster_12_and_27': 7, 'cancermap_cluster_22_and_44': 2, 'cancermap_cluster_15_vs_39': 8, 'annotated_class_other_vs_acute_leukemias': 2, 'annotated_class_CellLine_vs_B-Lymphoid': 4, 'cancermap_cluster_1_vs_4': 4, 'annotated_class_Cancer_Leukemia_vs_Cancer_Myeloma': 5, 'cancermap_cluster_1_vs_5': 7, 'cancermap_cluster_22_and_45': 6, 'cancermap_cluster_12_and_29': 4, 'cancermap_cluster_AML_vs_CellLine': 6, 'cancermap_cluster_12_and_28': 3, 'annotated_class_Myeloid_vs_B-Lymphoid': 7, 'cancermap_cluster_5_and_6': 4, 'annotated_class_T-Lymphoid_and_LP': 8, 'annotated_class_na_vs_CellLine': 8, 'cancermap_cluster_1_vs_2': 7, 'annotated_class_MP_vs_Lymphoid': 2, 'cancermap_cluster_Cancer_Lymphoma_and_CellLine_Leukemia': 3, 'cancermap_cluster_1_vs_3': 3, 'cancermap_cluster_20_vs_50': 4, 'cancermap_cluster_10_and_37': 6, 'cancermap_cluster_8_and_30': 9, 'cancermap_cluster_10_and_34': 9, 'cancermap_cluster_31_and_37': 1, 'cancermap_cluster_31_and_36': 5, 'cancermap_cluster_31_and_35': 3, 'cancermap_cluster_31_and_34': 9, 'cancermap_cluster_31_and_33': 0, 'cancermap_cluster_31_and_32': 9, 'cancermap_cluster_22_and_43': 5, 'cancermap_cluster_27_vs_50': 3, 'cancermap_cluster_10_and_38': 9, 'cancermap_cluster_31_and_39': 2, 'cancermap_cluster_31_and_38': 2, 'cancermap_cluster_8_and_49': 2, 'cancermap_cluster_8_and_48': 5, 'cancermap_cluster_6_and_49': 0, 'cancermap_cluster_6_and_48': 5, 'cancermap_cluster_38_and_50': 4, 'cancermap_cluster_6_and_45': 2, 'cancermap_cluster_6_and_44': 9, 'cancermap_cluster_8_and_41': 8, 'cancermap_cluster_8_and_40': 2, 'cancermap_cluster_8_and_47': 9, 'cancermap_cluster_8_and_46': 5, 'cancermap_cluster_8_and_45': 1, 'cancermap_cluster_8_and_44': 0, 'cancermap_cluster_46_vs_50': 8, 'cancermap_cluster_43_and_48': 7, 'cancermap_cluster_43_and_47': 7, 'cancermap_cluster_43_and_46': 4, 'cancermap_cluster_43_and_45': 8, 'cancermap_cluster_43_and_44': 9, 'annotated_class_T-Lymphoid_and_pre-B-ALL': 3, 'cancermap_cluster_13_vs_36': 1, 'cancermap_cluster_13_vs_37': 6, 'cancermap_cluster_13_vs_34': 4, 'cancermap_cluster_13_vs_35': 5, 'cancermap_cluster_13_vs_32': 5, 'cancermap_cluster_13_vs_33': 5, 'cancermap_cluster_13_vs_30': 5, 'cancermap_cluster_13_vs_31': 5, 'cancermap_cluster_13_vs_38': 9, 'cancermap_cluster_13_vs_39': 9, 'cancermap_cluster_3_and_5': 2, 'cancermap_cluster_3_and_4': 5, 'cancermap_cluster_3_and_7': 5, 'cancermap_cluster_3_and_6': 6, 'cancermap_cluster_3_and_9': 5, 'cancermap_cluster_3_and_8': 9, 'cancermap_cluster_36_and_38': 7, 'cancermap_cluster_32_vs_50': 3, 'cancermap_cluster_7_vs_47': 0, 'cancermap_cluster_7_vs_46': 3, 'cancermap_cluster_7_vs_45': 0, 'cancermap_cluster_7_vs_44': 5, 'cancermap_cluster_7_vs_43': 5, 'cancermap_cluster_7_vs_42': 3, 'cancermap_cluster_7_vs_41': 7, 'cancermap_cluster_7_vs_40': 8, 'cancermap_cluster_40_vs_50': 8, 'cancermap_cluster_16_vs_49': 2, 'annotated_class_LP_vs_pre-B-ALL': 0, 'annotated_class_BCL_and_AML': 7, 'cancermap_cluster_BCL_and_MM': 6, 'annotated_class_CellLine_and_pre-B-ALL': 4, 'cancermap_cluster_25_and_49': 6, 'cancermap_cluster_16_and_18': 9, 'cancermap_cluster_16_and_19': 7, 'cancermap_cluster_16_and_17': 3, 'cancermap_cluster_25_and_48': 9, 'annotated_class_Erythroid_and_StemCell': 1, 'cancermap_cluster_25_and_47': 7, 'cancermap_cluster_25_and_46': 4, 'cancermap_cluster_25_and_45': 7, 'cancermap_cluster_NonCancer': 7, 'cancermap_cluster_18_vs_22': 9, 'cancermap_cluster_7_and_14': 3, 'cancermap_cluster_25_and_44': 4, 'cancermap_cluster_18_vs_23': 3, 'cancermap_cluster_35_and_42': 7, 'cancermap_cluster_22_vs_45': 2, 'cancermap_cluster_35_and_43': 2, 'cancermap_cluster_18_vs_21': 7, 'cancermap_cluster_35_and_40': 7, 'cancermap_cluster_18_vs_26': 3, 'annotated_class_vs_cancermap_cluster_TCL_vs_CellLine': 1, 'cancermap_cluster_25_and_40': 2, 'cancermap_cluster_18_vs_27': 5, 'annotated_class_pre-B-ALL_and_StemCell': 4, 'cancermap_cluster_18_vs_24': 3, 'annotated_class_vs_cancermap_cluster_T-ALL_vs_CLL': 5, 'annotated_class_TCL_vs_Myeloid': 5, 'cancermap_cluster_18_vs_25': 6, 'annotated_class_CellLine_and_LP': 0, 'annotated_class_pre-B-ALL_vs_StemCell': 4, 'annotated_class_T-Lymphoid_and_B-Lymphoid': 6, 'cancermap_cluster_16_vs_45': 8, 'cancermap_cluster_1_and_48': 9, 'cancermap_cluster_1_and_49': 8, 'cancermap_cluster_12_and_26': 7, 'cancermap_cluster_NonCancer_and_Cancer_Myeloma': 6, 'annotated_class_MP_and_Lymphoid': 7, 'cancermap_cluster_1_and_40': 5, 'cancermap_cluster_1_and_41': 8, 'cancermap_cluster_1_and_42': 6, 'cancermap_cluster_1_and_43': 8, 'cancermap_cluster_1_and_44': 0, 'cancermap_cluster_1_and_45': 3, 'cancermap_cluster_1_and_46': 3, 'cancermap_cluster_48_and_49': 8, 'cancermap_cluster_48_vs_49': 7, 'cancermap_cluster_4_and_48': 1, 'cancermap_cluster_4_and_47': 4, 'cancermap_cluster_4_and_46': 4, 'cancermap_cluster_4_and_45': 6, 'cancermap_cluster_4_and_44': 5, 'cancermap_cluster_4_and_43': 2, 'cancermap_cluster_4_and_42': 3, 'cancermap_cluster_4_and_41': 1, 'cancermap_cluster_4_and_40': 7, 'annotated_class_Erythroid_and_CML': 2, 'cancermap_cluster_5_and_50': 2, 'annotated_class_CLL_vs_CML': 6, 'cancermap_cluster_33_and_35': 0, 'annotated_class_NonCancer_and_CellLine_Myeloma': 1, 'cancermap_cluster_33_and_34': 1, 'annotated_class_Myeloid_vs_CML': 8, 'cancermap_cluster_1_vs_42': 9, 'cancermap_cluster_11_and_28': 8, 'annotated_class_vs_cancermap_cluster_CellLine_vs_pre-B-ALL': 0, 'annotated_class_CellLine_vs_Erythroid': 5, 'cancermap_cluster_44_and_45': 9, 'annotated_class_T-ALL_vs_MM': 4, 'cancermap_cluster_11_and_26': 9, 'cancermap_cluster_15_and_39': 6, 'cancermap_cluster_15_and_38': 9, 'cancermap_cluster_11_and_27': 3, 'cancermap_cluster_15_and_35': 9, 'cancermap_cluster_16_vs_40': 9, 'cancermap_cluster_15_and_37': 0, 'cancermap_cluster_15_and_36': 3, 'cancermap_cluster_15_and_31': 8, 'cancermap_cluster_15_and_30': 4, 'cancermap_cluster_15_and_33': 1, 'cancermap_cluster_15_and_32': 4, 'cancermap_cluster_18_vs_40': 4, 'cancermap_cluster_18_vs_41': 3, 'cancermap_cluster_18_vs_42': 0, 'cancermap_cluster_18_vs_43': 9, 'cancermap_cluster_18_vs_44': 1, 'cancermap_cluster_18_vs_45': 2, 'cancermap_cluster_18_vs_46': 2, 'cancermap_cluster_18_vs_47': 9, 'cancermap_cluster_18_vs_48': 9, 'cancermap_cluster_18_vs_49': 1, 'cancermap_cluster_2_and_32': 3, 'cancermap_cluster_48_vs_50': 6, 'cancermap_cluster_2_and_33': 0, 'cancermap_cluster_28_vs_50': 1, 'cancermap_cluster_16_vs_41': 6, 'annotated_class_Cancer_Lymphoma_vs_Cancer_Leukemia': 3, 'annotated_class_BCL_and_LP': 2, 'cancermap_cluster_CellLine_Leukemia_and_CellLine_Myeloma': 2, 'cancermap_cluster_2_and_38': 6, 'cancermap_cluster_2_and_39': 1, 'cancermap_cluster_NonCancer_and_chronic_leukemias': 4, 'annotated_class_vs_cancermap_cluster_NonCancer_vs_acute_leukemias': 4, 'annotated_class_Cancer_Leukemia_vs_Prolif_Lympho': 2, 'annotated_class_T-Lymphoid_and_MP': 6, 'cancermap_cluster_31_vs_46': 4, 'annotated_class_T-Lymphoid_and_MM': 1, 'cancermap_cluster_8_vs_9': 4, 'annotated_class_Myeloid_and_B-Lymphoid': 4, 'annotated_class_T-ALL_vs_Lymphoid': 6, 'annotated_class_T-Lymphoid_and_Myeloid': 2, 'cancermap_cluster_39_and_48': 6, 'cancermap_cluster_39_and_49': 3, 'cancermap_cluster_39_and_42': 5, 'cancermap_cluster_39_and_43': 7, 'cancermap_cluster_39_and_40': 4, 'cancermap_cluster_39_and_41': 6, 'cancermap_cluster_39_and_46': 9, 'cancermap_cluster_39_and_47': 8, 'cancermap_cluster_39_and_44': 5, 'cancermap_cluster_39_and_45': 4, 'annotated_class_vs_cancermap_cluster_MP_vs_CLL': 5, 'cancermap_cluster_12_vs_19': 7, 'annotated_class_Myeloid_vs_CellLine': 5, 'cancermap_cluster_12_vs_18': 0, 'cancermap_cluster_11_and_39': 1, 'cancermap_cluster_11_and_38': 2, 'cancermap_cluster_16_vs_34': 4, 'cancermap_cluster_11_and_31': 8, 'cancermap_cluster_22_and_24': 2, 'cancermap_cluster_11_and_33': 0, 'cancermap_cluster_11_and_32': 4, 'cancermap_cluster_11_and_35': 2, 'cancermap_cluster_44_and_50': 1, 'cancermap_cluster_11_and_37': 5, 'cancermap_cluster_22_and_25': 8, 'cancermap_cluster_12_vs_14': 7, 'annotated_class_T-Lymphoid': 5, 'annotated_class_Cancer_Leukemia_and_CellLine_Leukemia': 3, 'annotated_class_MM_vs_pre-B-ALL': 5, 'cancermap_cluster_6_and_39': 4, 'cancermap_cluster_14_and_43': 7, 'cancermap_cluster_14_and_42': 0, 'cancermap_cluster_14_and_41': 0, 'cancermap_cluster_14_and_40': 0, 'cancermap_cluster_14_and_47': 8, 'cancermap_cluster_14_and_46': 6, 'cancermap_cluster_14_and_45': 5, 'cancermap_cluster_14_and_44': 9, 'cancermap_cluster_14_and_49': 5, 'cancermap_cluster_14_and_48': 0, 'annotated_class_Cancer_Lymphoma_and_other': 1, 'cancermap_cluster_22_vs_38': 6, 'annotated_class_TCL_and_MM': 5, 'cancermap_cluster_20_vs_49': 0, 'cancermap_cluster_20_vs_48': 0, 'cancermap_cluster_20_vs_45': 9, 'cancermap_cluster_20_vs_44': 3, 'cancermap_cluster_20_vs_47': 1, 'cancermap_cluster_20_vs_46': 4, 'cancermap_cluster_20_vs_41': 7, 'cancermap_cluster_20_vs_40': 1, 'cancermap_cluster_20_vs_43': 0, 'cancermap_cluster_20_vs_42': 7, 'cancermap_cluster_19_vs_30': 1, 'cancermap_cluster_19_vs_31': 1, 'cancermap_cluster_19_vs_32': 7, 'cancermap_cluster_19_vs_33': 4, 'cancermap_cluster_19_vs_34': 6, 'cancermap_cluster_19_vs_35': 2, 'cancermap_cluster_19_vs_36': 6, 'cancermap_cluster_19_vs_37': 0, 'cancermap_cluster_19_vs_38': 4, 'cancermap_cluster_19_vs_39': 9, 'annotated_class_vs_cancermap_cluster_T-Lymphoid_vs_MM': 2, 'annotated_class_CellLine_vs_pre-B-ALL': 8, 'cancermap_cluster_16_vs_39': 8, 'cancermap_cluster_7': 8, 'cancermap_cluster_6': 3, 'cancermap_cluster_5': 3, 'cancermap_cluster_4': 8, 'cancermap_cluster_3': 1, 'cancermap_cluster_2': 8, 'cancermap_cluster_8_and_38': 8, 'cancermap_cluster_8_and_39': 3, 'cancermap_cluster_8_and_36': 6, 'cancermap_cluster_8_and_37': 8, 'cancermap_cluster_8_and_34': 6, 'cancermap_cluster_8_and_35': 6, 'cancermap_cluster_8_and_32': 0, 'cancermap_cluster_8_and_33': 9, 'cancermap_cluster_9': 6, 'cancermap_cluster_8_and_31': 5, 'annotated_class_TCL_and_Lymphoid': 3, 'annotated_class_na_and_LP': 6, 'cancermap_cluster_46_vs_47': 8, 'cancermap_cluster_43_and_50': 7, 'cancermap_cluster_46_vs_48': 1, 'cancermap_cluster_16_vs_38': 5, 'cancermap_cluster_17_vs_19': 0, 'cancermap_cluster_13_vs_25': 3, 'cancermap_cluster_13_vs_24': 0, 'cancermap_cluster_13_vs_27': 5, 'cancermap_cluster_13_vs_26': 9, 'cancermap_cluster_13_vs_21': 6, 'cancermap_cluster_13_vs_20': 3, 'cancermap_cluster_13_vs_23': 6, 'cancermap_cluster_13_vs_22': 2, 'cancermap_cluster_13_vs_29': 3, 'cancermap_cluster_13_vs_28': 8, 'annotated_class_T-ALL_vs_Erythroid': 3, 'cancermap_cluster_11_vs_29': 4, 'cancermap_cluster_11_vs_28': 5, 'cancermap_cluster_7_vs_50': 2, 'cancermap_cluster_11_vs_27': 1, 'cancermap_cluster_11_vs_26': 6, 'cancermap_cluster_40_vs_47': 1, 'cancermap_cluster_40_vs_46': 5, 'cancermap_cluster_40_vs_45': 4, 'cancermap_cluster_40_vs_44': 7, 'cancermap_cluster_40_vs_43': 2, 'cancermap_cluster_40_vs_42': 1, 'cancermap_cluster_40_vs_41': 2, 'cancermap_cluster_17_vs_36': 1, 'cancermap_cluster_40_vs_49': 3, 'cancermap_cluster_40_vs_48': 1, 'annotated_class_BCL_and_Erythroid': 1, 'cancermap_cluster_15_vs_20': 4, 'cancermap_cluster_acute_leukemias_vs_chronic_leukemias': 0, 'annotated_class_BCL_and_Lymphoid': 3, 'annotated_class_Myeloid_and_MM': 0, 'annotated_class_BCL_vs_pre-B-ALL': 6, 'cancermap_cluster_39_vs_50': 2, 'cancermap_cluster_17_vs_28': 8, 'cancermap_cluster_T-ALL_vs_CLL': 6, 'cancermap_cluster_42_vs_50': 5, 'annotated_class_Myeloid_and_na': 6, 'cancermap_cluster_BCL_and_CLL': 7, 'cancermap_cluster_38_vs_42': 8, 'cancermap_cluster_38_vs_43': 7, 'cancermap_cluster_38_vs_40': 8, 'cancermap_cluster_38_vs_41': 6, 'cancermap_cluster_38_vs_46': 6, 'cancermap_cluster_38_vs_47': 2, 'cancermap_cluster_38_vs_44': 1, 'cancermap_cluster_38_vs_45': 2, 'annotated_class_Erythroid': 1, 'cancermap_cluster_38_vs_48': 1, 'cancermap_cluster_38_vs_49': 0, 'cancermap_cluster_11_vs_21': 4, 'cancermap_cluster_19_and_24': 8, 'cancermap_cluster_17_and_33': 3, 'cancermap_cluster_17_and_32': 8, 'cancermap_cluster_17_and_31': 2, 'cancermap_cluster_17_and_30': 8, 'cancermap_cluster_17_and_37': 8, 'cancermap_cluster_17_and_36': 4, 'cancermap_cluster_17_and_35': 9, 'cancermap_cluster_17_and_34': 7, 'cancermap_cluster_17_and_39': 7, 'cancermap_cluster_17_and_38': 7, 'cancermap_cluster_Cancer_Leukemia_and_CellLine_Myeloma': 8, 'annotated_class_vs_cancermap_cluster_Myeloid_vs_pre-B-ALL': 8, 'cancermap_cluster_1_and_50': 3, 'annotated_class_BCL_and_TCL': 4, 'cancermap_cluster_4_vs_31': 7, 'cancermap_cluster_T-ALL_vs_pre-B-ALL': 4, 'cancermap_cluster_4_vs_33': 0, 'cancermap_cluster_4_vs_32': 0, 'cancermap_cluster_4_vs_35': 0, 'cancermap_cluster_4_vs_34': 6, 'cancermap_cluster_4_vs_37': 1, 'cancermap_cluster_5_and_29': 9, 'cancermap_cluster_5_and_26': 3, 'cancermap_cluster_5_and_27': 5, 'cancermap_cluster_5_and_24': 1, 'cancermap_cluster_5_and_25': 5, 'cancermap_cluster_5_and_22': 5, 'cancermap_cluster_5_and_23': 8, 'cancermap_cluster_5_and_20': 6, 'cancermap_cluster_5_and_21': 0, 'cancermap_cluster_29_vs_44': 9, 'cancermap_cluster_29_vs_45': 2, 'cancermap_cluster_29_vs_46': 2, 'cancermap_cluster_29_vs_47': 9, 'cancermap_cluster_29_vs_40': 2, 'cancermap_cluster_29_vs_41': 2, 'cancermap_cluster_29_vs_42': 0, 'cancermap_cluster_29_vs_43': 2, 'cancermap_cluster_29_vs_48': 9, 'cancermap_cluster_Cancer_Lymphoma_vs_CellLine_Leukemia': 1, 'annotated_class_CellLine_Lymphoma_vs_Cancer_Myeloma': 3, 'annotated_class_CellLine_Lymphoma': 9, 'cancermap_cluster_16_vs_37': 9, 'cancermap_cluster_15_and_22': 8, 'cancermap_cluster_15_and_23': 5, 'cancermap_cluster_15_and_20': 7, 'cancermap_cluster_15_and_21': 6, 'cancermap_cluster_15_and_26': 4, 'cancermap_cluster_15_and_27': 8, 'cancermap_cluster_15_and_24': 2, 'cancermap_cluster_15_and_25': 9, 'cancermap_cluster_11_vs_38': 8, 'cancermap_cluster_15_and_28': 7, 'cancermap_cluster_15_and_29': 4, 'annotated_class_TCL_and_B-Lymphoid': 8, 'cancermap_cluster_18_vs_50': 1, 'cancermap_cluster_16_vs_36': 8, 'cancermap_cluster_28_vs_45': 9, 'cancermap_cluster_28_vs_44': 7, 'cancermap_cluster_28_vs_47': 4, 'cancermap_cluster_28_vs_46': 1, 'cancermap_cluster_28_vs_41': 4, 'cancermap_cluster_28_vs_40': 0, 'cancermap_cluster_28_vs_43': 4, 'cancermap_cluster_28_vs_42': 7, 'cancermap_cluster_28_vs_49': 5, 'cancermap_cluster_28_vs_48': 8, 'cancermap_cluster_10_vs_28': 2, 'cancermap_cluster_10_vs_29': 4, 'cancermap_cluster_Cancer_Lymphoma_vs_Cancer_Leukemia': 7, 'cancermap_cluster_10_vs_22': 5, 'cancermap_cluster_10_vs_23': 0, 'cancermap_cluster_10_vs_20': 7, 'cancermap_cluster_10_vs_21': 1, 'cancermap_cluster_10_vs_26': 7, 'cancermap_cluster_10_vs_27': 9, 'cancermap_cluster_10_vs_24': 5, 'cancermap_cluster_10_vs_25': 8, 'annotated_class_vs_cancermap_cluster_NonCancer_vs_Cancer_Myeloma': 9, 'cancermap_cluster_12_and_23': 7, 'cancermap_cluster_12_and_22': 1, 'cancermap_cluster_12_and_21': 9, 'cancermap_cluster_12_and_20': 7, 'cancermap_cluster_15_vs_38': 9, 'cancermap_cluster_other': 5, 'cancermap_cluster_12_and_25': 2, 'cancermap_cluster_12_and_24': 7, 'cancermap_cluster_15_vs_34': 8, 'cancermap_cluster_15_vs_35': 8, 'cancermap_cluster_15_vs_36': 5, 'cancermap_cluster_15_vs_37': 9, 'cancermap_cluster_15_vs_30': 0, 'cancermap_cluster_15_vs_31': 0, 'cancermap_cluster_15_vs_32': 8, 'cancermap_cluster_15_vs_33': 7, 'annotated_class_MP_vs_T-ALL': 4, 'annotated_class_vs_cancermap_cluster_Myeloid_vs_T-ALL': 2, 'annotated_class_Cancer_Lymphoma_and_CellLine_Lymphoma': 7, 'annotated_class_MM_and_Lymphoid': 2, 'cancermap_cluster_11_and_50': 6, 'cancermap_cluster_11_vs_39': 7, 'annotated_class_Cancer_Lymphoma_vs_Prolif_Myelo': 8, 'annotated_class_T-ALL_vs_StemCell': 9, 'annotated_class_T-ALL_vs_LP': 9, 'cancermap_cluster_1_and_17': 2, 'annotated_class_vs_cancermap_cluster_B-Lymphoid_vs_CLL': 0, 'cancermap_cluster_1_and_16': 1, 'cancermap_cluster_44_and_46': 3, 'cancermap_cluster_44_and_47': 3, 'cancermap_cluster_33_and_37': 7, 'cancermap_cluster_33_and_36': 4, 'cancermap_cluster_33_and_39': 6, 'cancermap_cluster_33_and_38': 6, 'cancermap_cluster_44_and_48': 5, 'cancermap_cluster_44_and_49': 5, 'cancermap_cluster_11_and_22': 2, 'cancermap_cluster_11_and_23': 6, 'cancermap_cluster_11_and_20': 2, 'cancermap_cluster_11_and_21': 6, 'annotated_class_na_and_MP': 3, 'cancermap_cluster_1_and_19': 2, 'cancermap_cluster_22_vs_32': 0, 'cancermap_cluster_22_vs_33': 4, 'cancermap_cluster_22_vs_30': 1, 'cancermap_cluster_22_vs_31': 4, 'cancermap_cluster_22_vs_36': 8, 'cancermap_cluster_22_vs_37': 7, 'cancermap_cluster_22_vs_34': 7, 'cancermap_cluster_22_vs_35': 2, 'cancermap_cluster_NonCancer_vs_CellLine_Myeloma': 0, 'cancermap_cluster_22_vs_39': 0, 'cancermap_cluster_14_and_50': 5, 'annotated_class_Cancer_Leukemia_vs_Prolif_Myelo': 4, 'annotated_class_TCL_vs_AML': 7, 'cancermap_cluster_13_vs_14': 7, 'cancermap_cluster_13_vs_15': 7, 'cancermap_cluster_13_vs_16': 9, 'cancermap_cluster_13_vs_17': 1, 'annotated_class_NonCancer_and_CellLine_Leukemia': 1, 'cancermap_cluster_Myeloid_vs_AML': 8, 'cancermap_cluster_T-ALL_and_CLL': 2, 'cancermap_cluster_19_vs_25': 7, 'cancermap_cluster_19_vs_24': 5, 'cancermap_cluster_19_vs_23': 4, 'cancermap_cluster_19_vs_22': 6, 'cancermap_cluster_19_vs_21': 4, 'cancermap_cluster_19_vs_20': 0, 'cancermap_cluster_19_vs_29': 0, 'cancermap_cluster_19_vs_28': 3, 'cancermap_cluster_36_vs_48': 2, 'cancermap_cluster_36_vs_49': 8, 'cancermap_cluster_36_vs_44': 6, 'cancermap_cluster_36_vs_45': 6, 'cancermap_cluster_36_vs_46': 7, 'cancermap_cluster_36_vs_47': 1, 'cancermap_cluster_36_vs_40': 0, 'cancermap_cluster_36_vs_41': 4, 'cancermap_cluster_36_vs_42': 0, 'cancermap_cluster_36_vs_43': 9, 'cancermap_cluster_13_vs_18': 4, 'cancermap_cluster_13_vs_19': 2, 'cancermap_cluster_8_and_29': 6, 'cancermap_cluster_8_and_28': 7, 'cancermap_cluster_8_and_25': 3, 'cancermap_cluster_8_and_24': 9, 'cancermap_cluster_8_and_27': 9, 'cancermap_cluster_8_and_26': 2, 'cancermap_cluster_8_and_21': 7, 'cancermap_cluster_8_and_20': 6, 'cancermap_cluster_8_and_23': 7, 'cancermap_cluster_8_and_22': 7, 'cancermap_cluster_14_and_22': 7, 'cancermap_cluster_12_vs_43': 0, 'cancermap_cluster_pre-B-ALL_vs_CLL': 6, 'annotated_class_BCL_vs_Myeloid': 6, 'cancermap_cluster_14_and_29': 6, 'cancermap_cluster_16_and_24': 7, 'cancermap_cluster_14_and_28': 4, 'cancermap_cluster_Myeloid_and_pre-B-ALL': 5, 'cancermap_cluster_10_and_18': 9, 'cancermap_cluster_1_vs_8': 5, 'cancermap_cluster_10_and_19': 1, 'cancermap_cluster_7_vs_25': 5, 'cancermap_cluster_7_vs_24': 2, 'cancermap_cluster_7_vs_27': 3, 'cancermap_cluster_7_vs_26': 5, 'cancermap_cluster_7_vs_21': 5, 'cancermap_cluster_7_vs_20': 2, 'cancermap_cluster_7_vs_23': 6, 'cancermap_cluster_7_vs_22': 1, 'annotated_class_other': 6, 'cancermap_cluster_7_vs_29': 0, 'cancermap_cluster_7_vs_28': 9, 'cancermap_cluster_5_and_9': 9, 'cancermap_cluster_5_and_8': 6, 'annotated_class_MP_and_StemCell': 7, 'cancermap_cluster_1_vs_9': 6, 'cancermap_cluster_2_and_18': 3, 'cancermap_cluster_2_and_19': 3, 'cancermap_cluster_2_and_16': 5, 'cancermap_cluster_2_and_17': 8, 'cancermap_cluster_2_and_14': 4, 'cancermap_cluster_2_and_15': 3, 'cancermap_cluster_2_and_12': 4, 'cancermap_cluster_2_and_13': 0, 'cancermap_cluster_2_and_10': 7, 'cancermap_cluster_2_and_11': 5, 'annotated_class_vs_cancermap_cluster_na_vs_AML': 1, 'cancermap_cluster_10_and_12': 3, 'cancermap_cluster_10_and_13': 6, 'annotated_class_AML_and_B-Lymphoid': 1, 'cancermap_cluster_25_and_27': 9, 'annotated_class_na_and_StemCell': 2, 'cancermap_cluster_25_and_26': 8, 'cancermap_cluster_9_and_40': 1, 'cancermap_cluster_9_and_41': 3, 'cancermap_cluster_9_and_42': 9, 'cancermap_cluster_9_and_43': 1, 'cancermap_cluster_9_and_44': 4, 'cancermap_cluster_9_and_45': 2, 'cancermap_cluster_9_and_46': 9, 'cancermap_cluster_9_and_47': 8, 'cancermap_cluster_9_and_48': 0, 'cancermap_cluster_9_and_49': 1, 'annotated_class_BCL_and_StemCell': 9, 'cancermap_cluster_38_vs_50': 6, 'annotated_class_BCL_vs_B-Lymphoid': 7, 'cancermap_cluster_17_and_20': 6, 'cancermap_cluster_17_and_21': 5, 'cancermap_cluster_17_and_22': 6, 'cancermap_cluster_17_and_23': 0, 'cancermap_cluster_17_and_24': 6, 'cancermap_cluster_17_and_25': 1, 'cancermap_cluster_17_and_26': 9, 'cancermap_cluster_17_and_27': 9, 'cancermap_cluster_17_and_28': 2, 'cancermap_cluster_17_and_29': 0, 'annotated_class_CellLine_Leukemia_and_Cancer_Myeloma': 5, 'annotated_class_Prolif_Myelo_vs_other': 7, 'cancermap_cluster_3_vs_43': 7, 'cancermap_cluster_3_vs_42': 3, 'cancermap_cluster_3_vs_41': 3, 'cancermap_cluster_3_vs_40': 5, 'cancermap_cluster_3_vs_47': 8, 'cancermap_cluster_3_vs_46': 5, 'cancermap_cluster_3_vs_45': 3, 'cancermap_cluster_3_vs_44': 8, 'cancermap_cluster_1_vs_6': 9, 'cancermap_cluster_3_vs_49': 9, 'cancermap_cluster_3_vs_48': 3, 'cancermap_cluster_4_vs_22': 8, 'cancermap_cluster_4_vs_23': 1, 'cancermap_cluster_4_vs_20': 9, 'cancermap_cluster_4_vs_21': 8, 'cancermap_cluster_5_and_39': 7, 'cancermap_cluster_4_vs_27': 5, 'cancermap_cluster_4_vs_24': 7, 'cancermap_cluster_4_vs_25': 3, 'cancermap_cluster_5_and_35': 8, 'cancermap_cluster_5_and_34': 1, 'cancermap_cluster_5_and_37': 4, 'cancermap_cluster_4_vs_29': 0, 'cancermap_cluster_5_and_31': 6, 'cancermap_cluster_5_and_30': 7, 'cancermap_cluster_5_and_33': 1, 'cancermap_cluster_5_and_32': 4, 'cancermap_cluster_1_vs_7': 4, 'cancermap_cluster_22_and_50': 0, 'annotated_class_AML_vs_pre-B-ALL': 3, 'cancermap_cluster_13_and_50': 4, 'annotated_class_T-Lymphoid_vs_CML': 6, 'annotated_class_T-ALL_and_CML': 2, 'annotated_class_Cancer_Myeloma_vs_other': 1, 'cancermap_cluster_MM_and_CLL': 3, 'annotated_class_TCL': 7, 'cancermap_cluster_15_and_50': 5, 'annotated_class_MP_and_pre-B-ALL': 5, 'annotated_class_Cancer_Myeloma_vs_CellLine_Myeloma': 0, 'annotated_class_BCL_and_MP': 4, 'cancermap_cluster_other_and_acute_leukemias': 1, 'annotated_class_vs_cancermap_cluster_LP_vs_CLL': 0, 'annotated_class_Cancer_Leukemia_vs_CellLine_Myeloma': 6, 'annotated_class_T-Lymphoid_and_Lymphoid': 4, 'cancermap_cluster_2_vs_19': 6, 'cancermap_cluster_2_vs_18': 1, 'cancermap_cluster_2_vs_15': 1, 'cancermap_cluster_2_vs_14': 6, 'cancermap_cluster_2_vs_17': 9, 'cancermap_cluster_2_vs_16': 5, 'cancermap_cluster_2_vs_11': 1, 'cancermap_cluster_2_vs_10': 9, 'cancermap_cluster_2_vs_13': 0, 'cancermap_cluster_2_vs_12': 9, 'cancermap_cluster_10_vs_39': 4, 'cancermap_cluster_44': 7, 'cancermap_cluster_33_vs_50': 1, 'cancermap_cluster_45': 9, 'cancermap_cluster_10_vs_31': 2, 'cancermap_cluster_10_vs_30': 7, 'cancermap_cluster_10_vs_33': 6, 'cancermap_cluster_29_vs_50': 5, 'cancermap_cluster_10_vs_35': 6, 'cancermap_cluster_46': 4, 'cancermap_cluster_10_vs_37': 9, 'cancermap_cluster_10_vs_36': 2, 'cancermap_cluster_14_vs_35': 4, 'cancermap_cluster_47': 3, 'cancermap_cluster_40': 0, 'cancermap_cluster_41': 3, 'cancermap_cluster_15_vs_23': 3, 'cancermap_cluster_15_vs_22': 7, 'cancermap_cluster_15_vs_21': 8, 'cancermap_cluster_28_and_44': 3, 'cancermap_cluster_15_vs_27': 5, 'cancermap_cluster_42': 8, 'cancermap_cluster_15_vs_25': 1, 'cancermap_cluster_15_vs_24': 8, 'cancermap_cluster_12_and_38': 9, 'cancermap_cluster_12_and_39': 5, 'cancermap_cluster_15_vs_29': 6, 'cancermap_cluster_43': 4, 'cancermap_cluster_BCL_vs_CLL': 7, 'annotated_class_T-Lymphoid_vs_na': 8, 'cancermap_cluster_48': 8, 'annotated_class_Lymphoid': 5, 'cancermap_cluster_49': 9, 'annotated_class_pre-B-ALL_vs_CLL': 6, 'cancermap_cluster_BCL_vs_CellLine': 4, 'cancermap_cluster_BCL_vs_pre-B-ALL': 1, 'annotated_class_NonCancer_and_CellLine_Lymphoma': 6, 'cancermap_cluster_31': 6, 'cancermap_cluster_17_vs_49': 2, 'cancermap_cluster_30': 7, 'cancermap_cluster_17_vs_48': 7, 'cancermap_cluster_6_vs_35': 9, 'annotated_class_B-Lymphoid_vs_CML': 0, 'cancermap_cluster_17_vs_47': 0, 'annotated_class_CellLine_Lymphoma_vs_CellLine_Myeloma': 3, 'cancermap_cluster_32': 8, 'annotated_class_vs_cancermap_cluster_BCL_vs_MM': 6, 'cancermap_cluster_17_vs_46': 8, 'cancermap_cluster_35': 6, 'annotated_class_Lymphoid_and_LP': 1, 'cancermap_cluster_BCL_vs_MM': 3, 'cancermap_cluster_17_vs_45': 6, 'cancermap_cluster_34': 9, 'cancermap_cluster_22_vs_23': 8, 'cancermap_cluster_17_vs_44': 3, 'cancermap_cluster_22_vs_25': 1, 'cancermap_cluster_37': 0, 'cancermap_cluster_22_vs_27': 7, 'cancermap_cluster_22_vs_26': 3, 'cancermap_cluster_22_vs_29': 0, 'cancermap_cluster_22_vs_28': 6, 'cancermap_cluster_36': 3, 'cancermap_cluster_17_vs_42': 7, 'cancermap_cluster_20_and_44': 4, 'annotated_class_T-Lymphoid_and_Erythroid': 7, 'cancermap_cluster_17_vs_41': 3, 'cancermap_cluster_20_and_45': 5, 'cancermap_cluster_17_vs_40': 5, 'cancermap_cluster_16_vs_44': 5, 'annotated_class_AML_vs_B-Lymphoid': 5, 'annotated_class_AML': 7, 'cancermap_cluster_20_and_47': 0, 'cancermap_cluster_14_vs_34': 2, 'cancermap_cluster_16_vs_42': 3, 'cancermap_cluster_16_vs_43': 8, 'cancermap_cluster_20_and_42': 8, 'cancermap_cluster_20_and_43': 7, 'annotated_class_Cancer_Lymphoma_and_CellLine_Leukemia': 5, 'cancermap_cluster_6_and_12': 5, 'annotated_class_na_and_AML': 1, 'cancermap_cluster_6_and_13': 3, 'annotated_class_Cancer_Myeloma_and_Prolif_Lympho': 2, 'cancermap_cluster_36_vs_50': 1, 'annotated_class_NonCancer': 8, 'annotated_class_vs_cancermap_cluster_CellLine_vs_MM': 9, 'annotated_class_vs_cancermap_cluster_BCL_vs_Myeloid': 1, 'cancermap_cluster_42_and_50': 4, 'cancermap_cluster_8_and_10': 4, 'cancermap_cluster_8_and_11': 7, 'cancermap_cluster_8_and_12': 0, 'cancermap_cluster_8_and_13': 1, 'cancermap_cluster_8_and_14': 1, 'cancermap_cluster_8_and_15': 3, 'cancermap_cluster_8_and_16': 8, 'cancermap_cluster_8_and_17': 1, 'cancermap_cluster_8_and_18': 9, 'cancermap_cluster_8_and_19': 5, 'annotated_class_vs_cancermap_cluster_NonCancer_vs_CellLine_Leukemia': 5, 'cancermap_cluster_9_vs_41': 8, 'cancermap_cluster_9_vs_40': 7, 'cancermap_cluster_9_vs_43': 4, 'cancermap_cluster_9_vs_42': 4, 'cancermap_cluster_9_vs_45': 5, 'cancermap_cluster_9_vs_44': 0, 'cancermap_cluster_9_vs_47': 4, 'cancermap_cluster_9_vs_46': 2, 'cancermap_cluster_9_vs_49': 4, 'cancermap_cluster_9_vs_48': 4, 'cancermap_cluster_6_and_18': 7, 'annotated_class_vs_cancermap_cluster_NonCancer_vs_CellLine_Myeloma': 2, 'cancermap_cluster_6_and_19': 2, 'annotated_class_Myeloid_and_AML': 2, 'cancermap_cluster_10_vs_49': 8, 'annotated_class_na_and_pre-B-ALL': 3, 'annotated_class_TCL_and_CellLine': 2, 'cancermap_cluster_7_vs_36': 9, 'cancermap_cluster_7_vs_37': 5, 'cancermap_cluster_7_vs_34': 0, 'cancermap_cluster_7_vs_35': 6, 'cancermap_cluster_7_vs_32': 2, 'cancermap_cluster_7_vs_33': 9, 'cancermap_cluster_7_vs_30': 3, 'cancermap_cluster_50': 4, 'cancermap_cluster_37_and_39': 9, 'cancermap_cluster_37_and_38': 5, 'cancermap_cluster_7_vs_38': 8, 'cancermap_cluster_7_vs_39': 1, 'cancermap_cluster_4_and_50': 3, 'annotated_class_BCL_vs_CML': 0, 'annotated_class_Myeloid_and_CML': 2, 'cancermap_cluster_11_vs_12': 5, 'annotated_class_CellLine_and_B-Lymphoid': 3, 'annotated_class_Myeloid_vs_LP': 9, 'cancermap_cluster_10_vs_11': 5, 'annotated_class_Myeloid_and_T-ALL': 9, 'cancermap_cluster_T-ALL_and_MM': 3, 'cancermap_cluster_10_vs_17': 9, 'cancermap_cluster_Cancer_Lymphoma_and_CellLine_Myeloma': 1, 'annotated_class_acute_leukemias': 7, 'cancermap_cluster_10_vs_16': 5, 'cancermap_cluster_7_and_48': 4, 'cancermap_cluster_7_and_49': 6, 'cancermap_cluster_9_and_50': 2, 'cancermap_cluster_7_and_42': 2, 'cancermap_cluster_7_and_43': 7, 'cancermap_cluster_7_and_40': 4, 'cancermap_cluster_7_and_41': 6, 'cancermap_cluster_7_and_46': 2, 'cancermap_cluster_7_and_47': 2, 'cancermap_cluster_7_and_44': 4, 'cancermap_cluster_7_and_45': 2, 'annotated_class_vs_cancermap_cluster_MP_vs_pre-B-ALL': 2, 'cancermap_cluster_17_vs_32': 5, 'cancermap_cluster_17_vs_33': 2, 'cancermap_cluster_17_vs_30': 2, 'annotated_class_Cancer_Leukemia_and_Prolif_Myelo': 8, 'cancermap_cluster_17_vs_31': 8, 'cancermap_cluster_NonCancer_vs_CellLine_Leukemia': 2, 'cancermap_cluster_17_vs_37': 6, 'cancermap_cluster_17_vs_34': 9, 'cancermap_cluster_16_and_38': 6, 'cancermap_cluster_17_vs_35': 6, 'cancermap_cluster_17_and_19': 1, 'cancermap_cluster_17_and_18': 4, 'annotated_class_Lymphoid_and_pre-B-ALL': 1, 'annotated_class_StemCell_and_CML': 3, 'annotated_class_vs_cancermap_cluster_BCL_vs_pre-B-ALL': 0, 'annotated_class_Lymphoid_vs_Erythroid': 8, 'annotated_class_NonCancer_vs_CellLine_Myeloma': 6, 'annotated_class_vs_cancermap_cluster_acute_leukemias_vs_chronic_leukemias': 4, 'cancermap_cluster_3_vs_50': 5, 'cancermap_cluster_16_and_39': 2, 'cancermap_cluster_4_vs_17': 4, 'cancermap_cluster_4_vs_16': 5, 'cancermap_cluster_4_vs_15': 1, 'cancermap_cluster_4_vs_14': 7, 'cancermap_cluster_4_vs_13': 8, 'cancermap_cluster_4_vs_12': 2, 'cancermap_cluster_4_vs_11': 2, 'cancermap_cluster_4_vs_10': 6, 'cancermap_cluster_4_vs_19': 7, 'cancermap_cluster_4_vs_18': 2, 'cancermap_cluster_CellLine_Leukemia_vs_CellLine_Myeloma': 8, 'cancermap_cluster_14_vs_50': 7, 'cancermap_cluster_7_vs_48': 7, 'cancermap_cluster_47_and_49': 6, 'cancermap_cluster_47_and_48': 4, 'annotated_class_BCL_and_CLL': 5, 'cancermap_cluster_49_vs_50': 8, 'cancermap_cluster_30_and_38': 2, 'cancermap_cluster_30_and_39': 0, 'cancermap_cluster_15_and_46': 1, 'cancermap_cluster_15_and_47': 5, 'cancermap_cluster_15_and_40': 1, 'cancermap_cluster_15_and_41': 6, 'cancermap_cluster_15_and_42': 7, 'cancermap_cluster_15_and_43': 7, 'cancermap_cluster_13_and_42': 3, 'cancermap_cluster_30_and_31': 3, 'cancermap_cluster_30_and_32': 9, 'cancermap_cluster_30_and_33': 0, 'cancermap_cluster_30_and_34': 3, 'cancermap_cluster_30_and_35': 4, 'cancermap_cluster_30_and_36': 8, 'cancermap_cluster_30_and_37': 6, 'cancermap_cluster_7_and_15': 3, 'annotated_class_vs_cancermap_cluster_T-ALL_vs_MM': 6, 'cancermap_cluster_45_and_49': 5, 'cancermap_cluster_45_and_48': 0, 'cancermap_cluster_17_vs_23': 4, 'cancermap_cluster_45_and_47': 2, 'cancermap_cluster_45_and_46': 8, 'cancermap_cluster_33_vs_41': 3, 'cancermap_cluster_33_vs_40': 4, 'cancermap_cluster_33_vs_43': 0, 'cancermap_cluster_33_vs_42': 8, 'cancermap_cluster_33_vs_45': 5, 'cancermap_cluster_33_vs_44': 3, 'cancermap_cluster_33_vs_47': 6, 'cancermap_cluster_33_vs_46': 1, 'cancermap_cluster_33_vs_49': 9, 'cancermap_cluster_33_vs_48': 5, 'annotated_class_NonCancer_and_acute_leukemias': 2, 'cancermap_cluster_7_and_17': 2, 'cancermap_cluster_NonCancer_and_acute_leukemias': 2, 'cancermap_cluster_15_vs_50': 8, 'annotated_class_vs_cancermap_cluster_AML_vs_pre-B-ALL': 1, 'annotated_class_Cancer_Lymphoma_and_Cancer_Myeloma': 2, 'cancermap_cluster_13_and_14': 2, 'annotated_class_T-ALL_and_CLL': 0, 'cancermap_cluster_7_and_16': 7, 'annotated_class_TCL_and_T-ALL': 3, 'annotated_class_BCL_vs_na': 5, 'cancermap_cluster_15_and_48': 6, 'annotated_class_vs_cancermap_cluster_Cancer_Leukemia_vs_CellLine_Leukemia': 1, 'cancermap_cluster_13_and_32': 4, 'cancermap_cluster_15_and_49': 3, 'cancermap_cluster_18_and_34': 1, 'annotated_class_NonCancer_vs_other': 2, 'annotated_class_vs_cancermap_cluster_Myeloid_vs_CellLine': 9, 'cancermap_cluster_24_vs_50': 7, 'annotated_class_CellLine_Leukemia_and_CellLine_Myeloma': 8, 'annotated_class_T-Lymphoid_vs_T-ALL': 0, 'cancermap_cluster_1_vs_35': 4, 'annotated_class_vs_cancermap_cluster_Myeloid_vs_MM': 0, 'cancermap_cluster_30_vs_48': 1, 'cancermap_cluster_30_vs_49': 6, 'cancermap_cluster_42_and_48': 5, 'cancermap_cluster_42_and_49': 3, 'cancermap_cluster_30_vs_42': 0, 'cancermap_cluster_30_vs_43': 4, 'cancermap_cluster_30_vs_40': 7, 'cancermap_cluster_30_vs_41': 2, 'cancermap_cluster_30_vs_46': 9, 'cancermap_cluster_30_vs_47': 9, 'cancermap_cluster_30_vs_44': 4, 'cancermap_cluster_30_vs_45': 0, 'cancermap_cluster_BCL': 8, 'annotated_class_AML_and_MM': 1, 'annotated_class_T-ALL_and_B-Lymphoid': 7, 'cancermap_cluster_9_vs_50': 5, 'annotated_class_AML_and_MP': 1, 'cancermap_cluster_12_vs_34': 0, 'annotated_class_T-ALL_and_StemCell': 5, 'annotated_class_vs_cancermap_cluster_Cancer_Myeloma_vs_CellLine_Myeloma': 9, 'annotated_class_Cancer_Lymphoma_vs_CellLine_Myeloma': 9, 'annotated_class_NonCancer_vs_CellLine_Lymphoma': 1, 'cancermap_cluster_8_and_43': 7, 'cancermap_cluster_12_vs_33': 8, 'cancermap_cluster_8_and_42': 4, 'cancermap_cluster_BCL_and_AML': 6, 'annotated_class_T-Lymphoid_and_AML': 1, 'cancermap_cluster_6_and_47': 3, 'cancermap_cluster_6_and_46': 7, 'cancermap_cluster_25_vs_26': 9, 'cancermap_cluster_25_vs_27': 2, 'annotated_class_Cancer_Leukemia_and_CellLine_Lymphoma': 1, 'cancermap_cluster_6_and_41': 8, 'cancermap_cluster_25_vs_28': 1, 'cancermap_cluster_25_vs_29': 9, 'cancermap_cluster_6_and_40': 7, 'cancermap_cluster_1_vs_49': 1, 'cancermap_cluster_1_vs_48': 9, 'cancermap_cluster_6_and_43': 1, 'cancermap_cluster_1_vs_41': 8, 'cancermap_cluster_1_vs_40': 9, 'cancermap_cluster_1_vs_43': 9, 'cancermap_cluster_6_and_42': 0, 'cancermap_cluster_1_vs_45': 7, 'cancermap_cluster_1_vs_44': 6, 'cancermap_cluster_1_vs_47': 6, 'cancermap_cluster_1_vs_46': 1, 'cancermap_cluster_2_and_30': 2, 'cancermap_cluster_2_and_31': 7, 'cancermap_cluster_31_vs_49': 0, 'cancermap_cluster_31_vs_48': 1, 'cancermap_cluster_2_and_34': 1, 'cancermap_cluster_2_and_35': 5, 'cancermap_cluster_2_and_36': 5, 'cancermap_cluster_2_and_37': 2, 'cancermap_cluster_31_vs_43': 1, 'cancermap_cluster_31_vs_42': 4, 'cancermap_cluster_31_vs_41': 6, 'cancermap_cluster_31_vs_40': 2, 'cancermap_cluster_31_vs_47': 0, 'cancermap_cluster_43_and_49': 6, 'cancermap_cluster_31_vs_45': 0, 'cancermap_cluster_31_vs_44': 9, 'cancermap_cluster_18': 4, 'cancermap_cluster_14_vs_33': 8, 'annotated_class_vs_cancermap_cluster_NonCancer_vs_Cancer_Lymphoma': 6, 'annotated_class_CellLine_vs_MP': 8, 'cancermap_cluster_12_vs_30': 9, 'annotated_class_TCL_vs_CellLine': 4, 'cancermap_cluster_26_vs_50': 5, 'cancermap_cluster_12': 7, 'annotated_class_T-Lymphoid_vs_AML': 8, 'annotated_class_CellLine_vs_MM': 6, 'annotated_class_BCL_and_na': 8, 'cancermap_cluster_7_and_50': 9, 'cancermap_cluster_26_and_40': 7, 'cancermap_cluster_16_vs_29': 0, 'cancermap_cluster_16_and_49': 3, 'annotated_class_NonCancer_vs_Prolif_Myelo': 9, 'cancermap_cluster_14': 5, 'cancermap_cluster_16_and_48': 8, 'cancermap_cluster_11_vs_50': 4, 'annotated_class_TCL_vs_Erythroid': 6, 'cancermap_cluster_16_and_47': 3, 'cancermap_cluster_23_vs_43': 5, 'cancermap_cluster_16_and_46': 6, 'annotated_class_Myeloid_vs_MM': 0, 'cancermap_cluster_35_vs_50': 7, 'annotated_class_BCL_vs_CLL': 9, 'annotated_class_Myeloid_and_CLL': 2, 'annotated_class_Myeloid_vs_MP': 9, 'cancermap_cluster_5_and_17': 1, 'cancermap_cluster_5_and_16': 1, 'cancermap_cluster_5_and_15': 4, 'cancermap_cluster_5_and_14': 5, 'cancermap_cluster_5_and_13': 9, 'cancermap_cluster_5_and_12': 6, 'cancermap_cluster_5_and_11': 6, 'cancermap_cluster_5_and_10': 1, 'cancermap_cluster_14_vs_32': 3, 'cancermap_cluster_25_and_29': 5, 'cancermap_cluster_25_and_28': 0, 'cancermap_cluster_5_and_19': 3, 'cancermap_cluster_5_and_18': 9, 'annotated_class_MP_vs_Erythroid': 2, 'cancermap_cluster_14_vs_47': 4, 'annotated_class_CellLine_Myeloma': 5, 'cancermap_cluster_14_vs_28': 7, 'cancermap_cluster_14_vs_29': 3, 'cancermap_cluster_14_vs_26': 4, 'cancermap_cluster_4_vs_26': 5, 'cancermap_cluster_14_vs_24': 7, 'cancermap_cluster_14_vs_25': 9, 'cancermap_cluster_14_vs_22': 8, 'cancermap_cluster_14_vs_23': 1, 'cancermap_cluster_14_vs_20': 9, 'cancermap_cluster_5_and_38': 3, 'annotated_class_vs_cancermap_cluster_TCL_vs_CLL': 2, 'annotated_class_vs_cancermap_cluster_CellLine_Lymphoma_vs_Cancer_Myeloma': 4, 'cancermap_cluster_14_vs_42': 1, 'cancermap_cluster_NonCancer_vs_chronic_leukemias': 2, 'cancermap_cluster_47_and_50': 8, 'annotated_class_B-Lymphoid_vs_StemCell': 8, 'annotated_class_MM_and_LP': 3, 'cancermap_cluster_13_and_39': 3, 'cancermap_cluster_13_and_38': 9, 'cancermap_cluster_13_and_37': 1, 'cancermap_cluster_13_and_36': 5, 'cancermap_cluster_13_and_35': 3, 'cancermap_cluster_13_and_34': 1, 'cancermap_cluster_13_and_33': 5, 'cancermap_cluster_4_vs_28': 9, 'cancermap_cluster_13_and_31': 9, 'cancermap_cluster_13_and_30': 2, 'cancermap_cluster_CellLine_vs_T-ALL': 5, 'cancermap_cluster_5_and_36': 0, 'annotated_class_Lymphoid_and_StemCell': 0, 'cancermap_cluster_14_vs_48': 5, 'annotated_class_TCL_vs_B-Lymphoid': 3, 'cancermap_cluster_14_vs_49': 2, 'cancermap_cluster_2_vs_33': 4, 'cancermap_cluster_2_vs_32': 0, 'cancermap_cluster_2_vs_31': 8, 'cancermap_cluster_2_vs_30': 2, 'cancermap_cluster_2_vs_37': 5, 'cancermap_cluster_2_vs_36': 1, 'cancermap_cluster_2_vs_35': 5, 'cancermap_cluster_2_vs_34': 5, 'cancermap_cluster_24_vs_45': 5, 'cancermap_cluster_2_vs_39': 8, 'cancermap_cluster_2_vs_38': 6, 'cancermap_cluster_45_and_50': 6, 'cancermap_cluster_10_vs_13': 0, 'cancermap_cluster_10_vs_12': 6, 'cancermap_cluster_17_vs_38': 5, 'cancermap_cluster_17_vs_39': 0, 'cancermap_cluster_29_vs_39': 2, 'cancermap_cluster_29_vs_38': 3, 'cancermap_cluster_10_vs_15': 9, 'cancermap_cluster_10_vs_14': 0, 'cancermap_cluster_29_vs_35': 9, 'cancermap_cluster_29_vs_34': 9, 'cancermap_cluster_29_vs_37': 5, 'cancermap_cluster_29_vs_36': 0, 'cancermap_cluster_29_vs_31': 5, 'cancermap_cluster_29_vs_30': 2, 'cancermap_cluster_29_vs_33': 5, 'cancermap_cluster_29_vs_32': 3, 'annotated_class_T-Lymphoid_vs_MP': 9, 'cancermap_cluster_24_and_50': 3, 'cancermap_cluster_15_vs_45': 6, 'cancermap_cluster_15_vs_44': 8, 'cancermap_cluster_15_vs_47': 1, 'cancermap_cluster_15_vs_46': 2, 'cancermap_cluster_15_vs_41': 3, 'cancermap_cluster_15_vs_40': 8, 'cancermap_cluster_15_vs_43': 8, 'cancermap_cluster_15_vs_42': 5, 'cancermap_cluster_15_vs_49': 9, 'cancermap_cluster_15_vs_48': 9, 'annotated_class_vs_cancermap_cluster_other_vs_acute_leukemias': 9, 'cancermap_cluster_12_and_18': 9, 'cancermap_cluster_12_and_19': 9, 'cancermap_cluster_12_and_13': 3, 'cancermap_cluster_12_and_16': 3, 'cancermap_cluster_12_and_17': 6, 'cancermap_cluster_12_and_14': 2, 'cancermap_cluster_12_and_15': 5, 'annotated_class_Myeloid_vs_na': 5, 'annotated_class_NonCancer_and_Cancer_Myeloma': 4, 'annotated_class_StemCell': 5, 'cancermap_cluster_T-ALL_and_pre-B-ALL': 4, 'cancermap_cluster_CellLine_and_pre-B-ALL': 7, 'annotated_class_T-Lymphoid_and_CLL': 9, 'annotated_class_vs_cancermap_cluster_TCL_vs_MM': 6, 'annotated_class_pre-B-ALL_and_Erythroid': 7, 'annotated_class_B-Lymphoid_and_CML': 3, 'annotated_class_CellLine_Leukemia_vs_Cancer_Myeloma': 5, 'annotated_class_CellLine_and_CLL': 1, 'cancermap_cluster_3_and_20': 3, 'cancermap_cluster_4_and_19': 4, 'cancermap_cluster_24_vs_41': 2, 'cancermap_cluster_24_vs_40': 6, 'cancermap_cluster_24_vs_43': 1, 'cancermap_cluster_24_vs_42': 7, 'cancermap_cluster_CellLine_Leukemia_and_Cancer_Myeloma': 3, 'cancermap_cluster_24_vs_44': 6, 'cancermap_cluster_24_vs_47': 2, 'cancermap_cluster_24_vs_46': 8, 'cancermap_cluster_24_vs_49': 5, 'cancermap_cluster_24_vs_48': 7, 'annotated_class_TCL_and_Myeloid': 5}
if __name__ == "__main__":
main()
| 22,193 | 110,925 | 0.828225 | 19,386 | 110,965 | 4.077221 | 0.004797 | 0.546754 | 0.059362 | 0.027201 | 0.935464 | 0.183449 | 0.088347 | 0.041232 | 0.011918 | 0.002024 | 0 | 0.115935 | 0.056486 | 110,965 | 4 | 110,926 | 27,741.25 | 0.639015 | 0 | 0 | 0 | 0 | 0 | 0.80219 | 0.791502 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
92752a7e1d56b6fe69a69c8a1016099394580801 | 8,777 | py | Python | export_readiness/migrations/0037_auto_20190321_0728.py | kaedroho/dit-directory-cms | 67c15eeed19e7b3583f1fce1969230ddf83b6813 | [
"MIT"
] | 6 | 2018-03-20T11:19:07.000Z | 2021-10-05T07:53:11.000Z | export_readiness/migrations/0037_auto_20190321_0728.py | kaedroho/dit-directory-cms | 67c15eeed19e7b3583f1fce1969230ddf83b6813 | [
"MIT"
] | 802 | 2018-02-05T14:16:13.000Z | 2022-02-10T10:59:21.000Z | export_readiness/migrations/0037_auto_20190321_0728.py | kaedroho/dit-directory-cms | 67c15eeed19e7b3583f1fce1969230ddf83b6813 | [
"MIT"
] | 6 | 2019-01-22T13:19:37.000Z | 2019-07-01T10:35:26.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-03-21 07:28
from __future__ import unicode_literals
import core.model_fields
import core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('export_readiness', '0036_topiclandingpage_teaser'),
]
operations = [
migrations.AlterField(
model_name='countryguidepage',
name='accordion_1_icon',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image', verbose_name='Industry Icon'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_1_teaser',
field=models.TextField(blank=True, verbose_name='Industry teaser'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_1_title',
field=models.CharField(blank=True, max_length=255, verbose_name='Industry title'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_2_icon',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image', verbose_name='Industry Icon'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_2_teaser',
field=models.TextField(blank=True, verbose_name='Industry teaser'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_2_title',
field=models.CharField(blank=True, max_length=255, verbose_name='Industry title'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_3_icon',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image', verbose_name='Industry Icon'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_3_teaser',
field=models.TextField(blank=True, verbose_name='Industry teaser'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_3_title',
field=models.CharField(blank=True, max_length=255, verbose_name='Industry title'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_4_icon',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image', verbose_name='Industry Icon'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_4_teaser',
field=models.TextField(blank=True, verbose_name='Industry teaser'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_4_title',
field=models.CharField(blank=True, max_length=255, verbose_name='Industry title'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_5_icon',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image', verbose_name='Industry Icon'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_5_teaser',
field=models.TextField(blank=True, verbose_name='Industry teaser'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_5_title',
field=models.CharField(blank=True, max_length=255, verbose_name='Industry title'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_6_icon',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image', verbose_name='Industry Icon'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_6_teaser',
field=models.TextField(blank=True, verbose_name='Industry teaser'),
),
migrations.AlterField(
model_name='countryguidepage',
name='accordion_6_title',
field=models.CharField(blank=True, max_length=255, verbose_name='Industry title'),
),
migrations.AlterField(
model_name='countryguidepage',
name='fact_sheet_column_1_body',
field=core.model_fields.MarkdownField(blank=True, help_text='Use H4 (####) for each sub category heading. Maximum five sub categories. Aim for 50 words each.', validators=[core.validators.slug_hyperlinks], verbose_name="Detailed text for 'Tax and customs'"),
),
migrations.AlterField(
model_name='countryguidepage',
name='fact_sheet_column_1_teaser',
field=models.CharField(blank=True, max_length=255, verbose_name="Summary for 'Tax and customs'"),
),
migrations.AlterField(
model_name='countryguidepage',
name='fact_sheet_column_1_title',
field=models.CharField(blank=True, max_length=255, verbose_name="Title for 'Tax and customs'"),
),
migrations.AlterField(
model_name='countryguidepage',
name='fact_sheet_column_2_body',
field=core.model_fields.MarkdownField(blank=True, help_text='Use H4 (####) for each sub category heading. Maximum five sub categories. Aim for 50 words each.', validators=[core.validators.slug_hyperlinks], verbose_name="Detailed text for 'Protecting your business'"),
),
migrations.AlterField(
model_name='countryguidepage',
name='fact_sheet_column_2_teaser',
field=models.CharField(blank=True, max_length=255, verbose_name="Summary for 'Protecting your business'"),
),
migrations.AlterField(
model_name='countryguidepage',
name='fact_sheet_column_2_title',
field=models.CharField(blank=True, max_length=255, verbose_name="Title for 'Protecting your business'"),
),
migrations.AlterField(
model_name='countryguidepage',
name='fact_sheet_teaser',
field=models.CharField(blank=True, max_length=255, verbose_name="Summary for 'Doing business in' section"),
),
migrations.AlterField(
model_name='countryguidepage',
name='fact_sheet_title',
field=models.CharField(blank=True, max_length=255, verbose_name="Title for 'Doing business in' section"),
),
migrations.AlterField(
model_name='countryguidepage',
name='heading',
field=models.CharField(help_text='Only enter the country name', max_length=255, verbose_name='Country name'),
),
migrations.AlterField(
model_name='countryguidepage',
name='heading_teaser',
field=models.TextField(blank=True, verbose_name='Introduction'),
),
migrations.AlterField(
model_name='countryguidepage',
name='help_market_guide_cta_link',
field=models.CharField(blank=True, max_length=255, verbose_name='GOV.UK country guide URL'),
),
migrations.AlterField(
model_name='countryguidepage',
name='section_one_body',
field=core.model_fields.MarkdownField(help_text='Use H3 (###) markdown for the 3 subheadings', null=True, validators=[core.validators.slug_hyperlinks], verbose_name='3 unique selling points markdown'),
),
migrations.AlterField(
model_name='countryguidepage',
name='section_one_image',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='wagtailimages.Image', verbose_name='Image for unique selling points'),
),
migrations.AlterField(
model_name='countryguidepage',
name='section_two_heading',
field=models.CharField(max_length=255, verbose_name='High potential industries for UK businesses'),
),
migrations.AlterField(
model_name='countryguidepage',
name='section_two_teaser',
field=models.TextField(verbose_name='Summary of the industry opportunities'),
),
]
| 47.701087 | 279 | 0.642475 | 912 | 8,777 | 5.966009 | 0.137061 | 0.121301 | 0.151627 | 0.175887 | 0.879618 | 0.871163 | 0.855357 | 0.834773 | 0.76769 | 0.76769 | 0 | 0.015083 | 0.244617 | 8,777 | 183 | 280 | 47.961749 | 0.805581 | 0.007861 | 0 | 0.664773 | 1 | 0 | 0.265135 | 0.023435 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.028409 | 0 | 0.045455 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
9288287432a98f7119298875dc25a72b52113154 | 1,231 | py | Python | binary/hex.py | deadlock-delegate/binary | a643787be0070c35273057c4a176fe0f1588f1c5 | [
"MIT"
] | null | null | null | binary/hex.py | deadlock-delegate/binary | a643787be0070c35273057c4a176fe0f1588f1c5 | [
"MIT"
] | 2 | 2019-01-28T10:22:14.000Z | 2019-01-28T10:40:45.000Z | binary/hex.py | deadlock-delegate/binary | a643787be0070c35273057c4a176fe0f1588f1c5 | [
"MIT"
] | 2 | 2018-06-27T16:51:33.000Z | 2019-01-28T10:13:53.000Z | from binascii import unhexlify
from struct import pack, unpack_from
def read_low(data, offset=0, nibble=None):
"""Read a hex string with low nibble first.
Args:
data (bytes): Description
offset (int, optional): Description
nibble (str, optional): Description
Returns:
int
"""
# todo: this is not right
fmt = 'h{}'.format(nibble) if nibble else 'h'
return unpack_from(fmt, data, offset)[1]
def read_high(data, offset=0, nibble=None):
"""Read a hex string with high nibble first.
Args:
data (bytes): Description
offset (int, optional): Description
nibble (str, optional): Description
Returns:
int
"""
# todo: this is not right
fmt = 'H{}'.format(nibble) if nibble else 'H'
return unpack_from(fmt, data, offset)[1]
def write_low(data):
"""Write a hex string with low nibble first
Args:
data (int)
Returns:
bytes: bytes object containing data
"""
return pack('<B', data)
def write_high(data):
"""Write a hex string with high nibble first
Args:
data (int)
Returns:
bytes: bytes object containing data
"""
return unhexlify(data)
| 20.864407 | 49 | 0.61251 | 159 | 1,231 | 4.698113 | 0.27673 | 0.053548 | 0.053548 | 0.074967 | 0.843373 | 0.843373 | 0.819277 | 0.819277 | 0.819277 | 0.763052 | 0 | 0.00454 | 0.284322 | 1,231 | 58 | 50 | 21.224138 | 0.84336 | 0.510154 | 0 | 0.166667 | 0 | 0 | 0.02079 | 0 | 0 | 0 | 0 | 0.034483 | 0 | 1 | 0.333333 | false | 0 | 0.166667 | 0 | 0.833333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 9 |
92b4573ede281c81ac34739f6e4e642ed3d93533 | 42,078 | py | Python | sdk/python/pulumi_gcp/datacatalog/tag.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 121 | 2018-06-18T19:16:42.000Z | 2022-03-31T06:06:48.000Z | sdk/python/pulumi_gcp/datacatalog/tag.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 492 | 2018-06-22T19:41:03.000Z | 2022-03-31T15:33:53.000Z | sdk/python/pulumi_gcp/datacatalog/tag.py | sisisin/pulumi-gcp | af6681d70ea457843409110c1324817fe55f68ad | [
"ECL-2.0",
"Apache-2.0"
] | 43 | 2018-06-19T01:43:13.000Z | 2022-03-23T22:43:37.000Z | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['TagArgs', 'Tag']
@pulumi.input_type
class TagArgs:
def __init__(__self__, *,
fields: pulumi.Input[Sequence[pulumi.Input['TagFieldArgs']]],
template: pulumi.Input[str],
column: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a Tag resource.
:param pulumi.Input[Sequence[pulumi.Input['TagFieldArgs']]] fields: This maps the ID of a tag field to the value of and additional information about that field.
Valid field IDs are defined by the tag's template. A tag must have at least 1 field and at most 500 fields.
Structure is documented below.
:param pulumi.Input[str] template: The resource name of the tag template that this tag uses. Example:
projects/{project_id}/locations/{location}/tagTemplates/{tagTemplateId}
This field cannot be modified after creation.
:param pulumi.Input[str] column: Resources like Entry can have schemas associated with them. This scope allows users to attach tags to an
individual column based on that schema.
For attaching a tag to a nested column, use `.` to separate the column names. Example:
`outer_column.inner_column`
:param pulumi.Input[str] parent: The name of the parent this tag is attached to. This can be the name of an entry or an entry group. If an entry group, the tag will be attached to
all entries in that group.
"""
pulumi.set(__self__, "fields", fields)
pulumi.set(__self__, "template", template)
if column is not None:
pulumi.set(__self__, "column", column)
if parent is not None:
pulumi.set(__self__, "parent", parent)
@property
@pulumi.getter
def fields(self) -> pulumi.Input[Sequence[pulumi.Input['TagFieldArgs']]]:
"""
This maps the ID of a tag field to the value of and additional information about that field.
Valid field IDs are defined by the tag's template. A tag must have at least 1 field and at most 500 fields.
Structure is documented below.
"""
return pulumi.get(self, "fields")
@fields.setter
def fields(self, value: pulumi.Input[Sequence[pulumi.Input['TagFieldArgs']]]):
pulumi.set(self, "fields", value)
@property
@pulumi.getter
def template(self) -> pulumi.Input[str]:
"""
The resource name of the tag template that this tag uses. Example:
projects/{project_id}/locations/{location}/tagTemplates/{tagTemplateId}
This field cannot be modified after creation.
"""
return pulumi.get(self, "template")
@template.setter
def template(self, value: pulumi.Input[str]):
pulumi.set(self, "template", value)
@property
@pulumi.getter
def column(self) -> Optional[pulumi.Input[str]]:
"""
Resources like Entry can have schemas associated with them. This scope allows users to attach tags to an
individual column based on that schema.
For attaching a tag to a nested column, use `.` to separate the column names. Example:
`outer_column.inner_column`
"""
return pulumi.get(self, "column")
@column.setter
def column(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "column", value)
@property
@pulumi.getter
def parent(self) -> Optional[pulumi.Input[str]]:
"""
The name of the parent this tag is attached to. This can be the name of an entry or an entry group. If an entry group, the tag will be attached to
all entries in that group.
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parent", value)
@pulumi.input_type
class _TagState:
def __init__(__self__, *,
column: Optional[pulumi.Input[str]] = None,
fields: Optional[pulumi.Input[Sequence[pulumi.Input['TagFieldArgs']]]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
template: Optional[pulumi.Input[str]] = None,
template_displayname: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering Tag resources.
:param pulumi.Input[str] column: Resources like Entry can have schemas associated with them. This scope allows users to attach tags to an
individual column based on that schema.
For attaching a tag to a nested column, use `.` to separate the column names. Example:
`outer_column.inner_column`
:param pulumi.Input[Sequence[pulumi.Input['TagFieldArgs']]] fields: This maps the ID of a tag field to the value of and additional information about that field.
Valid field IDs are defined by the tag's template. A tag must have at least 1 field and at most 500 fields.
Structure is documented below.
:param pulumi.Input[str] name: The resource name of the tag in URL format. Example:
projects/{project_id}/locations/{location}/entrygroups/{entryGroupId}/entries/{entryId}/tags/{tag_id} or
projects/{project_id}/locations/{location}/entrygroups/{entryGroupId}/tags/{tag_id} where tag_id is a system-generated
identifier. Note that this Tag may not actually be stored in the location in this name.
:param pulumi.Input[str] parent: The name of the parent this tag is attached to. This can be the name of an entry or an entry group. If an entry group, the tag will be attached to
all entries in that group.
:param pulumi.Input[str] template: The resource name of the tag template that this tag uses. Example:
projects/{project_id}/locations/{location}/tagTemplates/{tagTemplateId}
This field cannot be modified after creation.
:param pulumi.Input[str] template_displayname: The display name of the tag template.
"""
if column is not None:
pulumi.set(__self__, "column", column)
if fields is not None:
pulumi.set(__self__, "fields", fields)
if name is not None:
pulumi.set(__self__, "name", name)
if parent is not None:
pulumi.set(__self__, "parent", parent)
if template is not None:
pulumi.set(__self__, "template", template)
if template_displayname is not None:
pulumi.set(__self__, "template_displayname", template_displayname)
@property
@pulumi.getter
def column(self) -> Optional[pulumi.Input[str]]:
"""
Resources like Entry can have schemas associated with them. This scope allows users to attach tags to an
individual column based on that schema.
For attaching a tag to a nested column, use `.` to separate the column names. Example:
`outer_column.inner_column`
"""
return pulumi.get(self, "column")
@column.setter
def column(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "column", value)
@property
@pulumi.getter
def fields(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['TagFieldArgs']]]]:
"""
This maps the ID of a tag field to the value of and additional information about that field.
Valid field IDs are defined by the tag's template. A tag must have at least 1 field and at most 500 fields.
Structure is documented below.
"""
return pulumi.get(self, "fields")
@fields.setter
def fields(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['TagFieldArgs']]]]):
pulumi.set(self, "fields", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The resource name of the tag in URL format. Example:
projects/{project_id}/locations/{location}/entrygroups/{entryGroupId}/entries/{entryId}/tags/{tag_id} or
projects/{project_id}/locations/{location}/entrygroups/{entryGroupId}/tags/{tag_id} where tag_id is a system-generated
identifier. Note that this Tag may not actually be stored in the location in this name.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def parent(self) -> Optional[pulumi.Input[str]]:
"""
The name of the parent this tag is attached to. This can be the name of an entry or an entry group. If an entry group, the tag will be attached to
all entries in that group.
"""
return pulumi.get(self, "parent")
@parent.setter
def parent(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "parent", value)
@property
@pulumi.getter
def template(self) -> Optional[pulumi.Input[str]]:
"""
The resource name of the tag template that this tag uses. Example:
projects/{project_id}/locations/{location}/tagTemplates/{tagTemplateId}
This field cannot be modified after creation.
"""
return pulumi.get(self, "template")
@template.setter
def template(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template", value)
@property
@pulumi.getter(name="templateDisplayname")
def template_displayname(self) -> Optional[pulumi.Input[str]]:
"""
The display name of the tag template.
"""
return pulumi.get(self, "template_displayname")
@template_displayname.setter
def template_displayname(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "template_displayname", value)
class Tag(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
column: Optional[pulumi.Input[str]] = None,
fields: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TagFieldArgs']]]]] = None,
parent: Optional[pulumi.Input[str]] = None,
template: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Tags are used to attach custom metadata to Data Catalog resources. Tags conform to the specifications within their tag template.
See [Data Catalog IAM](https://cloud.google.com/data-catalog/docs/concepts/iam) for information on the permissions needed to create or view tags.
To get more information about Tag, see:
* [API documentation](https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.entryGroups.tags)
* How-to Guides
* [Official Documentation](https://cloud.google.com/data-catalog/docs)
## Example Usage
### Data Catalog Entry Tag Basic
```python
import pulumi
import pulumi_gcp as gcp
entry_group = gcp.datacatalog.EntryGroup("entryGroup", entry_group_id="my_entry_group")
entry = gcp.datacatalog.Entry("entry",
entry_group=entry_group.id,
entry_id="my_entry",
user_specified_type="my_custom_type",
user_specified_system="SomethingExternal")
tag_template = gcp.datacatalog.TagTemplate("tagTemplate",
tag_template_id="my_template",
region="us-central1",
display_name="Demo Tag Template",
fields=[
gcp.datacatalog.TagTemplateFieldArgs(
field_id="source",
display_name="Source of data asset",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
primitive_type="STRING",
),
is_required=True,
),
gcp.datacatalog.TagTemplateFieldArgs(
field_id="num_rows",
display_name="Number of rows in the data asset",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
primitive_type="DOUBLE",
),
),
gcp.datacatalog.TagTemplateFieldArgs(
field_id="pii_type",
display_name="PII type",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
enum_type=gcp.datacatalog.TagTemplateFieldTypeEnumTypeArgs(
allowed_values=[
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="EMAIL",
),
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="SOCIAL SECURITY NUMBER",
),
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="NONE",
),
],
),
),
),
],
force_delete=False)
basic_tag = gcp.datacatalog.Tag("basicTag",
parent=entry.id,
template=tag_template.id,
fields=[gcp.datacatalog.TagFieldArgs(
field_name="source",
string_value="my-string",
)])
```
### Data Catalog Entry Group Tag
```python
import pulumi
import pulumi_gcp as gcp
entry_group = gcp.datacatalog.EntryGroup("entryGroup", entry_group_id="my_entry_group")
first_entry = gcp.datacatalog.Entry("firstEntry",
entry_group=entry_group.id,
entry_id="first_entry",
user_specified_type="my_custom_type",
user_specified_system="SomethingExternal")
second_entry = gcp.datacatalog.Entry("secondEntry",
entry_group=entry_group.id,
entry_id="second_entry",
user_specified_type="another_custom_type",
user_specified_system="SomethingElseExternal")
tag_template = gcp.datacatalog.TagTemplate("tagTemplate",
tag_template_id="my_template",
region="us-central1",
display_name="Demo Tag Template",
fields=[
gcp.datacatalog.TagTemplateFieldArgs(
field_id="source",
display_name="Source of data asset",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
primitive_type="STRING",
),
is_required=True,
),
gcp.datacatalog.TagTemplateFieldArgs(
field_id="num_rows",
display_name="Number of rows in the data asset",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
primitive_type="DOUBLE",
),
),
gcp.datacatalog.TagTemplateFieldArgs(
field_id="pii_type",
display_name="PII type",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
enum_type=gcp.datacatalog.TagTemplateFieldTypeEnumTypeArgs(
allowed_values=[
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="EMAIL",
),
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="SOCIAL SECURITY NUMBER",
),
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="NONE",
),
],
),
),
),
],
force_delete=False)
entry_group_tag = gcp.datacatalog.Tag("entryGroupTag",
parent=entry_group.id,
template=tag_template.id,
fields=[gcp.datacatalog.TagFieldArgs(
field_name="source",
string_value="my-string",
)])
```
### Data Catalog Entry Tag Full
```python
import pulumi
import pulumi_gcp as gcp
entry_group = gcp.datacatalog.EntryGroup("entryGroup", entry_group_id="my_entry_group")
entry = gcp.datacatalog.Entry("entry",
entry_group=entry_group.id,
entry_id="my_entry",
user_specified_type="my_custom_type",
user_specified_system="SomethingExternal",
schema=\"\"\"{
"columns": [
{
"column": "first_name",
"description": "First name",
"mode": "REQUIRED",
"type": "STRING"
},
{
"column": "last_name",
"description": "Last name",
"mode": "REQUIRED",
"type": "STRING"
},
{
"column": "address",
"description": "Address",
"mode": "REPEATED",
"subcolumns": [
{
"column": "city",
"description": "City",
"mode": "NULLABLE",
"type": "STRING"
},
{
"column": "state",
"description": "State",
"mode": "NULLABLE",
"type": "STRING"
}
],
"type": "RECORD"
}
]
}
\"\"\")
tag_template = gcp.datacatalog.TagTemplate("tagTemplate",
tag_template_id="my_template",
region="us-central1",
display_name="Demo Tag Template",
fields=[
gcp.datacatalog.TagTemplateFieldArgs(
field_id="source",
display_name="Source of data asset",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
primitive_type="STRING",
),
is_required=True,
),
gcp.datacatalog.TagTemplateFieldArgs(
field_id="num_rows",
display_name="Number of rows in the data asset",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
primitive_type="DOUBLE",
),
),
gcp.datacatalog.TagTemplateFieldArgs(
field_id="pii_type",
display_name="PII type",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
enum_type=gcp.datacatalog.TagTemplateFieldTypeEnumTypeArgs(
allowed_values=[
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="EMAIL",
),
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="SOCIAL SECURITY NUMBER",
),
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="NONE",
),
],
),
),
),
],
force_delete=False)
basic_tag = gcp.datacatalog.Tag("basicTag",
parent=entry.id,
template=tag_template.id,
fields=[
gcp.datacatalog.TagFieldArgs(
field_name="source",
string_value="my-string",
),
gcp.datacatalog.TagFieldArgs(
field_name="num_rows",
double_value=5,
),
gcp.datacatalog.TagFieldArgs(
field_name="pii_type",
enum_value="EMAIL",
),
],
column="address")
second_tag = gcp.datacatalog.Tag("second-tag",
parent=entry.id,
template=tag_template.id,
fields=[
gcp.datacatalog.TagFieldArgs(
field_name="source",
string_value="my-string",
),
gcp.datacatalog.TagFieldArgs(
field_name="pii_type",
enum_value="NONE",
),
],
column="first_name")
```
## Import
Tag can be imported using any of these accepted formats
```sh
$ pulumi import gcp:datacatalog/tag:Tag default {{name}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] column: Resources like Entry can have schemas associated with them. This scope allows users to attach tags to an
individual column based on that schema.
For attaching a tag to a nested column, use `.` to separate the column names. Example:
`outer_column.inner_column`
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TagFieldArgs']]]] fields: This maps the ID of a tag field to the value of and additional information about that field.
Valid field IDs are defined by the tag's template. A tag must have at least 1 field and at most 500 fields.
Structure is documented below.
:param pulumi.Input[str] parent: The name of the parent this tag is attached to. This can be the name of an entry or an entry group. If an entry group, the tag will be attached to
all entries in that group.
:param pulumi.Input[str] template: The resource name of the tag template that this tag uses. Example:
projects/{project_id}/locations/{location}/tagTemplates/{tagTemplateId}
This field cannot be modified after creation.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: TagArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Tags are used to attach custom metadata to Data Catalog resources. Tags conform to the specifications within their tag template.
See [Data Catalog IAM](https://cloud.google.com/data-catalog/docs/concepts/iam) for information on the permissions needed to create or view tags.
To get more information about Tag, see:
* [API documentation](https://cloud.google.com/data-catalog/docs/reference/rest/v1/projects.locations.entryGroups.tags)
* How-to Guides
* [Official Documentation](https://cloud.google.com/data-catalog/docs)
## Example Usage
### Data Catalog Entry Tag Basic
```python
import pulumi
import pulumi_gcp as gcp
entry_group = gcp.datacatalog.EntryGroup("entryGroup", entry_group_id="my_entry_group")
entry = gcp.datacatalog.Entry("entry",
entry_group=entry_group.id,
entry_id="my_entry",
user_specified_type="my_custom_type",
user_specified_system="SomethingExternal")
tag_template = gcp.datacatalog.TagTemplate("tagTemplate",
tag_template_id="my_template",
region="us-central1",
display_name="Demo Tag Template",
fields=[
gcp.datacatalog.TagTemplateFieldArgs(
field_id="source",
display_name="Source of data asset",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
primitive_type="STRING",
),
is_required=True,
),
gcp.datacatalog.TagTemplateFieldArgs(
field_id="num_rows",
display_name="Number of rows in the data asset",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
primitive_type="DOUBLE",
),
),
gcp.datacatalog.TagTemplateFieldArgs(
field_id="pii_type",
display_name="PII type",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
enum_type=gcp.datacatalog.TagTemplateFieldTypeEnumTypeArgs(
allowed_values=[
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="EMAIL",
),
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="SOCIAL SECURITY NUMBER",
),
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="NONE",
),
],
),
),
),
],
force_delete=False)
basic_tag = gcp.datacatalog.Tag("basicTag",
parent=entry.id,
template=tag_template.id,
fields=[gcp.datacatalog.TagFieldArgs(
field_name="source",
string_value="my-string",
)])
```
### Data Catalog Entry Group Tag
```python
import pulumi
import pulumi_gcp as gcp
entry_group = gcp.datacatalog.EntryGroup("entryGroup", entry_group_id="my_entry_group")
first_entry = gcp.datacatalog.Entry("firstEntry",
entry_group=entry_group.id,
entry_id="first_entry",
user_specified_type="my_custom_type",
user_specified_system="SomethingExternal")
second_entry = gcp.datacatalog.Entry("secondEntry",
entry_group=entry_group.id,
entry_id="second_entry",
user_specified_type="another_custom_type",
user_specified_system="SomethingElseExternal")
tag_template = gcp.datacatalog.TagTemplate("tagTemplate",
tag_template_id="my_template",
region="us-central1",
display_name="Demo Tag Template",
fields=[
gcp.datacatalog.TagTemplateFieldArgs(
field_id="source",
display_name="Source of data asset",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
primitive_type="STRING",
),
is_required=True,
),
gcp.datacatalog.TagTemplateFieldArgs(
field_id="num_rows",
display_name="Number of rows in the data asset",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
primitive_type="DOUBLE",
),
),
gcp.datacatalog.TagTemplateFieldArgs(
field_id="pii_type",
display_name="PII type",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
enum_type=gcp.datacatalog.TagTemplateFieldTypeEnumTypeArgs(
allowed_values=[
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="EMAIL",
),
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="SOCIAL SECURITY NUMBER",
),
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="NONE",
),
],
),
),
),
],
force_delete=False)
entry_group_tag = gcp.datacatalog.Tag("entryGroupTag",
parent=entry_group.id,
template=tag_template.id,
fields=[gcp.datacatalog.TagFieldArgs(
field_name="source",
string_value="my-string",
)])
```
### Data Catalog Entry Tag Full
```python
import pulumi
import pulumi_gcp as gcp
entry_group = gcp.datacatalog.EntryGroup("entryGroup", entry_group_id="my_entry_group")
entry = gcp.datacatalog.Entry("entry",
entry_group=entry_group.id,
entry_id="my_entry",
user_specified_type="my_custom_type",
user_specified_system="SomethingExternal",
schema=\"\"\"{
"columns": [
{
"column": "first_name",
"description": "First name",
"mode": "REQUIRED",
"type": "STRING"
},
{
"column": "last_name",
"description": "Last name",
"mode": "REQUIRED",
"type": "STRING"
},
{
"column": "address",
"description": "Address",
"mode": "REPEATED",
"subcolumns": [
{
"column": "city",
"description": "City",
"mode": "NULLABLE",
"type": "STRING"
},
{
"column": "state",
"description": "State",
"mode": "NULLABLE",
"type": "STRING"
}
],
"type": "RECORD"
}
]
}
\"\"\")
tag_template = gcp.datacatalog.TagTemplate("tagTemplate",
tag_template_id="my_template",
region="us-central1",
display_name="Demo Tag Template",
fields=[
gcp.datacatalog.TagTemplateFieldArgs(
field_id="source",
display_name="Source of data asset",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
primitive_type="STRING",
),
is_required=True,
),
gcp.datacatalog.TagTemplateFieldArgs(
field_id="num_rows",
display_name="Number of rows in the data asset",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
primitive_type="DOUBLE",
),
),
gcp.datacatalog.TagTemplateFieldArgs(
field_id="pii_type",
display_name="PII type",
type=gcp.datacatalog.TagTemplateFieldTypeArgs(
enum_type=gcp.datacatalog.TagTemplateFieldTypeEnumTypeArgs(
allowed_values=[
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="EMAIL",
),
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="SOCIAL SECURITY NUMBER",
),
gcp.datacatalog.TagTemplateFieldTypeEnumTypeAllowedValueArgs(
display_name="NONE",
),
],
),
),
),
],
force_delete=False)
basic_tag = gcp.datacatalog.Tag("basicTag",
parent=entry.id,
template=tag_template.id,
fields=[
gcp.datacatalog.TagFieldArgs(
field_name="source",
string_value="my-string",
),
gcp.datacatalog.TagFieldArgs(
field_name="num_rows",
double_value=5,
),
gcp.datacatalog.TagFieldArgs(
field_name="pii_type",
enum_value="EMAIL",
),
],
column="address")
second_tag = gcp.datacatalog.Tag("second-tag",
parent=entry.id,
template=tag_template.id,
fields=[
gcp.datacatalog.TagFieldArgs(
field_name="source",
string_value="my-string",
),
gcp.datacatalog.TagFieldArgs(
field_name="pii_type",
enum_value="NONE",
),
],
column="first_name")
```
## Import
Tag can be imported using any of these accepted formats
```sh
$ pulumi import gcp:datacatalog/tag:Tag default {{name}}
```
:param str resource_name: The name of the resource.
:param TagArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(TagArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
column: Optional[pulumi.Input[str]] = None,
fields: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TagFieldArgs']]]]] = None,
parent: Optional[pulumi.Input[str]] = None,
template: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = TagArgs.__new__(TagArgs)
__props__.__dict__["column"] = column
if fields is None and not opts.urn:
raise TypeError("Missing required property 'fields'")
__props__.__dict__["fields"] = fields
__props__.__dict__["parent"] = parent
if template is None and not opts.urn:
raise TypeError("Missing required property 'template'")
__props__.__dict__["template"] = template
__props__.__dict__["name"] = None
__props__.__dict__["template_displayname"] = None
super(Tag, __self__).__init__(
'gcp:datacatalog/tag:Tag',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
column: Optional[pulumi.Input[str]] = None,
fields: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TagFieldArgs']]]]] = None,
name: Optional[pulumi.Input[str]] = None,
parent: Optional[pulumi.Input[str]] = None,
template: Optional[pulumi.Input[str]] = None,
template_displayname: Optional[pulumi.Input[str]] = None) -> 'Tag':
"""
Get an existing Tag resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] column: Resources like Entry can have schemas associated with them. This scope allows users to attach tags to an
individual column based on that schema.
For attaching a tag to a nested column, use `.` to separate the column names. Example:
`outer_column.inner_column`
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['TagFieldArgs']]]] fields: This maps the ID of a tag field to the value of and additional information about that field.
Valid field IDs are defined by the tag's template. A tag must have at least 1 field and at most 500 fields.
Structure is documented below.
:param pulumi.Input[str] name: The resource name of the tag in URL format. Example:
projects/{project_id}/locations/{location}/entrygroups/{entryGroupId}/entries/{entryId}/tags/{tag_id} or
projects/{project_id}/locations/{location}/entrygroups/{entryGroupId}/tags/{tag_id} where tag_id is a system-generated
identifier. Note that this Tag may not actually be stored in the location in this name.
:param pulumi.Input[str] parent: The name of the parent this tag is attached to. This can be the name of an entry or an entry group. If an entry group, the tag will be attached to
all entries in that group.
:param pulumi.Input[str] template: The resource name of the tag template that this tag uses. Example:
projects/{project_id}/locations/{location}/tagTemplates/{tagTemplateId}
This field cannot be modified after creation.
:param pulumi.Input[str] template_displayname: The display name of the tag template.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _TagState.__new__(_TagState)
__props__.__dict__["column"] = column
__props__.__dict__["fields"] = fields
__props__.__dict__["name"] = name
__props__.__dict__["parent"] = parent
__props__.__dict__["template"] = template
__props__.__dict__["template_displayname"] = template_displayname
return Tag(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def column(self) -> pulumi.Output[Optional[str]]:
"""
Resources like Entry can have schemas associated with them. This scope allows users to attach tags to an
individual column based on that schema.
For attaching a tag to a nested column, use `.` to separate the column names. Example:
`outer_column.inner_column`
"""
return pulumi.get(self, "column")
@property
@pulumi.getter
def fields(self) -> pulumi.Output[Sequence['outputs.TagField']]:
"""
This maps the ID of a tag field to the value of and additional information about that field.
Valid field IDs are defined by the tag's template. A tag must have at least 1 field and at most 500 fields.
Structure is documented below.
"""
return pulumi.get(self, "fields")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The resource name of the tag in URL format. Example:
projects/{project_id}/locations/{location}/entrygroups/{entryGroupId}/entries/{entryId}/tags/{tag_id} or
projects/{project_id}/locations/{location}/entrygroups/{entryGroupId}/tags/{tag_id} where tag_id is a system-generated
identifier. Note that this Tag may not actually be stored in the location in this name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def parent(self) -> pulumi.Output[Optional[str]]:
"""
The name of the parent this tag is attached to. This can be the name of an entry or an entry group. If an entry group, the tag will be attached to
all entries in that group.
"""
return pulumi.get(self, "parent")
@property
@pulumi.getter
def template(self) -> pulumi.Output[str]:
"""
The resource name of the tag template that this tag uses. Example:
projects/{project_id}/locations/{location}/tagTemplates/{tagTemplateId}
This field cannot be modified after creation.
"""
return pulumi.get(self, "template")
@property
@pulumi.getter(name="templateDisplayname")
def template_displayname(self) -> pulumi.Output[str]:
"""
The display name of the tag template.
"""
return pulumi.get(self, "template_displayname")
| 44.1532 | 187 | 0.558653 | 4,139 | 42,078 | 5.521865 | 0.071756 | 0.064319 | 0.032466 | 0.030803 | 0.923868 | 0.908729 | 0.892234 | 0.879064 | 0.876832 | 0.869744 | 0 | 0.001433 | 0.353082 | 42,078 | 952 | 188 | 44.19958 | 0.838176 | 0.649413 | 0 | 0.606635 | 1 | 0 | 0.085453 | 0.002448 | 0 | 0 | 0 | 0 | 0 | 1 | 0.156398 | false | 0.004739 | 0.033175 | 0 | 0.28436 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
2bd714ea3d3389ae2bd5ba767fca6522f6261b7c | 8,274 | py | Python | ext/ANTsPyNet/antspynet/architectures/create_convolutional_autoencoder_model.py | tsmonteiro/fmri_proc | ee740cfa3c3a7ef8e1ee1ebd3b286a66712e0ec1 | [
"MIT"
] | 2 | 2021-11-16T10:00:33.000Z | 2021-12-13T02:57:40.000Z | ext/ANTsPyNet/antspynet/architectures/create_convolutional_autoencoder_model.py | tsmonteiro/fmri_proc | ee740cfa3c3a7ef8e1ee1ebd3b286a66712e0ec1 | [
"MIT"
] | null | null | null | ext/ANTsPyNet/antspynet/architectures/create_convolutional_autoencoder_model.py | tsmonteiro/fmri_proc | ee740cfa3c3a7ef8e1ee1ebd3b286a66712e0ec1 | [
"MIT"
] | 1 | 2021-12-13T02:57:27.000Z | 2021-12-13T02:57:27.000Z |
from keras.models import Model
from keras.layers import Input, Conv2D, Conv2DTranspose, Dense, Flatten, Reshape
import numpy as np
import math
def create_convolutional_autoencoder_model_2d(input_image_size,
number_of_filters_per_layer=(32, 64, 128, 10),
convolution_kernel_size=(5, 5),
deconvolution_kernel_size=(5, 5)
):
"""
Function for creating a 2-D symmetric convolutional autoencoder model.
Builds an autoencoder based on the specified array definining the
number of units in the encoding branch. Ported from the Keras python
implementation here:
https://github.com/XifengGuo/DEC-keras
Arguments
---------
input_image_size : tuple
A tuple defining the shape of the 2-D input image
number_of_units_per_layer : tuple
A tuple defining the number of units in the encoding branch.
convolution_kernel_size : tuple or scalar
Kernel size for convolution
deconvolution_kernel_size : tuple or scalar
Kernel size for deconvolution
Returns
-------
Keras models
A convolutional encoder and autoencoder Keras model.
Example
-------
>>> autoencoder, encoder = create_convolutional_autoencoder_model_2d((128, 128, 3))
>>> autoencoder.summary()
>>> encoder.summary()
"""
activation = 'relu'
strides = (2, 2)
number_of_encoding_layers = len(number_of_filters_per_layer) - 1
factor = 2 ** number_of_encoding_layers
padding = 'valid'
if input_image_size[0] % factor == 0:
padding = 'same'
inputs = Input(shape = input_image_size)
encoder = inputs
for i in range(number_of_encoding_layers):
local_padding = 'same'
kernel_size = convolution_kernel_size
if i == (number_of_encoding_layers - 1):
local_padding = padding
kernel_size = tuple(np.array(convolution_kernel_size) - 2)
encoder = Conv2D(filters=number_of_filters_per_layer[i],
kernel_size=kernel_size,
strides=strides,
activation=activation,
padding=local_padding)(encoder)
encoder = Flatten()(encoder)
encoder = Dense(units=number_of_filters_per_layer[-1])(encoder)
autoencoder = encoder
penultimate_number_of_filters = \
number_of_filters_per_layer[number_of_encoding_layers-1]
input_image_size_factored = ((math.floor(input_image_size[0] / factor)),
(math.floor(input_image_size[1] / factor)))
number_of_units_for_encoder_output = (penultimate_number_of_filters *
input_image_size_factored[0] * input_image_size_factored[1])
autoencoder = Dense(units=number_of_units_for_encoder_output,
activation=activation)(autoencoder)
autoencoder = Reshape(target_shape=(*input_image_size_factored, penultimate_number_of_filters))(autoencoder)
for i in range(number_of_encoding_layers, 1, -1):
local_padding = 'same'
kernel_size = convolution_kernel_size
if i == number_of_encoding_layers:
local_padding = padding
kernel_size = tuple(np.array(deconvolution_kernel_size) - 2)
autoencoder = Conv2DTranspose(filters=number_of_filters_per_layer[i-2],
kernel_size=kernel_size,
strides=strides,
activation=activation,
padding=local_padding)(autoencoder)
autoencoder = Conv2DTranspose(input_image_size[-1],
kernel_size=deconvolution_kernel_size,
strides=strides,
padding='same')(autoencoder)
autoencoder_model = Model(inputs=inputs, outputs=autoencoder)
encoder_model = Model(inputs=inputs, outputs=encoder)
return([autoencoder_model, encoder_model])
def create_convolutional_autoencoder_model_3d(input_image_size,
number_of_filters_per_layer=(32, 64, 128, 10),
convolution_kernel_size=(5, 5, 5),
deconvolution_kernel_size=(5, 5, 5)
):
"""
Function for creating a 3-D symmetric convolutional autoencoder model.
Builds an autoencoder based on the specified array definining the
number of units in the encoding branch. Ported from the Keras python
implementation here:
https://github.com/XifengGuo/DEC-keras
Arguments
---------
input_image_size : tuple
A tuple defining the shape of the 3-D input image
number_of_units_per_layer : tuple
A tuple defining the number of units in the encoding branch.
convolution_kernel_size : tuple or scalar
Kernel size for convolution
deconvolution_kernel_size : tuple or scalar
Kernel size for deconvolution
Returns
-------
Keras models
A convolutional encoder and autoencoder Keras model.
Example
-------
>>> autoencoder, encoder = create_convolutional_autoencoder_model_3d((128, 128, 128, 3))
>>> autoencoder.summary()
>>> encoder.summary()
"""
activation = 'relu'
strides = (2, 2, 2)
number_of_encoding_layers = len(number_of_filters_per_layer) - 1
factor = 2 ** number_of_encoding_layers
padding = 'valid'
if input_image_size[0] % factor == 0:
padding = 'same'
inputs = Input(shape = input_image_size)
encoder = inputs
for i in range(number_of_encoding_layers):
local_padding = 'same'
kernel_size = convolution_kernel_size
if i == (number_of_encoding_layers - 1):
local_padding = padding
kernel_size = tuple(np.array(convolution_kernel_size) - 2)
encoder = Conv3D(filters=number_of_filters_per_layer[i],
kernel_size=kernel_size,
strides=strides,
activation=activation,
padding=local_padding)(encoder)
encoder = Flatten()(encoder)
encoder = Dense(units=number_of_filters_per_layer[-1])(encoder)
autoencoder = encoder
penultimate_number_of_filters = \
number_of_filters_per_layer[number_of_encoding_layers-1]
input_image_size_factored = ((math.floor(input_image_size[0] / factor)),
(math.floor(input_image_size[1] / factor)),
(math.floor(input_image_size[2] / factor)))
number_of_units_for_encoder_output = (penultimate_number_of_filters *
input_image_size_factored[0] * input_image_size_factored[1] *
input_image_size_factored[2])
autoencoder = Dense(units=number_of_units_for_encoder_output,
activation=activation)(autoencoder)
autoencoder = Reshape(target_shape=(*input_image_size_factored, penultimate_number_of_filters))(autoencoder)
for i in range(number_of_encoding_layers, 1, -1):
local_padding = 'same'
kernel_size = convolution_kernel_size
if i == number_of_encoding_layers:
local_padding = padding
kernel_size = tuple(np.array(deconvolution_kernel_size) - 2)
autoencoder = Conv3DTranspose(filters=number_of_filters_per_layer[i-2],
kernel_size=kernel_size,
strides=strides,
activation=activation,
padding=local_padding)(autoencoder)
autoencoder = Conv3DTranspose(input_image_size[-1],
kernel_size=deconvolution_kernel_size,
strides=strides,
padding='same')(autoencoder)
autoencoder_model = Model(inputs=inputs, outputs=autoencoder)
encoder_model = Model(inputs=inputs, outputs=encoder)
return([autoencoder_model, encoder_model])
| 36.289474 | 112 | 0.618443 | 893 | 8,274 | 5.410974 | 0.113102 | 0.069536 | 0.069536 | 0.063742 | 0.959023 | 0.937086 | 0.923841 | 0.913286 | 0.913286 | 0.913286 | 0 | 0.017795 | 0.307227 | 8,274 | 227 | 113 | 36.449339 | 0.825192 | 0.208122 | 0 | 0.793103 | 0 | 0 | 0.007891 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.017241 | false | 0 | 0.034483 | 0 | 0.051724 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
a69a703809a1229f1379ced4ccd94bc767aa6e02 | 2,215 | py | Python | x12genapp/x12/template.py | dixonwhitmire/x12genapp | d00b19504de34a2c686704165b04e871d3a1be9b | [
"Apache-2.0"
] | 3 | 2020-12-08T04:28:56.000Z | 2021-01-14T07:44:14.000Z | x12genapp/x12/template.py | dixonwhitmire/x12genapp | d00b19504de34a2c686704165b04e871d3a1be9b | [
"Apache-2.0"
] | null | null | null | x12genapp/x12/template.py | dixonwhitmire/x12genapp | d00b19504de34a2c686704165b04e871d3a1be9b | [
"Apache-2.0"
] | null | null | null | from typing import Dict
def get_271_existing_member(demographics: Dict) -> str:
"""
Returns an Eligibility/271 response where a member has current insurance coverage.
:param demographics: Demographic fields included within the x12 response.
"""
x12 = f"""ISA*00* *00* *ZZ*890069730 *ZZ*154663145 *200929*1705*|*00501*000000001*0*T*:~
GS*HS*890069730*154663145*20200929*1705*0001*X*005010X279A1~
ST*271*4321*005010X279A1~
BHT*0022*11*10001234*20060501*1319~
HL*1**20*1~
NM1*PR*2*ABC COMPANY*****PI*842610001~
HL*2*1*21*1~
NM1*1P*2*BONE AND JOINT CLINIC*****SV*2000035~
HL*3*2*22*0~
TRN*2*{demographics['trace_number']}*9877281234~
NM1*IL*1*{demographics['last_name']}*{demographics['first_name']}*{demographics['middle_name']}***{demographics['identification_code_type']}*{demographics['identification_code']}~
DMG*D8*{demographics['birth_date']}*{demographics['gender']}~
DTP*346*D8*20060101~
EB*1**30**GOLD 123 PLAN~
EB*L~
EB*1**1>33>35>47>86>88>98>AL>MH>UC~
EB*B**1>33>35>47>86>88>98>AL>MH>UC*HM*GOLD 123 PLAN*27*10*****Y~
EB*B**1>33>35>47>86>88>98>AL>MH>UC*HM*GOLD 123 PLAN*27*30*****N~
LS*2120~
NM1*P3*1*JONES*MARCUS****SV*0202034~
LE*2120~
SE*20*4321~""".replace('\n', '')
return x12
def get_271_member_not_found(demographics: Dict) -> str:
"""
Returns an Eligibility/271 response where a member is not found.
:param demographics: Demographic fields included within the x12 response.
:return: x12 transaction
"""
x12 = f"""ISA*00* *00* *ZZ*890069730 *ZZ*154663145 *200929*1705*|*00501*000000001*0*T*:~
GS*HS*890069730*154663145*20200929*1705*0001*X*005010X279A1~
ST*271*4321*005010X279A1~
BHT*0022*11*10001234*20060501*1319~
HL*1**20*1~
NM1*PR*2*ABC COMPANY*****PI*842610001~
HL*2*1*21*1~
NM1*1P*2*BONE AND JOINT CLINIC*****SV*2000035~
HL*3*2*22*0~
TRN*2*{demographics['trace_number']}*9877281234~
NM1*IL*1*{demographics['last_name']}*{demographics['first_name']}*{demographics['middle_name']}***{demographics['identification_code_type']}*{demographics['identification_code']}~
DMG*D8*{demographics['birth_date']}*{demographics['gender']}~
AAA*Y**75*C~
SE*12*4321~""".replace('\n', '')
return x12
| 39.553571 | 179 | 0.692551 | 348 | 2,215 | 4.341954 | 0.382184 | 0.063534 | 0.079418 | 0.013898 | 0.854401 | 0.826605 | 0.826605 | 0.826605 | 0.826605 | 0.74454 | 0 | 0.251266 | 0.108804 | 2,215 | 55 | 180 | 40.272727 | 0.514184 | 0.14447 | 0 | 0.634146 | 0 | 0.170732 | 0.865478 | 0.615343 | 0 | 0 | 0 | 0 | 0 | 1 | 0.04878 | false | 0 | 0.02439 | 0 | 0.121951 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
a6bd157333dfad7571e8f1b2fefb4f17f30c2922 | 120 | py | Python | oth-chain/gui/__init__.py | McSido/oth-chain | 16da2c06ce1188a825e3f3b236cd2c2fb5d59a68 | [
"MIT"
] | null | null | null | oth-chain/gui/__init__.py | McSido/oth-chain | 16da2c06ce1188a825e3f3b236cd2c2fb5d59a68 | [
"MIT"
] | 10 | 2018-04-05T09:46:31.000Z | 2018-08-29T13:58:32.000Z | oth-chain/gui/__init__.py | McSido/oth-chain | 16da2c06ce1188a825e3f3b236cd2c2fb5d59a68 | [
"MIT"
] | null | null | null | from .GUI import gui_loop
from .dns_gui import gui_loop as dns_gui_loop
from .ddos_gui import gui_loop as ddos_gui_loop
| 30 | 47 | 0.841667 | 25 | 120 | 3.68 | 0.28 | 0.380435 | 0.391304 | 0.521739 | 0.391304 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.133333 | 120 | 3 | 48 | 40 | 0.884615 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
471b28e9a2d9067ad355b2e137c01b540bc90b3b | 88 | py | Python | Actividad2/Codigo/Catalan.py | maps16/FComputacional1 | eb4a5b5ea9542023a5f928cc1f15d3f25f7ea0d0 | [
"MIT"
] | null | null | null | Actividad2/Codigo/Catalan.py | maps16/FComputacional1 | eb4a5b5ea9542023a5f928cc1f15d3f25f7ea0d0 | [
"MIT"
] | null | null | null | Actividad2/Codigo/Catalan.py | maps16/FComputacional1 | eb4a5b5ea9542023a5f928cc1f15d3f25f7ea0d0 | [
"MIT"
] | null | null | null | x1, n = 1 , 0
while (x1 < 1E+6):
print x1
x1 = x1*(4*n+2)/(n+2)
n+=1
| 9.777778 | 25 | 0.375 | 19 | 88 | 1.736842 | 0.526316 | 0.121212 | 0.181818 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.245283 | 0.397727 | 88 | 8 | 26 | 11 | 0.377358 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.2 | 1 | 0 | 1 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
4726faf850af16482c5d2657c4e16cd40d873a6d | 124 | py | Python | src/riot_api/setup.py | AnnekinMeyburgh/lol-spreadsheets | dff24b2d99c968228f1870788fc2bb69eb36a148 | [
"MIT"
] | null | null | null | src/riot_api/setup.py | AnnekinMeyburgh/lol-spreadsheets | dff24b2d99c968228f1870788fc2bb69eb36a148 | [
"MIT"
] | null | null | null | src/riot_api/setup.py | AnnekinMeyburgh/lol-spreadsheets | dff24b2d99c968228f1870788fc2bb69eb36a148 | [
"MIT"
] | null | null | null | from riotwatcher import LolWatcher
import os
def setup_lol_watcher():
return LolWatcher(os.environ.get('RIOT_API_KEY')) | 24.8 | 53 | 0.798387 | 18 | 124 | 5.277778 | 0.833333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.112903 | 124 | 5 | 53 | 24.8 | 0.863636 | 0 | 0 | 0 | 0 | 0 | 0.096 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.5 | 0.25 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 1 | 1 | 0 | 0 | 7 |
5b80d22ddd1d69ddd49039a99f1db91be9cafc58 | 8,739 | py | Python | inst/python/PB.py | samuel-souza/soccerdatas | a4b9357a71fda21a45819e551dc18c1b77424f23 | [
"MIT"
] | null | null | null | inst/python/PB.py | samuel-souza/soccerdatas | a4b9357a71fda21a45819e551dc18c1b77424f23 | [
"MIT"
] | null | null | null | inst/python/PB.py | samuel-souza/soccerdatas | a4b9357a71fda21a45819e551dc18c1b77424f23 | [
"MIT"
] | null | null | null | from selenium import webdriver
from time import sleep
import numpy as np
import pandas as pd
from webdriver_manager.firefox import GeckoDriverManager
import os
import platform
url = 'https://globoesporte.globo.com/pb/futebol/campeonato-paraibano/'
sys = platform.system()
if sys == "Windows":
browser = webdriver.Firefox(service_log_path= os.devnull)
else:
browser = webdriver.Firefox(executable_path = GeckoDriverManager().install(),service_log_path= os.devnull)
browser.get(url)
browser.maximize_window()
m = browser.find_element_by_class_name('grid-24')
f = m.find_element_by_class_name('navegacao-fase')
fases = f.find_element_by_class_name('navegacao-fase__fase').text
fases = fases.upper()
if fases == 'FINAL':
try:
lb = m.find_element_by_class_name('navegacao-fase__seta-esquerda')
for i in range(3):
lb.click()
sleep(1)
except:
None
elif fases == 'SEMIFINAL':
try:
lb = m.find_element_by_class_name('navegacao-fase__seta-esquerda')
for i in range(2):
lb.click()
sleep(1)
except:
None
elif fases == 'SEGUNDA FASE':
try:
lb = m.find_element_by_class_name('navegacao-fase__seta-esquerda')
lb.click()
except:
None
else:
botesq = m.find_element_by_class_name('lista-jogos__navegacao--seta-esquerda')
botdir = m.find_element_by_class_name('lista-jogos__navegacao--seta-direita')
sleep(1)
r = m.find_element_by_class_name('lista-jogos__navegacao--rodada')
rod = int(r.text.split()[0].replace('ª',''))
botesq = m.find_element_by_class_name('lista-jogos__navegacao--seta-esquerda')
botdir = m.find_element_by_class_name('lista-jogos__navegacao--seta-direita')
MAN = []
X = []
Y = []
VIS = []
local = []
man = []
x = []
y = []
vis = []
c = []
l = []
def checker(ho,pm,pv,vi,lo):
b = [ho,lo]
if pm and pv != '':
X.append(pm)
MAN.append(ho)
Y.append(pv)
VIS.append(vi)
if b in times_estadios:
try:
a = 1
c.append(a)
except:
None
else:
try:
a = 0
c.append(a)
except:
None
else:
x.append(pm)
man.append(ho)
y.append(pv)
vis.append(vi)
if b in times_estadios:
try:
a = 1
l.append(a)
except:
None
else:
try:
a = 0
l.append(a)
except:
None
times_estadios = [
['BOT','ALMEIDÃO'],
['SPC','CARNEIRÃO (PB)'],
['TRZ','AMIGÃO'],
['SOU','MARIZÃO (PB)'],
['CAM','AMIGÃO'],
['NAP','JOSÉ CAVALCANTI'],
['PER','AMIGÃO'],
['ATL','PERPETÃO']
]
for i in range(rod):
botesq.click()
sleep(0.35)
for i in range(rod):
for j in range(4):
ul = m.find_element_by_class_name('lista-jogos')
lis = ul.find_elements_by_tag_name('li')
thm = lis[j].find_element_by_class_name('theme')
inf = thm.find_element_by_class_name('jogo__informacoes')
lcl = inf.find_element_by_class_name('jogo__informacoes--local')
placar = thm.find_element_by_class_name('placar')
mdt = placar.find_element_by_class_name('placar__equipes--mandante')
mandante = mdt.find_element_by_class_name('equipes__sigla')
vst = placar.find_element_by_class_name('placar__equipes--visitante')
visitante = vst.find_element_by_class_name('equipes__sigla')
plb = placar.find_element_by_class_name('placar-box')
t1 = plb.find_element_by_class_name('placar-box__valor--mandante')
t2 = plb.find_element_by_class_name('placar-box__valor--visitante')
checker(mandante.text,t1.text,t2.text,visitante.text,lcl.text)
botdir.click()
sleep(0.3)
for i in range(7-rod):
for j in range(4):
ul = m.find_element_by_class_name('lista-jogos')
lis = ul.find_elements_by_tag_name('li')
thm = lis[j].find_element_by_class_name('theme')
inf = thm.find_element_by_class_name('jogo__informacoes')
lcl = inf.find_element_by_class_name('jogo__informacoes--local')
placar = thm.find_element_by_class_name('placar')
mdt = placar.find_element_by_class_name('placar__equipes--mandante')
mandante = mdt.find_element_by_class_name('equipes__sigla')
vst = placar.find_element_by_class_name('placar__equipes--visitante')
visitante = vst.find_element_by_class_name('equipes__sigla')
plb = placar.find_element_by_class_name('placar-box')
t1 = plb.find_element_by_class_name('placar-box__valor--mandante')
t2 = plb.find_element_by_class_name('placar-box__valor--visitante')
checker(mandante.text,t1.text,t2.text,visitante.text,lcl.text)
botdir.click()
sleep(0.3)
if rod*4 == 28:
rb = m.find_element_by_class_name('navegacao-fase__seta-direita')
rb.click()
for i in range(0,2,1):
ul = m.find_element_by_class_name('tabela__mata-mata')
lis = ul.find_elements_by_class_name('chave__jogos--1')
thm = lis[i].find_element_by_class_name('theme')
tl = thm.find_element_by_class_name('jogo__transmissao--link')
inf = tl.find_element_by_class_name('jogo__informacoes')
lcl = inf.find_element_by_class_name('jogo__informacoes--local')
placar = tl.find_element_by_class_name('placar')
mdt = placar.find_element_by_class_name('placar__equipes--mandante')
mandante = mdt.find_element_by_class_name("equipes__sigla").get_attribute('textContent')
vst = placar.find_element_by_class_name('placar__equipes--visitante')
visitante = vst.find_element_by_class_name('equipes__sigla').get_attribute('textContent')
plb = placar.find_element_by_class_name('placar-box')
t1 = plb.find_element_by_class_name('placar-box__valor--mandante')
t2 = plb.find_element_by_class_name('placar-box__valor--visitante')
checker(mandante,t1.text,t2.text,visitante,lcl.text)
sleep(2)
rb.click()
for i in range(0,2,1):
ul = m.find_element_by_class_name('tabela__mata-mata')
lis = ul.find_elements_by_class_name('chave__jogos--1')
thm = lis[i].find_element_by_class_name('theme')
tl = thm.find_element_by_class_name('jogo__transmissao--link')
inf = tl.find_element_by_class_name('jogo__informacoes')
lcl = inf.find_element_by_class_name('jogo__informacoes--local')
placar = tl.find_element_by_class_name('placar')
mdt = placar.find_element_by_class_name('placar__equipes--mandante')
mandante = mdt.find_element_by_class_name("equipes__sigla").get_attribute('textContent')
vst = placar.find_element_by_class_name('placar__equipes--visitante')
visitante = vst.find_element_by_class_name('equipes__sigla').get_attribute('textContent')
plb = placar.find_element_by_class_name('placar-box')
t1 = plb.find_element_by_class_name('placar-box__valor--mandante')
t2 = plb.find_element_by_class_name('placar-box__valor--visitante')
checker(mandante,t1.text,t2.text,visitante,lcl.text)
sleep(2)
rb.click()
for i in range(0,2,1):
ul = m.find_element_by_class_name('tabela__mata-mata')
lis = ul.find_elements_by_class_name('theme')
tl = lis[i].find_element_by_class_name('jogo__transmissao--link')
inf = tl.find_element_by_class_name('jogo__informacoes')
lcl = inf.find_element_by_class_name('jogo__informacoes--local')
placar = tl.find_element_by_class_name('placar')
mdt = placar.find_element_by_class_name('placar__equipes--mandante')
mandante = mdt.find_element_by_class_name("equipes__sigla").get_attribute('textContent')
vst = placar.find_element_by_class_name('placar__equipes--visitante')
visitante = vst.find_element_by_class_name('equipes__sigla').get_attribute('textContent')
plb = placar.find_element_by_class_name('placar-box')
t1 = plb.find_element_by_class_name('placar-box__valor--mandante')
t2 = plb.find_element_by_class_name('placar-box__valor--visitante')
checker(mandante,t1.text,t2.text,visitante,lcl.text)
fm = np.array([man,x,y,vis,l])
fm = fm.transpose()
future_matches = pd.DataFrame(fm, columns = ['Home','X','Y','Visitor','Local'])
db = np.array([MAN,X,Y,VIS,c])
db = db.transpose()
database = pd.DataFrame(db,columns=['Home','X','Y','Visitor','Local'])
browser.quit()
database.to_csv('~/dtbase.csv',index=False)
future_matches.to_csv('~/fmatches.csv',index=False)
| 34.003891 | 110 | 0.663119 | 1,210 | 8,739 | 4.415702 | 0.143802 | 0.10088 | 0.158525 | 0.249298 | 0.83979 | 0.815647 | 0.796369 | 0.78907 | 0.760621 | 0.745274 | 0 | 0.008388 | 0.20872 | 8,739 | 256 | 111 | 34.136719 | 0.764281 | 0 | 0 | 0.649038 | 0 | 0 | 0.207418 | 0.115614 | 0 | 0 | 0 | 0 | 0 | 1 | 0.004808 | false | 0 | 0.033654 | 0 | 0.038462 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5b90c885f461d319020ecd5a9c25a1afc75da71e | 27,058 | py | Python | demo/simple_rl/model.py | zqh0253/DI-drive | b5fd8ca3cf2d4d8a8a1b807dda718e9f6d760981 | [
"Apache-2.0"
] | null | null | null | demo/simple_rl/model.py | zqh0253/DI-drive | b5fd8ca3cf2d4d8a8a1b807dda718e9f6d760981 | [
"Apache-2.0"
] | null | null | null | demo/simple_rl/model.py | zqh0253/DI-drive | b5fd8ca3cf2d4d8a8a1b807dda718e9f6d760981 | [
"Apache-2.0"
] | null | null | null | import torch
import torch.nn as nn
from typing import List, Tuple, Union, Dict, Optional
from core.models import BEVSpeedConvEncoder, RGBSpeedConvEncoder
from ding.model.common.head import DuelingHead, RegressionHead, ReparameterizationHead, MultiHead, DiscreteHead
class DQNRLModel(nn.Module):
def __init__(
self,
obs_shape: Tuple = [5, 32, 32],
action_shape: Union[int, Tuple] = 21,
encoder_hidden_size_list: Tuple = [64, 128, 256],
dueling: bool = True,
head_hidden_size: Optional[int] = 512,
head_layer_num: int = 1,
activation: Optional[nn.Module] = nn.ReLU(),
norm_type: Optional[str] = None
) -> None:
super().__init__()
self._encoder = BEVSpeedConvEncoder(obs_shape, encoder_hidden_size_list, head_hidden_size, [3, 3, 3], [2, 2, 2])
if dueling:
head_cls = DuelingHead
else:
head_cls = DiscreteHead
multi_head = not isinstance(action_shape, int)
if multi_head:
self._head = MultiHead(
head_cls,
head_hidden_size,
action_shape,
layer_num=head_layer_num,
activation=activation,
norm_type=norm_type
)
else:
self._head = head_cls(
head_hidden_size, action_shape, head_layer_num, activation=activation, norm_type=norm_type
)
def forward(self, obs):
x = self._encoder(obs)
y = self._head(x)
return y
class DDPGRLModel(nn.Module):
def __init__(
self,
obs_shape: Tuple = [5, 32, 32],
action_shape: Union[int, tuple] = 2,
share_encoder: bool = False,
encoder_hidden_size_list: List = [64, 128, 256],
encoder_embedding_size: int = 512,
twin_critic: bool = False,
actor_head_hidden_size: int = 512,
actor_head_layer_num: int = 1,
critic_head_hidden_size: int = 512,
critic_head_layer_num: int = 1,
activation: Optional[nn.Module] = nn.ReLU(),
norm_type: Optional[str] = None,
) -> None:
super().__init__()
self._obs_shape = obs_shape
self._act_shape = action_shape
self.twin_critic = twin_critic
self.share_encoder = share_encoder
if self.share_encoder:
self.actor_encoder = self.critic_encoder = BEVSpeedConvEncoder(
self._obs_shape, encoder_hidden_size_list, encoder_embedding_size, [3, 3, 3], [2, 2, 2]
)
else:
self.actor_encoder = BEVSpeedConvEncoder(
self._obs_shape, encoder_hidden_size_list, encoder_embedding_size, [3, 3, 3], [2, 2, 2]
)
self.critic_encoder = BEVSpeedConvEncoder(
self._obs_shape, encoder_hidden_size_list, encoder_embedding_size, [3, 3, 3], [2, 2, 2]
)
self.actor_head = nn.Sequential(
nn.Linear(encoder_embedding_size, actor_head_hidden_size), activation,
RegressionHead(
actor_head_hidden_size,
action_shape,
actor_head_layer_num,
final_tanh=True,
activation=activation,
norm_type=norm_type
)
)
self.twin_critic = twin_critic
if self.twin_critic:
if not self.share_encoder:
self._twin_encoder = BEVSpeedConvEncoder(self._obs_shape, encoder_hidden_size_list, encoder_embedding_size, [3, 3, 3], [2, 2, 2])
else:
self._twin_encoder = self.actor_encoder
self.critic_head = [
nn.Sequential(
nn.Linear(encoder_embedding_size + self._act_shape, critic_head_hidden_size), activation,
RegressionHead(
critic_head_hidden_size,
1,
critic_head_layer_num,
final_tanh=False,
activation=activation,
norm_type=norm_type
)
) for _ in range(2)
]
else:
self.critic_head = nn.Sequential(
nn.Linear(encoder_embedding_size + self._act_shape, critic_head_hidden_size), activation,
RegressionHead(
critic_head_hidden_size,
1,
critic_head_layer_num,
final_tanh=False,
activation=activation,
norm_type=norm_type
)
)
# for convenience of call some apis(such as: self.critic.parameters()), but may cause
# misunderstanding when print(self)
self.actor = nn.ModuleList([self.actor_encoder, self.actor_head])
if self.twin_critic:
self.critic = nn.ModuleList([self.critic_encoder, *self.critic_head, self._twin_encoder])
else:
self.critic = nn.ModuleList([self.critic_encoder, self.critic_head])
def forward(self, inputs, mode=None, **kwargs):
assert (mode in ['compute_actor_critic', 'compute_actor', 'compute_critic'])
f = getattr(self, mode)
return f(inputs, **kwargs)
def compute_critic(self, inputs: Dict) -> Dict:
x0 = self.critic_encoder(inputs['obs'])
x0 = torch.cat([x0, inputs['action']], dim=1)
if self.twin_critic:
x1 = self._twin_encoder(inputs['obs'])
x1 = torch.cat([x1, inputs['action']], dim=1)
x = [m(xi)['pred'] for m, xi in [(self.critic_head[0], x0), (self.critic_head[1], x1)]]
else:
x = self.critic_head(x0)['pred']
return {'q_value': x}
def compute_actor(self, inputs: Dict) -> Dict:
x = self.actor_encoder(inputs)
action = self.actor_head(x)['pred']
return {'action': action}
class TD3RLModel(DDPGRLModel):
def __init__(
self,
obs_shape: Tuple = [5, 32, 32],
action_shape: Union[int, tuple] = 2,
share_encoder: bool = False,
encoder_hidden_size_list: List = [64, 128, 256],
encoder_embedding_size: int = 512,
twin_critic: bool = True,
actor_head_hidden_size: int = 512,
actor_head_layer_num: int = 1,
critic_head_hidden_size: int = 512,
critic_head_layer_num: int = 1,
activation: Optional[nn.Module] = nn.ReLU(),
norm_type: Optional[str] = None,
) -> None:
super().__init__(
obs_shape, action_shape, share_encoder, encoder_hidden_size_list, encoder_embedding_size,
twin_critic, actor_head_hidden_size, actor_head_layer_num, critic_head_hidden_size,
critic_head_layer_num, activation, norm_type)
assert twin_critic
class CSACRGBRLModel(nn.Module):
def __init__(
self,
obs_shape: Tuple = [3, 320, 180],
task_pretrained: bool=False,
img_pretrained: bool=False,
fix_perception: bool=False,
normalization = None,
action_shape: Union[int, tuple] = 2,
share_encoder: bool = False,
continuous: bool = True,
encoder_hidden_size_list: List = [64, 128, 256],
encoder_embedding_size: int = 512,
twin_critic: bool = False,
actor_head_hidden_size: int = 512,
actor_head_layer_num: int = 1,
critic_head_hidden_size: int = 514,
critic_head_layer_num: int = 1,
activation: Optional[nn.Module] = nn.ReLU(),
norm_type: Optional[str] = None,
**kwargs,
) -> None:
super().__init__()
self.command_n = 4
self._act = nn.ReLU()
self._obs_shape = obs_shape
self._act_shape = action_shape
self.twin_critic = twin_critic
self.share_encoder = share_encoder
if self.share_encoder:
self.actor_encoder = self.critic_encoder = RGBSpeedConvEncoder(
self._obs_shape, encoder_embedding_size, task_pretrained, img_pretrained, fix_perception
)
else:
self.actor_encoder = RGBSpeedConvEncoder(
self._obs_shape, encoder_embedding_size, task_pretrained, img_pretrained, fix_perception
)
self.critic_encoder = RGBSpeedConvEncoder(
self._obs_shape, encoder_embedding_size, task_pretrained, img_pretrained, fix_perception
)
#self.actor = nn.Sequential(
# nn.Linear(encoder_embedding_size, actor_head_hidden_size), activation,
# ReparameterizationHead(
# actor_head_hidden_size,
# action_shape,
# actor_head_layer_num,
# sigma_type='conditioned',
# activation=activation,
# norm_type=norm_type
# )
#)
self.actor = ChoiceHead(ReparameterizationHead, self.command_n,
actor_head_hidden_size,
action_shape,
actor_head_layer_num,
sigma_type='conditioned',
activation=activation,
norm_type=norm_type
)
self.twin_critic = twin_critic
if self.twin_critic:
if self.share_encoder:
self._twin_encoder = self.actor_encoder
else:
self._twin_encoder = RGBSpeedConvEncoder(
self._obs_shape, encoder_embedding_size, task_pretrained, img_pretrained, fix_perception
)
self.critic = nn.ModuleList()
for _ in range(2):
self.critic.append(
ChoiceHead(
RegressionHead, self.command_n,
critic_head_hidden_size,
1,
critic_head_layer_num,
final_tanh=False,
activation=activation,
norm_type=norm_type
)
)
else:
self.critic = ChoiceHead(
RegressionHead, self.command_n,
critic_head_hidden_size,
1,
critic_head_layer_num,
final_tanh=False,
activation=activation,
norm_type=norm_type
)
def forward(self, inputs, mode=None, **kwargs):
self.mode = ['compute_actor', 'compute_critic']
assert mode in self.mode, "not support forward mode: {}/{}".format(mode, self.mode)
return getattr(self, mode)(inputs)
def compute_critic(self, inputs: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
x0 = self.critic_encoder(inputs['obs'])
x0 = torch.cat([x0, inputs['action']], dim=1)
if self.twin_critic:
x1 = self._twin_encoder(inputs['obs'])
x1 = torch.cat([x1, inputs['action']], dim=1)
x = [m(xi, inputs['obs']['command'])['pred'] for m, xi in [(self.critic[0], x0), (self.critic[1], x1)]]
else:
x = self.critic(x0, inputs['obs']['command'])['pred']
return {'q_value': x}
def compute_actor(self, inputs) -> Dict[str, torch.Tensor]:
x = self.actor_encoder(inputs)
x = self.actor(x, inputs['command'])
return {'logit': [x['mu'], x['sigma']]}
class ChoiceHead(nn.Module):
def __init__(self, model, choices, *init_param, **kwargs):
super().__init__()
self.model_list = nn.ModuleList([model(*init_param, **kwargs) for _ in range(choices)])
def forward(self, x, idx):
idx = idx.type(torch.int64) - 1
output_list_dict = dict()
for m in self.model_list:
output = m(x)
for k, v in output.items():
if k not in output_list_dict:
output_list_dict[k] = list()
output_list_dict[k].append(v.unsqueeze(-1))
for k,v in output_list_dict.items():
output_list_dict[k] = torch.cat(v, dim=-1)
cur_idx = idx.clone()
while 1:
if len(cur_idx.shape) == len(output_list_dict[k].shape):
break
cur_idx = cur_idx.unsqueeze(-1)
cur_idx = cur_idx.repeat(1, *output_list_dict[k].shape[1:-1], 1)
output_list_dict[k] = torch.gather(output_list_dict[k], dim=-1, index=cur_idx).squeeze(-1)
return output_list_dict
class CPPORGBRLModel(nn.Module):
def __init__(
self,
obs_shape: Tuple = [3, 320, 180],
task_pretrained: bool=False,
img_pretrained: bool=False,
fix_perception: bool=False,
normalization = None,
action_shape: Union[int, Tuple] = 2,
share_encoder: bool = True,
continuous: bool = True,
encoder_embedding_size: int = 512,
encoder_hidden_size_list: List = [64, 128, 256],
actor_head_hidden_size: int = 512,
actor_head_layer_num: int = 1,
critic_head_hidden_size: int = 512,
critic_head_layer_num: int = 1,
activation: Optional[nn.Module] = nn.ReLU(),
norm_type: Optional[str] = None,
sigma_type: Optional[str] = 'independent',
bound_type: Optional[str] = None,
) -> None:
super().__init__()
self.command_n = 4
self._obs_shape = obs_shape
self._act_shape = action_shape
self.share_encoder = share_encoder
if self.share_encoder:
self.encoder = RGBSpeedConvEncoder(
self._obs_shape, encoder_embedding_size, task_pretrained, img_pretrained, fix_perception
)
else:
self.actor_encoder = RGBSpeedConvEncoder(
self._obs_shape, encoder_embedding_size, task_pretrained, img_pretrained, fix_perception
)
self.critic_encoder = RGBSpeedConvEncoder(
self._obs_shape, encoder_embedding_size, task_pretrained, img_pretrained, fix_perception
)
self.critic_head = ChoiceHead(RegressionHead, self.command_n,
critic_head_hidden_size, 1, critic_head_layer_num, activation=activation, norm_type=norm_type)
self.continuous = continuous
if self.continuous:
self.multi_head = False
self.actor_head = ChoiceHead(ReparameterizationHead, self.command_n,
actor_head_hidden_size,
action_shape,
actor_head_layer_num,
sigma_type=sigma_type,
activation=activation,
norm_type=norm_type,
bound_type=bound_type)
else:
multi_head = not isinstance(action_shape, int)
self.multi_head = multi_head
if multi_head:
self.actor_head = MultiHead(
DiscreteHead,
actor_head_hidden_size,
action_shape,
layer_num=actor_head_layer_num,
activation=activation,
norm_type=norm_type
)
else:
self.actor_head = DiscreteHead(
actor_head_hidden_size,
action_shape,
actor_head_layer_num,
activation=activation,
norm_type=norm_type
)
# for convenience of call some apis(such as: self.critic.parameters()), but may cause
# misunderstanding when print(self)
if self.share_encoder:
self.actor = nn.ModuleList([self.encoder, self.actor_head])
self.critic = nn.ModuleList([self.encoder, self.critic_head])
else:
self.actor = nn.ModuleList([self.actor_encoder, self.actor_head])
self.critic = nn.ModuleList([self.critic_encoder, self.critic_head])
def forward(self, inputs, mode=None, **kwargs):
assert (mode in ['compute_actor_critic', 'compute_actor', 'compute_critic'])
f = getattr(self, mode)
return f(inputs, **kwargs)
def compute_actor_critic(self, inputs) -> Dict[str, torch.Tensor]:
if self.share_encoder:
actor_embedding = critic_embedding = self.encoder(inputs)
else:
actor_embedding = self.actor_encoder(inputs)
critic_embedding = self.critic_encoder(inputs)
value = self.critic_head(critic_embedding, inputs['command'])
actor_output = self.actor_head(actor_embedding, inputs['command'])
if self.continuous:
logit = [actor_output['mu'], actor_output['sigma']]
else:
logit = actor_output['logit']
return {'logit': logit, 'value': value['pred']}
def compute_actor(self, inputs: Dict) -> Dict:
if self.share_encoder:
x = self.encoder(inputs)
else:
x = self.actor_encoder(inputs)
x = self.actor_head(x, inputs['command'])
if self.continuous:
x = {'logit': [x['mu'], x['sigma']]}
return x
def compute_critic(self, inputs: Dict) -> Dict:
if self.share_encoder:
x = self.encoder(inputs)
else:
x = self.critic_encoder(inputs)
x = self.critic_head(x, inputs['command'])
return {'value': x['pred']}
class PPORGBRLModel(nn.Module):
def __init__(
self,
obs_shape: Tuple = [3, 320, 180],
action_shape: Union[int, Tuple] = 2,
share_encoder: bool = True,
continuous: bool = True,
encoder_embedding_size: int = 512,
encoder_hidden_size_list: List = [64, 128, 256],
actor_head_hidden_size: int = 512,
actor_head_layer_num: int = 1,
critic_head_hidden_size: int = 512,
critic_head_layer_num: int = 1,
activation: Optional[nn.Module] = nn.ReLU(),
norm_type: Optional[str] = None,
sigma_type: Optional[str] = 'independent',
bound_type: Optional[str] = None,
) -> None:
super().__init__()
self._obs_shape = obs_shape
self._act_shape = action_shape
self.share_encoder = share_encoder
if self.share_encoder:
self.encoder = RGBSpeedConvEncoder(
self._obs_shape, encoder_embedding_size
)
else:
self.actor_encoder = RGBSpeedConvEncoder(
self._obs_shape, encoder_embedding_size
)
self.critic_encoder = RGBSpeedConvEncoder(
self._obs_shape, encoder_embedding_size
)
self.critic_head = RegressionHead(
critic_head_hidden_size, 1, critic_head_layer_num, activation=activation, norm_type=norm_type
)
self.continuous = continuous
if self.continuous:
self.multi_head = False
self.actor_head = ReparameterizationHead(
actor_head_hidden_size,
action_shape,
actor_head_layer_num,
sigma_type=sigma_type,
activation=activation,
norm_type=norm_type,
bound_type=bound_type
)
else:
multi_head = not isinstance(action_shape, int)
self.multi_head = multi_head
if multi_head:
self.actor_head = MultiHead(
DiscreteHead,
actor_head_hidden_size,
action_shape,
layer_num=actor_head_layer_num,
activation=activation,
norm_type=norm_type
)
else:
self.actor_head = DiscreteHead(
actor_head_hidden_size,
action_shape,
actor_head_layer_num,
activation=activation,
norm_type=norm_type
)
# for convenience of call some apis(such as: self.critic.parameters()), but may cause
# misunderstanding when print(self)
if self.share_encoder:
self.actor = nn.ModuleList([self.encoder, self.actor_head])
self.critic = nn.ModuleList([self.encoder, self.critic_head])
else:
self.actor = nn.ModuleList([self.actor_encoder, self.actor_head])
self.critic = nn.ModuleList([self.critic_encoder, self.critic_head])
def forward(self, inputs, mode=None, **kwargs):
assert (mode in ['compute_actor_critic', 'compute_actor', 'compute_critic'])
f = getattr(self, mode)
return f(inputs, **kwargs)
def compute_actor_critic(self, inputs) -> Dict[str, torch.Tensor]:
if self.share_encoder:
actor_embedding = critic_embedding = self.encoder(inputs)
else:
actor_embedding = self.actor_encoder(inputs)
critic_embedding = self.critic_encoder(inputs)
value = self.critic_head(critic_embedding)
actor_output = self.actor_head(actor_embedding)
if self.continuous:
logit = [actor_output['mu'], actor_output['sigma']]
else:
logit = actor_output['logit']
return {'logit': logit, 'value': value['pred']}
def compute_actor(self, inputs: Dict) -> Dict:
if self.share_encoder:
x = self.encoder(inputs)
else:
x = self.actor_encoder(inputs)
x = self.actor_head(x)
if self.continuous:
x = {'logit': [x['mu'], x['sigma']]}
return x
def compute_critic(self, inputs: Dict) -> Dict:
if self.share_encoder:
x = self.encoder(inputs)
else:
x = self.critic_encoder(inputs)
x = self.critic_head(x)
return {'value': x['pred']}
# class CPPORLModel(PPORLModel):
class PPORLModel(nn.Module):
def __init__(
self,
obs_shape: Tuple = [5, 32, 32],
action_shape: Union[int, Tuple] = 2,
share_encoder: bool = True,
continuous: bool = True,
encoder_embedding_size: int = 512,
encoder_hidden_size_list: List = [64, 128, 256],
actor_head_hidden_size: int = 512,
actor_head_layer_num: int = 1,
critic_head_hidden_size: int = 512,
critic_head_layer_num: int = 1,
activation: Optional[nn.Module] = nn.ReLU(),
norm_type: Optional[str] = None,
sigma_type: Optional[str] = 'independent',
bound_type: Optional[str] = None,
) -> None:
super().__init__()
self._obs_shape = obs_shape
self._act_shape = action_shape
self.share_encoder = share_encoder
if self.share_encoder:
self.encoder = BEVSpeedConvEncoder(
self._obs_shape, encoder_hidden_size_list, encoder_embedding_size, [3, 3, 3], [2, 2, 2]
)
else:
self.actor_encoder = BEVSpeedConvEncoder(
self._obs_shape, encoder_hidden_size_list, encoder_embedding_size, [3, 3, 3], [2, 2, 2]
)
self.critic_encoder = BEVSpeedConvEncoder(
self._obs_shape, encoder_hidden_size_list, encoder_embedding_size, [3, 3, 3], [2, 2, 2]
)
self.critic_head = RegressionHead(
critic_head_hidden_size, 1, critic_head_layer_num, activation=activation, norm_type=norm_type
)
self.continuous = continuous
if self.continuous:
self.multi_head = False
self.actor_head = ReparameterizationHead(
actor_head_hidden_size,
action_shape,
actor_head_layer_num,
sigma_type=sigma_type,
activation=activation,
norm_type=norm_type,
bound_type=bound_type
)
else:
multi_head = not isinstance(action_shape, int)
self.multi_head = multi_head
if multi_head:
self.actor_head = MultiHead(
DiscreteHead,
actor_head_hidden_size,
action_shape,
layer_num=actor_head_layer_num,
activation=activation,
norm_type=norm_type
)
else:
self.actor_head = DiscreteHead(
actor_head_hidden_size,
action_shape,
actor_head_layer_num,
activation=activation,
norm_type=norm_type
)
# for convenience of call some apis(such as: self.critic.parameters()), but may cause
# misunderstanding when print(self)
if self.share_encoder:
self.actor = nn.ModuleList([self.encoder, self.actor_head])
self.critic = nn.ModuleList([self.encoder, self.critic_head])
else:
self.actor = nn.ModuleList([self.actor_encoder, self.actor_head])
self.critic = nn.ModuleList([self.critic_encoder, self.critic_head])
self.feat_bn = nn.BatchNorm1d(512)
def forward(self, inputs, mode=None, **kwargs):
assert (mode in ['compute_actor_critic', 'compute_actor', 'compute_critic'])
f = getattr(self, mode)
return f(inputs, **kwargs)
def compute_actor_critic(self, inputs) -> Dict[str, torch.Tensor]:
if self.share_encoder:
actor_embedding = critic_embedding = self.encoder(inputs)
else:
actor_embedding = self.actor_encoder(inputs)
critic_embedding = self.critic_encoder(inputs)
actor_embedding = self.feat_bn(actor_embedding)
critic_embedding = self.feat_bn(critic_embedding)
value = self.critic_head(critic_embedding)
actor_output = self.actor_head(actor_embedding)
if self.continuous:
logit = [actor_output['mu'], actor_output['sigma']]
else:
logit = actor_output['logit']
return {'logit': logit, 'value': value['pred']}
def compute_actor(self, inputs: Dict) -> Dict:
if self.share_encoder:
x = self.encoder(inputs)
else:
x = self.actor_encoder(inputs)
x = self.feat_bn(x)
x = self.actor_head(x)
if self.continuous:
x = {'logit': [x['mu'], x['sigma']]}
return x
def compute_critic(self, inputs: Dict) -> Dict:
if self.share_encoder:
x = self.encoder(inputs)
else:
x = self.critic_encoder(inputs)
x = self.feat_bn(x)
x = self.critic_head(x)
return {'value': x['pred']}
| 40.264881 | 145 | 0.564787 | 2,943 | 27,058 | 4.88651 | 0.056405 | 0.040053 | 0.039914 | 0.040887 | 0.881788 | 0.860928 | 0.847924 | 0.825464 | 0.819623 | 0.810027 | 0 | 0.015631 | 0.342708 | 27,058 | 671 | 146 | 40.324888 | 0.792972 | 0.029825 | 0 | 0.759868 | 0 | 0 | 0.021883 | 0 | 0 | 0 | 0 | 0 | 0.009868 | 1 | 0.046053 | false | 0 | 0.008224 | 0 | 0.100329 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5b98d07ada34dfb3e916166189f96fd1c5b70e9d | 218 | py | Python | icevision/models/torchvision_models/__init__.py | RibenaMapleSyrup/icevision | 6cbd6d103cb3f76bc21ae7651e9d958efe8c3a64 | [
"Apache-2.0"
] | 88 | 2020-05-02T12:28:02.000Z | 2021-09-30T07:17:03.000Z | icevision/models/torchvision_models/__init__.py | RibenaMapleSyrup/icevision | 6cbd6d103cb3f76bc21ae7651e9d958efe8c3a64 | [
"Apache-2.0"
] | 248 | 2020-05-01T18:46:31.000Z | 2020-07-31T20:55:01.000Z | icevision/models/torchvision_models/__init__.py | RibenaMapleSyrup/icevision | 6cbd6d103cb3f76bc21ae7651e9d958efe8c3a64 | [
"Apache-2.0"
] | 15 | 2020-06-07T15:59:56.000Z | 2021-02-27T09:46:39.000Z | import icevision.models.torchvision_models.faster_rcnn
import icevision.models.torchvision_models.mask_rcnn
import icevision.models.torchvision_models.retinanet
import icevision.models.torchvision_models.keypoint_rcnn
| 43.6 | 56 | 0.908257 | 27 | 218 | 7.074074 | 0.333333 | 0.314136 | 0.439791 | 0.670157 | 0.837696 | 0.439791 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036697 | 218 | 4 | 57 | 54.5 | 0.909524 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 8 |
5b9da04e1080324af065d1e17413d56d1141a7a4 | 4,417 | py | Python | 0CTF-TCTF/2021 Quals/uc_goood (WIP)/elf_gen.py | PurpEth/solved-hacking-problem | 6f289d1647eb9c091caa580c7aae673e3ba02952 | [
"Unlicense"
] | 1 | 2021-08-24T22:16:41.000Z | 2021-08-24T22:16:41.000Z | 0CTF-TCTF/2021 Quals/uc_goood (WIP)/elf_gen.py | PurpEth/solved-hacking-problem | 6f289d1647eb9c091caa580c7aae673e3ba02952 | [
"Unlicense"
] | null | null | null | 0CTF-TCTF/2021 Quals/uc_goood (WIP)/elf_gen.py | PurpEth/solved-hacking-problem | 6f289d1647eb9c091caa580c7aae673e3ba02952 | [
"Unlicense"
] | null | null | null | from shutil import copyfile
import os
from pwn import *
context.arch = "amd64"
CODE = 0xDEADBEEF000
STACK = 0xBABECAFE000
MAIN = b"\x48\x83\xec\x20\x66\xc7\x44\x24\x0e\x00\x00\x48\x8d\x5c\x24\x0e\x48\xc7\x44\x24\x10\x00\x00\x00\x00\x48\xc7\x44\x24\x18\x00\x00\x00\x00\xb9\x41\x00\x00\x00\x48\x8d\x15\x8b\x01\x00\x00\xbe\x01\x00\x00\x00\x31\xc0\xbf\x01\x00\x00\x00\xe8\xbb\x01\x00\x00\xb9\x02\x00\x00\x00\x48\x89\xda\x31\xf6\x31\xff\x31\xc0\xe8\xa8\x01\x00\x00\x8a\x44\x24\x0e\x3c\x32\x74\x39\x3c\x33\x74\x62\x3c\x31\x0f\x85\x04\x01\x00\x00\xb9\x12\x00\x00\x00\x48\x8d\x15\x35\x01\x00\x00\xbe\x01\x00\x00\x00\x31\xc0\xbf\x01\x00\x00\x00\xe8\x77\x01\x00\x00\x48\x83\xc4\x20\x48\xbf\x00\xf8\xee\xdb\xea\x0d\x00\x00\xff\x27\xb9\x12\x00\x00\x00\x48\x8d\x15\xf6\x00\x00\x00\xbe\x01\x00\x00\x00\x31\xc0\xbf\x01\x00\x00\x00\xe8\x4a\x01\x00\x00\x48\x83\xc4\x20\x48\xbf\x00\xf8\xee\xdb\xea\x0d\x00\x00\xff\x27\xb9\x07\x00\x00\x00\x48\x8d\x15\xc2\x00\x00\x00\xbe\x01\x00\x00\x00\x31\xc0\xbf\x01\x00\x00\x00\xe8\x1d\x01\x00\x00\x31\xf6\x31\xff\x48\x8d\x54\x24\x10\xb9\x08\x00\x00\x00\x31\xc0\xe8\x08\x01\x00\x00\xb9\x07\x00\x00\x00\xbe\x01\x00\x00\x00\x31\xc0\x48\x8d\x15\x82\x00\x00\x00\xbf\x01\x00\x00\x00\xe8\xeb\x00\x00\x00\x31\xf6\x31\xff\x31\xc0\x48\x8d\x54\x24\x18\xb9\x08\x00\x00\x00\xe8\xd6\x00\x00\x00\x48\x81\x7c\x24\x18\xff\x00\x00\x00\x0f\x87\xef\xfe\xff\xff\xb9\x07\x00\x00\x00\x48\x8d\x15\x41\x00\x00\x00\xbe\x01\x00\x00\x00\x31\xc0\xbf\x01\x00\x00\x00\xe8\xaa\x00\x00\x00\x48\x8b\x4c\x24\x18\x31\xf6\x31\xff\x48\x8b\x54\x24\x10\x31\xc0\xe8\x95\x00\x00\x00\xe9\xb8\xfe\xff\xff\xbe\xff\x00\x00\x00\xbf\x3c\x00\x00\x00\x31\xc0\xe8\x7f\x00\x00\x00\xe9\xa2\xfe\xff\xff\x64\x61\x74\x61\x3a\x20\x00\x73\x69\x7a\x65\x3a\x20\x00\x61\x64\x64\x72\x3a\x20\x00\x50\x61\x74\x68\x65\x74\x69\x63\x20\x68\x75\x6d\x61\x6e\x20\x3e\x0a\x00\x50\x6f\x77\x65\x72\x66\x75\x6c\x20\x61\x64\x6d\x69\x6e\x20\x3e\x0a\x00\x57\x65\x6c\x63\x6f\x6d\x65\x20\x74\x6f\x20\x75\x63\x5f\x67\x6f\x6f\x6f\x64\x0a\x31\x2e\x20\x61\x64\x6d\x69\x6e\x20\x74\x65\x73\x74\x0a\x32\x2e\x20\x75\x73\x65\x72\x20\x74\x65\x73\x74\x0a\x33\x2e\x20\x70\x61\x74\x63\x68\x20\x64\x61\x74\x61\x0a\x3f\x3a\x20\x00\x48\x89\xf8\x48\x89\xf7\x48\x89\xd6\x48\x89\xca\x4d\x89\xc2\x4d\x89\xc8\x4c\x8b\x4c\x24\x08\x0f\x05\xc3"
TAIL = b"\x31\xc0\xb9\x32\x00\x00\x00\x48\x8d\x15\x55\x00\x00\x00\xbe\x01\x00\x00\x00\xbf\x01\x00\x00\x00\x48\x83\xec\x18\x66\x89\x44\x24\x0e\x31\xc0\xe8\x6d\x00\x00\x00\x31\xf6\x31\xff\x31\xc0\x48\x8d\x54\x24\x0e\xb9\x02\x00\x00\x00\xe8\x58\x00\x00\x00\x80\x7c\x24\x0e\x79\x75\x11\x48\x83\xc4\x18\x48\xbf\x00\xf8\xee\xdb\xea\x0d\x00\x00\xff\x67\x10\x31\xf6\xbf\x3c\x00\x00\x00\x31\xc0\xe8\x32\x00\x00\x00\x43\x6f\x6e\x67\x72\x61\x74\x75\x6c\x61\x74\x69\x6f\x6e\x73\x21\x20\x54\x65\x73\x74\x20\x73\x75\x63\x63\x65\x65\x64\x21\x0a\x54\x72\x79\x20\x61\x67\x61\x69\x6e\x3f\x20\x28\x79\x2f\x5b\x6e\x5d\x29\x00\x48\x89\xf8\x48\x89\xf7\x48\x89\xd6\x48\x89\xca\x4d\x89\xc2\x4d\x89\xc8\x4c\x8b\x4c\x24\x08\x0f\x05\xc3"
ADMIN = b"\xb9\x10\x00\x00\x00\x48\x8d\x15\x37\x00\x00\x00\x31\xc0\xbe\x01\x00\x00\x00\xbf\x01\x00\x00\x00\x48\x83\xec\x08\xe8\x5f\x00\x00\x00\x48\x8d\x05\x2b\x00\x00\x00\x48\xa3\x33\xe2\xaf\xec\xab\x0b\x00\x00\x48\x83\xc4\x08\x48\xbf\x00\xf8\xee\xdb\xea\x0d\x00\x00\xff\x67\x08\x49\x6d\x61\x67\x69\x6e\x61\x74\x69\x6f\x6e\x20\x69\x73\x20\x00\x6b\x33\x33\x6e\x6c\x61\x62\x65\x63\x68\x6f\x20\x27\x6d\x6f\x72\x65\x20\x69\x6d\x70\x6f\x72\x74\x61\x6e\x74\x20\x74\x68\x61\x6e\x20\x6b\x6e\x6f\x77\x6c\x65\x64\x67\x65\x2e\x27\x00\x48\x89\xf8\x48\x89\xf7\x48\x89\xd6\x48\x89\xca\x4d\x89\xc2\x4d\x89\xc8\x4c\x8b\x4c\x24\x08\x0f\x05\xc3".ljust(
0x1000, b"\xf4"
)
# main
mem_buf = bytearray(0x1000)
for i in range(0x1000):
mem_buf[i] = ord(b"\x90")
for i in range(len(MAIN)):
mem_buf[i] = MAIN[i]
addr = p64(CODE + 0xFF0) + p64(CODE + 0x2000) + p64(CODE)
for i in range(24):
mem_buf[0x800 + i] = addr[i]
filename = make_elf(mem_buf, vma=CODE, extract=False)
copyfile(filename, "uc_goood.elf")
os.chmod(filename, 0o755)
# admin
mem_buf = bytearray(0x1000)
for i in range(0x1000):
mem_buf[i] = ord(b"\x90")
for i in range(len(ADMIN)):
mem_buf[i] = ADMIN[i]
with open("admin.segment", "wb") as f:
f.write(mem_buf)
# tail
mem_buf = bytearray(0x1000)
for i in range(0x1000):
mem_buf[i] = ord(b"\x90")
for i in range(len(TAIL)):
mem_buf[i] = TAIL[i]
with open("tail.segment", "wb") as f:
f.write(mem_buf)
| 77.491228 | 2,138 | 0.726964 | 1,028 | 4,417 | 3.108949 | 0.171206 | 0.21965 | 0.146433 | 0.060075 | 0.512516 | 0.458073 | 0.413016 | 0.399875 | 0.331352 | 0.331352 | 0 | 0.34129 | 0.04211 | 4,417 | 56 | 2,139 | 78.875 | 0.414087 | 0.003396 | 0 | 0.305556 | 0 | 0.083333 | 0.797181 | 0.783083 | 0 | 1 | 0.0191 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.083333 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5bcd977e0b3722d0833d884d6f020aaf4b13dcf7 | 3,462 | py | Python | tests/utils/test_cocoeval.py | fabiana001/polimorfo | a8c348e9df2aabc7f046db4f7917733586c183df | [
"Apache-2.0"
] | null | null | null | tests/utils/test_cocoeval.py | fabiana001/polimorfo | a8c348e9df2aabc7f046db4f7917733586c183df | [
"Apache-2.0"
] | null | null | null | tests/utils/test_cocoeval.py | fabiana001/polimorfo | a8c348e9df2aabc7f046db4f7917733586c183df | [
"Apache-2.0"
] | null | null | null | from pathlib import Path
from polimorfo.datasets.coco import CocoDataset
from polimorfo.utils import cocoeval
import numpy as np
BASE_PATH = Path(__file__).parent.parent / 'data'
def test_generate_predictions():
ds_path = BASE_PATH / 'hair_drier_toaster_bear.json'
df = cocoeval.generate_predictions(ds_path, ds_path)
assert len(df) > 0
assert df['IOU'].mean() == 1.
def test_generate_predictions_one_class():
ds_path = BASE_PATH / 'hair_drier_toaster_bear.json'
df = cocoeval.generate_predictions(ds_path, ds_path, category_idxs=[1])
assert len(df) > 0
assert df['IOU'].mean() > 0.
def test_mean_average_precision_and_recall():
ds_path = BASE_PATH / 'hair_drier_toaster_bear.json'
df = cocoeval.generate_predictions(ds_path, ds_path)
map, mar = cocoeval.mean_average_precision_and_recall(df)
assert map == 1.
assert mar == 1.
def test_mean_average_precision_and_recall_per_class():
ds_path = BASE_PATH / 'hair_drier_toaster_bear.json'
df = cocoeval.generate_predictions(ds_path, ds_path)
class_idx_metrics = cocoeval.mean_average_precision_and_recall_per_class(df)
class_idxs = df['true_class_id'].unique()
class_idxs = class_idxs[class_idxs > 0]
print(class_idx_metrics)
assert len(class_idx_metrics) == len(class_idxs)
def test_mean_average_precision_and_recall_per_class_with_name():
ds_path = BASE_PATH / 'hair_drier_toaster_bear.json'
ds = CocoDataset(ds_path)
idx_class_name = {
idx: cat_meta['name'] for idx, cat_meta in ds.cats.items()
}
df = cocoeval.generate_predictions(ds_path, ds_path)
class_idx_metrics = cocoeval.mean_average_precision_and_recall_per_class(
df, idx_class_dict=idx_class_name)
class_idxs = df['true_class_id'].unique()
class_idxs = class_idxs[class_idxs > 0]
print(class_idx_metrics)
assert len(class_idx_metrics) == len(class_idxs)
def test_mean_average_precision_and_recall_per_class_with_name_largest_range():
ds_path = BASE_PATH / 'hair_drier_toaster_bear.json'
ds = CocoDataset(ds_path)
idx_class_name = {
idx: cat_meta['name'] for idx, cat_meta in ds.cats.items()
}
df = cocoeval.generate_predictions(ds_path, ds_path)
class_idx_metrics = cocoeval.mean_average_precision_and_recall_per_class(
df, idx_class_dict=idx_class_name, range_iou=np.arange(0, 1, 0.5))
class_idxs = df['true_class_id'].unique()
class_idxs = class_idxs[class_idxs > 0]
print(class_idx_metrics)
assert len(class_idx_metrics) == len(class_idxs)
def test_mean_average_precision_and_recall_per_class_with_name_min_score():
ds_path = BASE_PATH / 'hair_drier_toaster_bear.json'
ds = CocoDataset(ds_path)
idx_class_name = {
idx: cat_meta['name'] for idx, cat_meta in ds.cats.items()
}
df = cocoeval.generate_predictions(ds_path, ds_path)
class_idx_metrics = cocoeval.mean_average_precision_and_recall_per_class(
df, idx_class_dict=idx_class_name, min_score=0.5)
class_idxs = df['true_class_id'].unique()
class_idxs = class_idxs[class_idxs > 0]
print(class_idx_metrics)
assert len(class_idx_metrics) == len(class_idxs)
def test_precision_recall_per_image():
ds_path = BASE_PATH / 'hair_drier_toaster_bear.json'
df = cocoeval.generate_predictions(ds_path, ds_path)
map, mar = cocoeval.precision_recall_per_image(df, df['img_path'].loc[0])
assert map > 0
assert mar > 0 | 35.326531 | 80 | 0.743212 | 525 | 3,462 | 4.451429 | 0.131429 | 0.06932 | 0.077022 | 0.098417 | 0.845528 | 0.845528 | 0.833119 | 0.817715 | 0.794608 | 0.77792 | 0 | 0.006887 | 0.161179 | 3,462 | 98 | 81 | 35.326531 | 0.797865 | 0 | 0 | 0.608108 | 1 | 0 | 0.088363 | 0.064684 | 0 | 0 | 0 | 0 | 0.162162 | 1 | 0.108108 | false | 0 | 0.054054 | 0 | 0.162162 | 0.054054 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
5bf330ca93b1a7cebf6eed6a093389c7ac814c9d | 15,868 | py | Python | a10sdk/core/vcs/vcs_stat.py | deepfield/a10sdk-python | bfaa58099f51f085d5e91652d1d1a3fd5c529d5d | [
"Apache-2.0"
] | 16 | 2015-05-20T07:26:30.000Z | 2021-01-23T11:56:57.000Z | a10sdk/core/vcs/vcs_stat.py | deepfield/a10sdk-python | bfaa58099f51f085d5e91652d1d1a3fd5c529d5d | [
"Apache-2.0"
] | 6 | 2015-03-24T22:07:11.000Z | 2017-03-28T21:31:18.000Z | a10sdk/core/vcs/vcs_stat.py | deepfield/a10sdk-python | bfaa58099f51f085d5e91652d1d1a3fd5c529d5d | [
"Apache-2.0"
] | 23 | 2015-03-29T15:43:01.000Z | 2021-06-02T17:12:01.000Z | from a10sdk.common.A10BaseClass import A10BaseClass
class SamplingEnable(A10BaseClass):
"""This class does not support CRUD Operations please use parent.
:param counters1: {"enum": ["all", "elect_recv_err", "elect_send_err", "elect_recv_byte", "elect_send_byte", "elect_pdu_master_recv", "elect_pdu_master_cand_recv", "elect_pdu_slave_recv", "elect_pdu_master_take_over_recv", "elect_pdu_unknown_recv", "elect_pdu_master_sent", "elect_pdu_master_cand_sent", "elect_pdu_slave_sent", "elect_pdu_master_take_over_sent", "elect_pdu_unknown_sent", "elect_pdu_inval", "elect_pdu_hw_mismatch", "elect_pdu_cluster_mismatch", "elect_pdu_dev_id_collision", "elect_mc_discard_master", "elect_mc_replace_master", "elect_mc_dup_masterr", "elect_mc_reset_timer_by_mc", "elect_mc_reset_timer_by_mto", "elect_slave_dup_master", "elect_slave_discard_challenger", "elect_slave_replace_challenger", "elect_slave_dup_challenger", "elect_slave_discard_neighbour", "elect_slave_too_many_neighbour", "elect_slave_dup_neighbour", "elect_master_discard_challenger", "elect_master_new_challenger", "elect_master_replace_challenger", "elect_master_dup_challenger", "elect_master_discard_neighbour", "elect_master_too_many_neighbour", "elect_master_dup_neighbour", "elect_enter_master_cand_stat", "elect_enter_slave", "elect_enter_master", "elect_enter_master_take_over", "elect_leave_master_cand", "elect_leave_slave", "elect_leave_master", "elect_leave_master_take_over", "master_slave_start_err", "master_slave_start", "master_slave_stop", "master_cfg_upd", "master_cfg_upd_l_fail", "master_cfg_upd_r_fail", "master_cfg_upd_notif_err", "master_cfg_upd_result_err", "slave_recv_err", "slave_send_err", "slave_recv_bytes", "slave_sent_bytes", "slave_n_recv", "slave_n_sent", "slave_msg_inval", "slave_keepalive", "slave_cfg_upd", "slave_cfg_upd_fail", "daemon_n_elec_start", "daemon_n_elec_stop", "daemon_recv_err", "daemon_send_err", "daemon_recv_bytes", "daemon_sent_bytes", "daemon_n_recv", "daemon_n_sent", "daemon_msg_inval", "daemon_msg_handle_failure"], "type": "string", "description": "'all': all; 'elect_recv_err': Receive error counter of aVCS election; 'elect_send_err': Send error counter of aVCS election; 'elect_recv_byte': Receive bytes counter of aVCS election; 'elect_send_byte': Send bytes counter of aVCS election; 'elect_pdu_master_recv': Received vMaster-PDU counter of aVCS election; 'elect_pdu_master_cand_recv': Received MC-PDU counter of aVCS election; 'elect_pdu_slave_recv': Received vBlade-PDU counter of aVCS election; 'elect_pdu_master_take_over_recv': Received MTO-PDU counter of aVCS election; 'elect_pdu_unknown_recv': Received Unknown-PDU counter of aVCS election; 'elect_pdu_master_sent': Sent vMaster-PDU counter of aVCS election; 'elect_pdu_master_cand_sent': Sent MC-PDU counter of aVCS election; 'elect_pdu_slave_sent': Sent vBlade-PDU counter of aVCS election; 'elect_pdu_master_take_over_sent': Sent MTO-PDU counter of aVCS election; 'elect_pdu_unknown_sent': Sent Unknown-PDU counter of aVCS election; 'elect_pdu_inval': Invalid PDU counter of aVCS election; 'elect_pdu_hw_mismatch': PDU HW mismatch counter of aVCS election; 'elect_pdu_cluster_mismatch': PDU Chassis-ID mismatch counter of aVCS election; 'elect_pdu_dev_id_collision': PDU Device-ID collision counter of aVCS election; 'elect_mc_discard_master': MC discarded vMaster-PDU counter of aVCS election; 'elect_mc_replace_master': MC replaced vMaster-PDU counter of aVCS election; 'elect_mc_dup_masterr': MC duplicate vMaster-PDU counter of aVCS election; 'elect_mc_reset_timer_by_mc': MC timers reset by MC-PDU counter of aVCS election; 'elect_mc_reset_timer_by_mto': MC timers reset by MTO-PDU counter of aVCS election; 'elect_slave_dup_master': vBlade duplicate vMaster-PDU counter of aVCS election; 'elect_slave_discard_challenger': vBlade discard challenger counter of aVCS election; 'elect_slave_replace_challenger': vBlade replace challenger counter of aVCS election; 'elect_slave_dup_challenger': vBlade duplicate challenger counter of aVCS election; 'elect_slave_discard_neighbour': vBlade discard neighbour counter of aVCS election; 'elect_slave_too_many_neighbour': vBlade too many neighbours counter of aVCS election; 'elect_slave_dup_neighbour': send vBlade duplicate neighbours of aVCS election; 'elect_master_discard_challenger': vMaster discard challenger counter of aVCS election; 'elect_master_new_challenger': vMaster new challenger counter of aVCS election; 'elect_master_replace_challenger': vMaster replace challenger counter of aVCS election; 'elect_master_dup_challenger': vMaster duplicate challenger counter of aVCS election; 'elect_master_discard_neighbour': vMaster discard neighbour counter of aVCS election; 'elect_master_too_many_neighbour': vMaster too many neighbours counter of aVCS election; 'elect_master_dup_neighbour': vMaster duplicate neighbours counter of aVCS election; 'elect_enter_master_cand_stat': Enter MC counter of aVCS election; 'elect_enter_slave': Enter vBlade counter of aVCS election; 'elect_enter_master': Enter vMaster counter of aVCS election; 'elect_enter_master_take_over': Enter MTO counter of aVCS election; 'elect_leave_master_cand': Leave MC counter of aVCS election; 'elect_leave_slave': Leave vBlade counter of aVCS election; 'elect_leave_master': Leave vMaster counter of aVCS election; 'elect_leave_master_take_over': Leave MTO counter of aVCS election; 'master_slave_start_err': vMaster Start vBlade Errors counter of aVCS election; 'master_slave_start': vMaster vBlades Started counter of aVCS election; 'master_slave_stop': vMaster vBlades stopped counter of aVCS election; 'master_cfg_upd': Received vMaster Configuration Updates counter of aVCS election; 'master_cfg_upd_l_fail': vMaster Local Configuration Update Errors counter of aVCS election; 'master_cfg_upd_r_fail': vMaster Remote Configuration Update Errors counter of aVCS election; 'master_cfg_upd_notif_err': vMaster Configuration Update Notif Errors counter of aVCS election; 'master_cfg_upd_result_err': vMaster Configuration Update Result Errors counter of aVCS election; 'slave_recv_err': vBlade Receive Errors counter of aVCS election; 'slave_send_err': vBlade Send Errors counter of aVCS election; 'slave_recv_bytes': vBlade Received Bytes counter of aVCS election; 'slave_sent_bytes': vBlade Sent Bytes counter of aVCS election; 'slave_n_recv': vBlade Received Messages counter of aVCS election; 'slave_n_sent': vBlade Sent Messages counter of aVCS election; 'slave_msg_inval': vBlade Invalid Messages counter of aVCS election; 'slave_keepalive': vBlade Received Keepalives counter of aVCS election; 'slave_cfg_upd': vBlade Received Configuration Updates counter of aVCS election; 'slave_cfg_upd_fail': vBlade Configuration Update Failures counter of aVCS election; 'daemon_n_elec_start': times of aVCS election start; 'daemon_n_elec_stop': times of aVCS election stop; 'daemon_recv_err': counter of aVCS daemon receive error; 'daemon_send_err': counter of aVCS daemon sent error; 'daemon_recv_bytes': bytes of aVCS daemon receive; 'daemon_sent_bytes': bytes of aVCS daemon sent; 'daemon_n_recv': counter of aVCS daemon receive; 'daemon_n_sent': counter of aVCS daemon sent; 'daemon_msg_inval': counter of aVCS daemon invalid message; 'daemon_msg_handle_failure': counter of aVCS daemon message handle failure; ", "format": "enum"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.b_key = "sampling-enable"
self.DeviceProxy = ""
self.counters1 = ""
for keys, value in kwargs.items():
setattr(self,keys, value)
class Stat(A10BaseClass):
""" :param sampling_enable: {"minItems": 1, "items": {"type": "object"}, "uniqueItems": true, "type": "array", "array": [{"properties": {"optional": true, "counters1": {"enum": ["all", "elect_recv_err", "elect_send_err", "elect_recv_byte", "elect_send_byte", "elect_pdu_master_recv", "elect_pdu_master_cand_recv", "elect_pdu_slave_recv", "elect_pdu_master_take_over_recv", "elect_pdu_unknown_recv", "elect_pdu_master_sent", "elect_pdu_master_cand_sent", "elect_pdu_slave_sent", "elect_pdu_master_take_over_sent", "elect_pdu_unknown_sent", "elect_pdu_inval", "elect_pdu_hw_mismatch", "elect_pdu_cluster_mismatch", "elect_pdu_dev_id_collision", "elect_mc_discard_master", "elect_mc_replace_master", "elect_mc_dup_masterr", "elect_mc_reset_timer_by_mc", "elect_mc_reset_timer_by_mto", "elect_slave_dup_master", "elect_slave_discard_challenger", "elect_slave_replace_challenger", "elect_slave_dup_challenger", "elect_slave_discard_neighbour", "elect_slave_too_many_neighbour", "elect_slave_dup_neighbour", "elect_master_discard_challenger", "elect_master_new_challenger", "elect_master_replace_challenger", "elect_master_dup_challenger", "elect_master_discard_neighbour", "elect_master_too_many_neighbour", "elect_master_dup_neighbour", "elect_enter_master_cand_stat", "elect_enter_slave", "elect_enter_master", "elect_enter_master_take_over", "elect_leave_master_cand", "elect_leave_slave", "elect_leave_master", "elect_leave_master_take_over", "master_slave_start_err", "master_slave_start", "master_slave_stop", "master_cfg_upd", "master_cfg_upd_l_fail", "master_cfg_upd_r_fail", "master_cfg_upd_notif_err", "master_cfg_upd_result_err", "slave_recv_err", "slave_send_err", "slave_recv_bytes", "slave_sent_bytes", "slave_n_recv", "slave_n_sent", "slave_msg_inval", "slave_keepalive", "slave_cfg_upd", "slave_cfg_upd_fail", "daemon_n_elec_start", "daemon_n_elec_stop", "daemon_recv_err", "daemon_send_err", "daemon_recv_bytes", "daemon_sent_bytes", "daemon_n_recv", "daemon_n_sent", "daemon_msg_inval", "daemon_msg_handle_failure"], "type": "string", "description": "'all': all; 'elect_recv_err': Receive error counter of aVCS election; 'elect_send_err': Send error counter of aVCS election; 'elect_recv_byte': Receive bytes counter of aVCS election; 'elect_send_byte': Send bytes counter of aVCS election; 'elect_pdu_master_recv': Received vMaster-PDU counter of aVCS election; 'elect_pdu_master_cand_recv': Received MC-PDU counter of aVCS election; 'elect_pdu_slave_recv': Received vBlade-PDU counter of aVCS election; 'elect_pdu_master_take_over_recv': Received MTO-PDU counter of aVCS election; 'elect_pdu_unknown_recv': Received Unknown-PDU counter of aVCS election; 'elect_pdu_master_sent': Sent vMaster-PDU counter of aVCS election; 'elect_pdu_master_cand_sent': Sent MC-PDU counter of aVCS election; 'elect_pdu_slave_sent': Sent vBlade-PDU counter of aVCS election; 'elect_pdu_master_take_over_sent': Sent MTO-PDU counter of aVCS election; 'elect_pdu_unknown_sent': Sent Unknown-PDU counter of aVCS election; 'elect_pdu_inval': Invalid PDU counter of aVCS election; 'elect_pdu_hw_mismatch': PDU HW mismatch counter of aVCS election; 'elect_pdu_cluster_mismatch': PDU Chassis-ID mismatch counter of aVCS election; 'elect_pdu_dev_id_collision': PDU Device-ID collision counter of aVCS election; 'elect_mc_discard_master': MC discarded vMaster-PDU counter of aVCS election; 'elect_mc_replace_master': MC replaced vMaster-PDU counter of aVCS election; 'elect_mc_dup_masterr': MC duplicate vMaster-PDU counter of aVCS election; 'elect_mc_reset_timer_by_mc': MC timers reset by MC-PDU counter of aVCS election; 'elect_mc_reset_timer_by_mto': MC timers reset by MTO-PDU counter of aVCS election; 'elect_slave_dup_master': vBlade duplicate vMaster-PDU counter of aVCS election; 'elect_slave_discard_challenger': vBlade discard challenger counter of aVCS election; 'elect_slave_replace_challenger': vBlade replace challenger counter of aVCS election; 'elect_slave_dup_challenger': vBlade duplicate challenger counter of aVCS election; 'elect_slave_discard_neighbour': vBlade discard neighbour counter of aVCS election; 'elect_slave_too_many_neighbour': vBlade too many neighbours counter of aVCS election; 'elect_slave_dup_neighbour': send vBlade duplicate neighbours of aVCS election; 'elect_master_discard_challenger': vMaster discard challenger counter of aVCS election; 'elect_master_new_challenger': vMaster new challenger counter of aVCS election; 'elect_master_replace_challenger': vMaster replace challenger counter of aVCS election; 'elect_master_dup_challenger': vMaster duplicate challenger counter of aVCS election; 'elect_master_discard_neighbour': vMaster discard neighbour counter of aVCS election; 'elect_master_too_many_neighbour': vMaster too many neighbours counter of aVCS election; 'elect_master_dup_neighbour': vMaster duplicate neighbours counter of aVCS election; 'elect_enter_master_cand_stat': Enter MC counter of aVCS election; 'elect_enter_slave': Enter vBlade counter of aVCS election; 'elect_enter_master': Enter vMaster counter of aVCS election; 'elect_enter_master_take_over': Enter MTO counter of aVCS election; 'elect_leave_master_cand': Leave MC counter of aVCS election; 'elect_leave_slave': Leave vBlade counter of aVCS election; 'elect_leave_master': Leave vMaster counter of aVCS election; 'elect_leave_master_take_over': Leave MTO counter of aVCS election; 'master_slave_start_err': vMaster Start vBlade Errors counter of aVCS election; 'master_slave_start': vMaster vBlades Started counter of aVCS election; 'master_slave_stop': vMaster vBlades stopped counter of aVCS election; 'master_cfg_upd': Received vMaster Configuration Updates counter of aVCS election; 'master_cfg_upd_l_fail': vMaster Local Configuration Update Errors counter of aVCS election; 'master_cfg_upd_r_fail': vMaster Remote Configuration Update Errors counter of aVCS election; 'master_cfg_upd_notif_err': vMaster Configuration Update Notif Errors counter of aVCS election; 'master_cfg_upd_result_err': vMaster Configuration Update Result Errors counter of aVCS election; 'slave_recv_err': vBlade Receive Errors counter of aVCS election; 'slave_send_err': vBlade Send Errors counter of aVCS election; 'slave_recv_bytes': vBlade Received Bytes counter of aVCS election; 'slave_sent_bytes': vBlade Sent Bytes counter of aVCS election; 'slave_n_recv': vBlade Received Messages counter of aVCS election; 'slave_n_sent': vBlade Sent Messages counter of aVCS election; 'slave_msg_inval': vBlade Invalid Messages counter of aVCS election; 'slave_keepalive': vBlade Received Keepalives counter of aVCS election; 'slave_cfg_upd': vBlade Received Configuration Updates counter of aVCS election; 'slave_cfg_upd_fail': vBlade Configuration Update Failures counter of aVCS election; 'daemon_n_elec_start': times of aVCS election start; 'daemon_n_elec_stop': times of aVCS election stop; 'daemon_recv_err': counter of aVCS daemon receive error; 'daemon_send_err': counter of aVCS daemon sent error; 'daemon_recv_bytes': bytes of aVCS daemon receive; 'daemon_sent_bytes': bytes of aVCS daemon sent; 'daemon_n_recv': counter of aVCS daemon receive; 'daemon_n_sent': counter of aVCS daemon sent; 'daemon_msg_inval': counter of aVCS daemon invalid message; 'daemon_msg_handle_failure': counter of aVCS daemon message handle failure; ", "format": "enum"}}}]}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
Class Description::
Show aVCS statistics information.
Class stat supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/vcs/stat`.
"""
def __init__(self, **kwargs):
self.ERROR_MSG = ""
self.required=[]
self.b_key = "stat"
self.a10_url="/axapi/v3/vcs/stat"
self.DeviceProxy = ""
self.sampling_enable = []
for keys, value in kwargs.items():
setattr(self,keys, value)
| 273.586207 | 7,354 | 0.811823 | 2,341 | 15,868 | 5.136694 | 0.067493 | 0.072848 | 0.147027 | 0.216549 | 0.952516 | 0.952516 | 0.952516 | 0.952516 | 0.947027 | 0.947027 | 0 | 0.001409 | 0.105684 | 15,868 | 57 | 7,355 | 278.385965 | 0.845959 | 0.952735 | 0 | 0.526316 | 0 | 0 | 0.053546 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.105263 | false | 0 | 0.052632 | 0 | 0.263158 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
75577a35ef2ed616e626836aaf17773987f75d2a | 2,404 | py | Python | sklearn_pandas/tests/test_date_transform.py | toddbenanzer/sklearn_pandas | 36e24c55ef4829aa261963201c346869097d4931 | [
"MIT"
] | null | null | null | sklearn_pandas/tests/test_date_transform.py | toddbenanzer/sklearn_pandas | 36e24c55ef4829aa261963201c346869097d4931 | [
"MIT"
] | null | null | null | sklearn_pandas/tests/test_date_transform.py | toddbenanzer/sklearn_pandas | 36e24c55ef4829aa261963201c346869097d4931 | [
"MIT"
] | null | null | null | import pytest
import datetime
from sklearn_pandas.transformers.date_transform import *
def test_easy_date_DateTransform():
df = pd.DataFrame({'A': ['2019-03-01', ]})
df_expected = pd.DataFrame(
{'A': [datetime.date(2019, 3, 1), ]}, dtype='datetime64[ns]')
transform = DateTransform()
pd.testing.assert_frame_equal(transform.fit_transform(df), df_expected)
def test_easy_date2_DateTransform():
df = pd.DataFrame({'A': ['2019/03/01', ]})
df_expected = pd.DataFrame(
{'A': [datetime.date(2019, 3, 1), ]}, dtype='datetime64[ns]')
transform = DateTransform()
pd.testing.assert_frame_equal(transform.fit_transform(df), df_expected)
def test_easy_date3_DateTransform():
df = pd.DataFrame({'A': ['March 01, 2019', ]})
df_expected = pd.DataFrame(
{'A': [datetime.date(2019, 3, 1), ]}, dtype='datetime64[ns]')
transform = DateTransform()
pd.testing.assert_frame_equal(transform.fit_transform(df), df_expected)
def test_incomplete_date_DateTransform():
df = pd.DataFrame({'A': ['March 01', ]})
df_expected = pd.DataFrame({'A': [pd.NaT, ]}, dtype='datetime64[ns]')
transform = DateTransform()
pd.testing.assert_frame_equal(transform.fit_transform(df), df_expected)
def test_simple_conversion_ExtractDatePart():
df = pd.DataFrame({'A': ['2019-03-01', ]})
df_expected = pd.DataFrame({
'A_year': [2019, ],
'A_month': [3, ],
'A_day': [1, ],
'A_hour': [0, ],
'A_minute': [0, ],
'A_second': [0, ],
'A_weekday': [4, ],
'A_weekday_name': ['Friday', ],
'A_quarter': [1, ],
'A_dayofyear': [60, ],
'A_weekofyear': [9, ],
})
transform = ExtractDatePart()
pd.testing.assert_frame_equal(transform.fit_transform(df), df_expected)
def test_subset_conversion_ExtractDatePart():
df = pd.DataFrame({'A': ['2019-03-01', ]})
df_expected = pd.DataFrame({
# 'A_year': [2019, ],
# 'A_month': [3, ],
'A_day': [1, ],
'A_hour': [0, ],
'A_minute': [0, ],
'A_second': [0, ],
'A_weekday': [4, ],
'A_weekday_name': ['Friday', ],
'A_quarter': [1, ],
'A_dayofyear': [60, ],
'A_weekofyear': [9, ],
})
transform = ExtractDatePart(get_year=False, get_month=False)
pd.testing.assert_frame_equal(transform.fit_transform(df), df_expected)
| 32.931507 | 75 | 0.603993 | 291 | 2,404 | 4.728522 | 0.199313 | 0.09593 | 0.104651 | 0.061047 | 0.890262 | 0.890262 | 0.868459 | 0.819041 | 0.819041 | 0.819041 | 0 | 0.05066 | 0.21173 | 2,404 | 72 | 76 | 33.388889 | 0.675462 | 0.015391 | 0 | 0.706897 | 0 | 0 | 0.134095 | 0 | 0 | 0 | 0 | 0 | 0.103448 | 1 | 0.103448 | false | 0 | 0.051724 | 0 | 0.155172 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
f350bea4994728956b8e2f7b1ad690d781698579 | 173 | py | Python | core/__init__.py | cyrus01337/relay-bot | 7c862df87f77979999e9f50e2d7da3ddcb99ffd0 | [
"MIT"
] | null | null | null | core/__init__.py | cyrus01337/relay-bot | 7c862df87f77979999e9f50e2d7da3ddcb99ffd0 | [
"MIT"
] | null | null | null | core/__init__.py | cyrus01337/relay-bot | 7c862df87f77979999e9f50e2d7da3ddcb99ffd0 | [
"MIT"
] | 1 | 2021-01-01T11:42:29.000Z | 2021-01-01T11:42:29.000Z | from .connection import Connection
from .converters import Guild
from .errors import GuildNotFound
from .errors import WebhookInitFailed
from .errors import WebhookNotFound
| 28.833333 | 37 | 0.855491 | 20 | 173 | 7.4 | 0.45 | 0.202703 | 0.324324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.115607 | 173 | 5 | 38 | 34.6 | 0.96732 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
45f4c436579c147fdfa99b043fb2eeb92ce7fa16 | 9,218 | py | Python | NippoKun/report/tests/test_score.py | KIKUYA-Takumi/Nippokun | aa82f97aaf5b61d94b213425f28314a248914eb9 | [
"MIT"
] | null | null | null | NippoKun/report/tests/test_score.py | KIKUYA-Takumi/Nippokun | aa82f97aaf5b61d94b213425f28314a248914eb9 | [
"MIT"
] | 4 | 2016-10-19T00:23:21.000Z | 2016-11-04T01:29:08.000Z | NippoKun/report/tests/test_score.py | KIKUYA-Takumi/NippoKun | aa82f97aaf5b61d94b213425f28314a248914eb9 | [
"MIT"
] | null | null | null | from django.test import TestCase, Client, RequestFactory
from django.contrib.auth.models import User
from ..models import Score, Report
# Create your tests here.
class CreateScoreTest(TestCase):
def setUp(self):
self.client = Client()
self.client.post('/report/user_register/',
{'username': 'john',
'password1': 'johnpass',
'password2': 'johnpass'})
self.client.login(username='john', password='johnpass')
request_factory = RequestFactory()
self.request = request_factory.get('/report/mypage/')
self.request.user = User.objects.get(pk=1)
self.client.post('/report/report_entries/',
{'report_author': self.request.user,
'report_title': 'test title',
'report_content': 'test'
})
self.request.report = Report.objects.get(pk=1)
def test_create_score_0(self):
self.client.post('/report/1/score/',
{'report': self.request.report,
'score_author': self.request.user,
'score': 0,
'evaluate_point': 'good job',
'comment': 'comment'})
count = Score.objects.count()
self.assertEqual(count, 0)
def test_create_score_1(self):
self.client.post('/report/1/score/',
{'report': self.request.report,
'score_author': self.request.user,
'score': 1,
'evaluate_point': 'good job',
'comment': 'comment'})
count = Score.objects.count()
self.assertEqual(count, 1)
def test_create_score_5(self):
self.client.post('/report/1/score/',
{'report': self.request.report,
'score_author': self.request.user,
'score': 5,
'evaluate_point': 'good job',
'comment': 'comment'})
count = Score.objects.count()
self.assertEqual(count, 1)
def test_create_score_6(self):
self.client.post('/report/1/score/',
{'report': self.request.report,
'score_author': self.request.user,
'score': 6,
'evaluate_point': 'good job',
'comment': 'comment'})
count = Score.objects.count()
self.assertEqual(count, 0)
def test_create_score_redirect_to_score(self):
response = self.client.get('/report/1/score/')
self.assertEqual(response.status_code, 200)
def test_create_score_redirect_to_crate_report(self):
response = self.client.get('/report/2/score/')
self.assertEqual(response.status_code, 302)
class DeleteScoreTest(TestCase):
def setUp(self):
self.client = Client()
self.client.post('/report/user_register/',
{'username': 'john',
'password1': 'johnpass',
'password2': 'johnpass'})
self.client.login(username='john', password='johnpass')
request_factory = RequestFactory()
self.request = request_factory.get('/report/mypage/')
self.request.user = User.objects.get(pk=1)
self.client.post('/report/report_entries/',
{'report_author': self.request.user,
'report_title': 'test title',
'report_content': 'test'
})
self.request.report = Report.objects.get(pk=1)
def test_delete_score(self):
self.client.post('/report/1/score/',
{'report': self.request.report,
'score_author': self.request.user,
'score': 4,
'evaluate_point': 'good job',
'comment': 'comment'})
self.request.score = Score.objects.get(pk=1)
before_count = Score.objects.count()
self.client.delete('/report/1/delete/1/')
after_count = Score.objects.count()
self.assertEqual(before_count, after_count+1)
class UpdateScoreTest(TestCase):
def setUp(self):
self.client = Client()
self.client.post('/report/user_register/',
{'username': 'john',
'password1': 'johnpass',
'password2': 'johnpass'})
self.client.login(username='john', password='johnpass')
request_factory = RequestFactory()
self.request = request_factory.get('/report/mypage/')
self.request.user = User.objects.get(pk=1)
self.client.post('/report/report_entries/',
{'report_author': self.request.user,
'report_title': 'test title',
'report_content': 'test'
})
self.request.report = Report.objects.get(pk=1)
def test_score_update_score(self):
self.client.post('/report/1/score/',
{'report': self.request.report,
'score_author': self.request.user,
'score': 4,
'evaluate_point': 'good job',
'comment': 'comment'})
self.request.score = Score.objects.get(pk=1)
self.client.post('/report/1/edition/1/',
{'report': self.request.report,
'score_author': self.request.user,
'score': 2,
'evaluate_point': self.request.score.evaluate_point,
'comment': self.request.score.comment})
self.request.score = Score.objects.get(pk=1)
self.assertEqual(self.request.score.score, 2)
def test_score_update_evaluate_point(self):
self.client.post('/report/1/score/',
{'report': self.request.report,
'score_author': self.request.user,
'score': 4,
'evaluate_point': 'good job',
'comment': 'comment'})
self.request.score = Score.objects.get(pk=1)
self.client.post('/report/1/edition/1/',
{'report': self.request.report,
'score_author': self.request.user,
'score': self.request.score.score,
'evaluate_point': 'nice',
'comment': self.request.score.comment})
self.request.score = Score.objects.get(pk=1)
self.assertEqual(self.request.score.evaluate_point, 'nice')
report = {
'report_author': self.request.report.report_author,
'report_title': self.request.report.report_title,
'report_content': 'update content'
}
self.client.post('/report/1/edition/', report)
self.request.report = Report.objects.get(pk=1)
self.assertEqual(self.request.report.report_content, 'update content')
class ListScoreTest(TestCase):
def setUp(self):
self.client = Client()
self.client.post('/report/user_register/',
{'username': 'john',
'password1': 'johnpass',
'password2': 'johnpass'})
self.client.login(username='john', password='johnpass')
request_factory = RequestFactory()
self.request = request_factory.get('/report/mypage/')
self.request.user = User.objects.get(pk=1)
self.client.post('/report/report_entries/',
{'report_author': self.request.user,
'report_title': 'test title',
'report_content': 'test'
})
self.request.report = Report.objects.get(pk=1)
self.client.post('/report/1/score/',
{'report': self.request.report,
'score_author': self.request.user,
'score': 0,
'evaluate_point': 'good job',
'comment': 'comment'})
self.client.post('/report/1/score/',
{'report': self.request.report,
'score_author': self.request.user,
'score': 0,
'evaluate_point': 'good job',
'comment': 'comment'})
def test_list_score_redirect_to_list_score(self):
response = self.client.get('/report/1/score_list/')
self.assertEqual(response.status_code, 200)
def test_list_score_redirect_to_create_report(self):
response = self.client.get('/report/2/score_list/')
self.assertEqual(response.status_code, 302)
| 44.531401 | 78 | 0.502604 | 862 | 9,218 | 5.24478 | 0.083527 | 0.128954 | 0.061933 | 0.088476 | 0.878346 | 0.860871 | 0.821057 | 0.811989 | 0.7881 | 0.74165 | 0 | 0.012783 | 0.37199 | 9,218 | 206 | 79 | 44.747573 | 0.768181 | 0.002495 | 0 | 0.770492 | 0 | 0 | 0.181986 | 0.024149 | 0 | 0 | 0 | 0 | 0.065574 | 1 | 0.081967 | false | 0.065574 | 0.016393 | 0 | 0.120219 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 8 |
45f797ed1c502cda26fe95c7a9c8944a713449e8 | 4,078 | py | Python | python/tests/_utils/comparision_test.py | g5t/scipp | d819c930a5e438fd65e42e2e4e737743b8d39d37 | [
"BSD-3-Clause"
] | null | null | null | python/tests/_utils/comparision_test.py | g5t/scipp | d819c930a5e438fd65e42e2e4e737743b8d39d37 | [
"BSD-3-Clause"
] | null | null | null | python/tests/_utils/comparision_test.py | g5t/scipp | d819c930a5e438fd65e42e2e4e737743b8d39d37 | [
"BSD-3-Clause"
] | null | null | null | import numpy as np
import scipp as sc
import scipp._utils as su
import pytest
def test_wont_match_when_meta_size_unequal():
point = sc.scalar(value=1.0)
a = sc.DataArray(data=point, attrs={'x': point})
b = sc.DataArray(data=point)
with pytest.raises(RuntimeError):
su.isnear(a, b, rtol=0 * sc.units.one, atol=1.0 * sc.units.one)
# Raise nothing if we are ignoring differing parts
su.isnear(a,
b,
rtol=0 * sc.units.one,
atol=1.0 * sc.units.one,
include_attrs=False)
def test_wont_match_when_meta_keys_unequal():
point = sc.scalar(value=1.0)
a = sc.DataArray(data=point, attrs={'x': point})
b = sc.DataArray(data=point, attrs={'y': point})
with pytest.raises(RuntimeError):
su.isnear(a, b, rtol=0 * sc.units.one, atol=1.0 * sc.units.one)
# Raise nothing if we are ignoring differing parts
su.isnear(a,
b,
rtol=0 * sc.units.one,
atol=1.0 * sc.units.one,
include_attrs=False)
def test_wont_match_when_meta_sizes_unequal():
point = sc.scalar(value=1.0)
a = sc.DataArray(data=point, attrs={'x': point})
b = sc.DataArray(data=point,
attrs={'x': sc.array(dims=['x'], values=np.arange(2))})
with pytest.raises(sc.CoordError):
su.isnear(a, b, rtol=0 * sc.units.one, atol=1.0 * sc.units.one)
# Raise nothing if we are ignoring differing parts
su.isnear(a,
b,
rtol=0 * sc.units.one,
atol=1.0 * sc.units.one,
include_attrs=False)
def test_data_scalar_no_coords():
a = sc.DataArray(data=sc.scalar(value=1.0))
assert su.isnear(a, a, rtol=0 * sc.units.one, atol=1e-14 * sc.units.one)
b = a + 1.0 * sc.units.one
assert su.isnear(a, b, rtol=0.0 * sc.units.one, atol=1.0 * sc.units.one)
assert su.isnear(a, b, rtol=1.0 * sc.units.one, atol=0.0 * sc.units.one)
assert not su.isnear(
a, b, rtol=0 * sc.units.one, atol=0.9999 * sc.units.one)
def test_data_scalar_no_coords_no_data():
a = sc.DataArray(data=sc.scalar(value=1))
b = a.copy()
assert su.isnear(a,
b,
rtol=0 * sc.units.one,
atol=1e-14 * sc.units.one,
include_data=False) # Should compare equal
def test_scalar_with_coord():
point = sc.scalar(value=1.0)
a = sc.DataArray(data=point, coords={'x': point})
assert su.isnear(a, a, rtol=0.0 * sc.units.one, atol=1e-14 * sc.units.one)
b = sc.DataArray(data=point, coords={'x': point + point})
assert su.isnear(a, b, rtol=0.0 * sc.units.one, atol=1.0 * sc.units.one)
assert not su.isnear(
a, b, rtol=0.0 * sc.units.one, atol=0.9999 * sc.units.one)
def test_with_many_coords():
x = sc.array(dims=['x'], values=np.arange(10.0))
xx = sc.array(dims=['x'], values=np.arange(1, 11.0))
a = sc.DataArray(data=x, coords={'a': x, 'b': x})
b = sc.DataArray(data=x, coords={'a': x, 'b': xx})
assert su.isnear(a, a, rtol=0.0 * sc.units.one, atol=1e-14 * sc.units.one)
assert su.isnear(a, b, rtol=0.0 * sc.units.one, atol=1.0 * sc.units.one)
assert not su.isnear(
a, b, rtol=0.0 * sc.units.one, atol=0.9999 * sc.units.one)
def test_with_many_coords_and_attrs():
x = sc.array(dims=['x'], values=np.arange(10.0))
xx = sc.array(dims=['x'], values=np.arange(1, 11.0))
a = sc.DataArray(data=x, coords={'a': x, 'b': x}, attrs={'c': x, 'd': x})
b = sc.DataArray(data=x, coords={'a': x, 'b': x}, attrs={'c': x, 'd': xx})
assert su.isnear(a, a, rtol=0.0 * sc.units.one, atol=1e-14 * sc.units.one)
assert su.isnear(a, b, rtol=0.0 * sc.units.one, atol=1.0 * sc.units.one)
assert not su.isnear(
a, b, rtol=0.0 * sc.units.one, atol=0.9999 * sc.units.one)
# Now disable attrs matching (should be near)
assert su.isnear(a,
b,
rtol=0.0 * sc.units.one,
atol=0.9999 * sc.units.one,
include_attrs=False)
| 38.471698 | 78 | 0.581658 | 690 | 4,078 | 3.372464 | 0.114493 | 0.129351 | 0.184787 | 0.155995 | 0.908896 | 0.905887 | 0.865922 | 0.837559 | 0.811775 | 0.798023 | 0 | 0.040222 | 0.250123 | 4,078 | 105 | 79 | 38.838095 | 0.720733 | 0.051741 | 0 | 0.6 | 0 | 0 | 0.006216 | 0 | 0 | 0 | 0 | 0 | 0.176471 | 1 | 0.094118 | false | 0 | 0.047059 | 0 | 0.141176 | 0 | 0 | 0 | 0 | null | 0 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
342079ae1235a4a6c7d6bc4a837619832a96102d | 114 | py | Python | tests/context.py | Grulfen/game_of_life | a79670c498635e644b90ed94153ea99fc7f6ad03 | [
"MIT"
] | null | null | null | tests/context.py | Grulfen/game_of_life | a79670c498635e644b90ed94153ea99fc7f6ad03 | [
"MIT"
] | null | null | null | tests/context.py | Grulfen/game_of_life | a79670c498635e644b90ed94153ea99fc7f6ad03 | [
"MIT"
] | null | null | null | import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import game_of_life.game_of_life as game_of_life
| 16.285714 | 48 | 0.77193 | 22 | 114 | 3.727273 | 0.5 | 0.219512 | 0.365854 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.009804 | 0.105263 | 114 | 6 | 49 | 19 | 0.794118 | 0 | 0 | 0 | 0 | 0 | 0.017544 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
3453fdc3ba5783ed3f47a23ab5a1292026596880 | 4,843 | py | Python | application/forms.py | trapmarketing/lookatit | 21244a37ab5d6116315968dcdcf2a6984d733275 | [
"Apache-2.0"
] | null | null | null | application/forms.py | trapmarketing/lookatit | 21244a37ab5d6116315968dcdcf2a6984d733275 | [
"Apache-2.0"
] | null | null | null | application/forms.py | trapmarketing/lookatit | 21244a37ab5d6116315968dcdcf2a6984d733275 | [
"Apache-2.0"
] | null | null | null | from wtforms import Form, StringField, TextAreaField, validators, IntegerField, FieldList, FormField, BooleanField, \
FloatField
class Form_Record_Add(Form):
title = StringField('title', validators=[validators.DataRequired(),
validators.Length(max=255, message='max 255 characters')])
description = TextAreaField('description',
validators=[validators.Length(max=200, message='max 200 characters')])
class OrderForm(Form):
name = StringField('product_name', validators=[validators.DataRequired("product_name is required")])
description = StringField('description', validators=[validators.DataRequired("description is required")])
amount = FloatField('charge_amount', validators=[validators.DataRequired("amount is required")])
recurring = BooleanField('recurring', validators=[validators.DataRequired("recurring required")],
choices=[True, False])
tos = BooleanField('transaction_type', validators=[validators.DataRequired()],
choices=["main_sale", "upsell1", "upsell2"])
class InitialOrderForm(OrderForm):
stripeToken = StringField('stripeToken', validators=[validators.DataRequired()])
class BaseApiLeadForm(Form):
email = StringField('email',
validators=[validators.DataRequired("Email is required"), validators.Length(min=3, max=55),
validators.Email()])
firstName = StringField('firstName', validators=[validators.DataRequired("First name is required"),
validators.Length(min=2, max=30)])
lastName = StringField('lastName', validators=[validators.DataRequired("Last name is required"),
validators.Length(min=2, max=30)])
address = StringField('address', validators=[validators.DataRequired("Address is required"),
validators.Length(min=10, max=255)])
city = StringField('city',
validators=[validators.DataRequired("City is required"), validators.Length(min=1, max=55)])
phone = StringField('phone', validators=[validators.DataRequired("Zip code is required")])
zip = IntegerField('zip',
validators=[validators.DataRequired("Zip code is required"), validators.NumberRange(00000,99999)])
state = StringField('state',
validators=[validators.DataRequired("State is required"), validators.Length(min=2, max=2)])
country = StringField('country', validators=[validators.DataRequired("Country is required")])
class OnePagePurchaseForm(Form):
email = StringField('email',
validators=[validators.DataRequired("Email is required"), validators.Length(min=3, max=25),
validators.Email()])
firstName = StringField('fname', validators=[validators.DataRequired("First name is required"),
validators.Length(min=2, max=30)])
lastName = StringField('lname', validators=[validators.DataRequired("Last name is required"),
validators.Length(min=2, max=30)])
address = StringField('address', validators=[validators.DataRequired("Address is required"),
validators.Length(min=10, max=255)])
city = StringField('city',
validators=[validators.DataRequired("City is required"), validators.Length(min=1, max=55)])
phone = IntegerField('phone_number', validators=[validators.DataRequired("Zip code is required")])
zip = IntegerField('zip_code',
validators=[validators.DataRequired("Zip code is required"), validators.Length(min=5, max=5)])
state = StringField('state',
validators=[validators.DataRequired("State is required"), validators.Length(min=2, max=2)])
country = StringField('country', validators=[validators.DataRequired("Country is required")])
name = StringField('product_name', validators=[validators.DataRequired("product_name is required")])
description = StringField('description', validators=[validators.DataRequired("description is required")])
amount = FloatField('charge_amount', validators=[validators.DataRequired("amount is required")])
recurring = BooleanField('recurring', validators=[validators.DataRequired("recurring required")],
choices=[True, False])
tos = BooleanField('transaction_type', validators=[validators.DataRequired()],
choices=["main_sale", "upsell1", "upsell2"])
stripeToken = StringField('stripeToken', validators=[validators.DataRequired()])
| 59.790123 | 121 | 0.638447 | 431 | 4,843 | 7.141531 | 0.169374 | 0.207927 | 0.322287 | 0.109812 | 0.811891 | 0.808967 | 0.766732 | 0.766732 | 0.728395 | 0.728395 | 0 | 0.017819 | 0.235185 | 4,843 | 80 | 122 | 60.5375 | 0.813175 | 0 | 0 | 0.622951 | 0 | 0 | 0.177782 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.016393 | 0 | 0.622951 | 0 | 0 | 0 | 0 | null | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 9 |
346104e6fe4f312a90a7f39dc04fda7d88bf4e91 | 162 | py | Python | swan_vis/__init__.py | mortazavilab/swan_vis | 7e5817b06d8a20792fee7af1bc59a1bf818bd3c4 | [
"MIT"
] | 34 | 2020-06-12T20:05:38.000Z | 2022-03-28T00:12:22.000Z | swan_vis/__init__.py | mortazavilab/swan_vis | 7e5817b06d8a20792fee7af1bc59a1bf818bd3c4 | [
"MIT"
] | 14 | 2020-06-14T23:04:05.000Z | 2022-03-25T17:06:18.000Z | swan_vis/__init__.py | mortazavilab/swan_vis | 7e5817b06d8a20792fee7af1bc59a1bf818bd3c4 | [
"MIT"
] | 5 | 2020-10-17T09:10:41.000Z | 2022-02-03T21:19:15.000Z | from swan_vis.utils import *
from swan_vis.talon_utils import *
from swan_vis.graph import *
from swan_vis.swangraph import *
from swan_vis.plottedgraph import *
| 27 | 35 | 0.814815 | 26 | 162 | 4.846154 | 0.346154 | 0.31746 | 0.436508 | 0.539683 | 0.349206 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.123457 | 162 | 5 | 36 | 32.4 | 0.887324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
346e84b97b6b4e7b056ca85cf283d1a1959a6d52 | 57,077 | py | Python | battles.py | kevinoshita/programming-practice-2021 | be89b869a99e7a6991f22e8db5e83b0f76e6313e | [
"MIT"
] | null | null | null | battles.py | kevinoshita/programming-practice-2021 | be89b869a99e7a6991f22e8db5e83b0f76e6313e | [
"MIT"
] | null | null | null | battles.py | kevinoshita/programming-practice-2021 | be89b869a99e7a6991f22e8db5e83b0f76e6313e | [
"MIT"
] | null | null | null | import random
import time
def battle_chap1():
print("Commencing battle")
time.sleep(5)
print("\n" * 2)
play_again = True
# Set up the play again loop
while play_again:
winner = None
player_health = 100
computer_health = 50
# determine whose turn it is
turn = random.randint(1,2) # heads or tails
if turn == 1:
player_turn = True
computer_turn = False
print("\nYou will go first.")
else:
player_turn = False
computer_turn = True
print("\nEcurb will go first.")
print("\nYour health: ", player_health, "Ecurb's health: ", computer_health)
# set up the main game loop
while (player_health != 0 or computer_health != 0):
heal_up = False # determine if heal has been used by the player. Resets false each loop.
miss = False # determine if the chosen move will miss.
# create a dictionary of the possible moves and randomly select the damage it does when selected
player_movelist = {"Slash": random.randint(18, 25),
"Slice and Dice": random.randint(10, 35),
"Heal": random.randint(20, 25)}
enemy_movelist = {"Stab": random.randint(9, 13),
"Deeper Stab": random.randint(5, 18),
"Heal": random.randint(10, 13)}
if player_turn:
print("\nPlease select a move:\n1. Slash (Deal damage between 18-25)\n2. Slice and Dice (Deal damage between 10-35)\n3. Heal (Restore between 20-25 health)\n")
player_move = input("> ").lower()
move_miss = random.randint(1,5) # 20% of missing
if move_miss == 1:
miss = True
else:
miss = False
if miss:
player_move = 0 # player misses and deals no damage
print("You missed!")
else:
if player_move in ("1", "slash"):
player_move = player_movelist["Slash"]
print("\nYou used Slash. It dealt ", player_move, " damage.")
elif player_move in ("2", "slice and dice"):
player_move = player_movelist["Slice and Dice"]
print("\nYou used Slice and Dice. It dealt ", player_move, " damage.")
elif player_move in ("3", "heal"):
heal_up = True # heal activated
player_move = player_movelist["Heal"]
print("\nYou used Heal. It healed for ", player_move, " health.")
else:
print("\nThat is not a valid move. Please try again.")
continue
else: # computer turn
move_miss = random.randint(1,5)
if move_miss == 1:
miss = True
else:
miss = False
if miss:
computer_move = 0 # the computer misses and deals no damage
print("Ecurb missed!")
else:
if computer_health > 15:
if player_health > 75:
computer_move = enemy_movelist["Stab"]
print("\nEcurb used Stab. It dealt ", computer_move, " damage.")
elif player_health > 35 and player_health <= 75: # computer decides whether to go big or play it safe
imoves = ["Stab", "Deeper Stab"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nEcurb used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 35:
computer_move = enemy_movelist["Deeper Stab"]
print("\nEcurb used Deeper Stab. It dealt ", computer_move, " damage.")
else: # if the computer has little health, there is a 50% chance they will heal
heal_or_fight = random.randint(1,2)
if heal_or_fight == 1:
heal_up = True
computer_move = enemy_movelist["Heal"]
print("\nEcurb used Heal. It healed for ", computer_move, " health.")
else:
if player_health > 75:
computer_move = enemy_movelist["Stab"]
print("\nEcurb used Stab. It dealt ", computer_move, " damage.")
elif player_health > 35 and player_health <= 75:
imoves = ["Stab", "Deeper Stab"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nEcurb used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 35:
computer_move = enemy_movelist["Deeper Stab"]
print("\nEcurb used Deeper Stab. It dealt ", computer_move, " damage.")
if heal_up:
if player_turn:
player_health += player_move
if player_health > 100:
player_health = 100 # cap max health. No over healing!
else:
computer_health += computer_move
if computer_health > 50:
computer_health = 50
else:
if player_turn:
computer_health -= player_move
if computer_health < 0:
computer_health = 0 # cap minimum health at 0
winner = "Player"
break
else:
player_health -= computer_move
if player_health < 0:
player_health = 0
winner = "Computer"
break
print("\nYour health: ", player_health, "Ecurb's health: ", computer_health)
# switch turns
player_turn = not player_turn
computer_turn = not computer_turn
# once main game while loop breaks, determine winner and congratulate
if winner == "Player":
print("\nYour health: ", player_health, "Ecurb's health: ", computer_health)
print("\nCongratulations! You have won. You proceed to insult Ecurb: 'foolishness disgusts me'.")
break
else:
print("\nYour health: ", player_health, "Ecurb's health: ", computer_health)
print("\nSorry, but your opponent wiped the floor with you. As Ecurb dealt the final stab, "
"your conscious fades.")
print("\nWould you like to play again? (answer 'yes' or 'y' to play again. Otherwise, continue to next scenario.)")
answer = input("> ").lower()
if answer not in ("yes", "y"):
play_again = False
time.sleep(10)
print("\n" * 5)
def battle_chap2():
print("Commencing battle")
time.sleep(5)
print("The elf that bumped into you does not seem to have a menacing aura.")
time.sleep(3)
print("You decide to not kill her, believing that she just needs mutual understanding.")
time.sleep(3)
print("Killing her will end the game, leave her at an HP of 10 or lower to finish the battle.")
time.sleep(4)
print("\n" * 2)
play_again = True
# Set up the play again loop
while play_again:
winner = None
player_health = 100
computer_health = 70
# determine whose turn it is
turn = random.randint(1,2) # heads or tails
if turn == 1:
player_turn = True
computer_turn = False
print("\nYou will go first.")
else:
player_turn = False
computer_turn = True
print("\nElf will go first.")
print("\nYour health: ", player_health, "Elf health: ", computer_health)
# set up the main game loop
while (player_health != 0 or computer_health != 0):
heal_up = False # determine if heal has been used by the player. Resets false each loop.
miss = False # determine if the chosen move will miss.
# create a dictionary of the possible moves and randomly select the damage it does when selected
player_movelist = {"Slash": random.randint(18, 25),
"Slice and Dice": random.randint(10, 35),
"Heal": random.randint(0, 0)}
enemy_movelist = {"Shot": random.randint(13, 18),
"Power Shot": random.randint(7, 25),
"Vital Shot": random.randint(99, 99),
"Heal": random.randint(14, 18)}
if player_turn:
print("\nPlease select a move:\n1. Slash (Deal damage between 18-25)\n2. Slice and Dice (Deal damage between 10-35)\n3. Heal (Restore between 20-25 health)\n")
player_move = input("> ").lower()
move_miss = random.randint(1,5) # 20% of missing
if move_miss == 1:
miss = True
else:
miss = False
if miss:
player_move = 0 # player misses and deals no damage
print("You missed!")
else:
if player_move in ("1", "slash"):
player_move = player_movelist["Slash"]
print("\nYou used Slash. It dealt ", player_move, " damage.")
elif player_move in ("2", "slice and dice"):
player_move = player_movelist["Slice and Dice"]
print("\nYou used Slice and Dice. It dealt ", player_move, " damage.")
elif player_move in ("3", "heal"):
heal_up = True # heal activated
player_move = player_movelist["Heal"]
print("\nYou used Heal. It healed for ", player_move, " health.")
else:
print("\nThat is not a valid move. Please try again.")
continue
else: # computer turn
move_miss = random.randint(1,5)
if move_miss == 1:
miss = True
else:
miss = False
if miss:
computer_move = 0 # the computer misses and deals no damage
print("Elf missed!")
else:
if computer_health > 21:
if player_health > 75:
computer_move = enemy_movelist["Shot"]
print("\nElf used Shot. It dealt ", computer_move, " damage.")
elif player_health > 35 and player_health <= 75: # computer decides whether to go big or play it safe
imoves = ["Shot", "Power Shot"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nElf used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 35 and player_health > 10:
computer_move = enemy_movelist["Power Shot"]
print("\nElf used Power Shot. It dealt ", computer_move, " damage.")
elif player_health <= 10:
computer_move = enemy_movelist["Vital Shot"]
print("\nElf used Vital Shot. It dealt ", computer_move, " damage. You feel a very stinging"
" pain in your heart.")
else: # if the computer has little health, there is a 50% chance they will heal
heal_or_fight = random.randint(1,2)
if heal_or_fight == 1:
heal_up = True
computer_move = enemy_movelist["Heal"]
print("\nElf used Heal. It healed for ", computer_move, " health.")
else:
if player_health > 75:
computer_move = enemy_movelist["Shot"]
print("\nElf used Shot. It dealt ", computer_move, " damage.")
elif player_health > 35 and player_health <= 75:
imoves = ["Shot", "Power Shot"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nElf used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 35 and player_health > 10:
computer_move = enemy_movelist["Deeper Stab"]
print("\nElf used Power Shot. It dealt ", computer_move, " damage.")
elif player_health <= 10:
computer_move = enemy_movelist["Vital Shot"]
print("\nElf used Vital Shot. It dealt ", computer_move, " damage. You feel a very "
"stinging pain in your "
"heart.")
if heal_up:
if player_turn:
player_health += player_move
if player_health > 100:
player_health = 100 # cap max health. No over healing!
else:
computer_health += computer_move
if computer_health > 70:
computer_health = 70
else:
if player_turn:
computer_health -= player_move
if computer_health < 0:
computer_health = 0 # cap minimum health at 0
winner = "Dead Computer"
break
elif computer_health > 0 and computer_health <= 10:
winner = "Player"
break
else:
player_health -= computer_move
if player_health < 0:
player_health = 0
winner = "Computer"
break
print("\nYour health: ", player_health, "Elf's health: ", computer_health)
# switch turns
player_turn = not player_turn
computer_turn = not computer_turn
# once main game while loop breaks, determine winner and congratulate
if winner == "Player":
print("\nYour health: ", player_health, "Elf's health: ", computer_health)
print("\nCongratulations! You have won. You proceed to approach the elf.")
break
elif winner == "Dead Computer":
print("\nYour health: ", player_health, "Elf's health: ", computer_health)
print("\nYou've slain the innocent elf by accident.")
else:
print("\nYour health: ", player_health, "Elf's health: ", computer_health)
print("\nSorry, but your opponent wiped the floor with you. The elf looks at you with furious eyes while"
" leaving you.")
print("\nWould you like to play again? (answer 'yes' or 'y' to play again. Otherwise, continue to next scenario.)")
answer = input("> ").lower()
if answer not in ("yes", "y"):
play_again = False
time.sleep(10)
print("\n" * 5)
def battle_chap2_1():
print("Commencing battle")
time.sleep(5)
print("\n" * 2)
play_again = True
# Set up the play again loop
while play_again:
winner = None
player_health = 100
computer_health = 999
# determine whose turn it is
turn = random.randint(1,2) # heads or tails
if turn == 1:
player_turn = True
computer_turn = False
print("\nWanderer will go first.")
else:
player_turn = False
computer_turn = True
print("\nGolem will go first.")
print("\nWanderer's health: ", player_health, "Golem's health: ", computer_health)
# set up the main game loop
while (player_health != 0 or computer_health != 0):
heal_up = False # determine if heal has been used by the player. Resets false each loop.
miss = False # determine if the chosen move will miss.
# create a dictionary of the possible moves and randomly select the damage it does when selected
player_movelist = {"Slash": random.randint(1, 1),
"Slice and Dice": random.randint(1, 2),
"Heal": random.randint(20, 25)}
enemy_movelist = {"Whacc": random.randint(24, 30),
"Bigger Whacc": random.randint(19, 38),
"Omega Whacc": random.randint(99, 99)}
if player_turn:
print("\nPlease select a move:\n1. Slash (Deals 1 damage)\n2. Slice and Dice (Deal damage between 1-2)\n3. Heal (Restore between 20-25 health)\n")
player_move = input("> ").lower()
move_miss = random.randint(1,5) # 20% of missing
if move_miss == 1:
miss = True
else:
miss = False
if miss:
player_move = 0 # player misses and deals no damage
print("Wanderer missed!")
else:
if player_move in ("1", "slash"):
player_move = player_movelist["Slash"]
print("\nWanderer used Slash. It dealt ", player_move, " damage.")
elif player_move in ("2", "slice and dice"):
player_move = player_movelist["Slice and Dice"]
print("\nWanderer used Slice and Dice. It dealt ", player_move, " damage.")
elif player_move in ("3", "heal"):
heal_up = True # heal activated
player_move = player_movelist["Heal"]
print("\nWanderer used Heal. It healed for ", player_move, " health.")
else:
print("\nThat is not a valid move. Please try again.")
continue
else: # computer turn
move_miss = random.randint(1,5)
if move_miss == 1:
miss = True
else:
miss = False
if miss:
computer_move = 0 # the computer misses and deals no damage
print("Golem missed!")
else:
if player_health > 75:
computer_move = enemy_movelist["Whacc"]
print("\nGolem used Whacc. It dealt ", computer_move, " damage.")
elif player_health > 35 and player_health <= 75: # computer decides whether to go big or play it safe
imoves = ["Whacc", "Bigger Whacc"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nGolem used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 35 and player_health > 10:
computer_move = enemy_movelist["Bigger Whacc"]
print("\nGolem used Bigger Whacc. It dealt ", computer_move, " damage.")
elif player_health <= 10:
computer_move = enemy_movelist["Omega Whacc"]
print("\nGolem used Omega Whacc. It dealt ", computer_move, " damage.")
if heal_up:
if player_turn:
player_health += player_move
if player_health > 100:
player_health = 100 # cap max health. No over healing!
else:
computer_health += computer_move
if computer_health > 999:
computer_health = 999
else:
if player_turn:
computer_health -= player_move
if computer_health < 0:
computer_health = 0 # cap minimum health at 0
winner = "Player"
break
else:
player_health -= computer_move
if player_health < 0:
player_health = 0
winner = "Computer"
break
print("\nWanderer's health: ", player_health, "Golem's health: ", computer_health)
# switch turns
player_turn = not player_turn
computer_turn = not computer_turn
# once main game while loop breaks, determine winner and congratulate
if winner == "Player":
print("\nWanderer's health: ", player_health, "Golem's health: ", computer_health)
print("\nYou're not supposed to win.")
else:
print("\nWanderer's health: ", player_health, "Golem's health: ", computer_health)
print("\nSorry, but your opponent wiped the floor with you. The golem continues to wreck more havoc.")
break
print("\nWould you like to play again? (answer 'yes' or 'y' to play again. Otherwise, continue to next scenario.)")
answer = input("> ").lower()
if answer not in ("yes", "y"):
play_again = False
time.sleep(10)
print("\n" * 5)
def battle_chap3A():
print("Commencing battle")
time.sleep(5)
print("From your fight with Nairdirina and the golem, you have learned a new attack: Slashburst Flurry.")
time.sleep(3)
print("Accuracy has also been increased from 80% to 90%.")
time.sleep(3)
print("\n" * 2)
play_again = True
# Set up the play again loop
while play_again:
winner = None
player_health = 100
computer_health = 1000
# determine whose turn it is
turn = random.randint(1,2) # heads or tails
if turn == 1:
player_turn = True
computer_turn = False
print("\nWanderer will go first.")
else:
player_turn = False
computer_turn = True
print("\nBlue-eyes White Dragon will go first.")
print("\nWanderer's health: ", player_health, "Blue-eyes White Dragon's health: ", computer_health)
# set up the main game loop
while (player_health != 0 or computer_health != 0):
heal_up = False # determine if heal has been used by the player. Resets false each loop.
miss = False # determine if the chosen move will miss.
# create a dictionary of the possible moves and randomly select the damage it does when selected
player_movelist = {"Slash": random.randint(18, 25),
"Slice and Dice": random.randint(10, 35),
"Heal": random.randint(20, 25),
"Slashburst Flurry": random.randint(8, 42)}
enemy_movelist = {"Fireball": random.randint(20, 28),
"Fireblaze": random.randint(14, 38),
"Gust of Wind": random.randint(18, 25),
"Tail Slam": random.randint(10, 35),
"Hellfire Judgement": random.randint(999, 999)}
if player_turn:
print("\nPlease select a move:\n1. Slash (Deal damage between 18-25)\n2. Slice and Dice (Deal damage between 10-35)\n3. Heal (Restore between 20-25 health)\n4. Slashburst Flurry (Deal damage between 8-42)\n")
player_move = input("> ").lower()
move_miss = random.randint(1,10) # 10% of missing
if move_miss == 1:
miss = True
else:
miss = False
if miss:
player_move = 0 # player misses and deals no damage
print("Wanderer missed!")
else:
if player_move in ("1", "slash"):
player_move = player_movelist["Slash"]
print("\nWanderer used Slash. It dealt ", player_move, " damage.")
elif player_move in ("2", "slice and dice"):
player_move = player_movelist["Slice and Dice"]
print("\nWanderer used Slice and Dice. It dealt ", player_move, " damage.")
elif player_move in ("3", "heal"):
heal_up = True # heal activated
player_move = player_movelist["Heal"]
print("\nWanderer used Heal. It healed for ", player_move, " health.")
elif player_move in ("4", "slashburst flurry"):
player_move = player_movelist["Slashburst Flurry"]
print("\nWanderer used Slashburst Flurry. It dealt ", player_move, " damage.")
else:
print("\nThat is not a valid move. Please try again.")
continue
else: # computer turn
move_miss = random.randint(1,5)
if move_miss == 1:
miss = True
else:
miss = False
if miss:
computer_move = 0 # the computer misses and deals no damage
print("Blue-eyes White Dragon missed!")
else:
if player_health > 75:
imoves = ["Fireball", "Gust of Wind"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nBlue-eyes White Dragon used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health > 35 and player_health <= 75: # computer decides whether to go big or play it safe
imoves = ["Fireball", "Gust of Wind", "Fireblaze", "Tail Slam"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nBlue-eyes White Dragon used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 35 and player_health > 2:
imoves = ["Fireblaze", "Tail Slam"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nBlue-eyes White Dragon used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 2:
computer_move = enemy_movelist["Hellfire Judgement"]
print("\nBlue-eyes White Dragon used Hellfire Judgement, it was SUPER EFFECTIVE!!! It "
"dealt ", computer_move, " damage.")
if heal_up:
if player_turn:
player_health += player_move
if player_health > 100:
player_health = 100 # cap max health. No over healing!
else:
computer_health += computer_move
if computer_health > 1000:
computer_health = 1000
else:
if player_turn:
computer_health -= player_move
if computer_health < 850:
winner = "Player"
break
else:
player_health -= computer_move
if player_health < 0:
player_health = 0
winner = "Computer"
break
print("\nWanderer's health: ", player_health, "Blue-eyes White Dragon's health: ", computer_health)
# switch turns
player_turn = not player_turn
computer_turn = not computer_turn
# once main game while loop breaks, determine winner and congratulate
if winner == "Player":
print("\nWanderer's health: ", player_health, "Blue-eyes White Dragon's health: ", computer_health)
print("\nYou realized that you have exhausted yourself, so you can't attack anymore.")
break
else:
print("\nWanderer's health: ", player_health, "Blue-eyes White Dragon's health: ", computer_health)
print("\nSorry, but your opponent wiped the floor with you. Blue-eyes White Dragon continues "
"to scorch you alive.")
print("\nWould you like to play again? (answer 'yes' or 'y' to play again. Otherwise, continue to next scenario.)")
answer = input("> ").lower()
if answer not in ("yes", "y"):
play_again = False
time.sleep(10)
print("\n" * 5)
def battle_chap3B():
print("Commencing battle")
time.sleep(5)
print("From your fight with Nairdirina and the golem, you have learned a new attack: Slashburst Flurry.")
time.sleep(3)
print("Accuracy is 100% because enemy is too big for you to miss.")
time.sleep(3)
print("\n" * 2)
play_again = True
# Set up the play again loop
while play_again:
winner = None
player_health = 100
computer_health = 1000
# determine whose turn it is
turn = random.randint(1,2) # heads or tails
if turn == 1:
player_turn = True
computer_turn = False
print("\nWanderer will go first.")
else:
player_turn = False
computer_turn = True
print("\nWurm will go first.")
print("\nWanderer's health: ", player_health, "Wurm's health: ", computer_health)
# set up the main game loop
while (player_health != 0 or computer_health != 0):
heal_up = False # determine if heal has been used by the player. Resets false each loop.
miss = False # determine if the chosen move will miss.
# create a dictionary of the possible moves and randomly select the damage it does when selected
player_movelist = {"Slash": random.randint(18, 25),
"Slice and Dice": random.randint(10, 35),
"Heal": random.randint(20, 25),
"Slashburst Flurry": random.randint(8, 42)}
enemy_movelist = {"Chomp": random.randint(20, 27),
"Suck": random.randint(12, 37),
"Burrow": random.randint(18, 25),
"Tremor": random.randint(10, 35),
"Terramorphic Erosion": random.randint(999, 999)}
if player_turn:
print("\nPlease select a move:\n1. Slash (Deal damage between 18-25)\n2. Slice and Dice (Deal damage between 10-35)\n3. Heal (Restore between 20-25 health)\n4. Slashburst Flurry (Deal damage between 8-42)\n")
player_move = input("> ").lower()
move_miss = random.randint(1, 5)
if move_miss == 6: #since enemy is a big, wurm, there is no way you can miss
miss = True
else:
miss = False
if miss:
player_move = 0 # player misses and deals no damage
print("Wanderer missed!")
else:
if player_move in ("1", "slash"):
player_move = player_movelist["Slash"]
print("\nWanderer used Slash. It dealt ", player_move, " damage.")
elif player_move in ("2", "slice and dice"):
player_move = player_movelist["Slice and Dice"]
print("\nWanderer used Slice and Dice. It dealt ", player_move, " damage.")
elif player_move in ("3", "heal"):
heal_up = True # heal activated
player_move = player_movelist["Heal"]
print("\nWanderer used Heal. It healed for ", player_move, " health.")
elif player_move in ("4", "slashburst flurry"):
player_move = player_movelist["Slashburst Flurry"]
print("\nWanderer used Slashburst Flurry. It dealt ", player_move, " damage.")
else:
print("\nThat is not a valid move. Please try again.")
continue
else: # computer turn
move_miss = random.randint(1,5)
if move_miss == 6: #since enemy is a wurm, its attacks cannot miss
miss = True
else:
miss = False
if miss:
computer_move = 0 # the computer misses and deals no damage
print("Wurm missed!")
else:
if player_health > 75:
imoves = ["Chomp", "Burrow"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nWurm used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health > 35 and player_health <= 75: # computer decides whether to go big or play it safe
imoves = ["Chomp", "Burrow", "Suck", "Tremor"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nWurm used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 35 and player_health > 2:
imoves = ["Suck", "Tremor"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nWurm used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 2:
computer_move = enemy_movelist["Terramorphic Erosion"]
print("\nWurm used Terramorphic Erosion, it was SUPER EFFECTIVE!!! It "
"dealt ", computer_move, " damage.")
if heal_up:
if player_turn:
player_health += player_move
if player_health > 100:
player_health = 100 # cap max health. No over healing!
else:
computer_health += computer_move
if computer_health > 1000:
computer_health = 1000
else:
if player_turn:
computer_health -= player_move
if computer_health < 850:
winner = "Player"
break
else:
player_health -= computer_move
if player_health < 0:
player_health = 0
winner = "Computer"
break
print("\nWanderer's health: ", player_health, "Wurm's health: ", computer_health)
# switch turns
player_turn = not player_turn
computer_turn = not computer_turn
# once main game while loop breaks, determine winner and congratulate
if winner == "Player":
print("\nWanderer's health: ", player_health, "Wurm's health: ", computer_health)
print("\nDue to its attacks, the distance between you and the wurm has gotten farther,"
" you can't attack anymore.")
break
else:
print("\nWanderer's health: ", player_health, "Wurm's health: ", computer_health)
print("\nSorry, but your opponent wiped the floor with you. Wurm procceeds to consume you.")
print("\nWould you like to play again? (answer 'yes' or 'y' to play again. Otherwise, continue to next scenario.)")
answer = input("> ").lower()
if answer not in ("yes", "y"):
play_again = False
time.sleep(10)
print("\n" * 5)
def battle_chap4_training():
print("Commencing battle")
time.sleep(5)
print("All your attacks deal higher damage and some names are changed.")
time.sleep(3)
print("Healing ability has also increased.")
time.sleep(3)
print("New attack has been added: Slice of luck")
time.sleep(3)
print("Health has also increased.")
time.sleep(3)
print("\n" * 2)
play_again = True
# Set up the play again loop
while play_again:
winner = None
player_health = 200
computer_health = 200
# determine whose turn it is
turn = random.randint(1,2) # heads or tails
if turn == 1:
player_turn = True
computer_turn = False
print("\nWanderer will go first.")
else:
player_turn = False
computer_turn = True
print("\nCommander Langdorf will go first.")
print("\nWanderer's health: ", player_health, "Commander Langdorf's health: ", computer_health)
# set up the main game loop
while (player_health != 0 or computer_health != 0):
heal_up = False # determine if heal has been used by the player. Resets false each loop.
miss = False # determine if the chosen move will miss.
# create a dictionary of the possible moves and randomly select the damage it does when selected
player_movelist = {"Strike": random.randint(28, 35),
"Cutting Edge Strike": random.randint(20, 45),
"Better Heal": random.randint(35, 50),
"Raging Slash": random.randint(18, 52),
"Slice of Luck": random.randint(0, 80)}
enemy_movelist = {"Strike": random.randint(28, 35),
"Cutting Edge Strike": random.randint(20, 45),
"Better Heal": random.randint(35, 50),
"Raging Slash": random.randint(18, 52),
"Slice of Luck": random.randint(0, 80)}
if player_turn:
print("\nPlease select a move:\n1. Strike (Deal damage between 28-35)\n2. Cutting Edge Strike (Deal damage between 35-50)\n3. Better Heal (Restore between 35-50 health)\n4. Raging Slash (Deal damage between 18-52)\n5. Slice of Luck (Deal damage between 0-80)\n")
player_move = input("> ").lower()
move_miss = random.randint(1,10) # 10% of missing
if move_miss == 1:
miss = True
else:
miss = False
if miss:
player_move = 0 # player misses and deals no damage
print("Wanderer missed!")
else:
if player_move in ("1", "strike"):
player_move = player_movelist["Strike"]
print("\nWanderer used Strike. It dealt ", player_move, " damage.")
elif player_move in ("2", "cutting edge strike"):
player_move = player_movelist["Cutting Edge Strike"]
print("\nWanderer used Cutting Edge Strike. It dealt ", player_move, " damage.")
elif player_move in ("3", "better heal"):
heal_up = True # heal activated
player_move = player_movelist["Better Heal"]
print("\nWanderer used Better Heal. It healed for ", player_move, " health.")
elif player_move in ("4", "raging slash"):
player_move = player_movelist["Raging Slash"]
print("\nWanderer used Raging Slash. It dealt ", player_move, " damage.")
elif player_move in ("5", "slice of luck"):
player_move = player_movelist["Slice of Luck"]
print("\nWanderer used Slice of Luck. It dealt ", player_move, " damage.")
else:
print("\nThat is not a valid move. Please try again.")
continue
else: # computer turn
move_miss = random.randint(1,10)
if move_miss == 1:
miss = True
else:
miss = False
if miss:
computer_move = 0 # the computer misses and deals no damage
print("Commander Langdorf missed!")
else:
if computer_health > 35:
if player_health > 150:
computer_move = enemy_movelist["Strike"]
print("\nCommander Langdorf used Strike. It dealt ", computer_move, " damage.")
elif player_health > 70 and player_health <= 150: # computer decides whether to go big or play it safe
imoves = ["Strike", "Cutting Edge Strike", "Raging Slash", "Slice of Luck"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nCommander Langdorf used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 70:
imoves = ["Raging Slash", "Slice of Luck"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nCommander Langdorf used ", imoves, ". It dealt ", computer_move, " damage.")
else: # if the computer has little health, there is a 50% chance they will heal
heal_or_fight = random.randint(1, 2)
if heal_or_fight == 1:
heal_up = True
computer_move = enemy_movelist["Better Heal"]
print("\nCommander Langdorf used Heal. It healed for ", computer_move, " health.")
else:
if player_health > 150:
computer_move = enemy_movelist["Strike"]
print("\nCommander Langdorf used Strike. It dealt ", computer_move, " damage.")
elif player_health > 70 and player_health <= 150: # computer decides whether to go big or play it safe
imoves = ["Strike", "Cutting Edge Strike", "Raging Slash", "Slice of Luck"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nCommander Langdorf used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 70:
imoves = ["Raging Slash", "Slice of Luck"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nCommander Langdorf used ", imoves, ". It dealt ", computer_move, " damage.")
if heal_up:
if player_turn:
player_health += player_move
if player_health > 200:
player_health = 200 # cap max health. No over healing!
else:
computer_health += computer_move
if computer_health > 200:
computer_health = 200
else:
if player_turn:
computer_health -= player_move
if computer_health < 0:
computer_health = 0
winner = "Player"
break
else:
player_health -= computer_move
if player_health < 0:
player_health = 0
winner = "Computer"
break
print("\nWanderer's health: ", player_health, "Commander Langdorf's health: ", computer_health)
# switch turns
player_turn = not player_turn
computer_turn = not computer_turn
# once main game while loop breaks, determine winner and congratulate
if winner == "Player":
print("\nWanderer's health: ", player_health, "Commander Langdorf's health: ", computer_health)
print("\nCongratulations! You have won. You have proved your competence to the Commander.")
time.sleep(3)
print("Commander Langdorf: 'Nice, just like my expectations.'")
break
else:
print("\nWanderer's health: ", player_health, "Commander Langdorf's health: ", computer_health)
print("\nSorry, but your opponent wiped the floor with you. It seems you still have more to learn.")
time.sleep(3)
print("Commander Langdorf: 'Relax kid, you did well.'")
break
time.sleep(10)
print("\n" * 5)
def battle_chap4_valorex():
print("Commencing battle")
time.sleep(5)
print("\n" * 2)
play_again = True
# Set up the play again loop
while play_again:
winner = None
player_health = 200
computer_health = 150
# determine whose turn it is
turn = random.randint(1,2) # heads or tails
if turn == 1:
player_turn = True
computer_turn = False
print("\nWanderer will go first.")
else:
player_turn = False
computer_turn = True
print("\nValorex will go first.")
print("\nWanderer's health: ", player_health, "Valorex's health: ", computer_health)
# set up the main game loop
while (player_health != 0 or computer_health != 0):
heal_up = False # determine if heal has been used by the player. Resets false each loop.
miss = False # determine if the chosen move will miss.
# create a dictionary of the possible moves and randomly select the damage it does when selected
player_movelist = {"Strike": random.randint(28, 35),
"Cutting Edge Strike": random.randint(20, 45),
"Better Heal": random.randint(35, 50),
"Raging Slash": random.randint(18, 52),
"Slice of Luck": random.randint(0, 80)}
enemy_movelist = {"??": random.randint(30, 37),
"???": random.randint(22, 47),
"+++": random.randint(35, 50),
"!!!": random.randint(20, 54)}
if player_turn:
print("\nPlease select a move:\n1. Strike (Deal damage between 28-35)\n2. Cutting Edge Strike (Deal damage between 35-50)\n3. Better Heal (Restore between 35-50 health)\n4. Raging Slash (Deal damage between 18-52)\n5. Slice of Luck (Deal damage between 0-80)\n")
player_move = input("> ").lower()
move_miss = random.randint(1,3) # 33.3% of missing
if move_miss == 1:
miss = True
else:
miss = False
if miss:
player_move = 0 # player misses and deals no damage
print("Wanderer missed!")
else:
if player_move in ("1", "strike"):
player_move = player_movelist["Strike"]
print("\nWanderer used Strike. It dealt ", player_move, " damage.")
elif player_move in ("2", "cutting edge strike"):
player_move = player_movelist["Cutting Edge Strike"]
print("\nWanderer used Cutting Edge Strike. It dealt ", player_move, " damage.")
elif player_move in ("3", "better heal"):
heal_up = True # heal activated
player_move = player_movelist["Better Heal"]
print("\nWanderer used Better Heal. It healed for ", player_move, " health.")
elif player_move in ("4", "raging slash"):
player_move = player_movelist["Raging Slash"]
print("\nWanderer used Raging Slash. It dealt ", player_move, " damage.")
elif player_move in ("5", "slice of luck"):
player_move = player_movelist["Slice of Luck"]
print("\nWanderer used Slice of Luck. It dealt ", player_move, " damage.")
else:
print("\nThat is not a valid move. Please try again.")
continue
else: # computer turn
move_miss = random.randint(1,10)
if move_miss == 1:
miss = True
else:
miss = False
if miss:
computer_move = 0 # the computer misses and deals no damage
print("Valorex missed!")
else:
if computer_health > 35:
if player_health > 150:
computer_move = enemy_movelist["??"]
print("\nValorex used ??. It dealt ", computer_move, " damage.")
elif player_health > 70 and player_health <= 150: # computer decides whether to go big or play it safe
imoves = ["??", "???", "!!!"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nValorex used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 70:
computer_move = enemy_movelist["!!!"]
print("\nValorex used !!!. It dealt ", computer_move, " damage.")
else: # if the computer has little health, there is a 50% chance they will heal
heal_or_fight = random.randint(1, 2)
if heal_or_fight == 1:
heal_up = True
computer_move = enemy_movelist["+++"]
print("\nValorex used +++. It healed for ", computer_move, " health.")
else:
if player_health > 150:
computer_move = enemy_movelist["??"]
print("\nValorex used ??. It dealt ", computer_move, " damage.")
elif player_health > 70 and player_health <= 150: # computer decides whether to go big or play it safe
imoves = ["??", "???", "!!!"]
imoves = random.choice(imoves)
computer_move = enemy_movelist[imoves]
print("\nValorex used ", imoves, ". It dealt ", computer_move, " damage.")
elif player_health <= 70:
computer_move = enemy_movelist["!!!"]
print("\nValorex used !!!. It dealt ", computer_move, " damage.")
if heal_up:
if player_turn:
player_health += player_move
if player_health > 200:
player_health = 200 # cap max health. No over healing!
else:
computer_health += computer_move
if computer_health > 150:
computer_health = 150
else:
if player_turn:
computer_health -= player_move
if computer_health < 0:
computer_health = 1 # cap minimum health at 1
winner = "Player"
break
elif computer_health > 0 and computer_health <= 20:
winner = "Player"
break
else:
player_health -= computer_move
if player_health < 0:
player_health = 1 # cap minimum health at 1
winner = "Computer"
break
elif player_health > 0 and player_health <= 20:
winner = "Computer"
break
print("\nWanderer's health: ", player_health, "Valorex's health: ", computer_health)
# switch turns
player_turn = not player_turn
computer_turn = not computer_turn
# once main game while loop breaks, determine winner and congratulate
if winner == "Player":
print("\nWanderer's health: ", player_health, "Valorex's health: ", computer_health)
print("\nCongratulations! You have won.")
time.sleep(5)
print("As you are about to deliver the killing blow, Valorex does a backflip and throws a smoke bomb.")
time.sleep(5)
print("Valorex: 'Same as the old days, ey? You’re as readable as those fairy tales that your "
"mom told you every night.'")
time.sleep(5)
print("Wanderer: 'What? Wait, how do you know me? Who the hell are you?'")
time.sleep(4)
print("\n" * 5)
break
else:
print("\nWanderer's health: ", player_health, "Commander Langdorf's health: ", computer_health)
print("\nSorry, but your opponent wiped the floor with you.")
time.sleep(5)
print("Valorex is about to stab your neck, but as the dagger gets very close to your neck, the dagger"
" stops.")
time.sleep(5)
print("Valorex: 'See, 'peaceful' right? You live, I live.'")
time.sleep(3)
print("Wanderer: 'At least you are a man of your word, why don’t you finish me off?'")
time.sleep(4)
print("Valorex: 'Killing you now is not worth it. The next time we meet, checkmate.'")
time.sleep(4)
print("Valorex: 'I am actually here to confirm something, and now i know it.'")
time.sleep(4)
print("Wanderer: 'What do you know of me? Tell me this instance. Tell me who you are?'")
time.sleep(4)
print("\n" * 5)
break | 46.669665 | 279 | 0.487289 | 5,848 | 57,077 | 4.635602 | 0.068741 | 0.062415 | 0.022207 | 0.031724 | 0.899591 | 0.889409 | 0.884725 | 0.873142 | 0.865432 | 0.858165 | 0 | 0.026386 | 0.428964 | 57,077 | 1,223 | 280 | 46.669665 | 0.805357 | 0.0821 | 0 | 0.858452 | 0 | 0.019348 | 0.228132 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.007128 | false | 0 | 0.002037 | 0 | 0.009165 | 0.207739 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1ba4f0d423ff4eaea60f4bfd49cca051efb55901 | 5,899 | py | Python | tests/test_integration.py | cristianMeli/ubatch | fb3c6dccf0a9e25e25f5956e2e91ed70e9ea01ee | [
"Apache-2.0"
] | null | null | null | tests/test_integration.py | cristianMeli/ubatch | fb3c6dccf0a9e25e25f5956e2e91ed70e9ea01ee | [
"Apache-2.0"
] | null | null | null | tests/test_integration.py | cristianMeli/ubatch | fb3c6dccf0a9e25e25f5956e2e91ed70e9ea01ee | [
"Apache-2.0"
] | null | null | null | import pytest
from threading import Thread
from time import sleep
from ubatch.ubatch import UBatch
class MyTestException(Exception):
"""Use to test if an exceptions occurs process/thread continue running"""
pass
def in_batch(input_data):
if 6 in input_data:
raise MyTestException
return [x ** 2 for x in input_data]
def in_batch_parameters(input_data, mode):
if 6 in input_data:
raise MyTestException
return [x ** 2 if y == 'square' else x ** 3 for x, y in zip(input_data, mode)]
@pytest.mark.no_cover # TODO: not working!
@pytest.mark.timeout(5)
def test_multiple_put_outputs_consumed_at_once(reraise, mocker):
"""Test multiple thread using UBatch at same time
Simulate multiple put at same time, process batch should be called once
with all inputs.
UBatch._wait_buffer_ready logic ensure all elements in queue will
be consumed after checking timeout, this allow to put elements in queue
before starting UBatch simulating a constant flow of inputs in
UBatch.
Scenario:
11 threads put integers from 0 to 10 at same time, batch will be
consumed from 5 to 5 (max_batch_size). What happens is that
process_batch need to be called 3 times, first call has to be with
[0, 1, 2, 3, 4], second time call has to be with [5, 6, 7, 8, 9] and
the last call has to be with [10]
Use reraise to fail test if any thread assert false
"""
N_THREADS = 11
MAX_SIZE = 5
TIMEOUT = 0.1
mb = UBatch(max_size=MAX_SIZE, timeout=TIMEOUT)
mb.set_handler(handler=in_batch)
process_batch_spy = mocker.spy(mb, "_handler")
# Simulate threads waiting for outputs
def run(i):
with reraise:
try:
output = mb.ubatch(i)
except MyTestException:
if i in [5, 6, 7, 8, 9]:
assert True
else:
assert False
else:
# Test output received by thread is what we expect
assert output == i ** 2
# Create 5 threads waiting for outputs, this simulate flask thread
threads = [Thread(target=run, args=(i,)) for i in range(N_THREADS)]
# Start thread before staring UBatch process. ensuring queue
# have all threads data and process all data in only one batch.
# TODO: This assumes that thread start in order, so inputs will
# be 1, 2, 3, ...
for t in threads:
sleep(0.1)
t.start()
try:
mb.start()
# Wait for threads to get outputs
for t in threads:
t.join()
except Exception:
assert False
finally:
mb.stop()
calls = [
mocker.call([0, 1, 2, 3, 4]),
mocker.call([5, 6, 7, 8, 9]),
mocker.call([10]),
]
process_batch_spy.assert_has_calls(calls)
assert process_batch_spy.call_count == 3
@pytest.mark.no_cover
@pytest.mark.timeout(5)
def test_multiple_parameters(reraise, mocker):
"""Test multiple thread using UBatch at same time
Simulate multiple put at same time, process batch should be called once
with all inputs.
UBatch._wait_buffer_ready logic ensure all elements in queue will
be consumed after checking timeout, this allow to put elements in queue
before starting UBatch simulating a constant flow of inputs in
UBatch.
Scenario:
11 threads put integers from 0 to 10 at same time, and for each integer
asigns a mode that can be 'square' if the number is even or 'cube' if it's odd,
batch will be consumed from 5 to 5 (max_batch_size). What happens is that
process_batch need to be called 3 times, first call has to be with
input_data = [0, 1, 2, 3, 4] and mode = ['square', 'cube', 'square', 'cube', 'square'],
second time call has to be with input_data = [5, 6, 7, 8, 9] and mode = ['cube', 'square',
'cube', 'square', 'cube'] and the last call has to be with input_data = [10] and mode = ['square']
Use reraise to fail test if any thread assert false
"""
N_THREADS = 11
MAX_SIZE = 5
TIMEOUT = 0.1
mb = UBatch(max_size=MAX_SIZE, timeout=TIMEOUT)
mb.set_handler(handler=in_batch_parameters)
process_batch_spy = mocker.spy(mb, "_handler")
# Simulate threads waiting for outputs
def run(i):
with reraise:
try:
if i % 2 == 0:
output = mb.ubatch(i, 'square')
else:
output = mb.ubatch(i, 'cube')
except MyTestException:
if i in [5, 6, 7, 8, 9]:
assert True
else:
assert False
else:
# Test output received by thread is what we expect
if i % 2 == 0:
assert output == i ** 2
else:
assert output == i ** 3
# Create 5 threads waiting for outputs, this simulate flask thread
threads = [Thread(target=run, args=(i,)) for i in range(N_THREADS)]
# Start thread before staring UBatch process. ensuring queue
# have all threads data and process all data in only one batch.
# TODO: This assumes that thread start in order, so inputs will
# be 1, 2, 3, ...
for t in threads:
sleep(0.1)
t.start()
try:
mb.start()
# Wait for threads to get outputs
for t in threads:
t.join()
except Exception:
assert False
finally:
mb.stop()
calls = [
mocker.call([0, 1, 2, 3, 4], mode=['square', 'cube', 'square', 'cube', 'square']),
mocker.call([5, 6, 7, 8, 9], mode=['cube', 'square', 'cube', 'square', 'cube']),
mocker.call([10], mode=['square']),
]
process_batch_spy.assert_has_calls(calls)
assert process_batch_spy.call_count == 3
| 32.234973 | 106 | 0.606713 | 852 | 5,899 | 4.119718 | 0.190141 | 0.034188 | 0.017094 | 0.018803 | 0.825071 | 0.822222 | 0.78547 | 0.736182 | 0.721937 | 0.721937 | 0 | 0.027424 | 0.307679 | 5,899 | 182 | 107 | 32.412088 | 0.832027 | 0.438549 | 0 | 0.734694 | 0 | 0 | 0.027866 | 0 | 0 | 0 | 0 | 0.016484 | 0.132653 | 1 | 0.061224 | false | 0.010204 | 0.040816 | 0 | 0.132653 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
1704f5d4d08068fedc1f3fe22e882407778aaf8c | 1,020 | py | Python | tests/test_git_utils.py | JorgeGarciaIrazabal/cf-scripts | 69f4f0268496281c2b9e2073e13566b985b06677 | [
"MIT"
] | 33 | 2018-02-28T04:05:46.000Z | 2022-01-13T15:34:43.000Z | tests/test_git_utils.py | JorgeGarciaIrazabal/cf-scripts | 69f4f0268496281c2b9e2073e13566b985b06677 | [
"MIT"
] | 1,371 | 2018-02-25T00:32:37.000Z | 2022-03-29T23:44:42.000Z | tests/test_git_utils.py | bgruening/cf-scripts | bca57b85be7c9b85a180210f74c90be293519371 | [
"MIT"
] | 62 | 2018-02-25T00:28:48.000Z | 2022-02-22T02:55:28.000Z | from conda_forge_tick.git_utils import trim_pr_josn_keys
def test_trim_pr_json_keys():
pr_json = {
"ETag": "blah",
"Last-Modified": "flah",
"id": 435,
"random": "string",
"head": {"reff": "foo"},
"base": {"repo": {"namee": "None", "name": "foo"}},
}
pr_json = trim_pr_josn_keys(pr_json)
assert "random" not in pr_json
assert pr_json["head"] == {}
assert pr_json["base"]["repo"] == {"name": "foo"}
assert pr_json["id"] == 435
def test_trim_pr_json_keys_src():
src_pr_json = {
"ETag": "blah",
"Last-Modified": "flah",
"id": 435,
"random": "string",
"head": {"reff": "foo"},
"base": {"repo": {"namee": "None", "name": "foo"}},
}
pr_json = trim_pr_josn_keys({"r": None}, src_pr_json=src_pr_json)
assert "random" not in pr_json
assert pr_json["head"] == {}
assert pr_json["base"]["repo"] == {"name": "foo"}
assert pr_json["id"] == 435
assert "r" not in pr_json
| 26.153846 | 69 | 0.540196 | 137 | 1,020 | 3.729927 | 0.262774 | 0.21135 | 0.1409 | 0.082192 | 0.829746 | 0.829746 | 0.747554 | 0.747554 | 0.747554 | 0.747554 | 0 | 0.015852 | 0.257843 | 1,020 | 38 | 70 | 26.842105 | 0.659181 | 0 | 0 | 0.666667 | 0 | 0 | 0.2 | 0 | 0 | 0 | 0 | 0 | 0.3 | 1 | 0.066667 | false | 0 | 0.033333 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ca15c6ad8376309dd52c38cd98af120efa436dab | 15,894 | py | Python | mgmt/migrations/0001_initial.py | AndreDrDre/AviationStockManagement | 8331edbacc936919ede04da12d7e529e5e38be1c | [
"CC0-1.0"
] | null | null | null | mgmt/migrations/0001_initial.py | AndreDrDre/AviationStockManagement | 8331edbacc936919ede04da12d7e529e5e38be1c | [
"CC0-1.0"
] | null | null | null | mgmt/migrations/0001_initial.py | AndreDrDre/AviationStockManagement | 8331edbacc936919ede04da12d7e529e5e38be1c | [
"CC0-1.0"
] | null | null | null | # Generated by Django 3.1.7 on 2021-06-24 08:41
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Employees',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=50, null=True)),
],
),
migrations.CreateModel(
name='Parts',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('waybill', models.CharField(blank=True, max_length=50, null=True)),
('urlWayBill', models.URLField(blank=True, max_length=50, null=True)),
('description', models.CharField(blank=True, max_length=50, null=True)),
('part_number', models.CharField(blank=True, max_length=50, null=True)),
('serial_number', models.CharField(blank=True, max_length=50, null=True)),
('price', models.IntegerField(blank=True, null=True)),
('jobCardNumber', models.CharField(blank=True, max_length=50, null=True)),
('Historical', models.BooleanField(blank=True, default='False', null=True)),
('batch_no', models.CharField(blank=True, max_length=50, null=True)),
('expiry_date', models.DateTimeField(blank=True, null=True)),
('quantity', models.IntegerField(blank=True, default='0', null=True)),
('reorder_level', models.IntegerField(blank=True, default='0', null=True)),
('receive_quantity', models.IntegerField(blank=True, default='0', null=True)),
('order_quantity', models.IntegerField(blank=True, default='0', null=True)),
('issue_quantity', models.IntegerField(blank=True, default='0', null=True)),
('ipc_reference', models.CharField(blank=True, max_length=50, null=True)),
('date_ordered', models.DateTimeField(auto_now=True)),
('date_received', models.DateTimeField(blank=True, null=True)),
('ticketed', models.BooleanField(blank=True, default='False', null=True)),
('recieve_part', models.BooleanField(default='False')),
('invoice_number', models.CharField(blank=True, max_length=50, null=True)),
('vendor', models.CharField(blank=True, max_length=50, null=True)),
('purchase_order_number', models.CharField(blank=True, max_length=50, null=True, verbose_name='PurchaseOrder #')),
('Repaired', models.BooleanField(blank=True, default='False', null=True)),
('Quarentine', models.BooleanField(blank=True, default='False', null=True)),
('exported', models.BooleanField(blank=True, default='False', null=True)),
('bin_number', models.CharField(blank=True, max_length=50, null=True)),
('cert_document', models.ImageField(blank=True, null=True, upload_to='Cert-Parts', verbose_name='Certification Document')),
('SRN', models.CharField(blank=True, max_length=50, null=True)),
('barcode', models.ImageField(blank=True, null=True, upload_to='barcode-Parts')),
('part_type', models.CharField(choices=[('Rotable', 'Rotable'), ('Tires', 'Tires'), ('AGS', 'AGS'), ('Consumables', 'Consumables'), ('Shelf-life', 'Shelf-life')], default='Rotable', max_length=20, verbose_name='Part-Type')),
('condition', models.CharField(choices=[('NEW', 'NEW'), ('OH', 'OH'), ('AR', 'AR'), ('SV', 'SV'), ('REPAIRABLE', 'REPAIRABLE'), ('DAMAGED', 'DAMAGED'), ('INCORRECT-DOC', 'INCORRECT-DOC'), ('WRONG-PART', 'WRONG-PART')], default='NEW', max_length=20)),
('repaired_by', models.CharField(choices=[('INHOUSE', 'INHOUSE'), ('SHOP', 'SHOP')], default='', max_length=20)),
('inspector', models.ForeignKey(blank=True, default='--', null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='inspector', to='mgmt.employees', verbose_name='Inspector:')),
('issued_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='issued_by', to='mgmt.employees', verbose_name='Issued by:')),
('ordered_by', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='ordered_by', to='mgmt.employees', verbose_name='Ordered by:')),
],
),
migrations.CreateModel(
name='TailNumber',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(blank=True, max_length=50, null=True)),
],
),
migrations.CreateModel(
name='ToolChecker',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tool_type', models.IntegerField(choices=[(0, 'Calibrated'), (1, 'Un-Calibrated')], default=1)),
],
),
migrations.CreateModel(
name='WorkOrders',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tail_number', models.CharField(choices=[('N192WW', 'N192WW'), ('N193WW', 'N193WW'), ('N194WW', 'N194WW'), ('N195WW', 'N195WW'), ('N196WW', 'N196WW'), ('N197WW', 'N197WW'), ('N198WW', 'N198WW'), ('N190WW', 'N190WW'), ('N906WW', 'N906WW'), ('N3830S', 'N3830S'), ('N323WW', 'N323WW'), ('N491AK', 'N491AK'), ('N492AK', 'N492AK'), ('N725WW', 'N725WW'), ('N789WW', 'N789WW'), ('N921WW', 'N921WW')], default='AirFrame', max_length=20)),
('status', models.CharField(choices=[('OPEN', 'OPEN'), ('COMPLETED', 'COMPLETED')], default='OPEN', max_length=20)),
('description', models.TextField(blank=True, null=True)),
('type_airframe', models.CharField(blank=True, max_length=50, null=True)),
('date_added', models.DateTimeField(auto_now=True)),
('date_closed', models.DateTimeField(blank=True, null=True)),
('ldgs_at_open', models.IntegerField(blank=True, default='0', null=True)),
('hours_at_open', models.IntegerField(blank=True, default='0', null=True)),
('workorder_number', models.CharField(blank=True, max_length=50, null=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Tools_UnCalibrated',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(blank=True, max_length=50, null=True)),
('serial_number', models.CharField(blank=True, max_length=50, null=True)),
('part_number', models.CharField(blank=True, max_length=50, null=True)),
('recieved', models.DateTimeField(auto_now=True)),
('issued', models.BooleanField(blank=True, default='False', null=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('workorder_no', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='mgmt.workorders')),
],
),
migrations.CreateModel(
name='Tools_Calibrated',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(blank=True, max_length=50, null=True)),
('serial_number', models.CharField(blank=True, max_length=50, null=True)),
('part_number', models.CharField(blank=True, max_length=50, null=True)),
('recieved', models.DateTimeField(auto_now=True)),
('calibrated', models.BooleanField(blank=True, default='True', null=True)),
('calibrated_date', models.DateTimeField(blank=True, null=True)),
('expiry_date', models.DateTimeField(blank=True, null=True)),
('cert_no', models.CharField(blank=True, max_length=50, null=True)),
('calibration_certificate', models.ImageField(blank=True, null=True, upload_to='Cert-Tools', verbose_name='Certification Document')),
('range_no', models.CharField(blank=True, max_length=50, null=True)),
('issued', models.BooleanField(blank=True, default='False', null=True)),
('barcode', models.ImageField(blank=True, null=True, upload_to='barcode-calibrated')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
('workorder_no', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='mgmt.workorders')),
],
),
migrations.CreateModel(
name='ReorderItems',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('part_type', models.CharField(choices=[('Rotable', 'Rotable'), ('Tires', 'Tires'), ('AGS', 'AGS'), ('Consumables', 'Consumables'), ('Shelf-life', 'Shelf-life')], default='Rotable', max_length=20, verbose_name='Part-Type')),
('description', models.CharField(blank=True, max_length=50, null=True)),
('part_number', models.CharField(blank=True, max_length=50, null=True)),
('ipc_reference', models.CharField(blank=True, max_length=50, null=True)),
('tail_number', models.CharField(choices=[('Stock', 'Stock'), ('N192WW', 'N192WW'), ('N193WW', 'N193WW'), ('N194WW', 'N194WW'), ('N195WW', 'N195WW'), ('N196WW', 'N196WW'), ('N197WW', 'N197WW'), ('N198WW', 'N198WW'), ('N190WW', 'N190WW'), ('N906WW', 'N906WW'), ('N3830S', 'N3830S'), ('N323WW', 'N323WW'), ('N491AK', 'N491AK'), ('N492AK', 'N492AK'), ('N725WW', 'N725WW'), ('N789WW', 'N789WW'), ('N921WW', 'N921WW')], default='AirFrame', max_length=20, verbose_name='Tail #')),
('ordered_by', models.CharField(choices=[('Terence', 'Terence'), ('James', 'James'), ('Denten', 'Denten'), ('Ivin', 'Ivin'), ('Ester', 'Ester')], default='', max_length=20, verbose_name='Ordered by:')),
('date_ordered', models.DateTimeField(auto_now=True)),
('price', models.IntegerField(blank=True, null=True)),
('reorder_level', models.IntegerField(blank=True, default='0', null=True)),
('quantity', models.IntegerField(blank=True, default='0', null=True)),
('order_quantity', models.IntegerField(blank=True, default='0', null=True)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200, null=True)),
('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='PartWorkOrders',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('issue_quantity', models.IntegerField(blank=True, default='0', null=True)),
('price', models.DecimalField(decimal_places=2, default=0.0, max_digits=15)),
('removed_from', models.CharField(blank=True, max_length=50, null=True)),
('removed_by', models.CharField(blank=True, max_length=50, null=True)),
('cert_document', models.ImageField(blank=True, null=True, upload_to='', verbose_name='Certification Document')),
('jobCardNumber', models.CharField(blank=True, max_length=50, null=True)),
('receivedRepair', models.BooleanField(blank=True, default='False', null=True)),
('issued_by', models.CharField(choices=[('Terence', 'Terence'), ('James', 'James'), ('Denten', 'Denten'), ('Ivin', 'Ivin')], default='', max_length=20, verbose_name='Issued by:')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('part', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='mgmt.parts')),
('workorder', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='mgmt.workorders')),
],
),
migrations.AddField(
model_name='parts',
name='tail_number',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, to='mgmt.tailnumber', verbose_name='Tail #'),
),
migrations.AddField(
model_name='parts',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='parts',
name='workorders',
field=models.ManyToManyField(null=True, related_name='parts', through='mgmt.PartWorkOrders', to='mgmt.WorkOrders'),
),
migrations.CreateModel(
name='OrderHistory',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(blank=True, max_length=50, null=True)),
('part_number', models.CharField(blank=True, max_length=50, null=True)),
('date_ordered', models.DateTimeField(auto_now=True)),
('order_quantity', models.IntegerField(blank=True, default='0', null=True)),
('ipc_reference', models.CharField(blank=True, max_length=50, null=True)),
('vendor', models.CharField(blank=True, max_length=50, null=True)),
('ordered_by', models.CharField(choices=[('Terence', 'Terence'), ('James', 'James'), ('Denten', 'Denten'), ('Ivin', 'Ivin')], default='', max_length=20, verbose_name='Ordered by:')),
('tail_number', models.CharField(choices=[('Stock', 'Stock'), ('N192WW', 'N192WW'), ('N193WW', 'N193WW'), ('N194WW', 'N194WW'), ('N195WW', 'N195WW'), ('N196WW', 'N196WW'), ('N197WW', 'N197WW'), ('N198WW', 'N198WW'), ('N190WW', 'N190WW'), ('N906WW', 'N906WW'), ('N3830S', 'N3830S'), ('N323WW', 'N323WW'), ('N491AK', 'N491AK'), ('N492AK', 'N492AK'), ('N725WW', 'N725WW'), ('N789WW', 'N789WW'), ('N921WW', 'N921WW')], default='AirFrame', max_length=20, verbose_name='Tail #')),
('part_type', models.CharField(choices=[('Rotable', 'Rotable'), ('Tires', 'Tires'), ('AGS', 'AGS'), ('Consumables', 'Consumables'), ('Shelf-life', 'Shelf-life')], default='Rotable', max_length=20, verbose_name='Part-Type')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 77.531707 | 490 | 0.604379 | 1,701 | 15,894 | 5.51264 | 0.114051 | 0.068252 | 0.04479 | 0.067186 | 0.820518 | 0.804308 | 0.772848 | 0.762504 | 0.719313 | 0.701184 | 0 | 0.034094 | 0.213854 | 15,894 | 204 | 491 | 77.911765 | 0.716367 | 0.002831 | 0 | 0.598985 | 1 | 0 | 0.185019 | 0.002777 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.015228 | 0 | 0.035533 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ca41c5df8c1af2529468818ff8bc1c03049366a7 | 3,860 | py | Python | apps/processing/pmo/migrations/0008_delete_agg_create_unique_index.py | gis4dis/poster_new | cee983bfcfc90f581b18712d558bc9d8a83a400a | [
"BSD-3-Clause"
] | 4 | 2017-10-17T10:52:27.000Z | 2020-08-30T10:13:46.000Z | apps/processing/pmo/migrations/0008_delete_agg_create_unique_index.py | gis4dis/poster_new | cee983bfcfc90f581b18712d558bc9d8a83a400a | [
"BSD-3-Clause"
] | 138 | 2017-10-13T09:09:02.000Z | 2020-06-05T18:55:33.000Z | apps/processing/pmo/migrations/0008_delete_agg_create_unique_index.py | gis4dis/poster_new | cee983bfcfc90f581b18712d558bc9d8a83a400a | [
"BSD-3-Clause"
] | 2 | 2018-01-21T19:44:51.000Z | 2018-02-15T11:27:39.000Z | # Generated by Django 2.1.5 on 2019-03-25 14:08
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('pmo', '0007_add_timeslots_delete_unique'),
]
operations = [
migrations.RunSQL("""
DELETE FROM pmo_watercourseobservation_related_observations
WHERE from_watercourseobservation_id IN (
SELECT id FROM pmo_watercourseobservation WHERE procedure_id IN (
SELECT id FROM common_process WHERE name_id IN (
'apps.common.aggregate.sum_total',
'apps.common.aggregate.circle_mean',
'apps.common.aggregate.arithmetic_mean',
'avg_day',
'avg_hour'
)
)
);
"""),
migrations.RunSQL("""
DELETE FROM pmo_watercourseobservation_related_observations
WHERE to_watercourseobservation_id IN (
SELECT id FROM pmo_watercourseobservation WHERE procedure_id IN (
SELECT id FROM common_process WHERE name_id IN (
'apps.common.aggregate.sum_total',
'apps.common.aggregate.circle_mean',
'apps.common.aggregate.arithmetic_mean',
'avg_day',
'avg_hour'
)
)
);
"""),
migrations.RunSQL("""
DELETE FROM pmo_watercourseobservation WHERE procedure_id IN (
SELECT id FROM common_process WHERE name_id IN (
'apps.common.aggregate.sum_total',
'apps.common.aggregate.circle_mean',
'apps.common.aggregate.arithmetic_mean',
'avg_day',
'avg_hour'
)
);
"""),
migrations.RunSQL("""
CREATE UNIQUE INDEX pmo_watercourseobservation_uniq ON pmo_watercourseobservation
(
phenomenon_time_range,
observed_property_id,
feature_of_interest_id,
procedure_id,
COALESCE(time_slots_id, -1)
);
"""),
migrations.RunSQL("""
DELETE FROM pmo_weatherobservation_related_observations
WHERE from_weatherobservation_id IN (
SELECT id FROM pmo_weatherobservation WHERE procedure_id IN (
SELECT id FROM common_process WHERE name_id IN (
'apps.common.aggregate.sum_total',
'apps.common.aggregate.circle_mean',
'apps.common.aggregate.arithmetic_mean',
'avg_day',
'avg_hour'
)
)
);
"""),
migrations.RunSQL("""
DELETE FROM pmo_weatherobservation_related_observations
WHERE to_weatherobservation_id IN (
SELECT id FROM pmo_weatherobservation WHERE procedure_id IN (
SELECT id FROM common_process WHERE name_id IN (
'apps.common.aggregate.sum_total',
'apps.common.aggregate.circle_mean',
'apps.common.aggregate.arithmetic_mean',
'avg_day',
'avg_hour'
)
)
);
"""),
migrations.RunSQL("""
DELETE FROM pmo_weatherobservation WHERE procedure_id IN (
SELECT id FROM common_process WHERE name_id IN (
'apps.common.aggregate.sum_total',
'apps.common.aggregate.circle_mean',
'apps.common.aggregate.arithmetic_mean',
'avg_day',
'avg_hour'
)
);
"""),
migrations.RunSQL("""
CREATE UNIQUE INDEX pmo_weatherobservation_uniq ON pmo_weatherobservation
(
phenomenon_time_range,
observed_property_id,
feature_of_interest_id,
procedure_id,
COALESCE(time_slots_id, -1)
);
"""),
]
| 33.565217 | 93 | 0.574093 | 367 | 3,860 | 5.754768 | 0.177112 | 0.085227 | 0.161932 | 0.056818 | 0.861742 | 0.861742 | 0.861742 | 0.861742 | 0.861742 | 0.769886 | 0 | 0.008323 | 0.346373 | 3,860 | 114 | 94 | 33.859649 | 0.828775 | 0.011658 | 0 | 0.735849 | 1 | 0 | 0.876213 | 0.369263 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.018868 | 0 | 0.04717 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
04affb41bb65392de472371bca20a6447842c252 | 69 | py | Python | src/ukbb_common/__init__.py | Nealelab/ukb_common | ee063971d48e15ea4c525d26cf6745930d7106dc | [
"MIT"
] | 8 | 2020-03-06T12:32:44.000Z | 2021-11-17T18:00:13.000Z | src/ukbb_common/__init__.py | Nealelab/ukb_common | ee063971d48e15ea4c525d26cf6745930d7106dc | [
"MIT"
] | 1 | 2021-11-02T20:09:05.000Z | 2021-11-03T13:10:05.000Z | src/ukbb_common/__init__.py | Nealelab/ukb_common | ee063971d48e15ea4c525d26cf6745930d7106dc | [
"MIT"
] | 3 | 2020-07-27T04:14:52.000Z | 2021-09-15T13:43:23.000Z | from ukbb_common.utils import *
from ukbb_common.resources import *
| 17.25 | 35 | 0.811594 | 10 | 69 | 5.4 | 0.6 | 0.296296 | 0.518519 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.130435 | 69 | 3 | 36 | 23 | 0.9 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
04dbbe84589d890223e85616dac7becb62bc0e43 | 111 | py | Python | django_prbac/tests/__init__.py | doordash/django-prbac | ae07c8b36ceb12ef244ac3f4e20fa513307ba40d | [
"BSD-3-Clause"
] | null | null | null | django_prbac/tests/__init__.py | doordash/django-prbac | ae07c8b36ceb12ef244ac3f4e20fa513307ba40d | [
"BSD-3-Clause"
] | null | null | null | django_prbac/tests/__init__.py | doordash/django-prbac | ae07c8b36ceb12ef244ac3f4e20fa513307ba40d | [
"BSD-3-Clause"
] | null | null | null | from .test_fields import *
from .test_forms import *
from .test_decorators import *
from .test_models import *
| 22.2 | 30 | 0.783784 | 16 | 111 | 5.1875 | 0.4375 | 0.385542 | 0.506024 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.144144 | 111 | 4 | 31 | 27.75 | 0.873684 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
04dde1f2e8369670dc23248b7454b2778eb89aa4 | 147 | py | Python | app/accounts/__init__.py | uploadcare/intercom-rank | 1b4eb8ab8589c7ed85dd295f97016213deafec93 | [
"MIT"
] | 12 | 2016-03-10T15:58:51.000Z | 2020-05-07T21:46:03.000Z | app/accounts/__init__.py | uploadcare/intercom-rank | 1b4eb8ab8589c7ed85dd295f97016213deafec93 | [
"MIT"
] | 34 | 2019-06-14T16:17:13.000Z | 2020-12-17T10:19:45.000Z | app/accounts/__init__.py | uploadcare/intercom-rank | 1b4eb8ab8589c7ed85dd295f97016213deafec93 | [
"MIT"
] | 1 | 2016-12-23T11:02:07.000Z | 2016-12-23T11:02:07.000Z | from .models import * # NOQA
from .views import * # NOQA
from .forms import * # NOQA
from .manage import * # NOQA
from .setup import * # NOQA
| 24.5 | 29 | 0.659864 | 20 | 147 | 4.85 | 0.4 | 0.515464 | 0.57732 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.238095 | 147 | 5 | 30 | 29.4 | 0.866071 | 0.163265 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
ca008b56ee64f81d0dd54226812db67d64c3854e | 83 | py | Python | img2dataset/__init__.py | borisdayma/img2dataset | a44341d3681489da329ce4df29832b5b386f5497 | [
"MIT"
] | 1 | 2021-09-10T05:32:12.000Z | 2021-09-10T05:32:12.000Z | img2dataset/__init__.py | borisdayma/img2dataset | a44341d3681489da329ce4df29832b5b386f5497 | [
"MIT"
] | null | null | null | img2dataset/__init__.py | borisdayma/img2dataset | a44341d3681489da329ce4df29832b5b386f5497 | [
"MIT"
] | null | null | null | from img2dataset.downloader import main
from img2dataset.downloader import download | 41.5 | 43 | 0.891566 | 10 | 83 | 7.4 | 0.6 | 0.405405 | 0.675676 | 0.837838 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.026316 | 0.084337 | 83 | 2 | 43 | 41.5 | 0.947368 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 7 |
6d270c6f4c5e9ad851c6fe040bfff361dfbd4147 | 8,100 | py | Python | scripts/app_shell.py | JLivingston01/py_research | 928f74287039a933d27c5a5dc3df8db4cb79c152 | [
"MIT"
] | 1 | 2022-02-21T00:47:41.000Z | 2022-02-21T00:47:41.000Z | scripts/app_shell.py | JLivingston01/py_research | 928f74287039a933d27c5a5dc3df8db4cb79c152 | [
"MIT"
] | null | null | null | scripts/app_shell.py | JLivingston01/py_research | 928f74287039a933d27c5a5dc3df8db4cb79c152 | [
"MIT"
] | null | null | null |
import dash
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output, State
import plotly.graph_objs as go
import dash_table
from flask import Flask
server = Flask(__name__)
external_stylesheets=['https://codepen.io/chriddyp/pen/bWLwgP.css']
app=dash.Dash(__name__,
external_stylesheets=external_stylesheets,
server=server,
url_base_pathname='/dash/')
app.config['suppress_callback_exceptions'] = True
app.layout = html.Div( #START DOCUMENT
children= [
html.Div( #START HEADER
[
html.Div([html.H1("LOGO GOES HERE")],className="three columns"),
html.Div([html.H5('MENU AND INTRO INFO')],className="nine columns"),
],
className='twelve columns'), #END HEADER
html.Div( #START BODY
[
html.Div( #START ROW
[
html.Div([
html.Div([html.Label("Select Ticker:")],style={'color':'white'}),
dcc.Dropdown(
id = 'ticker',
options=[{'label': i, 'value': i} for i in ['A','E','I','O','U','Y']],
value='Y'
),
html.H5("SELECTIONS"),
html.H5("DROPDOWNS"),
html.H5("SLIDER"),
html.H5("CHECK BOXES"),
html.H5("RADIO BUTTONS"),
],className='three columns',id='pane_0'),
html.Div([
html.H1("CONTENT PANE 1"),
html.Div(id='output_1'),
],className='five columns',id='pane_1'),
html.Div([
html.H1("CONTENT PANE 2"),
],className='four columns',id='pane_2'),
],
className='twelve columns'), #END ROW
html.Div( #START ROW
[
html.Div([
html.H1("CONTENT PANE 3"),
],className='six columns',id='pane_3'),
html.Div([
html.H1("CONTENT PANE 4"),
],className='six columns',id='pane_4'),
],
className='twelve columns'), #END ROW
],
className='twelve columns'), #END BODY
]
) #END DOCUMENT
@app.callback(
Output('output_1','children'),
[Input(component_id='ticker', component_property='value'),]
)
def render_content(ticker_value,):
return (html.H5(ticker_value))
app2=dash.Dash(__name__,
external_stylesheets=external_stylesheets,
server=server,
url_base_pathname='/dash2/')
app2.config['suppress_callback_exceptions'] = True
app2.layout = html.Div( #START DOCUMENT
children= [
html.Div( #START HEADER
[
html.Div([html.H1("LOGO GOES HERE FOR APP2")],className="three columns"),
html.Div([html.H5('MENU AND INTRO INFO')],className="nine columns"),
],
className='twelve columns'), #END HEADER
html.Div( #START BODY
[
html.Div( #START ROW
[
html.Div([
html.Div([html.Label("Select Ticker:")],style={'color':'white'}),
dcc.Dropdown(
id = 'ticker',
options=[{'label': i, 'value': i} for i in ['A','B','C']],
value='C'
),
html.H5("SELECTIONS"),
html.H5("DROPDOWNS"),
html.H5("SLIDER"),
html.H5("CHECK BOXES"),
html.H5("RADIO BUTTONS"),
],className='three columns',id='pane_0'),
html.Div([
html.H1("CONTENT PANE 1"),
],className='five columns',id='pane_1'),
html.Div([
html.H1("CONTENT PANE 2"),
html.Div(id='output_1'),
],className='four columns',id='pane_2'),
],
className='twelve columns'), #END ROW
html.Div( #START ROW
[
html.Div([
html.H1("CONTENT PANE 3"),
],className='six columns',id='pane_3'),
html.Div([
html.H1("CONTENT PANE 4"),
],className='six columns',id='pane_4'),
],
className='twelve columns'), #END ROW
],
className='twelve columns'), #END BODY
]
) #END DOCUMENT
@app2.callback(
Output('output_1','children'),
[Input(component_id='ticker', component_property='value'),]
)
def render_content2(ticker_value,):
return (html.H5(ticker_value))
#if __name__ == '__main__':
# app.run_server(debug=False,
# port=8080)
@server.route('/', methods=['GET', 'POST'])
def index():
page='''
<html>
<a href='/dash'> app 1 </a><br>
<a href='/dash2'> app 2 </a><br>
</html>
'''
return page
@server.route("/dash", methods=['GET', 'POST'])
def my_dash_app():
return app.index()
@server.route("/dash2", methods=['GET', 'POST'])
def my_dash_app2():
return app2.index()
server.run(debug=False, port=8081)
'''
if __name__ == '__main__':
app.run_server(debug=False,
port=8080)
''' | 44.021739 | 125 | 0.32642 | 563 | 8,100 | 4.559503 | 0.218472 | 0.076354 | 0.068563 | 0.050643 | 0.80561 | 0.777561 | 0.747565 | 0.721075 | 0.721075 | 0.721075 | 0 | 0.020514 | 0.572716 | 8,100 | 184 | 126 | 44.021739 | 0.721179 | 0.034815 | 0 | 0.601399 | 0 | 0 | 0.133931 | 0.007275 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034965 | false | 0 | 0.048951 | 0.027972 | 0.118881 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
edddcd9a7407395a7a43c2db6089050e18bbab0f | 197 | py | Python | tests/arbpack/arbparser.py | mofm/pypyr | f417f69ba9a607d8a93019854105cfbc4dc9c36d | [
"Apache-2.0"
] | 261 | 2020-08-18T19:31:29.000Z | 2022-03-31T14:54:06.000Z | tests/arbpack/arbparser.py | mofm/pypyr | f417f69ba9a607d8a93019854105cfbc4dc9c36d | [
"Apache-2.0"
] | 89 | 2017-04-12T09:50:32.000Z | 2020-08-13T13:18:36.000Z | tests/arbpack/arbparser.py | mofm/pypyr | f417f69ba9a607d8a93019854105cfbc4dc9c36d | [
"Apache-2.0"
] | 15 | 2020-09-30T12:15:50.000Z | 2022-03-30T07:25:40.000Z | """Test custom parser which returns directly the same thing that was passed."""
def get_parsed_context(context_arg):
"""Return context arg stub."""
return {'parsed_context': context_arg}
| 28.142857 | 79 | 0.730964 | 27 | 197 | 5.148148 | 0.703704 | 0.215827 | 0.28777 | 0.330935 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.15736 | 197 | 6 | 80 | 32.833333 | 0.837349 | 0.497462 | 0 | 0 | 0 | 0 | 0.159091 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 7 |
edfddd230b48a6fe2064356db0a5f89721473f79 | 1,515 | py | Python | tests/data/test_degenerate_zero_length_01.py | ideasman42/isect_segments-bentley_ottmann | 19deb3c5be4c2b91689b87548a875054b43e9952 | [
"MIT"
] | 80 | 2015-12-04T15:06:49.000Z | 2022-03-02T18:08:15.000Z | test/data/test_degenerate_zero_length_01.py | lolistoy/sweepline | 82a2464f984c119dd438489c5f826e9693a7fabf | [
"MIT"
] | 25 | 2015-10-18T13:58:28.000Z | 2021-06-23T21:54:54.000Z | test/data/test_degenerate_zero_length_01.py | lolistoy/sweepline | 82a2464f984c119dd438489c5f826e9693a7fabf | [
"MIT"
] | 37 | 2016-07-06T01:38:33.000Z | 2022-02-19T03:53:14.000Z | data = (
((-1.000000, 0.000000), (0.000000, 1.000000)),
((-1.000000, 0.000000), (-1.000000, 0.000000)),
((0.000000, -1.000000), (-1.000000, 0.000000)),
((1.000000, 0.000000), (0.000000, -1.000000)),
((1.000000, 0.000000), (1.000000, 0.000000)),
((0.000000, 1.000000), (1.000000, 0.000000)),
((0.000000, 1.000000), (0.000000, 1.000000)),
((0.000000, -1.000000), (0.000000, -1.000000)),
((-0.900000, 0.000000), (0.000000, 0.900000)),
((-0.900000, 0.000000), (-0.900000, 0.000000)),
((0.000000, -0.900000), (-0.900000, 0.000000)),
((0.900000, 0.000000), (0.000000, -0.900000)),
((0.900000, 0.000000), (0.900000, 0.000000)),
((0.000000, 0.900000), (0.900000, 0.000000)),
((0.000000, 0.900000), (0.000000, 0.900000)),
((0.000000, -0.900000), (0.000000, -0.900000)),
((-0.800000, 0.000000), (0.000000, 0.800000)),
((-0.800000, 0.000000), (-0.800000, 0.000000)),
((0.000000, -0.800000), (-0.800000, 0.000000)),
((0.800000, 0.000000), (0.000000, -0.800000)),
((0.800000, 0.000000), (0.800000, 0.000000)),
((0.000000, 0.800000), (0.800000, 0.000000)),
((0.000000, 0.800000), (0.000000, 0.800000)),
((0.000000, -0.800000), (0.000000, -0.800000)),
((-0.700000, 0.000000), (0.000000, 0.700000)),
((-0.700000, 0.000000), (-0.700000, 0.000000)),
((0.000000, -0.700000), (-0.700000, 0.000000)),
((0.700000, 0.000000), (0.000000, -0.700000)),
((0.700000, 0.000000), (0.700000, 0.000000)),
((0.000000, 0.700000), (0.700000, 0.000000)),
((0.000000, 0.700000), (0.000000, 0.700000)),
((0.000000, -0.700000), (0.000000, -0.700000)),
)
| 43.285714 | 47 | 0.594059 | 257 | 1,515 | 3.501946 | 0.027237 | 0.505556 | 0.471111 | 0.311111 | 0.995556 | 0.995556 | 0.995556 | 0.995556 | 0.995556 | 0.995556 | 0 | 0.647867 | 0.087129 | 1,515 | 34 | 48 | 44.558824 | 0.002892 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
611e3e3f083acf453d916e41f31937e80b6251cf | 72 | py | Python | HackerRank/Python/Introduction/Write a function.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | HackerRank/Python/Introduction/Write a function.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | HackerRank/Python/Introduction/Write a function.py | anubhab-code/Competitive-Programming | de28cb7d44044b9e7d8bdb475da61e37c018ac35 | [
"MIT"
] | null | null | null | def is_leap(y):
return y % 4 == 0 and (y % 100 != 0 or y % 400 == 0) | 36 | 56 | 0.486111 | 16 | 72 | 2.125 | 0.6875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.204082 | 0.319444 | 72 | 2 | 56 | 36 | 0.489796 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.5 | false | 0 | 0 | 0.5 | 1 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 7 |
b64f6a8d57de35070c89a0675a74a9d36bd0d753 | 1,258 | py | Python | LeetcodeSolutionsPython/test.py | anCoderr/leetcodeSolutions | bdccf4c58f61b835b4758a0d5fb0f3c4bc803dfb | [
"MIT"
] | null | null | null | LeetcodeSolutionsPython/test.py | anCoderr/leetcodeSolutions | bdccf4c58f61b835b4758a0d5fb0f3c4bc803dfb | [
"MIT"
] | null | null | null | LeetcodeSolutionsPython/test.py | anCoderr/leetcodeSolutions | bdccf4c58f61b835b4758a0d5fb0f3c4bc803dfb | [
"MIT"
] | null | null | null | import math
from binary_indexed_tree import *
from segment_tree import *
print('Segment Tree')
STClass = SegmentTree()
segment_tree = STClass.build_tree([2, 3, -1, 5, -2, 4, 8, 10])
# print(segment_tree)
# print(STClass.sum_query(0, 0, 8, segment_tree))
# print(STClass.sum_query(0, 1, 8, segment_tree))
# print(STClass.sum_query(0, 2, 8, segment_tree))
# print(STClass.sum_query(0, 3, 8, segment_tree))
# print(STClass.sum_query(0, 4, 8, segment_tree))
STClass.update_tree(2, -2, 5, segment_tree)
print(STClass.sum_query(0, 0, 8, segment_tree))
print(STClass.sum_query(0, 1, 8, segment_tree))
print(STClass.sum_query(0, 2, 8, segment_tree))
print(STClass.sum_query(0, 3, 8, segment_tree))
print(STClass.sum_query(0, 4, 8, segment_tree))
# print(segment_tree)
print('\nBinary Indexed Tree')
BITClass = BinaryIndexedTree()
bit = BITClass.build_tree([2, 3, -1, 5, -2, 4, 8, 10])
# print(bit)
# print(BITClass.get_sum(bit, 0))
# print(BITClass.get_sum(bit, 1))
# print(BITClass.get_sum(bit, 2))
# print(BITClass.get_sum(bit, 3))
# print(BITClass.get_sum(bit, 4))
BITClass.update_tree(bit, 2, -2)
print(BITClass.get_sum(bit, 0))
print(BITClass.get_sum(bit, 1))
print(BITClass.get_sum(bit, 2))
print(BITClass.get_sum(bit, 3))
print(BITClass.get_sum(bit, 4))
| 31.45 | 62 | 0.72337 | 219 | 1,258 | 3.968037 | 0.123288 | 0.202532 | 0.220944 | 0.264672 | 0.735328 | 0.735328 | 0.734177 | 0.734177 | 0.734177 | 0.734177 | 0 | 0.055802 | 0.102544 | 1,258 | 39 | 63 | 32.25641 | 0.713906 | 0.357711 | 0 | 0 | 0 | 0 | 0.041719 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.142857 | 0 | 0.142857 | 0.571429 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 9 |
b67086de812e44a830bf8ba6b042765ceff5cc18 | 9,849 | py | Python | tests/integration/simple_copy_test/test_simple_copy.py | leahwicz/dbt-snowflake | c8f1021ff75fc4c66d467b63aadd3ab67ef46438 | [
"Apache-2.0"
] | null | null | null | tests/integration/simple_copy_test/test_simple_copy.py | leahwicz/dbt-snowflake | c8f1021ff75fc4c66d467b63aadd3ab67ef46438 | [
"Apache-2.0"
] | null | null | null | tests/integration/simple_copy_test/test_simple_copy.py | leahwicz/dbt-snowflake | c8f1021ff75fc4c66d467b63aadd3ab67ef46438 | [
"Apache-2.0"
] | null | null | null | import json
import os
from pytest import mark
from tests.integration.base import DBTIntegrationTest, use_profile
class BaseTestSimpleCopy(DBTIntegrationTest):
@property
def schema(self):
return "simple_copy"
@staticmethod
def dir(path):
return path.lstrip('/')
@property
def models(self):
return self.dir("models")
@property
def project_config(self):
return self.seed_quote_cfg_with({
'profile': '{{ "tes" ~ "t" }}'
})
def seed_quote_cfg_with(self, extra):
cfg = {
'config-version': 2,
'seeds': {
'quote_columns': False,
}
}
cfg.update(extra)
return cfg
class TestSimpleCopy(BaseTestSimpleCopy):
@property
def project_config(self):
return self.seed_quote_cfg_with({"data-paths": [self.dir("seed-initial")]})
@use_profile("snowflake")
def test__snowflake__simple_copy(self):
self.use_default_project({
"data-paths": [self.dir("seed-initial")],
"seeds": {
'quote_columns': False,
}
})
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
results = self.run_dbt()
self.assertEqual(len(results), 7)
self.assertManyTablesEqual(["SEED", "VIEW_MODEL", "INCREMENTAL", "MATERIALIZED", "GET_AND_REF"])
self.use_default_project({"data-paths": [self.dir("seed-update")]})
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
results = self.run_dbt()
self.assertEqual(len(results), 7)
self.assertManyTablesEqual(["SEED", "VIEW_MODEL", "INCREMENTAL", "MATERIALIZED", "GET_AND_REF"])
self.use_default_project({
"test-paths": [self.dir("tests")],
"data-paths": [self.dir("seed-update")],
})
self.run_dbt(['test'])
@use_profile("snowflake")
def test__snowflake__simple_copy__quoting_off(self):
self.use_default_project({
"quoting": {"identifier": False},
"data-paths": [self.dir("snowflake-seed-initial")],
})
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
results = self.run_dbt()
self.assertEqual(len(results), 7)
self.assertManyTablesEqual(["SEED", "VIEW_MODEL", "INCREMENTAL", "MATERIALIZED", "GET_AND_REF"])
self.use_default_project({
"data-paths": [self.dir("snowflake-seed-update")],
"quoting": {"identifier": False},
})
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
results = self.run_dbt()
self.assertEqual(len(results), 7)
self.assertManyTablesEqual(["SEED", "VIEW_MODEL", "INCREMENTAL", "MATERIALIZED", "GET_AND_REF"])
self.use_default_project({
"test-paths": [self.dir("tests")],
"data-paths": [self.dir("snowflake-seed-update")],
"quoting": {"identifier": False},
})
self.run_dbt(['test'])
@use_profile("snowflake")
def test__snowflake__seed__quoting_switch(self):
self.use_default_project({
"quoting": {"identifier": False},
"data-paths": [self.dir("snowflake-seed-initial")],
})
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
self.use_default_project({
"data-paths": [self.dir("snowflake-seed-update")],
"quoting": {"identifier": True},
})
results = self.run_dbt(["seed"], expect_pass=False)
self.use_default_project({
"test-paths": [self.dir("tests")],
"data-paths": [self.dir("snowflake-seed-initial")],
})
self.run_dbt(['test'])
class TestSimpleCopyQuotingIdentifierOn(BaseTestSimpleCopy):
@property
def project_config(self):
return self.seed_quote_cfg_with({
'quoting': {
'identifier': True,
},
})
@use_profile("snowflake")
def test__snowflake__simple_copy__quoting_on(self):
self.use_default_project({
"data-paths": [self.dir("snowflake-seed-initial")],
})
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
results = self.run_dbt()
self.assertEqual(len(results), 7)
self.assertManyTablesEqual(["seed", "view_model", "incremental", "materialized", "get_and_ref"])
self.use_default_project({
"data-paths": [self.dir("snowflake-seed-update")],
})
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
results = self.run_dbt()
self.assertEqual(len(results), 7)
self.assertManyTablesEqual(["seed", "view_model", "incremental", "materialized", "get_and_ref"])
# can't run the test as this one's identifiers will be the wrong case
class BaseLowercasedSchemaTest(BaseTestSimpleCopy):
def unique_schema(self):
# bypass the forced uppercasing that unique_schema() does on snowflake
return super().unique_schema().lower()
class TestSnowflakeSimpleLowercasedSchemaCopy(BaseLowercasedSchemaTest):
@use_profile('snowflake')
def test__snowflake__simple_copy(self):
self.use_default_project({"data-paths": [self.dir("snowflake-seed-initial")]})
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
results = self.run_dbt()
self.assertEqual(len(results), 7)
self.assertManyTablesEqual(["SEED", "VIEW_MODEL", "INCREMENTAL", "MATERIALIZED", "GET_AND_REF"])
self.use_default_project({"data-paths": [self.dir("snowflake-seed-update")]})
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
results = self.run_dbt()
self.assertEqual(len(results), 7)
self.assertManyTablesEqual(["SEED", "VIEW_MODEL", "INCREMENTAL", "MATERIALIZED", "GET_AND_REF"])
self.use_default_project({
"test-paths": [self.dir("tests")],
"data-paths": [self.dir("snowflake-seed-update")],
})
self.run_dbt(['test'])
class TestSnowflakeSimpleLowercasedSchemaQuoted(BaseLowercasedSchemaTest):
@property
def project_config(self):
return self.seed_quote_cfg_with({
'quoting': {'identifier': False, 'schema': True}
})
@use_profile("snowflake")
def test__snowflake__seed__quoting_switch_schema_lower(self):
self.use_default_project({
"data-paths": [self.dir("snowflake-seed-initial")],
})
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
# this is intentional - should not error!
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
self.use_default_project({
"data-paths": [self.dir("snowflake-seed-update")],
"quoting": {"identifier": False, "schema": False},
})
results = self.run_dbt(["seed"], expect_pass=False)
class TestSnowflakeSimpleUppercasedSchemaQuoted(BaseTestSimpleCopy):
@property
def project_config(self):
return self.seed_quote_cfg_with({
'quoting': {'identifier': False, 'schema': True}
})
@use_profile("snowflake")
def test__snowflake__seed__quoting_switch_schema_upper(self):
self.use_default_project({
"data-paths": [self.dir("snowflake-seed-initial")],
})
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
# this is intentional - should not error!
results = self.run_dbt(["seed"])
self.assertEqual(len(results), 1)
self.use_default_project({
"data-paths": [self.dir("snowflake-seed-update")],
"quoting": {"identifier": False, "schema": False},
})
results = self.run_dbt(["seed"])
class TestSnowflakeIncrementalOverwrite(BaseTestSimpleCopy):
@property
def models(self):
return self.dir("models-snowflake")
@use_profile("snowflake")
def test__snowflake__incremental_overwrite(self):
self.use_default_project({
"data-paths": [self.dir("snowflake-seed-initial")],
})
results = self.run_dbt(["run"])
self.assertEqual(len(results), 1)
results = self.run_dbt(["run"], expect_pass=False)
self.assertEqual(len(results), 1)
# Setting the incremental_strategy should make this succeed
self.use_default_project({
"models": {
"incremental_strategy": "delete+insert"
},
"data-paths": [self.dir("snowflake-seed-update")],
})
results = self.run_dbt(["run"])
self.assertEqual(len(results), 1)
class TestIncrementalMergeColumns(BaseTestSimpleCopy):
@property
def models(self):
return self.dir("models-merge-update")
@property
def project_config(self):
return {
"seeds": {
"quote_columns": False
}
}
def seed_and_run(self):
self.run_dbt(["seed"])
self.run_dbt(["run"])
@use_profile("snowflake")
def test__snowflake__incremental_merge_columns(self):
self.use_default_project({
"data-paths": ["seeds-merge-cols-initial"],
"seeds": {
"quote_columns": False
}
})
self.seed_and_run()
self.use_default_project({
"data-paths": ["seeds-merge-cols-update"],
"seeds": {
"quote_columns": False
}
})
self.seed_and_run()
self.assertTablesEqual("incremental_update_cols", "expected_result")
| 31.66881 | 104 | 0.59539 | 1,017 | 9,849 | 5.549656 | 0.113078 | 0.040928 | 0.058469 | 0.081325 | 0.803863 | 0.78703 | 0.77073 | 0.751772 | 0.735648 | 0.66708 | 0 | 0.00343 | 0.260026 | 9,849 | 310 | 105 | 31.770968 | 0.770993 | 0.02782 | 0 | 0.726141 | 0 | 0 | 0.184032 | 0.045459 | 0 | 0 | 0 | 0 | 0.136929 | 1 | 0.095436 | false | 0.012448 | 0.016598 | 0.049793 | 0.20332 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
b67eac771b45bc04d74c65051052c460a3cb7a6e | 94 | py | Python | crease_ga/utils/__init__.py | arthijayaraman-lab/crease-ga | e757811d73687b3cd1df2d40d607a37116a20a7d | [
"MIT"
] | null | null | null | crease_ga/utils/__init__.py | arthijayaraman-lab/crease-ga | e757811d73687b3cd1df2d40d607a37116a20a7d | [
"MIT"
] | 6 | 2021-07-27T16:26:35.000Z | 2022-03-14T19:47:01.000Z | crease_ga/utils/__init__.py | arthijayaraman-lab/crease_ga | e757811d73687b3cd1df2d40d607a37116a20a7d | [
"MIT"
] | 2 | 2021-07-20T18:04:32.000Z | 2021-07-20T18:10:27.000Z | from crease_ga.utils.initial_pop import initial_pop
from crease_ga.utils.decode import decode
| 31.333333 | 51 | 0.87234 | 16 | 94 | 4.875 | 0.5 | 0.25641 | 0.307692 | 0.435897 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.085106 | 94 | 2 | 52 | 47 | 0.906977 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 8 |
fcabe7b901c3e92d4933bfd0c3501d7bb777016e | 2,161 | py | Python | reservoirpy/welltestpy/ssrf.py | scuervo91/reservoirpy | a4db620baf3ff66a85c7f61b1919713a8642e6fc | [
"MIT"
] | 16 | 2020-05-07T01:57:04.000Z | 2021-11-27T12:45:59.000Z | reservoirpy/welltestpy/ssrf.py | scuervo91/reservoirpy | a4db620baf3ff66a85c7f61b1919713a8642e6fc | [
"MIT"
] | null | null | null | reservoirpy/welltestpy/ssrf.py | scuervo91/reservoirpy | a4db620baf3ff66a85c7f61b1919713a8642e6fc | [
"MIT"
] | 5 | 2020-05-12T07:28:24.000Z | 2021-12-10T21:24:59.000Z | import numpy as np
import pandas as pd
from typing import Union
def ssrf_dp(
q: Union[int, float, np.ndarray],
b: Union[int, float, np.ndarray],
mu: Union[int, float, np.ndarray],
k: Union[int, float, np.ndarray],
h: Union[int, float, np.ndarray],
re: Union[int, float, np.ndarray],
rw: Union[int, float, np.ndarray],
s: Union[int, float, np.ndarray]
):
"""ssrf_dp [Obtain the Pressure Drop in psi of a single phase Steady-State Radial Flow]
Parameters
----------
q : Union[int, float, np.ndarray]
[Flow in bbl/d]
b : Union[int, float, np.ndarray]
[Volumetric Factor]
mu : Union[int, float, np.ndarray]
[Viscosity in cP]
k : Union[int, float, np.ndarray]h
[Permeability in md]
k : Union[int, float, np.ndarray]h
[height in ft]
re : Union[int, float, np.ndarray]
[External radius in ft]
rw : Union[int, float, np.ndarray]
[Wellbore Radius in ft]
s : Union[int, float, np.ndarray]
[Skin factor]
"""
return (141.2*q*b*mu)*(1/(k*h))*(np.log(re/rw) + s)
def ssrf_q(
dp: Union[int, float, np.ndarray],
b: Union[int, float, np.ndarray],
mu: Union[int, float, np.ndarray],
k: Union[int, float, np.ndarray],
h: Union[int, float, np.ndarray],
re: Union[int, float, np.ndarray],
rw: Union[int, float, np.ndarray],
s: Union[int, float, np.ndarray]
):
"""ssrf_dp [Obtain the flow in bbl/d of a single phase Steady-State Radial Flow]
Parameters
----------
dp : Union[int, float, np.ndarray]
[delta pressure in psi]
b : Union[int, float, np.ndarray]
[Volumetric Factor]
mu : Union[int, float, np.ndarray]
[Viscosity in cP]
k : Union[int, float, np.ndarray]h
[Permeability in md]
k : Union[int, float, np.ndarray]h
[height in ft]
re : Union[int, float, np.ndarray]
[External radius in ft]
rw : Union[int, float, np.ndarray]
[Wellbore Radius in ft]
s : Union[int, float, np.ndarray]
[Skin factor]
"""
return 0.00708*k*h*dp*(1/(b*mu*(np.log(re/rw)+s)))
| 28.434211 | 91 | 0.583526 | 326 | 2,161 | 3.855828 | 0.184049 | 0.20366 | 0.330947 | 0.381862 | 0.881464 | 0.865553 | 0.825776 | 0.825776 | 0.825776 | 0.754177 | 0 | 0.007571 | 0.266543 | 2,161 | 75 | 92 | 28.813333 | 0.785489 | 0.527071 | 0 | 0.64 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.08 | false | 0 | 0.12 | 0 | 0.28 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
fcb9e04acc23b8ae39b9541835cfc8158836d029 | 110 | py | Python | backend/util/response/products_list/__init__.py | willrp/willstores-ws | 62c4f400f40fed1aef4f316c7e73dfecba98d026 | [
"MIT"
] | null | null | null | backend/util/response/products_list/__init__.py | willrp/willstores-ws | 62c4f400f40fed1aef4f316c7e73dfecba98d026 | [
"MIT"
] | null | null | null | backend/util/response/products_list/__init__.py | willrp/willstores-ws | 62c4f400f40fed1aef4f316c7e73dfecba98d026 | [
"MIT"
] | null | null | null | from .products_list_response import ProductsListResponse
from .products_list_schema import ProductsListSchema
| 36.666667 | 56 | 0.909091 | 12 | 110 | 8 | 0.666667 | 0.25 | 0.333333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.072727 | 110 | 2 | 57 | 55 | 0.941176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
fcd12fc1c100beefbb2501b4bbb101f95c3694d3 | 5,221 | py | Python | models/vgg_modules.py | jackaduma/SpeakerRecognition-ResNet-GhostVLAD | fc969b072ff86cd354c8aa0e6a560c3b71ac8fef | [
"MIT"
] | 2 | 2022-03-16T02:39:40.000Z | 2022-03-21T01:44:42.000Z | models/vgg_modules.py | jackaduma/SpeakerRecognition-ResNet-GhostVLAD | fc969b072ff86cd354c8aa0e6a560c3b71ac8fef | [
"MIT"
] | null | null | null | models/vgg_modules.py | jackaduma/SpeakerRecognition-ResNet-GhostVLAD | fc969b072ff86cd354c8aa0e6a560c3b71ac8fef | [
"MIT"
] | null | null | null | #!python
# -*- coding: utf-8 -*-
# @author: Kun
'''
Author: Kun
Date: 2021-10-11 11:46:18
LastEditTime: 2021-10-11 11:46:18
LastEditors: Kun
Description:
FilePath: /SpeakerRecognition-ResNet-GhostVLAD/models/vgg_modules.py
'''
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.nn import Conv2d, BatchNorm2d
weight_decay = 1e-4
eps_const_value = 1e-05
momentum_const_value = 0.1
class ConvBlock2D(nn.Module):
def __init__(self, filters_list, input, stride=(2, 2), **kwargs):
super(ConvBlock2D, self).__init__()
filters1, filters2, filters3 = filters_list
self.conv_1x1_reduce = Conv2d(in_channels=input,
out_channels=filters1,
kernel_size=(1, 1),
stride=stride, groups=1, bias=False)
# nn.init.orthogonal_(self.conv_1x1_reduce.weight)
self.conv_1x1_reduce_bn = BatchNorm2d(num_features=filters1,
eps=eps_const_value,
momentum=momentum_const_value)
self.conv_1x1_proj = Conv2d(in_channels=input,
out_channels=filters3,
kernel_size=(1, 1),
stride=stride, groups=1, bias=False)
# nn.init.orthogonal_(self.conv_1x1_proj.weight)
self.conv_1x1_proj_bn = BatchNorm2d(num_features=filters3,
eps=eps_const_value,
momentum=momentum_const_value)
self.conv_3x3 = Conv2d(in_channels=filters1,
out_channels=filters2,
kernel_size=(3, 3),
stride=(1, 1), groups=1, bias=False)
# nn.init.orthogonal_(self.conv_3x3.weight)
self.conv_3x3_bn = BatchNorm2d(num_features=filters2,
eps=eps_const_value,
momentum=momentum_const_value)
self.conv_1x1_increase = Conv2d(in_channels=filters2,
out_channels=filters3,
kernel_size=(1, 1),
stride=(1, 1), groups=1, bias=False)
# nn.init.orthogonal_(self.conv_1x1_increase.weight)
self.conv_1x1_increase_bn = BatchNorm2d(num_features=filters3,
eps=eps_const_value,
momentum=momentum_const_value)
def forward(self, x):
x1 = self.conv_1x1_reduce(x)
x2 = self.conv_1x1_proj(x)
x2 = self.conv_1x1_proj_bn(x2)
x1 = self.conv_1x1_reduce_bn(x1)
x1 = F.pad(F.relu(x1, inplace=True), (1, 1, 1, 1))
x1 = self.conv_3x3(x1)
x1 = self.conv_3x3_bn(x1)
x1 = self.conv_1x1_increase(F.relu(x1, inplace=True))
x1 = self.conv_1x1_increase_bn(x1)
return F.relu(x1 + x2, inplace=True)
class IdentityBlock2D(nn.Module):
def __init__(self, filters_list, input, **kwargs):
super(IdentityBlock2D, self).__init__()
filters1, filters2, filters3 = filters_list
self.conv_1x1_reduce = Conv2d(in_channels=input,
out_channels=filters1,
kernel_size=(1, 1),
stride=(1, 1), groups=1, bias=False)
# nn.init.orthogonal_(self.conv_1x1_reduce.weight)
self.conv_1x1_reduce_bn = BatchNorm2d(num_features=filters1,
eps=eps_const_value,
momentum=momentum_const_value)
self.conv_3x3 = Conv2d(in_channels=filters1,
out_channels=filters2,
kernel_size=(3, 3),
stride=(1, 1), groups=1, bias=False)
# nn.init.orthogonal_(self.conv_3x3.weight)
self.conv_3x3_bn = BatchNorm2d(num_features=filters2,
eps=eps_const_value,
momentum=momentum_const_value)
self.conv_1x1_increase = Conv2d(in_channels=filters2,
out_channels=filters3,
kernel_size=(1, 1),
stride=(1, 1), groups=1, bias=False)
# nn.init.orthogonal_(self.conv_1x1_increase.weight)
self.conv_1x1_increase_bn = BatchNorm2d(num_features=filters3,
eps=eps_const_value,
momentum=momentum_const_value)
def forward(self, x):
x1 = self.conv_1x1_reduce(x)
x1 = self.conv_1x1_reduce_bn(x1)
x1 = F.pad(F.relu(x1, inplace=True), (1, 1, 1, 1))
x1 = self.conv_3x3(x1)
x1 = self.conv_3x3_bn(x1)
x1 = self.conv_1x1_increase(F.relu(x1, inplace=True))
x1 = self.conv_1x1_increase_bn(x1)
return F.relu(x1 + x, inplace=True)
| 41.436508 | 78 | 0.520398 | 571 | 5,221 | 4.472855 | 0.154116 | 0.109632 | 0.107674 | 0.066562 | 0.841034 | 0.834377 | 0.801488 | 0.801488 | 0.766641 | 0.766641 | 0 | 0.070038 | 0.390155 | 5,221 | 125 | 79 | 41.768 | 0.732098 | 0.104386 | 0 | 0.77381 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.047619 | false | 0 | 0.047619 | 0 | 0.142857 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1e22dd953481ce82bbc01efd98d72bff9e17cee2 | 309 | py | Python | gpflow/training/__init__.py | codelover-without-talent/GPflow | 1af7b1ca7da6687974150a1440d821a106b2159d | [
"Apache-2.0"
] | 1 | 2020-01-27T19:05:28.000Z | 2020-01-27T19:05:28.000Z | gpflow/training/__init__.py | codelover-without-talent/GPflow | 1af7b1ca7da6687974150a1440d821a106b2159d | [
"Apache-2.0"
] | null | null | null | gpflow/training/__init__.py | codelover-without-talent/GPflow | 1af7b1ca7da6687974150a1440d821a106b2159d | [
"Apache-2.0"
] | 2 | 2019-03-09T11:46:11.000Z | 2021-12-20T10:22:34.000Z | # pylint: disable=wildcard-import
from .scipy_optimizer import ScipyOptimizer
from .hmc import HMC
from .natgrad_optimizer import XiTransform
from .natgrad_optimizer import XiNat
from .natgrad_optimizer import XiSqrtMeanVar
from .natgrad_optimizer import NatGradOptimizer
from .tensorflow_optimizer import *
| 30.9 | 47 | 0.857605 | 37 | 309 | 7 | 0.405405 | 0.34749 | 0.30888 | 0.401544 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.10356 | 309 | 9 | 48 | 34.333333 | 0.935018 | 0.100324 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
1e5498806012b21fb05f7d646c0e7ec27a517162 | 39 | py | Python | runtime/handler/__init__.py | akrantz01/backendless | 27acada7ab5ee4e81f9e23e0079cfb15b9f6b09e | [
"MIT"
] | 1 | 2020-10-17T04:39:29.000Z | 2020-10-17T04:39:29.000Z | runtime/handler/__init__.py | akrantz01/backendless | 27acada7ab5ee4e81f9e23e0079cfb15b9f6b09e | [
"MIT"
] | null | null | null | runtime/handler/__init__.py | akrantz01/backendless | 27acada7ab5ee4e81f9e23e0079cfb15b9f6b09e | [
"MIT"
] | null | null | null | from .generate import generate_handler
| 19.5 | 38 | 0.871795 | 5 | 39 | 6.6 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.102564 | 39 | 1 | 39 | 39 | 0.942857 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
1e6f3ced689d59dc052406c2b9c27612ce8ce61b | 35,252 | py | Python | tests/unit/test_mementoresource.py | shawnmjones/MementoEmbed | 4d1b2eafc934502ff8a9e3ad3efeec8c0ddc8602 | [
"MIT"
] | 11 | 2018-06-27T07:00:20.000Z | 2021-07-14T06:51:46.000Z | tests/unit/test_mementoresource.py | shawnmjones/MementoEmbed | 4d1b2eafc934502ff8a9e3ad3efeec8c0ddc8602 | [
"MIT"
] | 131 | 2018-06-07T22:42:20.000Z | 2021-11-15T01:08:53.000Z | tests/unit/test_mementoresource.py | shawnmjones/MementoEmbed | 4d1b2eafc934502ff8a9e3ad3efeec8c0ddc8602 | [
"MIT"
] | 2 | 2019-06-06T07:50:54.000Z | 2019-10-29T10:20:04.000Z | import os
import unittest
import zipfile
import io
from datetime import datetime
from urllib.parse import urljoin
from mementoembed.mementoresource import MementoResource, WaybackMemento, \
IMFMemento, ArchiveIsMemento, memento_resource_factory, NotAMementoError
testdir = os.path.dirname(os.path.realpath(__file__))
class mock_response:
def __init__(self, headers, text, status, url, content=None, links={}):
self.headers = headers
self.text = text
self.url = url
self.links = links
self.history = []
if content is None:
if type(content) == str:
self.content = bytes(text.encode('utf-8'))
else:
self.content = content
else:
self.content = content
self.status_code = status
class mock_request:
def __init__(self):
self.url = "mock_request url"
self.headers = {}
self.request = mock_request()
class mock_httpcache:
"""
rather than hitting the actual HTTP cache,
we can simulate behavior for this test
"""
def __init__(self, cachedict):
self.cachedict = cachedict
def get(self, uri, **args):
return self.cachedict[uri]
class TestMementoResource(unittest.TestCase):
def test_simplecase(self):
urim = "http://myarchive.org/memento/http://example.com/something"
expected_urig = "http://myarchive.org/timegate/http://example.com/something"
expected_original_uri = "http://example.com/something"
expected_content = """
<html>
<head>
<title>Is this a good title?</title>
</head>
<body>
Is this good text?
</body>
</html>"""
cachedict = {
urim:
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Fri, 22 Jun 2018 21:16:36 GMT",
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(expected_original_uri, expected_urig, urim)
},
text = expected_content,
status=200,
url = urim,
links = {
"original": {
"url": expected_original_uri
},
"timegate": {
"url": expected_urig
}
}
),
expected_urig: # requests follows all redirects, so we present the result at the end of the chain
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Fri, 22 Jun 2018 21:16:36 GMT",
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(expected_original_uri, expected_urig, urim)
},
text = expected_content,
status=200,
url = urim,
links = {
"original": {
"url": expected_original_uri
},
"timegate": {
"url": expected_urig
}
}
)
}
mh = mock_httpcache(cachedict)
mr = memento_resource_factory(urim, mh)
expected_mdt = datetime.strptime(
"Fri, 22 Jun 2018 21:16:36 GMT",
"%a, %d %b %Y %H:%M:%S GMT"
)
self.assertEqual(type(mr), MementoResource)
self.assertEqual(mr.memento_datetime, expected_mdt)
self.assertEqual(mr.timegate, expected_urig)
self.assertEqual(mr.original_uri, expected_original_uri)
self.assertEqual(mr.content, expected_content)
self.assertEqual(mr.raw_content, expected_content)
def test_waybackcase(self):
urim = "http://myarchive.org/memento/20080202062913/http://example.com/something"
raw_urim = "http://myarchive.org/memento/20080202062913id_/http://example.com/something"
expected_urig = "http://myarchive.org/timegate/http://example.com/something"
expected_original_uri = "http://example.com/something"
expected_content = """
<html>
<head>
<title>Is this a good title?</title>
</head>
<body>
<!-- ARCHIVE SPECIFIC STUFF -->
Is this good text?
</body>
</html>"""
expected_raw_content = """
<html>
<head>
<title>Is this a good title?</title>
</head>
<body>
Is this good text?
</body>
</html>"""
cachedict = {
urim:
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT",
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(expected_original_uri, expected_urig, urim)
},
text = expected_content,
status=200,
url = urim,
links = {
"original": {
"url": expected_original_uri
},
"timegate": {
"url": expected_urig
}
}
),
raw_urim:
mock_response(
headers = {
'content-type': 'text/html'
},
text = expected_raw_content,
status=200,
url = raw_urim
)
}
mh = mock_httpcache(cachedict)
mr = memento_resource_factory(urim, mh)
expected_mdt = datetime.strptime(
"Sat, 02 Feb 2008 06:29:13 GMT",
"%a, %d %b %Y %H:%M:%S GMT"
)
self.assertEqual(type(mr), WaybackMemento)
self.assertEqual(mr.memento_datetime, expected_mdt)
self.assertEqual(mr.timegate, expected_urig)
self.assertEqual(mr.original_uri, expected_original_uri)
self.assertEqual(mr.content, expected_content)
self.assertEqual(mr.raw_content, expected_raw_content)
def test_imfcase(self):
urim = "http://myarchive.org/memento/notraw/http://example.com/something"
raw_urim = "http://myarchive.org/memento/raw/http://example.com/something"
expected_urig = "http://myarchive.org/timegate/http://example.com/something"
expected_original_uri = "http://example.com/something"
expected_content = """
<html>
<head>
<title>ARCHIVED: Is this a good title?</title>
</head>
<body>
<p>Some Archive-specific stuff here</p>
<iframe id="theWebpage" src="{}"></iframe>
</body>
</html>""".format(raw_urim)
expected_raw_content = """
<html>
<head>
<title>Is this a good title?</title>
</head>
<body>
Is this good text?
</body>
</html>"""
cachedict = {
urim:
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT",
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(expected_original_uri, expected_urig, urim)
},
text = expected_content,
status=200,
url = urim,
links = {
"original": {
"url": expected_original_uri
},
"timegate": {
"url": expected_urig
}
}
),
raw_urim:
mock_response(
headers = {
'content-type': 'text/html'
},
text = expected_raw_content,
status=200,
url = raw_urim
)
}
mh = mock_httpcache(cachedict)
mr = memento_resource_factory(urim, mh)
expected_mdt = datetime.strptime(
"Sat, 02 Feb 2008 06:29:13 GMT",
"%a, %d %b %Y %H:%M:%S GMT"
)
self.assertEqual(type(mr), IMFMemento)
self.assertEqual(mr.memento_datetime, expected_mdt)
self.assertEqual(mr.timegate, expected_urig)
self.assertEqual(mr.original_uri, expected_original_uri)
self.assertEqual(mr.content, expected_content)
self.assertEqual(mr.raw_content, expected_raw_content)
def test_archiveiscase(self):
urim = "http://archive.is/abcd1234"
zipurim = "http://archive.is/download/abcd1234.zip"
expected_original_uri = "http://example.com/something"
expected_urig = "http://myarchive.org/timegate/http://example.com/something"
expected_raw_content = """
<html>
<head>
<title>Is this a good title?</title>
</head>
<body>
Is this good text?
</body>
</html>"""
expected_content = """
<html>
<head>
<title>ARCHIVED: Is this a good title?</title>
</head>
<body>
<p>Some Archive-specific stuff here</p>
<div id="SOLID">{}</div>
</body>
</html>""".format(expected_raw_content)
file_like_object = io.BytesIO()
zf = zipfile.ZipFile(file_like_object, mode='w')
zf.writestr('index.html', expected_raw_content)
zf.close()
zip_content = file_like_object.getvalue()
cachedict = {
urim:
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT",
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(expected_original_uri, expected_urig, urim)
},
text = expected_content,
status=200,
url = urim,
links = {
"original": {
"url": expected_original_uri
},
"timegate": {
"url": expected_urig
}
}
),
zipurim:
mock_response(
headers = {
'content-type': 'text/html'
},
text = "",
content = zip_content,
status=200,
url = zipurim
)
}
mh = mock_httpcache(cachedict)
mr = memento_resource_factory(urim, mh)
expected_mdt = datetime.strptime(
"Sat, 02 Feb 2008 06:29:13 GMT",
"%a, %d %b %Y %H:%M:%S GMT"
)
self.maxDiff = None
self.assertEqual(type(mr), ArchiveIsMemento)
self.assertEqual(mr.memento_datetime, expected_mdt)
self.assertEqual(mr.timegate, expected_urig)
self.assertEqual(mr.original_uri, expected_original_uri)
self.assertEqual(mr.content, expected_content)
self.assertEqual(mr.raw_content, bytes(expected_raw_content.encode('utf-8')))
def test_bad_headers(self):
urim = "http://myarchive.org/memento/20080202062913/http://example.com/something"
raw_urim = "http://myarchive.org/memento/20080202062913id_/http://example.com/something"
urir = "http://example.com/something"
expected_urig = "http://myarchive.org/timegate/http://example.com/something"
content = """
<html>
<head>
<title>Is this a good title?</title>
</head>
<!-- ARCHIVE SPECIFIC STUFF -->
<frameset rows="*" cols="130,*" framespacing="0" border="0">
<frame src="frame1.htm">
<frame src="pages/frame2.htm">
<frame src="/content/frame3.htm">
<frame src="http://example2.com/content/frame4.htm">
</frameset>
</html>"""
cachedict = {
urim:
mock_response(
headers = {
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(urir, expected_urig, urim)
},
text = content,
status=200,
url = urim
),
raw_urim:
mock_response(
headers = {},
text = "",
status = 404,
url = raw_urim
)
}
mh = mock_httpcache(cachedict)
self.assertRaises( NotAMementoError, memento_resource_factory, urim, mh )
cachedict = {
urim:
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT",
'link': """<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(expected_urig, urim)
},
text = content,
status=200,
url = urim
),
raw_urim:
mock_response(
headers = {},
text = "",
status = 404,
url = raw_urim
)
}
mh = mock_httpcache(cachedict)
self.assertRaises( NotAMementoError, memento_resource_factory, urim, mh )
cachedict = {
urim:
mock_response(
headers = {
'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT"
},
text = content,
status=200,
url = urim
),
raw_urim:
mock_response(
headers = {},
text = "",
status = 404,
url = raw_urim
)
}
mh = mock_httpcache(cachedict)
self.assertRaises( NotAMementoError, memento_resource_factory, urim, mh )
def test_archiveiscase_datetime_in_uri(self):
urim = "http://archive.is/20130508132946/http://flexispy.com/"
zipurim = "http://archive.is/download/pSSpa.zip"
expected_original_uri = "http://flexispy.com/"
expected_urig = "http://archive.is/timegate/http://flexispy.com/"
with open("{}/samples/archive.is-1.html".format(testdir), 'rb') as f:
expected_content = f.read()
with open("{}/samples/archive.is-1.raw.zip".format(testdir), 'rb') as f:
zip_content = f.read()
zf = zipfile.ZipFile(f)
expected_raw_content = zf.read("index.html")
cachedict = {
urim:
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT",
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(expected_original_uri, expected_urig, urim)
},
text = expected_content,
status=200,
url = urim,
links = {
"original": {
"url": expected_original_uri
},
"timegate": {
"url": expected_urig
}
}
),
"http://archive.is/20130508132946id_/http://flexispy.com/":
mock_response(
headers = {},
text= "",
status=404,
url = "http://archive.is/20130508132946id_/http://flexispy.com/"
),
zipurim:
mock_response(
headers = {
'content-type': 'text/html',
},
text = "",
content = zip_content,
status=200,
url = zipurim
)
}
mh = mock_httpcache(cachedict)
mr = memento_resource_factory(urim, mh)
expected_mdt = datetime.strptime(
"Sat, 02 Feb 2008 06:29:13 GMT",
"%a, %d %b %Y %H:%M:%S GMT"
)
self.maxDiff = None
self.assertEqual(type(mr), ArchiveIsMemento)
self.assertEqual(mr.memento_datetime, expected_mdt)
self.assertEqual(mr.timegate, expected_urig)
self.assertEqual(mr.original_uri, expected_original_uri)
self.assertEqual(mr.content, expected_content)
self.assertEqual(mr.raw_content, expected_raw_content)
def test_meta_redirect(self):
urim = "https://archive-example.org/web/20180401102030/http://example.com/redirpage"
redirurim = "https://archive-example.org/web/20180308084654/http://example.com/testpage"
metaredirecthtml="""<html>
<meta http-equiv="refresh" content="0; URL='{}'"/>
</html>""".format(redirurim)
expected_content = "<html><body>somecontent</body></html>"
expected_raw_content = expected_content
expected_original_uri = "http://example.com/redirpage"
expected_urig = "https://archive-example.org/web/timegate/http://example.com/redirpage"
redir_expected_original_uri = "http://example.com/testpage"
redir_expected_urig = "https://archive-example.org/web/timegate/http://example.com/testpage"
redirurim_raw = "https://archive-example.org/web/20180308084654id_/http://example.com/testpage"
expected_raw_content = "<html><body>raw content</body></html>"
cachedict = {
urim:
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT",
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(expected_original_uri, expected_urig, urim)
},
text = metaredirecthtml,
content = metaredirecthtml,
status = 200,
url = urim,
links = {
"original": {
"url": expected_original_uri
},
"timegate": {
"url": expected_urig
}
}
),
redirurim:
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT",
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(redir_expected_original_uri, redir_expected_urig, urim)
},
text = expected_content,
content = expected_content,
status = 200,
url = redirurim,
links = {
"original": {
"url": redir_expected_original_uri
},
"timegate": {
"url": redir_expected_urig
}
}
),
redirurim_raw:
mock_response(
headers = {
'content-type': 'text/html',
},
text = expected_raw_content,
content = expected_raw_content,
status = 200,
url = redirurim_raw
)
}
mh = mock_httpcache(cachedict)
mr = memento_resource_factory(urim, mh)
expected_mdt = datetime.strptime(
"Sat, 02 Feb 2008 06:29:13 GMT",
"%a, %d %b %Y %H:%M:%S GMT"
)
self.assertEqual(type(mr), WaybackMemento)
self.assertEqual(mr.memento_datetime, expected_mdt)
self.assertEqual(mr.timegate, redir_expected_urig)
self.assertEqual(mr.original_uri, redir_expected_original_uri)
self.assertEqual(mr.content, expected_content)
self.assertEqual(mr.raw_content, expected_raw_content)
def test_permacc_hashstyle_uris(self):
urim = "http://perma.cc/RZP7-3P4P"
expected_original_uri = "http://www.environment.gov.au/minister/hunt/2014/mr20141215a.html"
expected_urim = "https://perma-archives.org/warc/20151028031045/http://www.environment.gov.au/minister/hunt/2014/mr20141215a.html"
expected_raw_uri = "https://perma-archives.org/warc/20151028031045id_/http://www.environment.gov.au/minister/hunt/2014/mr20141215a.html"
expected_urig = "https://perma-archives.org/warc/timegate/http://www.environment.gov.au/minister/hunt/2014/mr20141215a.html"
expected_content = "hi"
expected_raw_content = "hi there"
cachedict = {
urim:
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT",
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(expected_original_uri, expected_urig, urim)
},
text = expected_content,
status=200,
url = urim,
links = {
"original": {
"url": expected_original_uri
},
"timegate": {
"url": expected_urig
}
}
),
expected_raw_uri:
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT",
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(expected_original_uri, expected_urig, expected_urim)
},
text = expected_raw_content,
status = 200,
url = expected_raw_uri,
links = {
"original": {
"url": expected_original_uri
},
"timegate": {
"url": expected_urig
}
}
),
expected_urig: # requests follows all redirects, so we present the result at the end of the chain
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT",
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(expected_original_uri, expected_urig, expected_urim)
},
text = expected_content,
status = 200, # after following redirects
url = expected_urim,
links = {
"original": {
"url": expected_original_uri
},
"timegate": {
"url": expected_urig
}
}
),
expected_urim:
mock_response(
headers = {
'content-type': 'text/html',
'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT",
'link': """<{}>; rel="original",
<{}>; rel="timegate",
<http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
<{}>; rel="memento"
""".format(expected_original_uri, expected_urig, expected_urim)
},
text = expected_content,
status = 200, # after following redirects
url = expected_urim,
links = {
"original": {
"url": expected_original_uri
},
"timegate": {
"url": expected_urig
}
}
)
}
mh = mock_httpcache(cachedict)
mr = memento_resource_factory(urim, mh)
expected_mdt = datetime.strptime(
"Sat, 02 Feb 2008 06:29:13 GMT",
"%a, %d %b %Y %H:%M:%S GMT"
)
self.maxDiff = None
self.assertEqual(type(mr), WaybackMemento)
self.assertEqual(mr.memento_datetime, expected_mdt)
self.assertEqual(mr.timegate, expected_urig)
self.assertEqual(mr.original_uri, expected_original_uri)
self.assertEqual(mr.content, expected_content)
self.assertEqual(mr.raw_content, expected_raw_content)
# def test_waybackframesets(self):
# # TODO: rework this test so that it passes
# self.skipTest("Integration tests work, but this unit test does not produce the correct behavior")
# urim = "http://myarchive.org/memento/20080202062913/http://example.com/something"
# urir = "http://example.com/something"
# raw_urim = "http://myarchive.org/memento/20080202062913id_/http://example.com/something"
# expected_urig = "http://myarchive.org/timegate/http://example.com/something"
# expected_original_uri = "http://example.com/something"
# content = """
# <html>
# <head>
# <title>Is this a good title?</title>
# </head>
# <!-- ARCHIVE SPECIFIC STUFF -->
# <frameset rows="*" cols="130,*" framespacing="0" border="0">
# <frame src="frame1.htm">
# <frame src="pages/frame2.htm">
# <frame src="/content/frame3.htm">
# <frame src="http://example2.com/content/frame4.htm">
# </frameset>
# </html>"""
# raw_content = """
# <html>
# <head>
# <title>Is this a good title?</title>
# </head>
# <frameset rows="*" cols="130,*" framespacing="0" border="0">
# <frame src="frame1.htm">
# <frame src="pages/frame2.htm">
# <frame src="/content/frame3.htm">
# <frame src="http://example2.com/content/frame4.htm">
# </frameset>
# </html>"""
# timegate_stem = "http://myarchive.org/timegate/"
# memento_stem = "http://myarchive.org/memento/"
# cachedict = {
# urim:
# mock_response(
# headers = {
# 'content-type': 'text/html',
# 'memento-datetime': "Sat, 02 Feb 2008 06:29:13 GMT",
# 'link': """<{}>; rel="original",
# <{}>; rel="timegate",
# <http://myarchive.org/timemap/http://example.com/something>; rel="timemap",
# <{}>; rel="memento"
# """.format(expected_original_uri, expected_urig, urim)
# },
# text = content,
# status=200
# ),
# raw_urim:
# mock_response(
# headers = {
# 'content-type': 'text/html'
# },
# text = raw_content,
# status=200
# ),
# "{}/{}".format(memento_stem, urljoin(urir, "frame1.htm")):
# mock_response(
# headers = {
# 'content-type': 'text/html'
# },
# text = "<html><body><p>frame1</p></body></html>",
# status=200
# ),
# "{}{}".format(timegate_stem, urljoin(urir, "frame1.htm")):
# mock_response(
# headers = { 'location': "{}/{}".format(memento_stem, urljoin(urir, "frame1.htm")) },
# text = "",
# status=302
# ),
# "{}{}".format(memento_stem, urljoin(urir, "pages/frame2.htm")):
# mock_response(
# headers = {},
# text = "<html><body><div>frame2</div></body></html>",
# status=200
# ),
# "{}{}".format(timegate_stem, urljoin(urir, "pages/frame2.htm")):
# mock_response(
# headers = { 'location': "{}{}".format(memento_stem, urljoin(urir, "pages/frame2.htm")) },
# text = "",
# status=302
# ),
# "{}{}".format(memento_stem, urljoin(urir, "/content/frame3.htm")):
# mock_response(
# headers = {},
# text = "<html><body><span><p>frame3</p></span></body></html>",
# status=200
# ),
# "{}{}".format(timegate_stem, urljoin(urir, "/content/frame3.htm")):
# mock_response(
# headers = { 'location': "{}{}".format(memento_stem, urljoin(urir, "/content/frame3.htm")) },
# text = "",
# status=302
# ),
# "http://myarchive.org/memento/20080202062913/http://example2.com/content/frame4.htm":
# mock_response(
# headers = {},
# text = "<html><body><div><span><p>frame4</p></span></div></body></html>",
# status=200
# ),
# "{}{}".format(timegate_stem, "http://example2.com/content/frame4.htm"):
# mock_response(
# headers = { 'location': "http://myarchive.org/memento/20080202062913/http://example2.com/content/frame4.htm" },
# text = "",
# status=302
# )
# }
# expected_raw_content = """<html><head><title>Is this a good title?</title></head><body>
# <p>frame1</p>
# <div>frame2</div>
# <span><p>frame3</p></span>
# <div><span><p>frame4</p></span></div>
# </body></html>"""
# expected_content = expected_raw_content
# mh = mock_httpcache(cachedict)
# mr = memento_resource_factory(urim, mh)
# expected_mdt = datetime.strptime(
# "Sat, 02 Feb 2008 06:29:13 GMT",
# "%a, %d %b %Y %H:%M:%S GMT"
# )
# self.maxDiff = None
# self.assertEqual(type(mr), WaybackMemento)
# self.assertEqual(mr.memento_datetime, expected_mdt)
# self.assertEqual(mr.timegate, expected_urig)
# self.assertEqual(mr.original_uri, expected_original_uri)
# self.assertEqual(mr.content, expected_content)
# self.assertEqual(mr.raw_content, expected_raw_content)
| 37.622199 | 144 | 0.445478 | 2,940 | 35,252 | 5.197279 | 0.086735 | 0.035995 | 0.040314 | 0.055694 | 0.842408 | 0.81106 | 0.793063 | 0.77644 | 0.763809 | 0.733966 | 0 | 0.035991 | 0.433309 | 35,252 | 936 | 145 | 37.662393 | 0.728888 | 0.175565 | 0 | 0.703976 | 0 | 0.025037 | 0.332987 | 0.005744 | 0 | 0 | 0 | 0.001068 | 0.066274 | 1 | 0.017673 | false | 0 | 0.010309 | 0.001473 | 0.035346 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1e76acbed4c1affb4b1cb62e68fbdb562206d71f | 1,155 | py | Python | test/internal/test_restlet.py | dokka-ai/netsuite-sdk-py2.7 | 93260dea1f02a6b1785b77ffcdd7f8fe3c9d0b76 | [
"MIT"
] | null | null | null | test/internal/test_restlet.py | dokka-ai/netsuite-sdk-py2.7 | 93260dea1f02a6b1785b77ffcdd7f8fe3c9d0b76 | [
"MIT"
] | null | null | null | test/internal/test_restlet.py | dokka-ai/netsuite-sdk-py2.7 | 93260dea1f02a6b1785b77ffcdd7f8fe3c9d0b76 | [
"MIT"
] | 1 | 2021-02-22T11:52:20.000Z | 2021-02-22T11:52:20.000Z |
def test_call_restlet(ns):
result = ns.call_get_restlet("https://6758546.restlets.api.netsuite.com/app/site/hosting/restlet.nl?script=9&deploy=1", 'getVendorsCurrencies')
assert isinstance(result, dict)
assert result['integrationType'] == 'getVendorsCurrencies'
assert isinstance(result['data'], dict)
def test_call_restlet_wrong_script(ns):
result = ns.call_get_restlet("https://6758546.restlets.api.netsuite.com/app/site/hosting/restlet.nl?script=10&deploy=1", 'getVendorsCurrencies')
assert isinstance(result, dict)
assert 'error' in result
def test_call_restlet_wrong_deploy(ns):
result = ns.call_get_restlet("https://6758546.restlets.api.netsuite.com/app/site/hosting/restlet.nl?script=9&deploy=2", 'getVendorsCurrencies')
assert isinstance(result, dict)
assert 'error' in result
def test_call_restlet_wrong_integration_type(ns):
result = ns.call_get_restlet("https://6758546.restlets.api.netsuite.com/app/site/hosting/restlet.nl?script=9&deploy=1", 'getVendorsCurrencies1')
assert isinstance(result, dict)
assert result['status'] == 'failed'
assert not isinstance(result['data'], dict)
| 44.423077 | 148 | 0.757576 | 154 | 1,155 | 5.532468 | 0.246753 | 0.112676 | 0.129108 | 0.084507 | 0.789906 | 0.762911 | 0.711268 | 0.711268 | 0.642019 | 0.642019 | 0 | 0.036786 | 0.105628 | 1,155 | 25 | 149 | 46.2 | 0.787996 | 0 | 0 | 0.333333 | 0 | 0.222222 | 0.428943 | 0.018198 | 0 | 0 | 0 | 0 | 0.555556 | 1 | 0.222222 | false | 0 | 0 | 0 | 0.222222 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
1e9fa13d3e15067fe514a23cdf49bf72490b1088 | 57,342 | py | Python | angr/procedures/definitions/win32_winfax.py | r4b3rt/angr | c133cfd4f83ffea2a1d9e064241e9459eaabc55f | [
"BSD-2-Clause"
] | null | null | null | angr/procedures/definitions/win32_winfax.py | r4b3rt/angr | c133cfd4f83ffea2a1d9e064241e9459eaabc55f | [
"BSD-2-Clause"
] | null | null | null | angr/procedures/definitions/win32_winfax.py | r4b3rt/angr | c133cfd4f83ffea2a1d9e064241e9459eaabc55f | [
"BSD-2-Clause"
] | null | null | null | # pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("winfax.dll")
prototypes = \
{
#
'FaxConnectFaxServerA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["MachineName", "FaxHandle"]),
#
'FaxConnectFaxServerW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["MachineName", "FaxHandle"]),
#
'FaxClose': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle"]),
#
'FaxOpenPort': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "DeviceId", "Flags", "FaxPortHandle"]),
#
'FaxCompleteJobParamsA': SimTypeFunction([SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "RecipientNumber": SimTypeBottom(label="PSTR"), "RecipientName": SimTypeBottom(label="PSTR"), "Tsid": SimTypeBottom(label="PSTR"), "SenderName": SimTypeBottom(label="PSTR"), "SenderCompany": SimTypeBottom(label="PSTR"), "SenderDept": SimTypeBottom(label="PSTR"), "BillingCode": SimTypeBottom(label="PSTR"), "ScheduleAction": SimTypeInt(signed=False, label="UInt32"), "ScheduleTime": SimTypeBottom(label="SYSTEMTIME"), "DeliveryReportType": SimTypeInt(signed=False, label="UInt32"), "DeliveryReportAddress": SimTypeBottom(label="PSTR"), "DocumentName": SimTypeBottom(label="PSTR"), "CallHandle": SimTypeInt(signed=False, label="UInt32"), "Reserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 3)}, name="FAX_JOB_PARAMA", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "CoverPageName": SimTypeBottom(label="PSTR"), "UseServerCoverPage": SimTypeBottom(label="BOOL"), "RecName": SimTypeBottom(label="PSTR"), "RecFaxNumber": SimTypeBottom(label="PSTR"), "RecCompany": SimTypeBottom(label="PSTR"), "RecStreetAddress": SimTypeBottom(label="PSTR"), "RecCity": SimTypeBottom(label="PSTR"), "RecState": SimTypeBottom(label="PSTR"), "RecZip": SimTypeBottom(label="PSTR"), "RecCountry": SimTypeBottom(label="PSTR"), "RecTitle": SimTypeBottom(label="PSTR"), "RecDepartment": SimTypeBottom(label="PSTR"), "RecOfficeLocation": SimTypeBottom(label="PSTR"), "RecHomePhone": SimTypeBottom(label="PSTR"), "RecOfficePhone": SimTypeBottom(label="PSTR"), "SdrName": SimTypeBottom(label="PSTR"), "SdrFaxNumber": SimTypeBottom(label="PSTR"), "SdrCompany": SimTypeBottom(label="PSTR"), "SdrAddress": SimTypeBottom(label="PSTR"), "SdrTitle": SimTypeBottom(label="PSTR"), "SdrDepartment": SimTypeBottom(label="PSTR"), "SdrOfficeLocation": SimTypeBottom(label="PSTR"), "SdrHomePhone": SimTypeBottom(label="PSTR"), "SdrOfficePhone": SimTypeBottom(label="PSTR"), "Note": SimTypeBottom(label="PSTR"), "Subject": SimTypeBottom(label="PSTR"), "TimeSent": SimTypeBottom(label="SYSTEMTIME"), "PageCount": SimTypeInt(signed=False, label="UInt32")}, name="FAX_COVERPAGE_INFOA", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["JobParams", "CoverpageInfo"]),
#
'FaxCompleteJobParamsW': SimTypeFunction([SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "RecipientNumber": SimTypeBottom(label="PWSTR"), "RecipientName": SimTypeBottom(label="PWSTR"), "Tsid": SimTypeBottom(label="PWSTR"), "SenderName": SimTypeBottom(label="PWSTR"), "SenderCompany": SimTypeBottom(label="PWSTR"), "SenderDept": SimTypeBottom(label="PWSTR"), "BillingCode": SimTypeBottom(label="PWSTR"), "ScheduleAction": SimTypeInt(signed=False, label="UInt32"), "ScheduleTime": SimTypeBottom(label="SYSTEMTIME"), "DeliveryReportType": SimTypeInt(signed=False, label="UInt32"), "DeliveryReportAddress": SimTypeBottom(label="PWSTR"), "DocumentName": SimTypeBottom(label="PWSTR"), "CallHandle": SimTypeInt(signed=False, label="UInt32"), "Reserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 3)}, name="FAX_JOB_PARAMW", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "CoverPageName": SimTypeBottom(label="PWSTR"), "UseServerCoverPage": SimTypeBottom(label="BOOL"), "RecName": SimTypeBottom(label="PWSTR"), "RecFaxNumber": SimTypeBottom(label="PWSTR"), "RecCompany": SimTypeBottom(label="PWSTR"), "RecStreetAddress": SimTypeBottom(label="PWSTR"), "RecCity": SimTypeBottom(label="PWSTR"), "RecState": SimTypeBottom(label="PWSTR"), "RecZip": SimTypeBottom(label="PWSTR"), "RecCountry": SimTypeBottom(label="PWSTR"), "RecTitle": SimTypeBottom(label="PWSTR"), "RecDepartment": SimTypeBottom(label="PWSTR"), "RecOfficeLocation": SimTypeBottom(label="PWSTR"), "RecHomePhone": SimTypeBottom(label="PWSTR"), "RecOfficePhone": SimTypeBottom(label="PWSTR"), "SdrName": SimTypeBottom(label="PWSTR"), "SdrFaxNumber": SimTypeBottom(label="PWSTR"), "SdrCompany": SimTypeBottom(label="PWSTR"), "SdrAddress": SimTypeBottom(label="PWSTR"), "SdrTitle": SimTypeBottom(label="PWSTR"), "SdrDepartment": SimTypeBottom(label="PWSTR"), "SdrOfficeLocation": SimTypeBottom(label="PWSTR"), "SdrHomePhone": SimTypeBottom(label="PWSTR"), "SdrOfficePhone": SimTypeBottom(label="PWSTR"), "Note": SimTypeBottom(label="PWSTR"), "Subject": SimTypeBottom(label="PWSTR"), "TimeSent": SimTypeBottom(label="SYSTEMTIME"), "PageCount": SimTypeInt(signed=False, label="UInt32")}, name="FAX_COVERPAGE_INFOW", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["JobParams", "CoverpageInfo"]),
#
'FaxSendDocumentA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "RecipientNumber": SimTypeBottom(label="PSTR"), "RecipientName": SimTypeBottom(label="PSTR"), "Tsid": SimTypeBottom(label="PSTR"), "SenderName": SimTypeBottom(label="PSTR"), "SenderCompany": SimTypeBottom(label="PSTR"), "SenderDept": SimTypeBottom(label="PSTR"), "BillingCode": SimTypeBottom(label="PSTR"), "ScheduleAction": SimTypeInt(signed=False, label="UInt32"), "ScheduleTime": SimTypeBottom(label="SYSTEMTIME"), "DeliveryReportType": SimTypeInt(signed=False, label="UInt32"), "DeliveryReportAddress": SimTypeBottom(label="PSTR"), "DocumentName": SimTypeBottom(label="PSTR"), "CallHandle": SimTypeInt(signed=False, label="UInt32"), "Reserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 3)}, name="FAX_JOB_PARAMA", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "CoverPageName": SimTypeBottom(label="PSTR"), "UseServerCoverPage": SimTypeBottom(label="BOOL"), "RecName": SimTypeBottom(label="PSTR"), "RecFaxNumber": SimTypeBottom(label="PSTR"), "RecCompany": SimTypeBottom(label="PSTR"), "RecStreetAddress": SimTypeBottom(label="PSTR"), "RecCity": SimTypeBottom(label="PSTR"), "RecState": SimTypeBottom(label="PSTR"), "RecZip": SimTypeBottom(label="PSTR"), "RecCountry": SimTypeBottom(label="PSTR"), "RecTitle": SimTypeBottom(label="PSTR"), "RecDepartment": SimTypeBottom(label="PSTR"), "RecOfficeLocation": SimTypeBottom(label="PSTR"), "RecHomePhone": SimTypeBottom(label="PSTR"), "RecOfficePhone": SimTypeBottom(label="PSTR"), "SdrName": SimTypeBottom(label="PSTR"), "SdrFaxNumber": SimTypeBottom(label="PSTR"), "SdrCompany": SimTypeBottom(label="PSTR"), "SdrAddress": SimTypeBottom(label="PSTR"), "SdrTitle": SimTypeBottom(label="PSTR"), "SdrDepartment": SimTypeBottom(label="PSTR"), "SdrOfficeLocation": SimTypeBottom(label="PSTR"), "SdrHomePhone": SimTypeBottom(label="PSTR"), "SdrOfficePhone": SimTypeBottom(label="PSTR"), "Note": SimTypeBottom(label="PSTR"), "Subject": SimTypeBottom(label="PSTR"), "TimeSent": SimTypeBottom(label="SYSTEMTIME"), "PageCount": SimTypeInt(signed=False, label="UInt32")}, name="FAX_COVERPAGE_INFOA", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "FileName", "JobParams", "CoverpageInfo", "FaxJobId"]),
#
'FaxSendDocumentW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "RecipientNumber": SimTypeBottom(label="PWSTR"), "RecipientName": SimTypeBottom(label="PWSTR"), "Tsid": SimTypeBottom(label="PWSTR"), "SenderName": SimTypeBottom(label="PWSTR"), "SenderCompany": SimTypeBottom(label="PWSTR"), "SenderDept": SimTypeBottom(label="PWSTR"), "BillingCode": SimTypeBottom(label="PWSTR"), "ScheduleAction": SimTypeInt(signed=False, label="UInt32"), "ScheduleTime": SimTypeBottom(label="SYSTEMTIME"), "DeliveryReportType": SimTypeInt(signed=False, label="UInt32"), "DeliveryReportAddress": SimTypeBottom(label="PWSTR"), "DocumentName": SimTypeBottom(label="PWSTR"), "CallHandle": SimTypeInt(signed=False, label="UInt32"), "Reserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 3)}, name="FAX_JOB_PARAMW", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "CoverPageName": SimTypeBottom(label="PWSTR"), "UseServerCoverPage": SimTypeBottom(label="BOOL"), "RecName": SimTypeBottom(label="PWSTR"), "RecFaxNumber": SimTypeBottom(label="PWSTR"), "RecCompany": SimTypeBottom(label="PWSTR"), "RecStreetAddress": SimTypeBottom(label="PWSTR"), "RecCity": SimTypeBottom(label="PWSTR"), "RecState": SimTypeBottom(label="PWSTR"), "RecZip": SimTypeBottom(label="PWSTR"), "RecCountry": SimTypeBottom(label="PWSTR"), "RecTitle": SimTypeBottom(label="PWSTR"), "RecDepartment": SimTypeBottom(label="PWSTR"), "RecOfficeLocation": SimTypeBottom(label="PWSTR"), "RecHomePhone": SimTypeBottom(label="PWSTR"), "RecOfficePhone": SimTypeBottom(label="PWSTR"), "SdrName": SimTypeBottom(label="PWSTR"), "SdrFaxNumber": SimTypeBottom(label="PWSTR"), "SdrCompany": SimTypeBottom(label="PWSTR"), "SdrAddress": SimTypeBottom(label="PWSTR"), "SdrTitle": SimTypeBottom(label="PWSTR"), "SdrDepartment": SimTypeBottom(label="PWSTR"), "SdrOfficeLocation": SimTypeBottom(label="PWSTR"), "SdrHomePhone": SimTypeBottom(label="PWSTR"), "SdrOfficePhone": SimTypeBottom(label="PWSTR"), "Note": SimTypeBottom(label="PWSTR"), "Subject": SimTypeBottom(label="PWSTR"), "TimeSent": SimTypeBottom(label="SYSTEMTIME"), "PageCount": SimTypeInt(signed=False, label="UInt32")}, name="FAX_COVERPAGE_INFOW", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "FileName", "JobParams", "CoverpageInfo", "FaxJobId"]),
#
'FaxSendDocumentForBroadcastA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeFunction([SimTypeBottom(label="HANDLE"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "RecipientNumber": SimTypeBottom(label="PSTR"), "RecipientName": SimTypeBottom(label="PSTR"), "Tsid": SimTypeBottom(label="PSTR"), "SenderName": SimTypeBottom(label="PSTR"), "SenderCompany": SimTypeBottom(label="PSTR"), "SenderDept": SimTypeBottom(label="PSTR"), "BillingCode": SimTypeBottom(label="PSTR"), "ScheduleAction": SimTypeInt(signed=False, label="UInt32"), "ScheduleTime": SimTypeBottom(label="SYSTEMTIME"), "DeliveryReportType": SimTypeInt(signed=False, label="UInt32"), "DeliveryReportAddress": SimTypeBottom(label="PSTR"), "DocumentName": SimTypeBottom(label="PSTR"), "CallHandle": SimTypeInt(signed=False, label="UInt32"), "Reserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 3)}, name="FAX_JOB_PARAMA", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "CoverPageName": SimTypeBottom(label="PSTR"), "UseServerCoverPage": SimTypeBottom(label="BOOL"), "RecName": SimTypeBottom(label="PSTR"), "RecFaxNumber": SimTypeBottom(label="PSTR"), "RecCompany": SimTypeBottom(label="PSTR"), "RecStreetAddress": SimTypeBottom(label="PSTR"), "RecCity": SimTypeBottom(label="PSTR"), "RecState": SimTypeBottom(label="PSTR"), "RecZip": SimTypeBottom(label="PSTR"), "RecCountry": SimTypeBottom(label="PSTR"), "RecTitle": SimTypeBottom(label="PSTR"), "RecDepartment": SimTypeBottom(label="PSTR"), "RecOfficeLocation": SimTypeBottom(label="PSTR"), "RecHomePhone": SimTypeBottom(label="PSTR"), "RecOfficePhone": SimTypeBottom(label="PSTR"), "SdrName": SimTypeBottom(label="PSTR"), "SdrFaxNumber": SimTypeBottom(label="PSTR"), "SdrCompany": SimTypeBottom(label="PSTR"), "SdrAddress": SimTypeBottom(label="PSTR"), "SdrTitle": SimTypeBottom(label="PSTR"), "SdrDepartment": SimTypeBottom(label="PSTR"), "SdrOfficeLocation": SimTypeBottom(label="PSTR"), "SdrHomePhone": SimTypeBottom(label="PSTR"), "SdrOfficePhone": SimTypeBottom(label="PSTR"), "Note": SimTypeBottom(label="PSTR"), "Subject": SimTypeBottom(label="PSTR"), "TimeSent": SimTypeBottom(label="SYSTEMTIME"), "PageCount": SimTypeInt(signed=False, label="UInt32")}, name="FAX_COVERPAGE_INFOA", pack=False, align=None), offset=0)], SimTypeBottom(label="BOOL"), arg_names=["FaxHandle", "RecipientNumber", "Context", "JobParams", "CoverpageInfo"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "FileName", "FaxJobId", "FaxRecipientCallback", "Context"]),
#
'FaxSendDocumentForBroadcastW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeFunction([SimTypeBottom(label="HANDLE"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "RecipientNumber": SimTypeBottom(label="PWSTR"), "RecipientName": SimTypeBottom(label="PWSTR"), "Tsid": SimTypeBottom(label="PWSTR"), "SenderName": SimTypeBottom(label="PWSTR"), "SenderCompany": SimTypeBottom(label="PWSTR"), "SenderDept": SimTypeBottom(label="PWSTR"), "BillingCode": SimTypeBottom(label="PWSTR"), "ScheduleAction": SimTypeInt(signed=False, label="UInt32"), "ScheduleTime": SimTypeBottom(label="SYSTEMTIME"), "DeliveryReportType": SimTypeInt(signed=False, label="UInt32"), "DeliveryReportAddress": SimTypeBottom(label="PWSTR"), "DocumentName": SimTypeBottom(label="PWSTR"), "CallHandle": SimTypeInt(signed=False, label="UInt32"), "Reserved": SimTypeFixedSizeArray(SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), 3)}, name="FAX_JOB_PARAMW", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "CoverPageName": SimTypeBottom(label="PWSTR"), "UseServerCoverPage": SimTypeBottom(label="BOOL"), "RecName": SimTypeBottom(label="PWSTR"), "RecFaxNumber": SimTypeBottom(label="PWSTR"), "RecCompany": SimTypeBottom(label="PWSTR"), "RecStreetAddress": SimTypeBottom(label="PWSTR"), "RecCity": SimTypeBottom(label="PWSTR"), "RecState": SimTypeBottom(label="PWSTR"), "RecZip": SimTypeBottom(label="PWSTR"), "RecCountry": SimTypeBottom(label="PWSTR"), "RecTitle": SimTypeBottom(label="PWSTR"), "RecDepartment": SimTypeBottom(label="PWSTR"), "RecOfficeLocation": SimTypeBottom(label="PWSTR"), "RecHomePhone": SimTypeBottom(label="PWSTR"), "RecOfficePhone": SimTypeBottom(label="PWSTR"), "SdrName": SimTypeBottom(label="PWSTR"), "SdrFaxNumber": SimTypeBottom(label="PWSTR"), "SdrCompany": SimTypeBottom(label="PWSTR"), "SdrAddress": SimTypeBottom(label="PWSTR"), "SdrTitle": SimTypeBottom(label="PWSTR"), "SdrDepartment": SimTypeBottom(label="PWSTR"), "SdrOfficeLocation": SimTypeBottom(label="PWSTR"), "SdrHomePhone": SimTypeBottom(label="PWSTR"), "SdrOfficePhone": SimTypeBottom(label="PWSTR"), "Note": SimTypeBottom(label="PWSTR"), "Subject": SimTypeBottom(label="PWSTR"), "TimeSent": SimTypeBottom(label="SYSTEMTIME"), "PageCount": SimTypeInt(signed=False, label="UInt32")}, name="FAX_COVERPAGE_INFOW", pack=False, align=None), offset=0)], SimTypeBottom(label="BOOL"), arg_names=["FaxHandle", "RecipientNumber", "Context", "JobParams", "CoverpageInfo"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "FileName", "FaxJobId", "FaxRecipientCallback", "Context"]),
#
'FaxEnumJobsA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "JobId": SimTypeInt(signed=False, label="UInt32"), "UserName": SimTypeBottom(label="PSTR"), "JobType": SimTypeInt(signed=False, label="UInt32"), "QueueStatus": SimTypeInt(signed=False, label="UInt32"), "Status": SimTypeInt(signed=False, label="UInt32"), "Size": SimTypeInt(signed=False, label="UInt32"), "PageCount": SimTypeInt(signed=False, label="UInt32"), "RecipientNumber": SimTypeBottom(label="PSTR"), "RecipientName": SimTypeBottom(label="PSTR"), "Tsid": SimTypeBottom(label="PSTR"), "SenderName": SimTypeBottom(label="PSTR"), "SenderCompany": SimTypeBottom(label="PSTR"), "SenderDept": SimTypeBottom(label="PSTR"), "BillingCode": SimTypeBottom(label="PSTR"), "ScheduleAction": SimTypeInt(signed=False, label="UInt32"), "ScheduleTime": SimTypeBottom(label="SYSTEMTIME"), "DeliveryReportType": SimTypeInt(signed=False, label="UInt32"), "DeliveryReportAddress": SimTypeBottom(label="PSTR"), "DocumentName": SimTypeBottom(label="PSTR")}, name="FAX_JOB_ENTRYA", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "JobEntry", "JobsReturned"]),
#
'FaxEnumJobsW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "JobId": SimTypeInt(signed=False, label="UInt32"), "UserName": SimTypeBottom(label="PWSTR"), "JobType": SimTypeInt(signed=False, label="UInt32"), "QueueStatus": SimTypeInt(signed=False, label="UInt32"), "Status": SimTypeInt(signed=False, label="UInt32"), "Size": SimTypeInt(signed=False, label="UInt32"), "PageCount": SimTypeInt(signed=False, label="UInt32"), "RecipientNumber": SimTypeBottom(label="PWSTR"), "RecipientName": SimTypeBottom(label="PWSTR"), "Tsid": SimTypeBottom(label="PWSTR"), "SenderName": SimTypeBottom(label="PWSTR"), "SenderCompany": SimTypeBottom(label="PWSTR"), "SenderDept": SimTypeBottom(label="PWSTR"), "BillingCode": SimTypeBottom(label="PWSTR"), "ScheduleAction": SimTypeInt(signed=False, label="UInt32"), "ScheduleTime": SimTypeBottom(label="SYSTEMTIME"), "DeliveryReportType": SimTypeInt(signed=False, label="UInt32"), "DeliveryReportAddress": SimTypeBottom(label="PWSTR"), "DocumentName": SimTypeBottom(label="PWSTR")}, name="FAX_JOB_ENTRYW", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "JobEntry", "JobsReturned"]),
#
'FaxGetJobA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "JobId": SimTypeInt(signed=False, label="UInt32"), "UserName": SimTypeBottom(label="PSTR"), "JobType": SimTypeInt(signed=False, label="UInt32"), "QueueStatus": SimTypeInt(signed=False, label="UInt32"), "Status": SimTypeInt(signed=False, label="UInt32"), "Size": SimTypeInt(signed=False, label="UInt32"), "PageCount": SimTypeInt(signed=False, label="UInt32"), "RecipientNumber": SimTypeBottom(label="PSTR"), "RecipientName": SimTypeBottom(label="PSTR"), "Tsid": SimTypeBottom(label="PSTR"), "SenderName": SimTypeBottom(label="PSTR"), "SenderCompany": SimTypeBottom(label="PSTR"), "SenderDept": SimTypeBottom(label="PSTR"), "BillingCode": SimTypeBottom(label="PSTR"), "ScheduleAction": SimTypeInt(signed=False, label="UInt32"), "ScheduleTime": SimTypeBottom(label="SYSTEMTIME"), "DeliveryReportType": SimTypeInt(signed=False, label="UInt32"), "DeliveryReportAddress": SimTypeBottom(label="PSTR"), "DocumentName": SimTypeBottom(label="PSTR")}, name="FAX_JOB_ENTRYA", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "JobId", "JobEntry"]),
#
'FaxGetJobW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "JobId": SimTypeInt(signed=False, label="UInt32"), "UserName": SimTypeBottom(label="PWSTR"), "JobType": SimTypeInt(signed=False, label="UInt32"), "QueueStatus": SimTypeInt(signed=False, label="UInt32"), "Status": SimTypeInt(signed=False, label="UInt32"), "Size": SimTypeInt(signed=False, label="UInt32"), "PageCount": SimTypeInt(signed=False, label="UInt32"), "RecipientNumber": SimTypeBottom(label="PWSTR"), "RecipientName": SimTypeBottom(label="PWSTR"), "Tsid": SimTypeBottom(label="PWSTR"), "SenderName": SimTypeBottom(label="PWSTR"), "SenderCompany": SimTypeBottom(label="PWSTR"), "SenderDept": SimTypeBottom(label="PWSTR"), "BillingCode": SimTypeBottom(label="PWSTR"), "ScheduleAction": SimTypeInt(signed=False, label="UInt32"), "ScheduleTime": SimTypeBottom(label="SYSTEMTIME"), "DeliveryReportType": SimTypeInt(signed=False, label="UInt32"), "DeliveryReportAddress": SimTypeBottom(label="PWSTR"), "DocumentName": SimTypeBottom(label="PWSTR")}, name="FAX_JOB_ENTRYW", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "JobId", "JobEntry"]),
#
'FaxSetJobA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "JobId": SimTypeInt(signed=False, label="UInt32"), "UserName": SimTypeBottom(label="PSTR"), "JobType": SimTypeInt(signed=False, label="UInt32"), "QueueStatus": SimTypeInt(signed=False, label="UInt32"), "Status": SimTypeInt(signed=False, label="UInt32"), "Size": SimTypeInt(signed=False, label="UInt32"), "PageCount": SimTypeInt(signed=False, label="UInt32"), "RecipientNumber": SimTypeBottom(label="PSTR"), "RecipientName": SimTypeBottom(label="PSTR"), "Tsid": SimTypeBottom(label="PSTR"), "SenderName": SimTypeBottom(label="PSTR"), "SenderCompany": SimTypeBottom(label="PSTR"), "SenderDept": SimTypeBottom(label="PSTR"), "BillingCode": SimTypeBottom(label="PSTR"), "ScheduleAction": SimTypeInt(signed=False, label="UInt32"), "ScheduleTime": SimTypeBottom(label="SYSTEMTIME"), "DeliveryReportType": SimTypeInt(signed=False, label="UInt32"), "DeliveryReportAddress": SimTypeBottom(label="PSTR"), "DocumentName": SimTypeBottom(label="PSTR")}, name="FAX_JOB_ENTRYA", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "JobId", "Command", "JobEntry"]),
#
'FaxSetJobW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "JobId": SimTypeInt(signed=False, label="UInt32"), "UserName": SimTypeBottom(label="PWSTR"), "JobType": SimTypeInt(signed=False, label="UInt32"), "QueueStatus": SimTypeInt(signed=False, label="UInt32"), "Status": SimTypeInt(signed=False, label="UInt32"), "Size": SimTypeInt(signed=False, label="UInt32"), "PageCount": SimTypeInt(signed=False, label="UInt32"), "RecipientNumber": SimTypeBottom(label="PWSTR"), "RecipientName": SimTypeBottom(label="PWSTR"), "Tsid": SimTypeBottom(label="PWSTR"), "SenderName": SimTypeBottom(label="PWSTR"), "SenderCompany": SimTypeBottom(label="PWSTR"), "SenderDept": SimTypeBottom(label="PWSTR"), "BillingCode": SimTypeBottom(label="PWSTR"), "ScheduleAction": SimTypeInt(signed=False, label="UInt32"), "ScheduleTime": SimTypeBottom(label="SYSTEMTIME"), "DeliveryReportType": SimTypeInt(signed=False, label="UInt32"), "DeliveryReportAddress": SimTypeBottom(label="PWSTR"), "DocumentName": SimTypeBottom(label="PWSTR")}, name="FAX_JOB_ENTRYW", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "JobId", "Command", "JobEntry"]),
#
'FaxGetPageData': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "JobId", "Buffer", "BufferSize", "ImageWidth", "ImageHeight"]),
#
'FaxGetDeviceStatusA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "CallerId": SimTypeBottom(label="PSTR"), "Csid": SimTypeBottom(label="PSTR"), "CurrentPage": SimTypeInt(signed=False, label="UInt32"), "DeviceId": SimTypeInt(signed=False, label="UInt32"), "DeviceName": SimTypeBottom(label="PSTR"), "DocumentName": SimTypeBottom(label="PSTR"), "JobType": SimTypeInt(signed=False, label="UInt32"), "PhoneNumber": SimTypeBottom(label="PSTR"), "RoutingString": SimTypeBottom(label="PSTR"), "SenderName": SimTypeBottom(label="PSTR"), "RecipientName": SimTypeBottom(label="PSTR"), "Size": SimTypeInt(signed=False, label="UInt32"), "StartTime": SimTypeBottom(label="FILETIME"), "Status": SimTypeInt(signed=False, label="UInt32"), "StatusString": SimTypeBottom(label="PSTR"), "SubmittedTime": SimTypeBottom(label="FILETIME"), "TotalPages": SimTypeInt(signed=False, label="UInt32"), "Tsid": SimTypeBottom(label="PSTR"), "UserName": SimTypeBottom(label="PSTR")}, name="FAX_DEVICE_STATUSA", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "DeviceStatus"]),
#
'FaxGetDeviceStatusW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "CallerId": SimTypeBottom(label="PWSTR"), "Csid": SimTypeBottom(label="PWSTR"), "CurrentPage": SimTypeInt(signed=False, label="UInt32"), "DeviceId": SimTypeInt(signed=False, label="UInt32"), "DeviceName": SimTypeBottom(label="PWSTR"), "DocumentName": SimTypeBottom(label="PWSTR"), "JobType": SimTypeInt(signed=False, label="UInt32"), "PhoneNumber": SimTypeBottom(label="PWSTR"), "RoutingString": SimTypeBottom(label="PWSTR"), "SenderName": SimTypeBottom(label="PWSTR"), "RecipientName": SimTypeBottom(label="PWSTR"), "Size": SimTypeInt(signed=False, label="UInt32"), "StartTime": SimTypeBottom(label="FILETIME"), "Status": SimTypeInt(signed=False, label="UInt32"), "StatusString": SimTypeBottom(label="PWSTR"), "SubmittedTime": SimTypeBottom(label="FILETIME"), "TotalPages": SimTypeInt(signed=False, label="UInt32"), "Tsid": SimTypeBottom(label="PWSTR"), "UserName": SimTypeBottom(label="PWSTR")}, name="FAX_DEVICE_STATUSW", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "DeviceStatus"]),
#
'FaxAbort': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "JobId"]),
#
'FaxGetConfigurationA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "Retries": SimTypeInt(signed=False, label="UInt32"), "RetryDelay": SimTypeInt(signed=False, label="UInt32"), "DirtyDays": SimTypeInt(signed=False, label="UInt32"), "Branding": SimTypeBottom(label="BOOL"), "UseDeviceTsid": SimTypeBottom(label="BOOL"), "ServerCp": SimTypeBottom(label="BOOL"), "PauseServerQueue": SimTypeBottom(label="BOOL"), "StartCheapTime": SimStruct({"Hour": SimTypeShort(signed=False, label="UInt16"), "Minute": SimTypeShort(signed=False, label="UInt16")}, name="FAX_TIME", pack=False, align=None), "StopCheapTime": SimStruct({"Hour": SimTypeShort(signed=False, label="UInt16"), "Minute": SimTypeShort(signed=False, label="UInt16")}, name="FAX_TIME", pack=False, align=None), "ArchiveOutgoingFaxes": SimTypeBottom(label="BOOL"), "ArchiveDirectory": SimTypeBottom(label="PSTR"), "Reserved": SimTypeBottom(label="PSTR")}, name="FAX_CONFIGURATIONA", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "FaxConfig"]),
#
'FaxGetConfigurationW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "Retries": SimTypeInt(signed=False, label="UInt32"), "RetryDelay": SimTypeInt(signed=False, label="UInt32"), "DirtyDays": SimTypeInt(signed=False, label="UInt32"), "Branding": SimTypeBottom(label="BOOL"), "UseDeviceTsid": SimTypeBottom(label="BOOL"), "ServerCp": SimTypeBottom(label="BOOL"), "PauseServerQueue": SimTypeBottom(label="BOOL"), "StartCheapTime": SimStruct({"Hour": SimTypeShort(signed=False, label="UInt16"), "Minute": SimTypeShort(signed=False, label="UInt16")}, name="FAX_TIME", pack=False, align=None), "StopCheapTime": SimStruct({"Hour": SimTypeShort(signed=False, label="UInt16"), "Minute": SimTypeShort(signed=False, label="UInt16")}, name="FAX_TIME", pack=False, align=None), "ArchiveOutgoingFaxes": SimTypeBottom(label="BOOL"), "ArchiveDirectory": SimTypeBottom(label="PWSTR"), "Reserved": SimTypeBottom(label="PWSTR")}, name="FAX_CONFIGURATIONW", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "FaxConfig"]),
#
'FaxSetConfigurationA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "Retries": SimTypeInt(signed=False, label="UInt32"), "RetryDelay": SimTypeInt(signed=False, label="UInt32"), "DirtyDays": SimTypeInt(signed=False, label="UInt32"), "Branding": SimTypeBottom(label="BOOL"), "UseDeviceTsid": SimTypeBottom(label="BOOL"), "ServerCp": SimTypeBottom(label="BOOL"), "PauseServerQueue": SimTypeBottom(label="BOOL"), "StartCheapTime": SimStruct({"Hour": SimTypeShort(signed=False, label="UInt16"), "Minute": SimTypeShort(signed=False, label="UInt16")}, name="FAX_TIME", pack=False, align=None), "StopCheapTime": SimStruct({"Hour": SimTypeShort(signed=False, label="UInt16"), "Minute": SimTypeShort(signed=False, label="UInt16")}, name="FAX_TIME", pack=False, align=None), "ArchiveOutgoingFaxes": SimTypeBottom(label="BOOL"), "ArchiveDirectory": SimTypeBottom(label="PSTR"), "Reserved": SimTypeBottom(label="PSTR")}, name="FAX_CONFIGURATIONA", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "FaxConfig"]),
#
'FaxSetConfigurationW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "Retries": SimTypeInt(signed=False, label="UInt32"), "RetryDelay": SimTypeInt(signed=False, label="UInt32"), "DirtyDays": SimTypeInt(signed=False, label="UInt32"), "Branding": SimTypeBottom(label="BOOL"), "UseDeviceTsid": SimTypeBottom(label="BOOL"), "ServerCp": SimTypeBottom(label="BOOL"), "PauseServerQueue": SimTypeBottom(label="BOOL"), "StartCheapTime": SimStruct({"Hour": SimTypeShort(signed=False, label="UInt16"), "Minute": SimTypeShort(signed=False, label="UInt16")}, name="FAX_TIME", pack=False, align=None), "StopCheapTime": SimStruct({"Hour": SimTypeShort(signed=False, label="UInt16"), "Minute": SimTypeShort(signed=False, label="UInt16")}, name="FAX_TIME", pack=False, align=None), "ArchiveOutgoingFaxes": SimTypeBottom(label="BOOL"), "ArchiveDirectory": SimTypeBottom(label="PWSTR"), "Reserved": SimTypeBottom(label="PWSTR")}, name="FAX_CONFIGURATIONW", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "FaxConfig"]),
#
'FaxGetLoggingCategoriesA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"Name": SimTypeBottom(label="PSTR"), "Category": SimTypeInt(signed=False, label="UInt32"), "Level": SimTypeInt(signed=False, label="UInt32")}, name="FAX_LOG_CATEGORYA", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "Categories", "NumberCategories"]),
#
'FaxGetLoggingCategoriesW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"Name": SimTypeBottom(label="PWSTR"), "Category": SimTypeInt(signed=False, label="UInt32"), "Level": SimTypeInt(signed=False, label="UInt32")}, name="FAX_LOG_CATEGORYW", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "Categories", "NumberCategories"]),
#
'FaxSetLoggingCategoriesA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypeBottom(label="PSTR"), "Category": SimTypeInt(signed=False, label="UInt32"), "Level": SimTypeInt(signed=False, label="UInt32")}, name="FAX_LOG_CATEGORYA", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "Categories", "NumberCategories"]),
#
'FaxSetLoggingCategoriesW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Name": SimTypeBottom(label="PWSTR"), "Category": SimTypeInt(signed=False, label="UInt32"), "Level": SimTypeInt(signed=False, label="UInt32")}, name="FAX_LOG_CATEGORYW", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "Categories", "NumberCategories"]),
#
'FaxEnumPortsA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "DeviceId": SimTypeInt(signed=False, label="UInt32"), "State": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Rings": SimTypeInt(signed=False, label="UInt32"), "Priority": SimTypeInt(signed=False, label="UInt32"), "DeviceName": SimTypeBottom(label="PSTR"), "Tsid": SimTypeBottom(label="PSTR"), "Csid": SimTypeBottom(label="PSTR")}, name="FAX_PORT_INFOA", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "PortInfo", "PortsReturned"]),
#
'FaxEnumPortsW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "DeviceId": SimTypeInt(signed=False, label="UInt32"), "State": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Rings": SimTypeInt(signed=False, label="UInt32"), "Priority": SimTypeInt(signed=False, label="UInt32"), "DeviceName": SimTypeBottom(label="PWSTR"), "Tsid": SimTypeBottom(label="PWSTR"), "Csid": SimTypeBottom(label="PWSTR")}, name="FAX_PORT_INFOW", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "PortInfo", "PortsReturned"]),
#
'FaxGetPortA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "DeviceId": SimTypeInt(signed=False, label="UInt32"), "State": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Rings": SimTypeInt(signed=False, label="UInt32"), "Priority": SimTypeInt(signed=False, label="UInt32"), "DeviceName": SimTypeBottom(label="PSTR"), "Tsid": SimTypeBottom(label="PSTR"), "Csid": SimTypeBottom(label="PSTR")}, name="FAX_PORT_INFOA", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "PortInfo"]),
#
'FaxGetPortW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "DeviceId": SimTypeInt(signed=False, label="UInt32"), "State": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Rings": SimTypeInt(signed=False, label="UInt32"), "Priority": SimTypeInt(signed=False, label="UInt32"), "DeviceName": SimTypeBottom(label="PWSTR"), "Tsid": SimTypeBottom(label="PWSTR"), "Csid": SimTypeBottom(label="PWSTR")}, name="FAX_PORT_INFOW", pack=False, align=None), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "PortInfo"]),
#
'FaxSetPortA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "DeviceId": SimTypeInt(signed=False, label="UInt32"), "State": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Rings": SimTypeInt(signed=False, label="UInt32"), "Priority": SimTypeInt(signed=False, label="UInt32"), "DeviceName": SimTypeBottom(label="PSTR"), "Tsid": SimTypeBottom(label="PSTR"), "Csid": SimTypeBottom(label="PSTR")}, name="FAX_PORT_INFOA", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "PortInfo"]),
#
'FaxSetPortW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "DeviceId": SimTypeInt(signed=False, label="UInt32"), "State": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Rings": SimTypeInt(signed=False, label="UInt32"), "Priority": SimTypeInt(signed=False, label="UInt32"), "DeviceName": SimTypeBottom(label="PWSTR"), "Tsid": SimTypeBottom(label="PWSTR"), "Csid": SimTypeBottom(label="PWSTR")}, name="FAX_PORT_INFOW", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "PortInfo"]),
#
'FaxEnumRoutingMethodsA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "DeviceId": SimTypeInt(signed=False, label="UInt32"), "Enabled": SimTypeBottom(label="BOOL"), "DeviceName": SimTypeBottom(label="PSTR"), "Guid": SimTypeBottom(label="PSTR"), "FriendlyName": SimTypeBottom(label="PSTR"), "FunctionName": SimTypeBottom(label="PSTR"), "ExtensionImageName": SimTypeBottom(label="PSTR"), "ExtensionFriendlyName": SimTypeBottom(label="PSTR")}, name="FAX_ROUTING_METHODA", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "RoutingMethod", "MethodsReturned"]),
#
'FaxEnumRoutingMethodsW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "DeviceId": SimTypeInt(signed=False, label="UInt32"), "Enabled": SimTypeBottom(label="BOOL"), "DeviceName": SimTypeBottom(label="PWSTR"), "Guid": SimTypeBottom(label="PWSTR"), "FriendlyName": SimTypeBottom(label="PWSTR"), "FunctionName": SimTypeBottom(label="PWSTR"), "ExtensionImageName": SimTypeBottom(label="PWSTR"), "ExtensionFriendlyName": SimTypeBottom(label="PWSTR")}, name="FAX_ROUTING_METHODW", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "RoutingMethod", "MethodsReturned"]),
#
'FaxEnableRoutingMethodA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "RoutingGuid", "Enabled"]),
#
'FaxEnableRoutingMethodW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypeInt(signed=True, label="Int32")], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "RoutingGuid", "Enabled"]),
#
'FaxEnumGlobalRoutingInfoA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "Priority": SimTypeInt(signed=False, label="UInt32"), "Guid": SimTypeBottom(label="PSTR"), "FriendlyName": SimTypeBottom(label="PSTR"), "FunctionName": SimTypeBottom(label="PSTR"), "ExtensionImageName": SimTypeBottom(label="PSTR"), "ExtensionFriendlyName": SimTypeBottom(label="PSTR")}, name="FAX_GLOBAL_ROUTING_INFOA", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "RoutingInfo", "MethodsReturned"]),
#
'FaxEnumGlobalRoutingInfoW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "Priority": SimTypeInt(signed=False, label="UInt32"), "Guid": SimTypeBottom(label="PWSTR"), "FriendlyName": SimTypeBottom(label="PWSTR"), "FunctionName": SimTypeBottom(label="PWSTR"), "ExtensionImageName": SimTypeBottom(label="PWSTR"), "ExtensionFriendlyName": SimTypeBottom(label="PWSTR")}, name="FAX_GLOBAL_ROUTING_INFOW", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "RoutingInfo", "MethodsReturned"]),
#
'FaxSetGlobalRoutingInfoA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "Priority": SimTypeInt(signed=False, label="UInt32"), "Guid": SimTypeBottom(label="PSTR"), "FriendlyName": SimTypeBottom(label="PSTR"), "FunctionName": SimTypeBottom(label="PSTR"), "ExtensionImageName": SimTypeBottom(label="PSTR"), "ExtensionFriendlyName": SimTypeBottom(label="PSTR")}, name="FAX_GLOBAL_ROUTING_INFOA", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "RoutingInfo"]),
#
'FaxSetGlobalRoutingInfoW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "Priority": SimTypeInt(signed=False, label="UInt32"), "Guid": SimTypeBottom(label="PWSTR"), "FriendlyName": SimTypeBottom(label="PWSTR"), "FunctionName": SimTypeBottom(label="PWSTR"), "ExtensionImageName": SimTypeBottom(label="PWSTR"), "ExtensionFriendlyName": SimTypeBottom(label="PWSTR")}, name="FAX_GLOBAL_ROUTING_INFOW", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "RoutingInfo"]),
#
'FaxGetRoutingInfoA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "RoutingGuid", "RoutingInfoBuffer", "RoutingInfoBufferSize"]),
#
'FaxGetRoutingInfoW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeChar(label="Byte"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "RoutingGuid", "RoutingInfoBuffer", "RoutingInfoBufferSize"]),
#
'FaxSetRoutingInfoA': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "RoutingGuid", "RoutingInfoBuffer", "RoutingInfoBufferSize"]),
#
'FaxSetRoutingInfoW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxPortHandle", "RoutingGuid", "RoutingInfoBuffer", "RoutingInfoBufferSize"]),
#
'FaxInitializeEventQueue': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "CompletionPort", "CompletionKey", "hWnd", "MessageStart"]),
#
'FaxFreeBuffer': SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeBottom(label="Void"), arg_names=["Buffer"]),
#
'FaxStartPrintJobA': SimTypeFunction([SimTypePointer(SimTypeChar(label="Byte"), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "DocName": SimTypeBottom(label="PSTR"), "RecipientName": SimTypeBottom(label="PSTR"), "RecipientNumber": SimTypeBottom(label="PSTR"), "SenderName": SimTypeBottom(label="PSTR"), "SenderCompany": SimTypeBottom(label="PSTR"), "SenderDept": SimTypeBottom(label="PSTR"), "SenderBillingCode": SimTypeBottom(label="PSTR"), "Reserved": SimTypeBottom(label="PSTR"), "DrEmailAddress": SimTypeBottom(label="PSTR"), "OutputFileName": SimTypeBottom(label="PSTR")}, name="FAX_PRINT_INFOA", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "hDC": SimTypeBottom(label="HDC"), "ServerName": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 16)}, name="FAX_CONTEXT_INFOA", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["PrinterName", "PrintInfo", "FaxJobId", "FaxContextInfo"]),
#
'FaxStartPrintJobW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "DocName": SimTypeBottom(label="PWSTR"), "RecipientName": SimTypeBottom(label="PWSTR"), "RecipientNumber": SimTypeBottom(label="PWSTR"), "SenderName": SimTypeBottom(label="PWSTR"), "SenderCompany": SimTypeBottom(label="PWSTR"), "SenderDept": SimTypeBottom(label="PWSTR"), "SenderBillingCode": SimTypeBottom(label="PWSTR"), "Reserved": SimTypeBottom(label="PWSTR"), "DrEmailAddress": SimTypeBottom(label="PWSTR"), "OutputFileName": SimTypeBottom(label="PWSTR")}, name="FAX_PRINT_INFOW", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "hDC": SimTypeBottom(label="HDC"), "ServerName": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 16)}, name="FAX_CONTEXT_INFOW", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["PrinterName", "PrintInfo", "FaxJobId", "FaxContextInfo"]),
#
'FaxPrintCoverPageA': SimTypeFunction([SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "hDC": SimTypeBottom(label="HDC"), "ServerName": SimTypeFixedSizeArray(SimTypeBottom(label="CHAR"), 16)}, name="FAX_CONTEXT_INFOA", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "CoverPageName": SimTypeBottom(label="PSTR"), "UseServerCoverPage": SimTypeBottom(label="BOOL"), "RecName": SimTypeBottom(label="PSTR"), "RecFaxNumber": SimTypeBottom(label="PSTR"), "RecCompany": SimTypeBottom(label="PSTR"), "RecStreetAddress": SimTypeBottom(label="PSTR"), "RecCity": SimTypeBottom(label="PSTR"), "RecState": SimTypeBottom(label="PSTR"), "RecZip": SimTypeBottom(label="PSTR"), "RecCountry": SimTypeBottom(label="PSTR"), "RecTitle": SimTypeBottom(label="PSTR"), "RecDepartment": SimTypeBottom(label="PSTR"), "RecOfficeLocation": SimTypeBottom(label="PSTR"), "RecHomePhone": SimTypeBottom(label="PSTR"), "RecOfficePhone": SimTypeBottom(label="PSTR"), "SdrName": SimTypeBottom(label="PSTR"), "SdrFaxNumber": SimTypeBottom(label="PSTR"), "SdrCompany": SimTypeBottom(label="PSTR"), "SdrAddress": SimTypeBottom(label="PSTR"), "SdrTitle": SimTypeBottom(label="PSTR"), "SdrDepartment": SimTypeBottom(label="PSTR"), "SdrOfficeLocation": SimTypeBottom(label="PSTR"), "SdrHomePhone": SimTypeBottom(label="PSTR"), "SdrOfficePhone": SimTypeBottom(label="PSTR"), "Note": SimTypeBottom(label="PSTR"), "Subject": SimTypeBottom(label="PSTR"), "TimeSent": SimTypeBottom(label="SYSTEMTIME"), "PageCount": SimTypeInt(signed=False, label="UInt32")}, name="FAX_COVERPAGE_INFOA", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxContextInfo", "CoverPageInfo"]),
#
'FaxPrintCoverPageW': SimTypeFunction([SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "hDC": SimTypeBottom(label="HDC"), "ServerName": SimTypeFixedSizeArray(SimTypeChar(label="Char"), 16)}, name="FAX_CONTEXT_INFOW", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"SizeOfStruct": SimTypeInt(signed=False, label="UInt32"), "CoverPageName": SimTypeBottom(label="PWSTR"), "UseServerCoverPage": SimTypeBottom(label="BOOL"), "RecName": SimTypeBottom(label="PWSTR"), "RecFaxNumber": SimTypeBottom(label="PWSTR"), "RecCompany": SimTypeBottom(label="PWSTR"), "RecStreetAddress": SimTypeBottom(label="PWSTR"), "RecCity": SimTypeBottom(label="PWSTR"), "RecState": SimTypeBottom(label="PWSTR"), "RecZip": SimTypeBottom(label="PWSTR"), "RecCountry": SimTypeBottom(label="PWSTR"), "RecTitle": SimTypeBottom(label="PWSTR"), "RecDepartment": SimTypeBottom(label="PWSTR"), "RecOfficeLocation": SimTypeBottom(label="PWSTR"), "RecHomePhone": SimTypeBottom(label="PWSTR"), "RecOfficePhone": SimTypeBottom(label="PWSTR"), "SdrName": SimTypeBottom(label="PWSTR"), "SdrFaxNumber": SimTypeBottom(label="PWSTR"), "SdrCompany": SimTypeBottom(label="PWSTR"), "SdrAddress": SimTypeBottom(label="PWSTR"), "SdrTitle": SimTypeBottom(label="PWSTR"), "SdrDepartment": SimTypeBottom(label="PWSTR"), "SdrOfficeLocation": SimTypeBottom(label="PWSTR"), "SdrHomePhone": SimTypeBottom(label="PWSTR"), "SdrOfficePhone": SimTypeBottom(label="PWSTR"), "Note": SimTypeBottom(label="PWSTR"), "Subject": SimTypeBottom(label="PWSTR"), "TimeSent": SimTypeBottom(label="SYSTEMTIME"), "PageCount": SimTypeInt(signed=False, label="UInt32")}, name="FAX_COVERPAGE_INFOW", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxContextInfo", "CoverPageInfo"]),
#
'FaxRegisterServiceProviderW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["DeviceProvider", "FriendlyName", "ImageName", "TspName"]),
#
'FaxUnregisterServiceProviderW': SimTypeFunction([SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["DeviceProvider"]),
#
'FaxRegisterRoutingExtensionW': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeFunction([SimTypeBottom(label="HANDLE"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypeBottom(label="PWSTR"), SimTypeBottom(label="PWSTR"), SimTypeBottom(label="PWSTR"), SimTypeBottom(label="PWSTR")], SimTypeBottom(label="BOOL"), arg_names=["FaxHandle", "Context", "MethodName", "FriendlyName", "FunctionName", "Guid"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "ExtensionName", "FriendlyName", "ImageName", "CallBack", "Context"]),
#
'FaxAccessCheck': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["FaxHandle", "AccessMask"]),
}
lib.set_prototypes(prototypes)
| 427.925373 | 3,055 | 0.74319 | 5,736 | 57,342 | 7.398536 | 0.050035 | 0.210377 | 0.093501 | 0.142137 | 0.950398 | 0.947618 | 0.947123 | 0.935176 | 0.929356 | 0.924761 | 0 | 0.015083 | 0.064595 | 57,342 | 133 | 3,056 | 431.142857 | 0.77611 | 0.000488 | 0 | 0 | 0 | 0 | 0.277902 | 0.018304 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.071429 | 0 | 0.071429 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
94e3d802541d2ea069e70d31a6a5617cadc1faa5 | 166 | py | Python | optjournal/__init__.py | sile/optjournal | 76970c1bd9f1f9e51d2dd4cc28573528e31d7986 | [
"MIT"
] | 4 | 2020-07-27T07:01:38.000Z | 2022-01-25T01:34:05.000Z | optjournal/__init__.py | sile/optjournal | 76970c1bd9f1f9e51d2dd4cc28573528e31d7986 | [
"MIT"
] | 1 | 2020-10-26T12:19:13.000Z | 2020-10-26T12:19:13.000Z | optjournal/__init__.py | sile/optjournal | 76970c1bd9f1f9e51d2dd4cc28573528e31d7986 | [
"MIT"
] | 1 | 2021-04-23T22:41:18.000Z | 2021-04-23T22:41:18.000Z | from optjournal._file_system import FileSystemDatabase # NOQA
from optjournal._rdb import RDBDatabase # NOQA
from optjournal._storage import JournalStorage # NOQA
| 41.5 | 62 | 0.837349 | 19 | 166 | 7.105263 | 0.578947 | 0.311111 | 0.266667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.126506 | 166 | 3 | 63 | 55.333333 | 0.931034 | 0.084337 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
bf9c2cdcea9260682b2793959e9c74fa1d1b7f85 | 3,171 | py | Python | fastrunner/migrations/0004_auto_20200814_1605.py | FuxiongYang/faster | 91cb3f3912f68ea0dde9a5b6d6ef8179310fbbdb | [
"MIT"
] | 227 | 2020-12-25T12:26:27.000Z | 2022-03-31T16:00:45.000Z | fastrunner/migrations/0004_auto_20200814_1605.py | FuxiongYang/faster | 91cb3f3912f68ea0dde9a5b6d6ef8179310fbbdb | [
"MIT"
] | 3 | 2021-01-18T02:51:34.000Z | 2022-03-03T06:17:44.000Z | fastrunner/migrations/0004_auto_20200814_1605.py | FuxiongYang/faster | 91cb3f3912f68ea0dde9a5b6d6ef8179310fbbdb | [
"MIT"
] | 32 | 2021-01-04T01:53:55.000Z | 2022-03-04T12:25:35.000Z | # Generated by Django 2.1.11 on 2020-08-14 16:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fastrunner', '0003_casestep_source_api_id'),
]
operations = [
migrations.AddField(
model_name='api',
name='creator',
field=models.CharField(max_length=20, null=True, verbose_name='创建人'),
),
migrations.AddField(
model_name='api',
name='updater',
field=models.CharField(max_length=20, null=True, verbose_name='更新人'),
),
migrations.AddField(
model_name='case',
name='creator',
field=models.CharField(max_length=20, null=True, verbose_name='创建人'),
),
migrations.AddField(
model_name='case',
name='updater',
field=models.CharField(max_length=20, null=True, verbose_name='更新人'),
),
migrations.AddField(
model_name='casestep',
name='creator',
field=models.CharField(max_length=20, null=True, verbose_name='创建人'),
),
migrations.AddField(
model_name='casestep',
name='updater',
field=models.CharField(max_length=20, null=True, verbose_name='更新人'),
),
migrations.AddField(
model_name='config',
name='creator',
field=models.CharField(max_length=20, null=True, verbose_name='创建人'),
),
migrations.AddField(
model_name='config',
name='updater',
field=models.CharField(max_length=20, null=True, verbose_name='更新人'),
),
migrations.AddField(
model_name='hostip',
name='creator',
field=models.CharField(max_length=20, null=True, verbose_name='创建人'),
),
migrations.AddField(
model_name='hostip',
name='updater',
field=models.CharField(max_length=20, null=True, verbose_name='更新人'),
),
migrations.AddField(
model_name='project',
name='creator',
field=models.CharField(max_length=20, null=True, verbose_name='创建人'),
),
migrations.AddField(
model_name='project',
name='updater',
field=models.CharField(max_length=20, null=True, verbose_name='更新人'),
),
migrations.AddField(
model_name='report',
name='creator',
field=models.CharField(max_length=20, null=True, verbose_name='创建人'),
),
migrations.AddField(
model_name='report',
name='updater',
field=models.CharField(max_length=20, null=True, verbose_name='更新人'),
),
migrations.AddField(
model_name='variables',
name='creator',
field=models.CharField(max_length=20, null=True, verbose_name='创建人'),
),
migrations.AddField(
model_name='variables',
name='updater',
field=models.CharField(max_length=20, null=True, verbose_name='更新人'),
),
]
| 33.734043 | 81 | 0.555976 | 318 | 3,171 | 5.380503 | 0.150943 | 0.168323 | 0.215079 | 0.252484 | 0.908241 | 0.908241 | 0.835184 | 0.835184 | 0.835184 | 0.835184 | 0 | 0.02393 | 0.314727 | 3,171 | 93 | 82 | 34.096774 | 0.763461 | 0.014506 | 0 | 0.91954 | 1 | 0 | 0.09446 | 0.008646 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.011494 | 0 | 0.045977 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
bfcdaf742f87d46902169228d0b416904d4e45ee | 145 | py | Python | sklearn_xarray/externals/__init__.py | liminleitt/phausamanne | fa259698eb14c75711818f141082d24bc99d645b | [
"BSD-3-Clause"
] | 94 | 2017-12-04T16:23:08.000Z | 2022-03-29T07:38:48.000Z | sklearn_xarray/externals/__init__.py | liminleitt/phausamanne | fa259698eb14c75711818f141082d24bc99d645b | [
"BSD-3-Clause"
] | 52 | 2017-12-01T15:29:13.000Z | 2022-02-11T15:21:17.000Z | sklearn_xarray/externals/__init__.py | liminleitt/phausamanne | fa259698eb14c75711818f141082d24bc99d645b | [
"BSD-3-Clause"
] | 9 | 2018-05-02T18:38:07.000Z | 2021-02-23T05:45:44.000Z | """" ``sklearn_xarray.externals`` """
try:
import numpy_groupies
except ImportError:
from . import _numpy_groupies_np as numpy_groupies
| 20.714286 | 54 | 0.737931 | 17 | 145 | 5.941176 | 0.705882 | 0.386139 | 0.376238 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.158621 | 145 | 6 | 55 | 24.166667 | 0.827869 | 0.206897 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.75 | 0 | 0.75 | 0 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
7835bd38f7392bb4523380899f696102c33c440a | 102 | py | Python | neaturl/__init__.py | martintupy/pretty-url | a5831e99b1ad42591fbb0e64d6c75f12e0b21663 | [
"Apache-2.0"
] | null | null | null | neaturl/__init__.py | martintupy/pretty-url | a5831e99b1ad42591fbb0e64d6c75f12e0b21663 | [
"Apache-2.0"
] | null | null | null | neaturl/__init__.py | martintupy/pretty-url | a5831e99b1ad42591fbb0e64d6c75f12e0b21663 | [
"Apache-2.0"
] | null | null | null | import sys
from urllib.parse import unquote_plus
def main():
print(unquote_plus(sys.argv[1]))
| 11.333333 | 37 | 0.72549 | 16 | 102 | 4.5 | 0.75 | 0.305556 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011765 | 0.166667 | 102 | 8 | 38 | 12.75 | 0.835294 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | true | 0 | 0.5 | 0 | 0.75 | 0.25 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
15722750805dac228ecae2725925267ba7360211 | 19,357 | py | Python | roles/lib_openshift/src/test/unit/test_oc_env.py | coberry87/openshift-ansible | 16899f0783905a841a5bbbe0716395863c9e81b5 | [
"Apache-2.0"
] | 1 | 2019-09-17T06:18:05.000Z | 2019-09-17T06:18:05.000Z | roles/lib_openshift/src/test/unit/test_oc_env.py | coberry87/openshift-ansible | 16899f0783905a841a5bbbe0716395863c9e81b5 | [
"Apache-2.0"
] | 4 | 2020-02-26T20:22:29.000Z | 2021-09-23T23:25:46.000Z | roles/lib_openshift/src/test/unit/test_oc_env.py | Elwalle/openshift-ansible-release-3.11 | f29735863133a6b8816faf561ed410a1d2bad76c | [
"Apache-2.0"
] | 5 | 2019-09-05T02:31:48.000Z | 2020-11-20T04:11:55.000Z | #!/usr/bin/env python
'''
Unit tests for oc_env
'''
import os
import six
import sys
import unittest
import mock
# Removing invalid variable names for tests so that I can
# keep them brief
# pylint: disable=invalid-name,no-name-in-module
# Disable import-error b/c our libraries aren't loaded in jenkins
# pylint: disable=import-error,wrong-import-position
# place class in our python path
module_path = os.path.join('/'.join(os.path.realpath(__file__).split('/')[:-4]), 'library') # noqa: E501
sys.path.insert(0, module_path)
from oc_env import OCEnv, locate_oc_binary # noqa: E402
class OCEnvTest(unittest.TestCase):
'''
Test class for OCEnv
'''
@mock.patch('oc_env.locate_oc_binary')
@mock.patch('oc_env.Utils.create_tmpfile_copy')
@mock.patch('oc_env.OCEnv._run')
def test_listing_all_env_vars(self, mock_cmd, mock_tmpfile_copy, mock_oc_binary):
''' Testing listing all environment variables from a dc'''
# Arrange
# run_ansible input parameters
params = {
'state': 'list',
'namespace': 'default',
'name': 'router',
'kind': 'dc',
'env_vars': None,
'kubeconfig': '/etc/origin/master/admin.kubeconfig',
'debug': False,
}
dc_results = '''{
"apiVersion": "v1",
"kind": "DeploymentConfig",
"metadata": {
"creationTimestamp": "2017-02-02T15:58:49Z",
"generation": 8,
"labels": {
"router": "router"
},
"name": "router",
"namespace": "default",
"resourceVersion": "513678"
},
"spec": {
"replicas": 2,
"selector": {
"router": "router"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"router": "router"
}
},
"spec": {
"containers": [
{
"env": [
{
"name": "DEFAULT_CERTIFICATE_DIR",
"value": "/etc/pki/tls/private"
},
{
"name": "DEFAULT_CERTIFICATE_PATH",
"value": "/etc/pki/tls/private/tls.crt"
},
{
"name": "ROUTER_EXTERNAL_HOST_HOSTNAME"
},
{
"name": "ROUTER_EXTERNAL_HOST_HTTPS_VSERVER"
},
{
"name": "ROUTER_EXTERNAL_HOST_HTTP_VSERVER"
},
{
"name": "ROUTER_EXTERNAL_HOST_INSECURE",
"value": "false"
}
],
"name": "router"
}
]
}
},
"test": false,
"triggers": [
{
"type": "ConfigChange"
}
]
}
}'''
# Return values of our mocked function call. These get returned once per call.
mock_cmd.side_effect = [
(0, dc_results, ''), # First call to the mock
]
mock_oc_binary.side_effect = [
'oc'
]
mock_tmpfile_copy.side_effect = [
'/tmp/mock_adminkubeconfig',
]
# Act
results = OCEnv.run_ansible(params, False)
# Assert
self.assertFalse(results['changed'])
for env_var in results['module_results']:
if env_var == {'name': 'DEFAULT_CERTIFICATE_DIR', 'value': '/etc/pki/tls/private'}:
break
else:
self.fail('Did not find environment variables in results.')
self.assertEqual(results['state'], 'list')
# Making sure our mocks were called as we expected
mock_cmd.assert_has_calls([
mock.call(['oc', 'get', 'dc', 'router', '-o', 'json', '-n', 'default'], None),
])
@mock.patch('oc_env.locate_oc_binary')
@mock.patch('oc_env.Utils.create_tmpfile_copy')
@mock.patch('oc_env.OCEnv._run')
def test_adding_env_vars(self, mock_cmd, mock_tmpfile_copy, mock_oc_binary):
''' Test add environment variables to a dc'''
# Arrange
# run_ansible input parameters
params = {
'state': 'present',
'namespace': 'default',
'name': 'router',
'kind': 'dc',
'kubeconfig': '/etc/origin/master/admin.kubeconfig',
'debug': False,
'env_vars': {'SOMEKEY': 'SOMEVALUE'},
}
dc_results = '''{
"apiVersion": "v1",
"kind": "DeploymentConfig",
"metadata": {
"creationTimestamp": "2017-02-02T15:58:49Z",
"generation": 8,
"labels": {
"router": "router"
},
"name": "router",
"namespace": "default",
"resourceVersion": "513678"
},
"spec": {
"replicas": 2,
"selector": {
"router": "router"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"router": "router"
}
},
"spec": {
"containers": [
{
"env": [
{
"name": "DEFAULT_CERTIFICATE_DIR",
"value": "/etc/pki/tls/private"
},
{
"name": "DEFAULT_CERTIFICATE_PATH",
"value": "/etc/pki/tls/private/tls.crt"
},
{
"name": "ROUTER_EXTERNAL_HOST_HOSTNAME"
},
{
"name": "ROUTER_EXTERNAL_HOST_HTTPS_VSERVER"
},
{
"name": "ROUTER_EXTERNAL_HOST_HTTP_VSERVER"
},
{
"name": "ROUTER_EXTERNAL_HOST_INSECURE",
"value": "false"
}
],
"name": "router"
}
]
}
},
"test": false,
"triggers": [
{
"type": "ConfigChange"
}
]
}
}'''
dc_results_after = '''{
"apiVersion": "v1",
"kind": "DeploymentConfig",
"metadata": {
"creationTimestamp": "2017-02-02T15:58:49Z",
"generation": 8,
"labels": {
"router": "router"
},
"name": "router",
"namespace": "default",
"resourceVersion": "513678"
},
"spec": {
"replicas": 2,
"selector": {
"router": "router"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"router": "router"
}
},
"spec": {
"containers": [
{
"env": [
{
"name": "DEFAULT_CERTIFICATE_DIR",
"value": "/etc/pki/tls/private"
},
{
"name": "DEFAULT_CERTIFICATE_PATH",
"value": "/etc/pki/tls/private/tls.crt"
},
{
"name": "ROUTER_EXTERNAL_HOST_HOSTNAME"
},
{
"name": "ROUTER_EXTERNAL_HOST_HTTPS_VSERVER"
},
{
"name": "ROUTER_EXTERNAL_HOST_HTTP_VSERVER"
},
{
"name": "ROUTER_EXTERNAL_HOST_INSECURE",
"value": "false"
},
{
"name": "SOMEKEY",
"value": "SOMEVALUE"
}
],
"name": "router"
}
]
}
},
"test": false,
"triggers": [
{
"type": "ConfigChange"
}
]
}
}'''
# Return values of our mocked function call. These get returned once per call.
mock_cmd.side_effect = [
(0, dc_results, ''),
(0, dc_results, ''),
(0, dc_results_after, ''),
(0, dc_results_after, ''),
]
mock_oc_binary.side_effect = [
'oc'
]
mock_tmpfile_copy.side_effect = [
'/tmp/mock_adminkubeconfig',
]
# Act
results = OCEnv.run_ansible(params, False)
# Assert
self.assertTrue(results['changed'])
for env_var in results['module_results']:
if env_var == {'name': 'SOMEKEY', 'value': 'SOMEVALUE'}:
break
else:
self.fail('Did not find environment variables in results.')
self.assertEqual(results['state'], 'present')
# Making sure our mocks were called as we expected
mock_cmd.assert_has_calls([
mock.call(['oc', 'get', 'dc', 'router', '-o', 'json', '-n', 'default'], None),
])
@mock.patch('oc_env.locate_oc_binary')
@mock.patch('oc_env.Utils.create_tmpfile_copy')
@mock.patch('oc_env.OCEnv._run')
def test_removing_env_vars(self, mock_cmd, mock_tmpfile_copy, mock_oc_binary):
''' Test add environment variables to a dc'''
# Arrange
# run_ansible input parameters
params = {
'state': 'absent',
'namespace': 'default',
'name': 'router',
'kind': 'dc',
'kubeconfig': '/etc/origin/master/admin.kubeconfig',
'debug': False,
'env_vars': {'SOMEKEY': 'SOMEVALUE'},
}
dc_results_before = '''{
"apiVersion": "v1",
"kind": "DeploymentConfig",
"metadata": {
"creationTimestamp": "2017-02-02T15:58:49Z",
"generation": 8,
"labels": {
"router": "router"
},
"name": "router",
"namespace": "default",
"resourceVersion": "513678"
},
"spec": {
"replicas": 2,
"selector": {
"router": "router"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"router": "router"
}
},
"spec": {
"containers": [
{
"env": [
{
"name": "DEFAULT_CERTIFICATE_DIR",
"value": "/etc/pki/tls/private"
},
{
"name": "DEFAULT_CERTIFICATE_PATH",
"value": "/etc/pki/tls/private/tls.crt"
},
{
"name": "ROUTER_EXTERNAL_HOST_HOSTNAME"
},
{
"name": "ROUTER_EXTERNAL_HOST_HTTPS_VSERVER"
},
{
"name": "ROUTER_EXTERNAL_HOST_HTTP_VSERVER"
},
{
"name": "ROUTER_EXTERNAL_HOST_INSECURE",
"value": "false"
},
{
"name": "SOMEKEY",
"value": "SOMEVALUE"
}
],
"name": "router"
}
]
}
},
"test": false,
"triggers": [
{
"type": "ConfigChange"
}
]
}
}'''
# Return values of our mocked function call. These get returned once per call.
mock_cmd.side_effect = [
(0, dc_results_before, ''),
(0, dc_results_before, ''),
(0, '', ''),
]
mock_oc_binary.side_effect = [
'oc'
]
mock_tmpfile_copy.side_effect = [
'/tmp/mock_adminkubeconfig',
]
# Act
results = OCEnv.run_ansible(params, False)
# Assert
self.assertTrue(results['changed'])
self.assertEqual(results['state'], 'absent')
# Making sure our mocks were called as we expected
mock_cmd.assert_has_calls([
mock.call(['oc', 'get', 'dc', 'router', '-o', 'json', '-n', 'default'], None),
])
@unittest.skipIf(six.PY3, 'py2 test only')
@mock.patch('os.path.exists')
@mock.patch('os.environ.get')
def test_binary_lookup_fallback(self, mock_env_get, mock_path_exists):
''' Testing binary lookup fallback '''
mock_env_get.side_effect = lambda _v, _d: ''
mock_path_exists.side_effect = lambda _: False
self.assertEqual(locate_oc_binary(), 'oc')
@unittest.skipIf(six.PY3, 'py2 test only')
@mock.patch('os.path.exists')
@mock.patch('os.environ.get')
def test_binary_lookup_in_path(self, mock_env_get, mock_path_exists):
''' Testing binary lookup in path '''
oc_bin = '/usr/bin/oc'
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_path_exists.side_effect = lambda f: f == oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY3, 'py2 test only')
@mock.patch('os.path.exists')
@mock.patch('os.environ.get')
def test_binary_lookup_in_usr_local(self, mock_env_get, mock_path_exists):
''' Testing binary lookup in /usr/local/bin '''
oc_bin = '/usr/local/bin/oc'
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_path_exists.side_effect = lambda f: f == oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY3, 'py2 test only')
@mock.patch('os.path.exists')
@mock.patch('os.environ.get')
def test_binary_lookup_in_home(self, mock_env_get, mock_path_exists):
''' Testing binary lookup in ~/bin '''
oc_bin = os.path.expanduser('~/bin/oc')
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_path_exists.side_effect = lambda f: f == oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY2, 'py3 test only')
@mock.patch('shutil.which')
@mock.patch('os.environ.get')
def test_binary_lookup_fallback_py3(self, mock_env_get, mock_shutil_which):
''' Testing binary lookup fallback '''
mock_env_get.side_effect = lambda _v, _d: ''
mock_shutil_which.side_effect = lambda _f, path=None: None
self.assertEqual(locate_oc_binary(), 'oc')
@unittest.skipIf(six.PY2, 'py3 test only')
@mock.patch('shutil.which')
@mock.patch('os.environ.get')
def test_binary_lookup_in_path_py3(self, mock_env_get, mock_shutil_which):
''' Testing binary lookup in path '''
oc_bin = '/usr/bin/oc'
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_shutil_which.side_effect = lambda _f, path=None: oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY2, 'py3 test only')
@mock.patch('shutil.which')
@mock.patch('os.environ.get')
def test_binary_lookup_in_usr_local_py3(self, mock_env_get, mock_shutil_which):
''' Testing binary lookup in /usr/local/bin '''
oc_bin = '/usr/local/bin/oc'
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_shutil_which.side_effect = lambda _f, path=None: oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY2, 'py3 test only')
@mock.patch('shutil.which')
@mock.patch('os.environ.get')
def test_binary_lookup_in_home_py3(self, mock_env_get, mock_shutil_which):
''' Testing binary lookup in ~/bin '''
oc_bin = os.path.expanduser('~/bin/oc')
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_shutil_which.side_effect = lambda _f, path=None: oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
| 35.194545 | 105 | 0.399339 | 1,522 | 19,357 | 4.834428 | 0.137319 | 0.036695 | 0.039141 | 0.047839 | 0.90337 | 0.900245 | 0.889644 | 0.889644 | 0.882849 | 0.85757 | 0 | 0.012952 | 0.489435 | 19,357 | 549 | 106 | 35.258652 | 0.731559 | 0.067004 | 0 | 0.642857 | 0 | 0 | 0.652659 | 0.078921 | 0 | 0 | 0 | 0 | 0.039171 | 1 | 0.025346 | false | 0 | 0.013825 | 0 | 0.041475 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
15965f4e06b1e1e57a6ca0d41f0acf634adf73aa | 19,205 | py | Python | nflpool/controllers/picks_controller.py | michaelizergit/nflpool | 157a00a1807aaff3ecc08f6cdf3075c2de90a295 | [
"MIT"
] | 9 | 2016-12-25T16:42:20.000Z | 2021-03-19T02:52:55.000Z | nflpool/controllers/picks_controller.py | michaelizergit/nflpool | 157a00a1807aaff3ecc08f6cdf3075c2de90a295 | [
"MIT"
] | 64 | 2017-08-20T14:54:05.000Z | 2020-03-11T19:07:18.000Z | nflpool/controllers/picks_controller.py | michaelizergit/nflpool | 157a00a1807aaff3ecc08f6cdf3075c2de90a295 | [
"MIT"
] | 6 | 2019-01-11T01:20:57.000Z | 2021-03-19T02:52:57.000Z | import pyramid_handlers
from nflpool.controllers.base_controller import BaseController
from nflpool.services.playerpicks_service import PlayerPicksService
from nflpool.viewmodels.playerpicks_viewmodel import PlayerPicksViewModel
from nflpool.data.dbsession import DbSessionFactory
from nflpool.data.player_picks import PlayerPicks
from nflpool.data.seasoninfo import SeasonInfo
from nflpool.data.account import Account
from nflpool.services.slack_service import SlackService
from nflpool.services.time_service import TimeService
from nflpool.services.gameday_service import GameDayService
from nflpool.services.view_picks_service import ViewPicksService
class PicksController(BaseController):
@pyramid_handlers.action(renderer="templates/picks/index.pt")
def index(self):
if not self.logged_in_user_id:
print("Cannot view account page, you must be logged in")
self.redirect("/account/signin")
return {}
@pyramid_handlers.action(renderer="templates/picks/completed.pt")
def completed(self):
if not self.logged_in_user_id:
print("Cannot view account page, you must be logged in")
self.redirect("/account/signin")
# display_player_picks = DisplayPlayerPicks.display_picks(self.logged_in_user_id)
session = DbSessionFactory.create_session()
season_row = (
session.query(SeasonInfo.current_season)
.filter(SeasonInfo.id == "1")
.first()
)
season = season_row.current_season
get_first_name = (
session.query(Account.first_name)
.filter(Account.id == self.logged_in_user_id)
.first()
)
first_name = get_first_name[0]
return {"season": season, "first_name": first_name}
# Get player picks for the current season
@pyramid_handlers.action(
renderer="templates/picks/submit-picks.pt",
request_method="GET",
name="submit-picks",
)
def submit_player_picks(self):
if not self.logged_in_user_id:
print("Cannot view picks page, you must be logged in")
self.redirect("/account/signin")
now_time = TimeService.get_time()
session = DbSessionFactory.create_session()
season_row = (
session.query(SeasonInfo.current_season)
.filter(SeasonInfo.id == "1")
.first()
)
season = season_row.current_season
season_info = session.query(SeasonInfo).all()
first_game = GameDayService.season_opener_date()
picks_due = GameDayService.picks_due()
time_due = GameDayService.time_due()
# Methods used prior to Pendulum
# string_date = first_game[0] + ' 21:59'
# first_game_time = datetime.datetime.strptime(string_date, "%Y-%m-%d %H:%M")
if now_time > first_game:
print("Season has already started")
self.redirect("/picks/too-late")
else:
# Check if user has already submitted picks
days = GameDayService.delta_days()
hours = GameDayService.delta_hours()
minutes = GameDayService.delta_minutes()
current_datetime = now_time.to_day_datetime_string()
user_query = (
session.query(PlayerPicks.user_id)
.filter(PlayerPicks.user_id == self.logged_in_user_id)
.filter(PlayerPicks.season == season)
.first()
)
if user_query is None:
# Data / Service access
afc_east_list = PlayerPicksService.get_team_list(0, 1)
afc_north_list = PlayerPicksService.get_team_list(0, 2)
afc_south_list = PlayerPicksService.get_team_list(0, 3)
afc_west_list = PlayerPicksService.get_team_list(0, 4)
nfc_east_list = PlayerPicksService.get_team_list(1, 1)
nfc_north_list = PlayerPicksService.get_team_list(1, 2)
nfc_south_list = PlayerPicksService.get_team_list(1, 3)
nfc_west_list = PlayerPicksService.get_team_list(1, 4)
afc_qb_list = PlayerPicksService.get_player_list(0, "QB")
nfc_qb_list = PlayerPicksService.get_player_list(1, "QB")
afc_rb_list = PlayerPicksService.get_player_list(0, "RB")
nfc_rb_list = PlayerPicksService.get_player_list(1, "RB")
afc_rec_list = PlayerPicksService.get_rec_list(0, "WR", "TE")
nfc_rec_list = PlayerPicksService.get_rec_list(1, "WR", "TE")
afc_sacks_list = PlayerPicksService.get_sacks(
0, "DE", "DT", "ILB", "LB", "MLB", "NT", "OLB"
)
nfc_sacks_list = PlayerPicksService.get_sacks(
1, "DE", "DT", "ILB", "LB", "MLB", "NT", "OLB"
)
afc_int_list = PlayerPicksService.get_int(
0, "CB", "DB", "FS", "SS", "MLB", "LB", "OLB", "ILB"
)
nfc_int_list = PlayerPicksService.get_int(
1, "CB", "DB", "FS", "SS", "MLB", "LB", "OLB", "ILB"
)
afc_wildcard_list = PlayerPicksService.get_afc_wildcard()
nfc_wildcard_list = PlayerPicksService.get_nfc_wildcard()
all_team_list = PlayerPicksService.get_all_teams()
# Get the user ID
user_id = self.logged_in_user_id
get_first_name = (
session.query(Account.first_name)
.filter(Account.id == self.logged_in_user_id)
.first()
)
first_name = get_first_name[0]
# Return the models
return {
"season": season,
"user_id": user_id,
"first_name": first_name,
"afc_east": afc_east_list,
"afc_north": afc_north_list,
"afc_south": afc_south_list,
"afc_west": afc_west_list,
"nfc_east": nfc_east_list,
"nfc_north": nfc_north_list,
"nfc_south": nfc_south_list,
"nfc_west": nfc_west_list,
"afc_qb_list": afc_qb_list,
"nfc_qb_list": nfc_qb_list,
"afc_rb_list": afc_rb_list,
"nfc_rb_list": nfc_rb_list,
"afc_rec_list": afc_rec_list,
"nfc_rec_list": nfc_rec_list,
"afc_sacks_list": afc_sacks_list,
"nfc_sacks_list": nfc_sacks_list,
"afc_int_list": afc_int_list,
"nfc_int_list": nfc_int_list,
"afc_wildcard_list": afc_wildcard_list,
"nfc_wildcard_list": nfc_wildcard_list,
"all_team_list": all_team_list,
"picks_due": picks_due,
"time_due": time_due,
"days": days,
"hours": hours,
"minutes": minutes,
"current_datetime": current_datetime,
"season_info": season_info,
}
else:
print("You have already submitted picks for this season")
self.redirect("/picks/change-picks")
# POST /picks/submit_picks
@pyramid_handlers.action(
renderer="templates/picks/submit-picks.pt",
request_method="POST",
name="submit-picks",
)
def submit_player_picks_post(self):
vm = PlayerPicksViewModel()
vm.from_dict(self.request.POST)
# Pass a player's picks to the service to be inserted in the db
vm.user_id = self.logged_in_user_id
session = DbSessionFactory.create_session()
get_first_name = (
session.query(Account.first_name)
.filter(Account.id == self.logged_in_user_id)
.first()
)
first_name = get_first_name[0]
get_last_name = (
session.query(Account.last_name)
.filter(Account.id == self.logged_in_user_id)
.first()
)
last_name = get_last_name[0]
player_picks = PlayerPicksService.get_player_picks(
vm.afc_east_winner_pick,
vm.afc_east_second,
vm.afc_east_last,
vm.afc_north_winner_pick,
vm.afc_north_second,
vm.afc_north_last,
vm.afc_south_winner_pick,
vm.afc_south_second,
vm.afc_south_last,
vm.afc_west_winner_pick,
vm.afc_west_second,
vm.afc_west_last,
vm.nfc_east_winner_pick,
vm.nfc_east_second,
vm.nfc_east_last,
vm.nfc_north_winner_pick,
vm.nfc_north_second,
vm.nfc_north_last,
vm.nfc_south_winner_pick,
vm.nfc_south_second,
vm.nfc_south_last,
vm.nfc_west_winner_pick,
vm.nfc_west_second,
vm.nfc_west_last,
vm.afc_qb_pick,
vm.nfc_qb_pick,
vm.afc_rb_pick,
vm.nfc_rb_pick,
vm.afc_rec_pick,
vm.nfc_rec_pick,
vm.afc_sacks_pick,
vm.nfc_sacks_pick,
vm.afc_int_pick,
vm.nfc_int_pick,
vm.afc_wildcard1_pick,
vm.afc_wildcard2_pick,
vm.nfc_wildcard1_pick,
vm.nfc_wildcard2_pick,
vm.afc_pf_pick,
vm.nfc_pf_pick,
vm.specialteams_td_pick,
vm.user_id,
)
# Log that a user submitted picks
self.log.notice("Picks submitted by {}.".format(self.logged_in_user.email))
message = f"Picks submitted by NFLPool user: {first_name} {last_name}"
print(message)
SlackService.send_message(message)
# redirect
self.redirect("/picks/completed")
@pyramid_handlers.action(
renderer="templates/picks/too-late.pt", request_method="GET", name="too-late"
)
def too_late(self):
if not self.logged_in_user_id:
print("Cannot view account page, you must be logged in")
self.redirect("/account/signin")
session = DbSessionFactory.create_session()
season_row = (
session.query(SeasonInfo.current_season)
.filter(SeasonInfo.id == "1")
.first()
)
season = season_row.current_season
return {"season": season}
# Change player picks for the current season
@pyramid_handlers.action(
renderer="templates/picks/change-picks.pt",
request_method="GET",
name="change-picks",
)
def change_player_picks(self):
if not self.logged_in_user_id:
print("Cannot view picks page, you must be logged in")
self.redirect("/account/signin")
# Check if user has already submitted picks
session = DbSessionFactory.create_session()
season_row = (
session.query(SeasonInfo.current_season)
.filter(SeasonInfo.id == "1")
.first()
)
season = season_row.current_season
user_query = (
session.query(PlayerPicks.user_id)
.filter(PlayerPicks.user_id == self.logged_in_user_id)
.filter(PlayerPicks.season == season)
.first()
)
if user_query is None:
print("You have not submitted picks for this season")
self.redirect("/picks/submit-picks")
else:
now_time = TimeService.get_time()
if now_time > GameDayService.season_opener_date():
self.redirect("/picks/too-late")
else:
picks_due = GameDayService.picks_due()
time_due = GameDayService.time_due()
days = GameDayService.delta_days()
hours = GameDayService.delta_hours()
minutes = GameDayService.delta_minutes()
current_datetime = now_time.to_day_datetime_string()
season_info = session.query(SeasonInfo).all()
# Data / Service access
afc_east_list = PlayerPicksService.get_team_list(0, 1)
afc_north_list = PlayerPicksService.get_team_list(0, 2)
afc_south_list = PlayerPicksService.get_team_list(0, 3)
afc_west_list = PlayerPicksService.get_team_list(0, 4)
nfc_east_list = PlayerPicksService.get_team_list(1, 1)
nfc_north_list = PlayerPicksService.get_team_list(1, 2)
nfc_south_list = PlayerPicksService.get_team_list(1, 3)
nfc_west_list = PlayerPicksService.get_team_list(1, 4)
afc_qb_list = PlayerPicksService.get_player_list(0, "QB")
nfc_qb_list = PlayerPicksService.get_player_list(1, "QB")
afc_rb_list = PlayerPicksService.get_player_list(0, "RB")
nfc_rb_list = PlayerPicksService.get_player_list(1, "RB")
afc_rec_list = PlayerPicksService.get_rec_list(0, "WR", "TE")
nfc_rec_list = PlayerPicksService.get_rec_list(1, "WR", "TE")
afc_sacks_list = PlayerPicksService.get_sacks(
0, "DE", "DT", "ILB", "LB", "MLB", "NT", "OLB"
)
nfc_sacks_list = PlayerPicksService.get_sacks(
1, "DE", "DT", "ILB", "LB", "MLB", "NT", "OLB"
)
afc_int_list = PlayerPicksService.get_int(
0, "CB", "DB", "FS", "SS", "MLB", "LB", "OLB", "ILB"
)
nfc_int_list = PlayerPicksService.get_int(
1, "CB", "DB", "FS", "SS", "MLB", "LB", "OLB", "ILB"
)
afc_wildcard_list = PlayerPicksService.get_afc_wildcard()
nfc_wildcard_list = PlayerPicksService.get_nfc_wildcard()
all_team_list = PlayerPicksService.get_all_teams()
# Get the user ID
user_id = self.logged_in_user_id
get_first_name = (
session.query(Account.first_name)
.filter(Account.id == self.logged_in_user_id)
.first()
)
first_name = get_first_name[0]
# Get the user's original picks
all_picks = ViewPicksService.change_picks(
self.logged_in_user_id, season
)
# Return the models
return {
"season": season,
"user_id": user_id,
"first_name": first_name,
"afc_east": afc_east_list,
"afc_north": afc_north_list,
"afc_south": afc_south_list,
"afc_west": afc_west_list,
"nfc_east": nfc_east_list,
"nfc_north": nfc_north_list,
"nfc_south": nfc_south_list,
"nfc_west": nfc_west_list,
"afc_qb_list": afc_qb_list,
"nfc_qb_list": nfc_qb_list,
"afc_rb_list": afc_rb_list,
"nfc_rb_list": nfc_rb_list,
"afc_rec_list": afc_rec_list,
"nfc_rec_list": nfc_rec_list,
"afc_sacks_list": afc_sacks_list,
"nfc_sacks_list": nfc_sacks_list,
"afc_int_list": afc_int_list,
"nfc_int_list": nfc_int_list,
"afc_wildcard_list": afc_wildcard_list,
"nfc_wildcard_list": nfc_wildcard_list,
"all_team_list": all_team_list,
"all_picks": all_picks,
"picks_due": picks_due,
"time_due": time_due,
"days": days,
"hours": hours,
"minutes": minutes,
"current_datetime": current_datetime,
"season_info": season_info,
}
# POST /picks/submit_picks
@pyramid_handlers.action(
renderer="templates/picks/change-picks.pt",
request_method="POST",
name="change-picks",
)
def change_player_picks_post(self):
# Pass a player's picks to the service to be inserted in the db
vm = PlayerPicksViewModel()
vm.from_dict(self.request.POST)
session = DbSessionFactory.create_session()
season_row = (
session.query(SeasonInfo.current_season)
.filter(SeasonInfo.id == "1")
.first()
)
season = season_row.current_season
vm.user_id = self.logged_in_user_id
vm.season = season
now_time = TimeService.get_time()
player_picks = PlayerPicksService.change_player_picks(
vm.afc_east_winner_pick,
vm.afc_east_second,
vm.afc_east_last,
vm.afc_north_winner_pick,
vm.afc_north_second,
vm.afc_north_last,
vm.afc_south_winner_pick,
vm.afc_south_second,
vm.afc_south_last,
vm.afc_west_winner_pick,
vm.afc_west_second,
vm.afc_west_last,
vm.nfc_east_winner_pick,
vm.nfc_east_second,
vm.nfc_east_last,
vm.nfc_north_winner_pick,
vm.nfc_north_second,
vm.nfc_north_last,
vm.nfc_south_winner_pick,
vm.nfc_south_second,
vm.nfc_south_last,
vm.nfc_west_winner_pick,
vm.nfc_west_second,
vm.nfc_west_last,
vm.afc_qb_pick,
vm.nfc_qb_pick,
vm.afc_rb_pick,
vm.nfc_rb_pick,
vm.afc_rec_pick,
vm.nfc_rec_pick,
vm.afc_sacks_pick,
vm.nfc_sacks_pick,
vm.afc_int_pick,
vm.nfc_int_pick,
vm.afc_wildcard1_pick,
vm.afc_wildcard2_pick,
vm.nfc_wildcard1_pick,
vm.nfc_wildcard2_pick,
vm.afc_pf_pick,
vm.nfc_pf_pick,
vm.specialteams_td_pick,
vm.user_id,
)
# Log that a user changed picks
self.log.notice("Picks changed by {}.".format(self.logged_in_user.email))
get_first_name = (
session.query(Account.first_name)
.filter(Account.id == self.logged_in_user_id)
.first()
)
first_name = get_first_name[0]
get_last_name = (
session.query(Account.last_name)
.filter(Account.id == self.logged_in_user_id)
.first()
)
last_name = get_last_name[0]
message = f"Picks updated by NFLPool user: {first_name} {last_name}"
print(message)
SlackService.send_message(message)
# redirect
self.redirect("/account")
| 37.436647 | 96 | 0.559854 | 2,141 | 19,205 | 4.664643 | 0.081738 | 0.030039 | 0.105137 | 0.035246 | 0.866827 | 0.852208 | 0.822469 | 0.797837 | 0.77661 | 0.7694 | 0 | 0.006181 | 0.351315 | 19,205 | 512 | 97 | 37.509766 | 0.795473 | 0.04176 | 0 | 0.814727 | 0 | 0 | 0.097757 | 0.011049 | 0 | 0 | 0 | 0 | 0 | 1 | 0.016627 | false | 0 | 0.028504 | 0 | 0.059382 | 0.023753 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
ec65301f500a8d0ac133208af607a68d886b116b | 3,998 | py | Python | models.py | WebVision-Capstone/WebVision-Cap | 7ccdf5403b79cc51e061918623ce2c4b4c996c19 | [
"MIT"
] | null | null | null | models.py | WebVision-Capstone/WebVision-Cap | 7ccdf5403b79cc51e061918623ce2c4b4c996c19 | [
"MIT"
] | null | null | null | models.py | WebVision-Capstone/WebVision-Cap | 7ccdf5403b79cc51e061918623ce2c4b4c996c19 | [
"MIT"
] | 1 | 2020-12-12T16:02:06.000Z | 2020-12-12T16:02:06.000Z | """Models
"""
from typing import Tuple, List
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras.applications.inception_v3 import InceptionV3
from tensorflow.keras.applications import ResNet152V2
def unfreeze_idx(layers: List[tf.keras.layers.Layer],
layer_name: str
) -> int:
"""Find index of layer
"""
last_trainable_layer = int()
for i, layer in enumerate(layers):
if layer.name == layer_name:
last_trainable_layer = i
break
last_trainable_layer += 1
return last_trainable_layer
def get_inception_v3(target_size: Tuple[int],
unfreeze_layer: str = ''
) -> Model:
"""
:param target_size: the image size
:param unfeeze_layer: layer to start unfreezing the model (e.g. 'mixed5')
:return: uncompiled keras model
"""
pre_trained_model = InceptionV3(input_shape = target_size + tuple([3]),
include_top = False)
if unfreeze_layer != '':
# find the last module before opening for training
for i, layer in enumerate(pre_trained_model.layers):
if layer.name == unfreeze_layer:
last_trainable_layer = i
break
last_trainable_layer += 1
print("Unfreezing from "
+ pre_trained_model.layers[last_trainable_layer-1].name)
# lock the early layers
for layer in pre_trained_model.layers[:last_trainable_layer]:
layer.trainable = False
# unlock the later layers
for layer in pre_trained_model.layers[last_trainable_layer:]:
layer.trainable = True
else:
for layer in pre_trained_model.layers:
layer.trainable = False
# use mixed10 as the last layer from inception3
last_layer = pre_trained_model.get_layer('mixed10')
last_output = last_layer.output
# Flatten the output layer to 1 dimension
x = tf.keras.layers.GlobalAveragePooling2D()(last_output)
#x = Dense(2048, activation='relu')(x)
x = tf.keras.layers.Dropout(0.2)(x)
output = tf.keras.layers.Dense(5000, activation='softmax', name='output')(x)
return Model(inputs=[pre_trained_model.input], outputs=[output])
def get_resent_152v2(target_size: Tuple[int],
unfreeze_layer: str = ''
) -> Model:
"""
:param target_size: the image size
:param unfeeze_layer: layer to start unfreezing the model (e.g. 'mixed5')
:return: uncompiled keras model
"""
pre_trained_model = ResNet152V2(input_shape = target_size + tuple([3]),
include_top = False)
if unfreeze_layer != '':
# find the last module before opening for training
for i, layer in enumerate(pre_trained_model.layers):
if layer.name == unfreeze_layer:
last_trainable_layer = i
break
last_trainable_layer += 1
print("Unfreezing from "
+ pre_trained_model.layers[last_trainable_layer-1].name)
# lock the early layers
for layer in pre_trained_model.layers[:last_trainable_layer]:
layer.trainable = False
# unlock the later layers
for layer in pre_trained_model.layers[last_trainable_layer:]:
layer.trainable = True
else:
for layer in pre_trained_model.layers:
layer.trainable = False
# use mixed10 as the last layer from inception3
last_layer = pre_trained_model.get_layer('post_relu')
last_output = last_layer.output
# Flatten the output layer to 1 dimension
x = tf.keras.layers.GlobalAveragePooling2D()(last_output)
#x = Dense(2048, activation='relu')(x)
x = tf.keras.layers.Dropout(0.2)(x)
output = tf.keras.layers.Dense(5000, activation='softmax', name='output')(x)
return Model(inputs=[pre_trained_model.input], outputs=[output])
| 33.316667 | 80 | 0.634067 | 489 | 3,998 | 4.985685 | 0.188139 | 0.065628 | 0.098441 | 0.086136 | 0.834701 | 0.826497 | 0.826497 | 0.826497 | 0.826497 | 0.80886 | 0 | 0.01973 | 0.277389 | 3,998 | 119 | 81 | 33.596639 | 0.824161 | 0.187594 | 0 | 0.731343 | 0 | 0 | 0.023278 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.044776 | false | 0 | 0.074627 | 0 | 0.164179 | 0.029851 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
01a2c6c56ef47984aa626d663d20bc833ba72eb8 | 3,361 | py | Python | meetings/tests/test_free_times.py | asaltveit/proj8-free-times | 0f60ab265e302133aa4c20d9f9cb661b11a177ef | [
"Artistic-2.0"
] | null | null | null | meetings/tests/test_free_times.py | asaltveit/proj8-free-times | 0f60ab265e302133aa4c20d9f9cb661b11a177ef | [
"Artistic-2.0"
] | null | null | null | meetings/tests/test_free_times.py | asaltveit/proj8-free-times | 0f60ab265e302133aa4c20d9f9cb661b11a177ef | [
"Artistic-2.0"
] | null | null | null | from free_times import calculate_free
import nose # Testing framework
import logging
def test_free_cover():
"""
Free block covers busy block.
"""
busy_times = [{ 'start': '2017-11-09T10:00:00-08:00', 'end': '2017-11-09T11:20:00-08:00'}]
start_time = '2017-11-09T08:00:00-08:00'
end_time = '2017-11-09T12:00:00-08:00'
free_times = [['2017-11-09T08:00:00-08:00', '2017-11-09T10:00:00-08:00'], ['2017-11-09T11:20:00-08:00', '2017-11-09T12:00:00-08:00']]
assert calculate_free(busy_times, start_time, end_time) == free_times
def test_no_overlap():
"""
Busy block does not over lap free block.
"""
busy_times = [{ 'start': '2017-11-09T10:00:00-08:00', 'end': '2017-11-09T11:20:00-08:00'}]
start_time = '2017-11-09T08:00:00-08:00'
end_time = '2017-11-09T09:00:00-08:00'
free_times = [['2017-11-09T08:00:00-08:00', '2017-11-09T09:00:00-08:00']]
assert calculate_free(busy_times, start_time, end_time) == free_times
def test_start_overlap():
"""
End of busy block overlaps start of free block.
"""
busy_times = [{ 'start': '2017-11-09T08:00:00-08:00', 'end': '2017-11-09T12:00:00-08:00'}]
start_time = '2017-11-09T10:00:00-08:00'
end_time = '2017-11-09T14:00:00-08:00'
free_times = [['2017-11-09T12:00:00-08:00', '2017-11-09T14:00:00-08:00']]
assert calculate_free(busy_times, start_time, end_time) == free_times
def test_end_overlap():
"""
Start of busy block overlaps end of free block.
"""
busy_times = [{ 'start': '2017-11-09T12:30:00-08:00', 'end': '2017-11-09T16:00:00-08:00'}]
start_time = '2017-11-09T10:00:00-08:00'
end_time = '2017-11-09T14:00:00-08:00'
free_times = [['2017-11-09T10:00:00-08:00', '2017-11-09T12:30:00-08:00']]
assert calculate_free(busy_times, start_time, end_time) == free_times
def test_all_overlap():
"""
Busy block totally covers free block.
"""
busy_times = [{ 'start': '2017-11-09T12:30:00-08:00', 'end': '2017-11-09T16:00:00-08:00'}]
start_time = '2017-11-09T12:30:00-08:00'
end_time = '2017-11-09T16:00:00-08:00'
free_times = []
assert calculate_free(busy_times, start_time, end_time) == free_times
def test_two_blocks():
"""
Tests a list of two busy times.
"""
busy_times = [{ 'start': '2017-11-09T10:00:00-08:00', 'end': '2017-11-09T11:00:00-08:00'}, {'start': '2017-11-09T12:30:00-08:00', 'end': '2017-11-09T13:00:00-08:00'}]
start_time = '2017-11-09T10:30:00-08:00'
end_time = '2017-11-09T16:00:00-08:00'
free_times = [['2017-11-09T11:00:00-08:00' , '2017-11-09T12:30:00-08:00'], ['2017-11-09T13:00:00-08:00', '2017-11-09T16:00:00-08:00']]
assert calculate_free(busy_times, start_time, end_time) == free_times
def test_multiple_blocks():
"""
Tests a list of multiple busy times.
"""
busy_times = [
{'start': '2017-11-09T10:00:00-08:00', 'end': '2017-11-09T12:00:00-08:00'},
{'start': '2017-11-09T12:30:00-08:00', 'end': '2017-11-09T13:00:00-08:00'},
{'start': '2017-11-09T08:00:00-08:00', 'end': '2017-11-09T09:00:00-08:00'},
{'start': '2017-11-09T15:00:00-08:00', 'end': '2017-11-09T18:00:00-08:00'}
]
start_time = '2017-11-09T09:00:00-08:00'
end_time = '2017-11-09T16:00:00-08:00'
free_times = [['2017-11-09T09:00:00-08:00', '2017-11-09T10:00:00-08:00'], ['2017-11-09T12:00:00-08:00', '2017-11-09T12:30:00-08:00'], ['2017-11-09T13:00:00-08:00', '2017-11-09T15:00:00-08:00']]
assert calculate_free(busy_times, start_time, end_time) == free_times
| 38.193182 | 194 | 0.669146 | 638 | 3,361 | 3.401254 | 0.079937 | 0.154839 | 0.154839 | 0.162212 | 0.874194 | 0.857604 | 0.857604 | 0.815207 | 0.786636 | 0.770968 | 0 | 0.335664 | 0.106516 | 3,361 | 87 | 195 | 38.632184 | 0.386946 | 0.086879 | 0 | 0.4 | 0 | 0 | 0.49534 | 0.466045 | 0 | 0 | 0 | 0 | 0.14 | 1 | 0.14 | false | 0 | 0.06 | 0 | 0.2 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 10 |
bf0adbed1f1994a764b7e4bd38bc5b1fc48b8bc1 | 1,887 | py | Python | backend/moonstreamapi/test_stream_boundaries.py | zomglings/moonstream | 954f6014f782157ff3d708d0697457c4306a6588 | [
"Apache-2.0"
] | 67 | 2021-07-22T11:09:30.000Z | 2022-03-30T07:38:19.000Z | backend/moonstreamapi/test_stream_boundaries.py | zomglings/moonstream | 954f6014f782157ff3d708d0697457c4306a6588 | [
"Apache-2.0"
] | 246 | 2021-07-19T15:40:59.000Z | 2022-03-24T20:30:55.000Z | backend/moonstreamapi/test_stream_boundaries.py | zomglings/moonstream | 954f6014f782157ff3d708d0697457c4306a6588 | [
"Apache-2.0"
] | 21 | 2021-07-25T18:36:05.000Z | 2022-03-30T16:30:24.000Z | """
Tests for stream boundary utilities.
"""
import unittest
from . import stream_boundaries
from .data import StreamBoundary
class TestValidateStreamBoundary(unittest.TestCase):
def test_valid_stream_boundary(self):
stream_boundary = StreamBoundary(
start_time=1, end_time=5, include_start=True, include_end=True
)
valid, _ = stream_boundaries.validate_stream_boundary(
stream_boundary, 10, raise_when_invalid=False
)
self.assertTrue(valid)
def test_invalid_stream_boundary(self):
stream_boundary = StreamBoundary(
start_time=1, end_time=5, include_start=True, include_end=True
)
valid, _ = stream_boundaries.validate_stream_boundary(
stream_boundary, 1, raise_when_invalid=False
)
self.assertFalse(valid)
def test_invalid_stream_boundary_error(self):
stream_boundary = StreamBoundary(
start_time=1, end_time=5, include_start=True, include_end=True
)
valid, _ = stream_boundaries.validate_stream_boundary(
stream_boundary, 1, raise_when_invalid=True
)
with self.assertRaises(stream_boundaries.InvalidStreamBoundary):
valid
def test_unconstrainted_invalid_stream_boundary(self):
stream_boundary = StreamBoundary()
valid, _ = stream_boundaries.validate_stream_boundary(
stream_boundary, 1, raise_when_invalid=False
)
self.assertFalse(valid)
def test_unconstrained_invalid_stream_boundary_error(self):
stream_boundary = StreamBoundary()
valid, _ = stream_boundaries.validate_stream_boundary(
stream_boundary, 1, raise_when_invalid=True
)
with self.assertRaises(stream_boundaries.InvalidStreamBoundary):
valid
if __name__ == "__main__":
unittest.main()
| 33.105263 | 74 | 0.692104 | 197 | 1,887 | 6.213198 | 0.213198 | 0.240196 | 0.073529 | 0.130719 | 0.815359 | 0.794935 | 0.785131 | 0.762255 | 0.719771 | 0.719771 | 0 | 0.008351 | 0.238474 | 1,887 | 56 | 75 | 33.696429 | 0.843424 | 0.019078 | 0 | 0.522727 | 0 | 0 | 0.004341 | 0 | 0 | 0 | 0 | 0 | 0.113636 | 1 | 0.113636 | false | 0 | 0.068182 | 0 | 0.204545 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
17733547712411f5729d3fb9aeeb1f99f1d6ed19 | 4,265 | py | Python | test/pyaz/network/routeserver/peering/__init__.py | bigdatamoore/py-az-cli | 54383a4ee7cc77556f6183e74e992eec95b28e01 | [
"MIT"
] | null | null | null | test/pyaz/network/routeserver/peering/__init__.py | bigdatamoore/py-az-cli | 54383a4ee7cc77556f6183e74e992eec95b28e01 | [
"MIT"
] | 9 | 2021-09-24T16:37:24.000Z | 2021-12-24T00:39:19.000Z | test/pyaz/network/routeserver/peering/__init__.py | bigdatamoore/py-az-cli | 54383a4ee7cc77556f6183e74e992eec95b28e01 | [
"MIT"
] | null | null | null | import json, subprocess
from .... pyaz_utils import get_cli_name, get_params
def create(resource_group, routeserver, name, peer_asn, peer_ip, no_wait=None):
params = get_params(locals())
command = "az network routeserver peering create " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def update(resource_group, routeserver, name, peer_asn=None, peer_ip=None, set=None, add=None, remove=None, force_string=None):
params = get_params(locals())
command = "az network routeserver peering update " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def delete(resource_group, routeserver, name, yes=None, no_wait=None):
params = get_params(locals())
command = "az network routeserver peering delete " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def show(resource_group, routeserver, name):
params = get_params(locals())
command = "az network routeserver peering show " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def wait(resource_group, routeserver, name, timeout=None, interval=None, deleted=None, created=None, updated=None, exists=None, custom=None):
params = get_params(locals())
command = "az network routeserver peering wait " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list(resource_group, routeserver):
params = get_params(locals())
command = "az network routeserver peering list " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list_learned_routes(resource_group, routeserver, name):
params = get_params(locals())
command = "az network routeserver peering list-learned-routes " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list_advertised_routes(resource_group, routeserver, name):
params = get_params(locals())
command = "az network routeserver peering list-advertised-routes " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 36.767241 | 141 | 0.674091 | 516 | 4,265 | 5.511628 | 0.122093 | 0.078762 | 0.056259 | 0.059072 | 0.873066 | 0.873066 | 0.848453 | 0.848453 | 0.848453 | 0.848453 | 0 | 0.004765 | 0.212661 | 4,265 | 115 | 142 | 37.086957 | 0.842168 | 0 | 0 | 0.830189 | 0 | 0 | 0.095428 | 0.005158 | 0 | 0 | 0 | 0 | 0 | 1 | 0.075472 | false | 0 | 0.018868 | 0 | 0.169811 | 0.226415 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bd9640f8c1e931a52a1356a6631054bd20548914 | 86,590 | py | Python | GUI/code/Adv_params_GUI.py | jlhitt1993/pytentiostat | 8cc4364a625144ce4d73bae6407d4a0c5d1e20cd | [
"BSD-3-Clause"
] | 5 | 2019-02-08T23:39:20.000Z | 2021-07-13T19:25:11.000Z | GUI/code/Adv_params_GUI.py | jlhitt1993/pytentiostat | 8cc4364a625144ce4d73bae6407d4a0c5d1e20cd | [
"BSD-3-Clause"
] | 150 | 2019-02-14T18:49:56.000Z | 2021-08-25T10:31:36.000Z | GUI/code/Adv_params_GUI.py | jlhitt1993/pytentiostat | 8cc4364a625144ce4d73bae6407d4a0c5d1e20cd | [
"BSD-3-Clause"
] | 12 | 2019-02-07T14:21:23.000Z | 2021-11-01T16:00:30.000Z | from PySide2 import QtCore, QtGui, QtWidgets
class Ui_Adv_Params(object):
def setupUi(self, Adv_Params):
"""
Initializes the Advanced Parameters window
"""
Adv_Params.setObjectName("Adv_Params")
Adv_Params.resize(357, 371)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap("../pics/icon_pytentiostat.ico"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Adv_Params.setWindowIcon(icon)
self.centralwidget = QtWidgets.QWidget(Adv_Params)
self.centralwidget.setObjectName("centralwidget")
self.experiment_conversion_factor_label = QtWidgets.QLabel(self.centralwidget)
self.experiment_conversion_factor_label.setEnabled(True)
self.experiment_conversion_factor_label.setGeometry(QtCore.QRect(30, 60, 191, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(161, 188, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(161, 188, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.experiment_conversion_factor_label.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.experiment_conversion_factor_label.setFont(font)
self.experiment_conversion_factor_label.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.experiment_conversion_factor_label.setAcceptDrops(False)
self.experiment_conversion_factor_label.setAutoFillBackground(True)
self.experiment_conversion_factor_label.setFrameShape(QtWidgets.QFrame.Box)
self.experiment_conversion_factor_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.experiment_conversion_factor_label.setLineWidth(1)
self.experiment_conversion_factor_label.setMidLineWidth(1)
self.experiment_conversion_factor_label.setScaledContents(False)
self.experiment_conversion_factor_label.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse)
self.experiment_conversion_factor_label.setObjectName("experiment_conversion_factor_label")
self.experiment_averag_number_label = QtWidgets.QLabel(self.centralwidget)
self.experiment_averag_number_label.setEnabled(True)
self.experiment_averag_number_label.setGeometry(QtCore.QRect(30, 260, 191, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(161, 188, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(161, 188, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.experiment_averag_number_label.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.experiment_averag_number_label.setFont(font)
self.experiment_averag_number_label.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.experiment_averag_number_label.setAcceptDrops(False)
self.experiment_averag_number_label.setAutoFillBackground(True)
self.experiment_averag_number_label.setFrameShape(QtWidgets.QFrame.Box)
self.experiment_averag_number_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.experiment_averag_number_label.setLineWidth(1)
self.experiment_averag_number_label.setMidLineWidth(1)
self.experiment_averag_number_label.setScaledContents(False)
self.experiment_averag_number_label.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse)
self.experiment_averag_number_label.setObjectName("experiment_averag_number_label")
self.experiment_setpoint_gain = QtWidgets.QLineEdit(self.centralwidget)
self.experiment_setpoint_gain.setGeometry(QtCore.QRect(260, 100, 71, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
self.experiment_setpoint_gain.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.experiment_setpoint_gain.setFont(font)
self.experiment_setpoint_gain.setStyleSheet("")
self.experiment_setpoint_gain.setFrame(True)
self.experiment_setpoint_gain.setAlignment(QtCore.Qt.AlignCenter)
self.experiment_setpoint_gain.setObjectName("experiment_setpoint_gain")
self.experiment_shunt_resistor_label = QtWidgets.QLabel(self.centralwidget)
self.experiment_shunt_resistor_label.setEnabled(True)
self.experiment_shunt_resistor_label.setGeometry(QtCore.QRect(30, 180, 191, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(161, 188, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(161, 188, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.experiment_shunt_resistor_label.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.experiment_shunt_resistor_label.setFont(font)
self.experiment_shunt_resistor_label.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.experiment_shunt_resistor_label.setAcceptDrops(False)
self.experiment_shunt_resistor_label.setAutoFillBackground(True)
self.experiment_shunt_resistor_label.setFrameShape(QtWidgets.QFrame.Box)
self.experiment_shunt_resistor_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.experiment_shunt_resistor_label.setLineWidth(1)
self.experiment_shunt_resistor_label.setMidLineWidth(1)
self.experiment_shunt_resistor_label.setScaledContents(False)
self.experiment_shunt_resistor_label.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse)
self.experiment_shunt_resistor_label.setObjectName("experiment_shunt_resistor_label")
self.advanced_parameters_label = QtWidgets.QLabel(self.centralwidget)
self.advanced_parameters_label.setEnabled(True)
self.advanced_parameters_label.setGeometry(QtCore.QRect(30, 20, 191, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 127, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(212, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(85, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.advanced_parameters_label.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.advanced_parameters_label.setFont(font)
self.advanced_parameters_label.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.advanced_parameters_label.setAcceptDrops(False)
self.advanced_parameters_label.setAutoFillBackground(True)
self.advanced_parameters_label.setFrameShape(QtWidgets.QFrame.Box)
self.advanced_parameters_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.advanced_parameters_label.setLineWidth(1)
self.advanced_parameters_label.setMidLineWidth(1)
self.advanced_parameters_label.setScaledContents(False)
self.advanced_parameters_label.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse)
self.advanced_parameters_label.setObjectName("advanced_parameters_label")
self.experiment_shunt_resistor = QtWidgets.QLineEdit(self.centralwidget)
self.experiment_shunt_resistor.setGeometry(QtCore.QRect(260, 180, 71, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
self.experiment_shunt_resistor.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.experiment_shunt_resistor.setFont(font)
self.experiment_shunt_resistor.setStyleSheet("")
self.experiment_shunt_resistor.setFrame(True)
self.experiment_shunt_resistor.setAlignment(QtCore.Qt.AlignCenter)
self.experiment_shunt_resistor.setObjectName("experiment_shunt_resistor")
self.experiment_setpoint_offset = QtWidgets.QLineEdit(self.centralwidget)
self.experiment_setpoint_offset.setGeometry(QtCore.QRect(260, 140, 71, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
self.experiment_setpoint_offset.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.experiment_setpoint_offset.setFont(font)
self.experiment_setpoint_offset.setStyleSheet("")
self.experiment_setpoint_offset.setFrame(True)
self.experiment_setpoint_offset.setAlignment(QtCore.Qt.AlignCenter)
self.experiment_setpoint_offset.setObjectName("experiment_setpoint_offset")
self.experiment_time_step_label = QtWidgets.QLabel(self.centralwidget)
self.experiment_time_step_label.setEnabled(True)
self.experiment_time_step_label.setGeometry(QtCore.QRect(30, 220, 191, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(161, 188, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(161, 188, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.experiment_time_step_label.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.experiment_time_step_label.setFont(font)
self.experiment_time_step_label.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.experiment_time_step_label.setAcceptDrops(False)
self.experiment_time_step_label.setAutoFillBackground(True)
self.experiment_time_step_label.setFrameShape(QtWidgets.QFrame.Box)
self.experiment_time_step_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.experiment_time_step_label.setLineWidth(1)
self.experiment_time_step_label.setMidLineWidth(1)
self.experiment_time_step_label.setScaledContents(False)
self.experiment_time_step_label.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse)
self.experiment_time_step_label.setObjectName("experiment_time_step_label")
self.experiment_setpoint_gain_label = QtWidgets.QLabel(self.centralwidget)
self.experiment_setpoint_gain_label.setEnabled(True)
self.experiment_setpoint_gain_label.setGeometry(QtCore.QRect(30, 100, 191, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(161, 188, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(161, 188, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.experiment_setpoint_gain_label.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.experiment_setpoint_gain_label.setFont(font)
self.experiment_setpoint_gain_label.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.experiment_setpoint_gain_label.setAcceptDrops(False)
self.experiment_setpoint_gain_label.setAutoFillBackground(True)
self.experiment_setpoint_gain_label.setFrameShape(QtWidgets.QFrame.Box)
self.experiment_setpoint_gain_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.experiment_setpoint_gain_label.setLineWidth(1)
self.experiment_setpoint_gain_label.setMidLineWidth(1)
self.experiment_setpoint_gain_label.setScaledContents(False)
self.experiment_setpoint_gain_label.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse)
self.experiment_setpoint_gain_label.setObjectName("experiment_setpoint_gain_label")
self.experiment_conversion_factor = QtWidgets.QLineEdit(self.centralwidget)
self.experiment_conversion_factor.setGeometry(QtCore.QRect(260, 60, 71, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
self.experiment_conversion_factor.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.experiment_conversion_factor.setFont(font)
self.experiment_conversion_factor.setStyleSheet("")
self.experiment_conversion_factor.setFrame(True)
self.experiment_conversion_factor.setAlignment(QtCore.Qt.AlignCenter)
self.experiment_conversion_factor.setObjectName("experiment_conversion_factor")
self.experiment_setpoint_offset_label = QtWidgets.QLabel(self.centralwidget)
self.experiment_setpoint_offset_label.setEnabled(True)
self.experiment_setpoint_offset_label.setGeometry(QtCore.QRect(30, 140, 191, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(161, 188, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(161, 188, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(84, 151, 213))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(44, 80, 114))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(33, 60, 85))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(67, 121, 171))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.experiment_setpoint_offset_label.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.experiment_setpoint_offset_label.setFont(font)
self.experiment_setpoint_offset_label.setContextMenuPolicy(QtCore.Qt.NoContextMenu)
self.experiment_setpoint_offset_label.setAcceptDrops(False)
self.experiment_setpoint_offset_label.setAutoFillBackground(True)
self.experiment_setpoint_offset_label.setFrameShape(QtWidgets.QFrame.Box)
self.experiment_setpoint_offset_label.setFrameShadow(QtWidgets.QFrame.Sunken)
self.experiment_setpoint_offset_label.setLineWidth(1)
self.experiment_setpoint_offset_label.setMidLineWidth(1)
self.experiment_setpoint_offset_label.setScaledContents(False)
self.experiment_setpoint_offset_label.setTextInteractionFlags(QtCore.Qt.LinksAccessibleByMouse)
self.experiment_setpoint_offset_label.setObjectName("experiment_setpoint_offset_label")
self.save_experiment_file_button = QtWidgets.QPushButton(self.centralwidget)
self.save_experiment_file_button.setGeometry(QtCore.QRect(260, 300, 71, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(91, 166, 232))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 217, 21))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Link, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.NoRole, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(248, 221, 23))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 217, 21))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Link, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.NoRole, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 120, 215))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Highlight, brush)
brush = QtGui.QBrush(QtGui.QColor(247, 217, 21))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Link, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.NoRole, brush)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.save_experiment_file_button.setFont(font)
self.save_experiment_file_button.setContextMenuPolicy(QtCore.Qt.PreventContextMenu)
self.save_experiment_file_button.setAcceptDrops(False)
self.save_experiment_file_button.setWhatsThis("")
self.save_experiment_file_button.setAutoFillBackground(True)
self.save_experiment_file_button.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.save_experiment_file_button.setInputMethodHints(QtCore.Qt.ImhNone)
self.save_experiment_file_button.setAutoRepeatDelay(301)
self.save_experiment_file_button.setAutoRepeatInterval(96)
self.save_experiment_file_button.setAutoDefault(False)
self.save_experiment_file_button.setDefault(False)
self.save_experiment_file_button.setFlat(False)
self.save_experiment_file_button.setObjectName("save_experiment_file_button")
self.experiment_time_step = QtWidgets.QLineEdit(self.centralwidget)
self.experiment_time_step.setGeometry(QtCore.QRect(260, 220, 71, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
self.experiment_time_step.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.experiment_time_step.setFont(font)
self.experiment_time_step.setStyleSheet("")
self.experiment_time_step.setFrame(True)
self.experiment_time_step.setAlignment(QtCore.Qt.AlignCenter)
self.experiment_time_step.setObjectName("experiment_time_step")
self.experiment_averag_number = QtWidgets.QLineEdit(self.centralwidget)
self.experiment_averag_number.setGeometry(QtCore.QRect(260, 260, 71, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
self.experiment_averag_number.setPalette(palette)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.experiment_averag_number.setFont(font)
self.experiment_averag_number.setStyleSheet("")
self.experiment_averag_number.setFrame(True)
self.experiment_averag_number.setAlignment(QtCore.Qt.AlignCenter)
self.experiment_averag_number.setObjectName("experiment_averag_number")
Adv_Params.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(Adv_Params)
self.statusbar.setStyleSheet("background-color: rgb(236, 236, 236);\n""background-color: rgb(228, 228, 228);")
self.statusbar.setObjectName("statusbar")
Adv_Params.setStatusBar(self.statusbar)
self.actionAg_AgCl = QtWidgets.QAction(Adv_Params)
self.actionAg_AgCl.setObjectName("actionAg_AgCl")
self.actionSCE = QtWidgets.QAction(Adv_Params)
self.actionSCE.setObjectName("actionSCE")
self.retranslateUi(Adv_Params)
QtCore.QMetaObject.connectSlotsByName(Adv_Params)
def retranslateUi(self, Adv_Params):
_translate = QtCore.QCoreApplication.translate
Adv_Params.setWindowTitle(_translate("Adv_Params", "Adv Params"))
self.experiment_conversion_factor_label.setText(_translate("Adv_Params", "<html><head/><body><p align=\"center\">Conversion Factor</p></body></html>"))
self.experiment_averag_number_label.setText(_translate("Adv_Params", "<html><head/><body><p align=\"center\">Average Number</p></body></html>"))
self.experiment_setpoint_gain.setStatusTip(_translate("Adv_Params", "setpoint gain for experiment"))
self.experiment_shunt_resistor_label.setText(_translate("Adv_Params", "<html><head/><body><p align=\"center\">Shunt Resistor /mohm</p></body></html>"))
self.advanced_parameters_label.setText(_translate("Adv_Params", "<html><head/><body><p align=\"center\">Advanced Parameters</p></body></html>"))
self.experiment_shunt_resistor.setStatusTip(_translate("Adv_Params", "shunt resistor for experiment."))
self.experiment_setpoint_offset.setStatusTip(_translate("Adv_Params", "setpoint offset for experiment."))
self.experiment_time_step_label.setText(_translate("Adv_Params", "<html><head/><body><p align=\"center\">Time Step /s</p></body></html>"))
self.experiment_setpoint_gain_label.setText(_translate("Adv_Params", "<html><head/><body><p align=\"center\">Setpoint Gain</p></body></html>"))
self.experiment_conversion_factor.setStatusTip(_translate("Adv_Params", "conversion factor for experiment"))
self.experiment_setpoint_offset_label.setText(_translate("Adv_Params", "<html><head/><body><p align=\"center\">Setpoint Offset</p></body></html>"))
self.save_experiment_file_button.setStatusTip(_translate("Adv_Params", "Click this button to save parameters."))
self.save_experiment_file_button.setText(_translate("Adv_Params", "Save "))
self.experiment_time_step.setStatusTip(_translate("Adv_Params", "time step for experiment."))
self.experiment_averag_number.setStatusTip(_translate("Adv_Params", "average number for experiment."))
self.actionAg_AgCl.setText(_translate("Adv_Params", "Ag/AgCl"))
self.actionSCE.setText(_translate("Adv_Params", "SCE"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
Adv_Params = QtWidgets.QMainWindow()
ui = Ui_Adv_Params()
ui.setupUi(Adv_Params)
Adv_Params.show()
sys.exit(app.exec_())
| 60.850316 | 159 | 0.705578 | 10,208 | 86,590 | 5.927606 | 0.021552 | 0.164141 | 0.099159 | 0.130146 | 0.942653 | 0.906741 | 0.859558 | 0.835793 | 0.824985 | 0.824985 | 0 | 0.035771 | 0.175113 | 86,590 | 1,422 | 160 | 60.893108 | 0.811374 | 0.000485 | 0 | 0.837819 | 0 | 0 | 0.01714 | 0.007894 | 0 | 0 | 0 | 0 | 0 | 1 | 0.001416 | false | 0 | 0.001416 | 0 | 0.003541 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
bda716c9ce9c2bf86742f18033830fc136a21d0c | 13,313 | py | Python | tests/serializers/test_djongomodel_arrayreference.py | jino-cod/rest_meets_djongo | bec3ee0843222d6c62b189673d07e426c789af01 | [
"MIT"
] | 22 | 2019-10-01T15:31:42.000Z | 2021-05-04T08:01:08.000Z | tests/serializers/test_djongomodel_arrayreference.py | jino-cod/rest_meets_djongo | bec3ee0843222d6c62b189673d07e426c789af01 | [
"MIT"
] | 10 | 2019-12-07T17:21:50.000Z | 2021-06-10T17:49:53.000Z | tests/serializers/test_djongomodel_arrayreference.py | jino-cod/rest_meets_djongo | bec3ee0843222d6c62b189673d07e426c789af01 | [
"MIT"
] | 5 | 2020-05-12T16:11:06.000Z | 2022-01-02T02:08:23.000Z | from bson import ObjectId
from pytest import mark
from rest_framework import serializers as drf_ser
from rest_meets_djongo import serializers as rmd_ser
from tests.models import ArrayRelatedModel, ArrayRelationModel
from tests.utils import format_dict
@mark.django_db
class TestIntegration(object):
def test_root_retrieve(self):
"""
Confirm that existing instances of models w/ ArrayModelFields can
still be retrieved and serialized correctly
"""
# Set up the initial data
rel_data_1 = {
'email': 'jojo@gmail.com'
}
rel_instance_1 = ArrayRelatedModel.objects.create(**rel_data_1)
rel_data_1.update({'pk': rel_instance_1.pk})
rel_data_2 = {
'email': 'gogo@gmail.com'
}
rel_instance_2 = ArrayRelatedModel.objects.create(**rel_data_2)
rel_data_2.update({'pk': rel_instance_2.pk})
rel_list = [rel_instance_1, rel_instance_2]
instance = ArrayRelationModel.objects.create()
instance.arr_relation.add(*rel_list)
# Attempt to serialize an instance of the model using the data above
class TestSerializer(rmd_ser.DjongoModelSerializer):
class Meta:
model = ArrayRelationModel
fields = '__all__'
serializer = TestSerializer(instance)
# Compare observed serialization to what we would expect
expected_data = {
'_id': str(instance._id),
'int_val': instance.int_val,
'arr_relation': [
rel_data_1['pk'],
rel_data_2['pk']
]
}
expected_str = format_dict(expected_data)
observed_str = format_dict(serializer.data)
assert expected_str == observed_str
def test_deep_retrieve(self):
"""
Confirm that existing instances of models w/ ArrayModelFields can
still be retrieved and serialized correctly
"""
# Set up the initial data
rel_data_1 = {
'email': 'jojo@gmail.com'
}
rel_instance_1 = ArrayRelatedModel.objects.create(**rel_data_1)
rel_data_2 = {
'email': 'gogo@gmail.com'
}
rel_instance_2 = ArrayRelatedModel.objects.create(**rel_data_2)
rel_list = [rel_instance_1, rel_instance_2]
instance = ArrayRelationModel.objects.create()
instance.arr_relation.add(*rel_list)
# Attempt to serialize an instance of the model using the data above
class TestSerializer(rmd_ser.DjongoModelSerializer):
class Meta:
model = ArrayRelationModel
fields = '__all__'
depth = 1
serializer = TestSerializer(instance)
# Compare observed serialization with expected serialization
expected_data = {
'_id': str(instance._id),
'int_val': instance.int_val,
'arr_relation': [
{
'_id': str(rel_instance_1.pk),
'email': rel_instance_1.email
},
{
'_id': str(rel_instance_2.pk),
'email': rel_instance_2.email
}
]
}
expected_str = format_dict(expected_data)
observed_str = format_dict(serializer.data)
assert expected_str == observed_str
def test_root_create(self):
"""
Confirm that new instances of models w/ ArrayModelFields fields
can still be generated and saved correctly from raw data
"""
# Set up the initial data
rel_data_1 = {
'email': 'jojo@gmail.com'
}
rel_instance_1 = ArrayRelatedModel.objects.create(**rel_data_1)
rel_data_1.update({'pk': rel_instance_1.pk})
rel_data_2 = {
'email': 'gogo@gmail.com'
}
rel_instance_2 = ArrayRelatedModel.objects.create(**rel_data_2)
rel_data_2.update({'pk': rel_instance_2.pk})
data = {
'int_val': -4321,
# Default create is read_only
}
# Serializer should validate
class TestSerializer(rmd_ser.DjongoModelSerializer):
class Meta:
model = ArrayRelationModel
fields = '__all__'
serializer = TestSerializer(data=data)
assert serializer.is_valid(), serializer.errors
# Serializer should be able to save the data, sans relations
instance = serializer.save()
assert list(instance.arr_relation.all()) == []
# Confirm that this default read-only setup can be overridden
class NewTestSerializer(rmd_ser.DjongoModelSerializer):
arr_relation = drf_ser.PrimaryKeyRelatedField(
queryset=ArrayRelatedModel.objects.all(),
many=True
)
def create(self, validated_data):
rel_pks = validated_data.pop('arr_relation', [])
obj = ArrayRelationModel.objects.create(**validated_data)
obj.arr_relation.add(*rel_pks)
obj.save()
return obj
class Meta:
model = ArrayRelationModel
fields = '__all__'
data.update({
'arr_relation': [rel_instance_1.pk, rel_instance_2.pk]
})
serializer = NewTestSerializer(data=data)
assert serializer.is_valid(), serializer.errors
# Serializer should be able to save the data
instance = serializer.save()
expected_data = {
'_id': str(instance.pk),
'arr_relation': [rel_instance_1.pk, rel_instance_2.pk],
'int_val': instance.int_val
}
assert format_dict(serializer.data) == format_dict(expected_data)
def test_deep_create(self):
"""
Confirm that new instances of models w/ ArrayModelFields fields
can still be generated and saved correctly from raw data
"""
# Set up the initial data
rel_data_1 = {
'email': 'jojo@gmail.com'
}
rel_instance_1 = ArrayRelatedModel.objects.create(**rel_data_1)
rel_data_1.update({'pk': rel_instance_1.pk})
rel_data_2 = {
'email': 'gogo@gmail.com'
}
rel_instance_2 = ArrayRelatedModel.objects.create(**rel_data_2)
rel_data_2.update({'pk': rel_instance_2.pk})
data = {
'int_val': -4321,
}
# Serializer should validate
class TestSerializer(rmd_ser.DjongoModelSerializer):
class Meta:
model = ArrayRelationModel
fields = '__all__'
depth = 1
serializer = TestSerializer(data=data)
assert serializer.is_valid(), serializer.errors
# Serializer should be able to save the data, sans relations
instance = serializer.save()
assert list(instance.arr_relation.all()) == []
# Confirm that this default read-only setup can be overridden
class NewTestSerializer(rmd_ser.DjongoModelSerializer):
arr_relation_pks = drf_ser.PrimaryKeyRelatedField(
queryset=ArrayRelatedModel.objects.all(),
many=True,
write_only=True
)
def create(self, validated_data):
rel_pks = validated_data.pop('arr_relation_pks', [])
obj = ArrayRelationModel.objects.create(**validated_data)
obj.arr_relation.add(*rel_pks)
obj.save()
return obj
class Meta:
model = ArrayRelationModel
fields = '__all__'
depth = 1
data.update({
'arr_relation_pks': [rel_instance_1.pk, rel_instance_2.pk]
})
serializer = NewTestSerializer(data=data)
assert serializer.is_valid(), serializer.errors
# Serializer should be able to save the data
instance = serializer.save()
expected_data = {
'_id': str(instance.pk),
'int_val': instance.int_val,
'arr_relation': [
{
'_id': str(rel_instance_1.pk),
'email': rel_instance_1.email
},
{
'_id': str(rel_instance_2.pk),
'email': rel_instance_2.email
}],
}
assert format_dict(serializer.data) == format_dict(expected_data)
def test_root_update(self):
"""
Confirm that existing instances of models w/ ArrayReferenceFields
can still be updated when provided with new raw data
"""
# Set up the initial data
rel_data_1 = {
'email': 'jojo@gmail.com'
}
rel_instance_1 = ArrayRelatedModel.objects.create(**rel_data_1)
rel_data_1.update({'pk': rel_instance_1.pk})
rel_data_2 = {
'email': 'gogo@gmail.com'
}
rel_instance_2 = ArrayRelatedModel.objects.create(**rel_data_2)
rel_data_2.update({'pk': rel_instance_2.pk})
old_data = {
'int_val': -4321,
}
instance = ArrayRelationModel.objects.create(**old_data)
instance.arr_relation.add(rel_instance_1, rel_instance_2)
# Try to perform an instance update
new_rel_data = {
'email': 'new_user@new.com',
}
new_rel_instance = ArrayRelatedModel.objects.create(**new_rel_data)
new_data = {
'int_val': 999,
'arr_relation': [new_rel_instance.pk]
}
class TestSerializer(rmd_ser.DjongoModelSerializer):
class Meta:
model = ArrayRelationModel
fields = '__all__'
serializer = TestSerializer(instance, data=new_data)
assert serializer.is_valid(), serializer.errors
# Confirm that the serializer can still save w/ the updated
# relations
serializer.save()
expected_data = {
'_id': str(instance.pk),
'int_val': '999',
'arr_relation': [
ObjectId(new_rel_instance.pk)
]
}
assert format_dict(serializer.data) == format_dict(expected_data)
def test_deep_update(self):
"""
Confirm that existing instances of models w/ ArrayReferenceFields
can still be updated when provided with new raw data
"""
# Set up the initial data
rel_data_1 = {
'email': 'jojo@gmail.com'
}
rel_instance_1 = ArrayRelatedModel.objects.create(**rel_data_1)
rel_data_1.update({'pk': rel_instance_1.pk})
rel_data_2 = {
'email': 'gogo@gmail.com'
}
rel_instance_2 = ArrayRelatedModel.objects.create(**rel_data_2)
rel_data_2.update({'pk': rel_instance_2.pk})
old_data = {
'int_val': -4321,
}
instance = ArrayRelationModel.objects.create(**old_data)
instance.arr_relation.add(rel_instance_1, rel_instance_2)
# Try to perform an instance update
new_rel_data = {
'email': 'new_user@new.com',
}
new_rel_instance = ArrayRelatedModel.objects.create(**new_rel_data)
new_data = {
'int_val': 999,
'arr_relation': [new_rel_instance.pk]
}
class TestSerializer(rmd_ser.DjongoModelSerializer):
class Meta:
model = ArrayRelationModel
fields = '__all__'
depth = 1
# Confirm that the serializer can save, but strips reference data
serializer = TestSerializer(instance, data=new_data)
assert serializer.is_valid(), serializer.errors
# Confirm that the serializer can still save w/ the updated relations
serializer.save()
expected_data = {
'_id': str(instance.pk),
'int_val': '999',
'arr_relation': [
{
'_id': str(rel_instance_1.pk),
'email': rel_instance_1.email,
},
{
'_id': str(rel_instance_2.pk),
'email': rel_instance_2.email
}
]
}
assert format_dict(serializer.data) == format_dict(expected_data)
# Confirm that this default format can be overridden
class NewTestSerializer(rmd_ser.DjongoModelSerializer):
arr_relation = drf_ser.PrimaryKeyRelatedField(
queryset=ArrayRelatedModel.objects.all(),
read_only=False,
many=True
)
class Meta:
model = ArrayRelationModel
fields = '__all__'
depth = 1
def update(self, inst, validated_data):
rel_pks = validated_data.pop('arr_relation')
inst.arr_relation.add(*rel_pks)
inst.save()
return inst
serializer = NewTestSerializer(instance, data=new_data)
assert serializer.is_valid(), serializer.errors
instance = serializer.save()
| 30.188209 | 77 | 0.572223 | 1,386 | 13,313 | 5.220779 | 0.102453 | 0.080569 | 0.039801 | 0.031509 | 0.918878 | 0.910862 | 0.899254 | 0.899254 | 0.891929 | 0.875207 | 0 | 0.013346 | 0.341471 | 13,313 | 440 | 78 | 30.256818 | 0.812022 | 0.136483 | 0 | 0.721831 | 0 | 0 | 0.059996 | 0 | 0 | 0 | 0 | 0 | 0.052817 | 1 | 0.03169 | false | 0 | 0.021127 | 0 | 0.130282 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
da6c8117f071dd6e5bd6b513be896a4399b35fbb | 4,506 | py | Python | models/wrapper.py | vskadandale/multichannel-unet-bss | 5d979a7669def510ece15fc18eeb185d73aa30be | [
"MIT"
] | 13 | 2020-06-30T11:32:54.000Z | 2022-03-29T10:06:50.000Z | models/wrapper.py | kvsphantom/multitask-unet-bss | 5d979a7669def510ece15fc18eeb185d73aa30be | [
"MIT"
] | 3 | 2020-10-28T14:51:46.000Z | 2021-12-29T12:50:05.000Z | models/wrapper.py | kvsphantom/multitask-unet-bss | 5d979a7669def510ece15fc18eeb185d73aa30be | [
"MIT"
] | 5 | 2020-07-05T10:03:41.000Z | 2022-03-29T10:47:15.000Z | import torch
from utils.utils import warpgrid
import torch.nn.functional as F
from settings import *
class Wrapper(torch.nn.Module):
def __init__(self, model, main_device=0):
super(Wrapper, self).__init__()
self.L = len(SOURCES_SUBSET)
self.model = model
self.main_device = main_device
self.grid_warp = torch.from_numpy(
warpgrid(BATCH_SIZE, 256, STFT_WIDTH, warp=True)).to(self.main_device)
def forward(self, x):
if x.shape[0] == BATCH_SIZE:
mags = F.grid_sample(x, self.grid_warp)
else: # for the last batch, where the number of samples are generally lesser than the batch_size
custom_grid_warp = torch.from_numpy(
warpgrid(x.shape[0], 256, STFT_WIDTH, warp=True)).to(self.main_device)
mags = F.grid_sample(x, custom_grid_warp)
gt_masks = torch.div(mags[:, :-1], mags[:, -1].unsqueeze(1).expand(x.shape[0], self.L, *mags.shape[2:]))
gt_masks.clamp_(0., 10.)
log_mags = torch.log(mags[:, -1].unsqueeze(1)).detach()
gt_mags = x[:, :-1]
mix_mag = x[:, -1].unsqueeze(1)
pred_masks = self.model(log_mags)
pred_masks = torch.relu(pred_masks)
mag_mix_sq = mags[:, -1].unsqueeze(1)
pred_mags_sq = pred_masks * mag_mix_sq
gt_mags_sq = gt_masks * mag_mix_sq
network_output = [gt_mags_sq, pred_mags_sq, gt_mags, mix_mag, gt_masks,
pred_masks] # BxKx256x256, BxKx256x256, BxKx512x256, Bx1x512x256, BxKx256x256, BxKx256x256
return network_output
class SpecChannelUnetNoMaskWrapper(torch.nn.Module):
def __init__(self, model, main_device=0):
super(SpecChannelUnetNoMaskWrapper, self).__init__()
self.L = len(SOURCES_SUBSET)
self.model = model
self.main_device = main_device
self.grid_warp = torch.from_numpy(
warpgrid(BATCH_SIZE, 256, STFT_WIDTH, warp=True)).to(self.main_device)
def forward(self, x):
if x.shape[0] == BATCH_SIZE:
mags = F.grid_sample(x, self.grid_warp)
else: # for the last batch, where the number of samples are generally lesser than the batch_size
custom_grid_warp = torch.from_numpy(
warpgrid(x.shape[0], 256, STFT_WIDTH, warp=True)).to(self.main_device)
mags = F.grid_sample(x, custom_grid_warp)
gt_masks = torch.div(mags[:, :-1], mags[:, -1].unsqueeze(1).expand(x.shape[0], self.L, *mags.shape[2:]))
gt_masks.clamp_(0., 10.)
gt_mags = x[:, :-1]
mix_mag = x[:, -1].unsqueeze(1)
pred_mags_sq = self.model(mags[:, -1].unsqueeze(1))
pred_mags_sq = torch.relu(pred_mags_sq)
mag_mix_sq = mags[:, -1].unsqueeze(1)
gt_mags_sq = gt_masks * mag_mix_sq
network_output = [gt_mags_sq, pred_mags_sq, gt_mags, mix_mag, gt_masks, gt_masks] # BxKx256x256, BxKx256x256, BxKx512x256, Bx1x512x256, BxKx256x256, BxKx256x256
return network_output
class CUNetWrapper(torch.nn.Module):
def __init__(self, model, main_device=0):
super(CUNetWrapper, self).__init__()
self.L = len(SOURCES_SUBSET)
self.model = model
self.main_device = main_device
self.grid_warp = torch.from_numpy(
warpgrid(BATCH_SIZE, 256, STFT_WIDTH, warp=True)).to(self.main_device)
def forward(self, x):
x, conditions = x
if x.shape[0] == BATCH_SIZE:
mags = F.grid_sample(x, self.grid_warp)
else: # for the last batch, where the number of samples are generally lesser than the batch_size
custom_grid_warp = torch.from_numpy(
warpgrid(x.shape[0], 256, STFT_WIDTH, warp=True)).to(self.main_device)
mags = F.grid_sample(x, custom_grid_warp)
gt_masks = torch.div(mags[:, :-1], mags[:, -1].unsqueeze(1))
gt_masks.clamp_(0., 10.)
log_mags = torch.log(mags[:, -1].unsqueeze(1)).detach()
gt_mags = x[:, :-1]
mix_mag = x[:, -1].unsqueeze(1)
pred_masks = self.model(log_mags, conditions)
pred_masks = torch.relu(pred_masks)
mag_mix_sq = mags[:, -1].unsqueeze(1)
pred_mags_sq = pred_masks * mag_mix_sq
gt_mags_sq = gt_masks * mag_mix_sq
network_output = [gt_mags_sq, pred_mags_sq, gt_mags, mix_mag, gt_masks,
pred_masks] # BxKx256x256, BxKx256x256, BxKx512x256, Bx1x512x256, BxKx256x256, BxKx256x256
return network_output
| 42.914286 | 169 | 0.632268 | 635 | 4,506 | 4.204724 | 0.124409 | 0.05618 | 0.049438 | 0.050562 | 0.912734 | 0.912734 | 0.910487 | 0.892509 | 0.892509 | 0.892509 | 0 | 0.053377 | 0.247448 | 4,506 | 104 | 170 | 43.326923 | 0.734002 | 0.110297 | 0 | 0.802326 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.069767 | false | 0 | 0.046512 | 0 | 0.186047 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7 |
e54c2a120717bf0156392861bc7ef03528da21ba | 3,385 | py | Python | Django/recurView/ARNN/migrations/0008_auto_20190313_2131.py | ltomas837/DjangoProject | 31a88fa9e8819ab3f03f689e50bbf8a341b4fc49 | [
"MIT"
] | 3 | 2019-03-29T17:32:50.000Z | 2019-12-12T18:39:32.000Z | Django/recurView/ARNN/migrations/0008_auto_20190313_2131.py | ltomas837/DjangoProject | 31a88fa9e8819ab3f03f689e50bbf8a341b4fc49 | [
"MIT"
] | 4 | 2020-02-12T00:00:08.000Z | 2021-06-10T22:52:15.000Z | Django/recurView/ARNN/migrations/0008_auto_20190313_2131.py | ltomas837/DjangoProject | 31a88fa9e8819ab3f03f689e50bbf8a341b4fc49 | [
"MIT"
] | 1 | 2019-04-11T23:58:48.000Z | 2019-04-11T23:58:48.000Z | # Generated by Django 2.1.3 on 2019-03-13 20:31
from django.conf import settings
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('ARNN', '0007_auto_20190312_2218'),
]
operations = [
migrations.CreateModel(
name='Observable',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('creation_date', models.DateTimeField(auto_now_add=True)),
('type', models.CharField(choices=[('1', 'Train'), ('2', 'Test')], max_length=1)),
('start', models.IntegerField(validators=[django.core.validators.MinValueValidator(0)], verbose_name='Starting step')),
('stop', models.IntegerField(validators=[django.core.validators.MinValueValidator(0)], verbose_name='Stopping step')),
],
options={
'ordering': ['creation_date'],
'abstract': False,
},
),
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('creation_date', models.DateTimeField(auto_now_add=True)),
('type', models.CharField(choices=[('1', 'Train'), ('2', 'Test')], max_length=1)),
('start', models.IntegerField(validators=[django.core.validators.MinValueValidator(0)], verbose_name='Starting step')),
('stop', models.IntegerField(validators=[django.core.validators.MinValueValidator(0)], verbose_name='Stopping step')),
],
options={
'ordering': ['creation_date'],
'abstract': False,
},
),
migrations.RemoveField(
model_name='corpus',
name='data',
),
migrations.AddField(
model_name='corpus',
name='data_in',
field=models.FileField(default='Panda', max_length=500, upload_to=''),
),
migrations.AddField(
model_name='corpus',
name='data_out',
field=models.FileField(default='Panda', max_length=500, upload_to=''),
),
migrations.AddField(
model_name='task',
name='corpus',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='ARNN.Corpus'),
),
migrations.AddField(
model_name='task',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='observable',
name='corpus',
field=models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to='ARNN.Corpus'),
),
migrations.AddField(
model_name='observable',
name='owner',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
| 41.280488 | 135 | 0.580502 | 331 | 3,385 | 5.782477 | 0.280967 | 0.032915 | 0.0721 | 0.08464 | 0.817659 | 0.805643 | 0.776907 | 0.748171 | 0.748171 | 0.748171 | 0 | 0.020867 | 0.277991 | 3,385 | 81 | 136 | 41.790123 | 0.762275 | 0.013294 | 0 | 0.746667 | 1 | 0 | 0.107849 | 0.00689 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.053333 | 0 | 0.093333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 8 |
e57fa7176256327fc54dd5a9932a29a44d5253f9 | 5,881 | py | Python | startup/11-bimorph.py | NSLS-II-SMI/profile_collection | c1e2236a7520f605ac85e7591f05682add06357c | [
"BSD-3-Clause"
] | null | null | null | startup/11-bimorph.py | NSLS-II-SMI/profile_collection | c1e2236a7520f605ac85e7591f05682add06357c | [
"BSD-3-Clause"
] | 13 | 2018-09-25T19:35:08.000Z | 2021-01-15T20:42:26.000Z | startup/11-bimorph.py | NSLS-II-SMI/profile_collection | c1e2236a7520f605ac85e7591f05682add06357c | [
"BSD-3-Clause"
] | 3 | 2019-09-06T01:40:59.000Z | 2020-07-01T20:27:39.000Z | import re
from ophyd import EpicsMotor, EpicsSignalRO, EpicsSignal, Device, Component as Cpt, PseudoPositioner
class HFM_voltage(Device):
ch0 = Cpt(EpicsSignal, 'GET-VOUT0')
ch0_trg = Cpt(EpicsSignal, 'SET-VTRGT0')
ch1 = Cpt(EpicsSignal, 'GET-VOUT1')
ch1_trg = Cpt(EpicsSignal, 'SET-VTRGT1')
ch2 = Cpt(EpicsSignal, 'GET-VOUT2')
ch2_trg = Cpt(EpicsSignal, 'SET-VTRGT2')
ch3 = Cpt(EpicsSignal, 'GET-VOUT3')
ch3_trg = Cpt(EpicsSignal, 'SET-VTRGT3')
ch4 = Cpt(EpicsSignal, 'GET-VOUT4')
ch4_trg = Cpt(EpicsSignal, 'SET-VTRGT4')
ch5 = Cpt(EpicsSignal, 'GET-VOUT5')
ch5_trg = Cpt(EpicsSignal, 'SET-VTRGT5')
ch6 = Cpt(EpicsSignal, 'GET-VOUT6')
ch6_trg = Cpt(EpicsSignal, 'SET-VTRGT6')
ch7 = Cpt(EpicsSignal, 'GET-VOUT7')
ch7_trg = Cpt(EpicsSignal, 'SET-VTRGT7')
ch8 = Cpt(EpicsSignal, 'GET-VOUT8')
ch8_trg = Cpt(EpicsSignal, 'SET-VTRGT8')
ch9 = Cpt(EpicsSignal, 'GET-VOUT9')
ch9_trg = Cpt(EpicsSignal, 'SET-VTRGT9')
ch10 = Cpt(EpicsSignal, 'GET-VOUT10')
ch10_trg = Cpt(EpicsSignal, 'SET-VTRGT10')
ch11 = Cpt(EpicsSignal, 'GET-VOUT11')
ch11_trg = Cpt(EpicsSignal, 'SET-VTRGT11')
ch12 = Cpt(EpicsSignal, 'GET-VOUT12')
ch12_trg = Cpt(EpicsSignal, 'SET-VTRGT12')
ch13 = Cpt(EpicsSignal, 'GET-VOUT13')
ch13_trg = Cpt(EpicsSignal, 'SET-VTRGT13')
ch14 = Cpt(EpicsSignal, 'GET-VOUT14')
ch14_trg = Cpt(EpicsSignal, 'SET-VTRGT14')
ch15 = Cpt(EpicsSignal, 'GET-VOUT15')
ch15_trg = Cpt(EpicsSignal, 'SET-VTRGT15')
shift_rel = Cpt(EpicsSignal, 'SET-ALLSHIFT')
set_tar = Cpt(EpicsSignal, 'SET-ALLTRGT')
#This is the default hfm mirror voltage for smi swaxs hutch
default_hfm_v2 = np.asarray([-151, 261, 250, 293, 175, 236, 168, 231, 242, 200, 291, 222, 215, 157, 311, 36])
def set_target(self, mode='SWAXS'):
ch_pattern = re.compile("ch(?P<number>\d{1,2})")
for att_an in dir(self):
ch_pattern_match = ch_pattern.match(att_an)
if ch_pattern_match and 'trg' in att_an:
#-80 to move directly to teh good voltag for lowdiv configuration
yield from bps.mv(getattr(self, att_an), -80 + self.default_hfm_v2[int(ch_pattern_match[1])])
yield from bps.sleep(5)
def move_target(self):
yield from bps.mv(self.set_tar, 0)
def shift_relative(self, relative_value=0):
yield from bps.mv(self.shift_rel, relative_value)
def move_abs(self, mode='SWAXS'):
yield from self.set_target(mode = mode)
yield from bps.sleep(5)
yield from self.move_target()
hfm_voltage = HFM_voltage('HFM:', name='hfm_voltage')
class VFM_voltage(Device):
ch0 = Cpt(EpicsSignal, 'GET-VOUT0')
ch0_trg = Cpt(EpicsSignal, 'SET-VTRGT0')
ch1 = Cpt(EpicsSignal, 'GET-VOUT1')
ch1_trg = Cpt(EpicsSignal, 'SET-VTRGT1')
ch2 = Cpt(EpicsSignal, 'GET-VOUT2')
ch2_trg = Cpt(EpicsSignal, 'SET-VTRGT2')
ch3 = Cpt(EpicsSignal, 'GET-VOUT3')
ch3_trg = Cpt(EpicsSignal, 'SET-VTRGT3')
ch4 = Cpt(EpicsSignal, 'GET-VOUT4')
ch4_trg = Cpt(EpicsSignal, 'SET-VTRGT4')
ch5 = Cpt(EpicsSignal, 'GET-VOUT5')
ch5_trg = Cpt(EpicsSignal, 'SET-VTRGT5')
ch6 = Cpt(EpicsSignal, 'GET-VOUT6')
ch6_trg = Cpt(EpicsSignal, 'SET-VTRGT6')
ch7 = Cpt(EpicsSignal, 'GET-VOUT7')
ch7_trg = Cpt(EpicsSignal, 'SET-VTRGT7')
ch8 = Cpt(EpicsSignal, 'GET-VOUT8')
ch8_trg = Cpt(EpicsSignal, 'SET-VTRGT8')
ch9 = Cpt(EpicsSignal, 'GET-VOUT9')
ch9_trg = Cpt(EpicsSignal, 'SET-VTRGT9')
ch10 = Cpt(EpicsSignal, 'GET-VOUT10')
ch10_trg = Cpt(EpicsSignal, 'SET-VTRGT10')
ch11 = Cpt(EpicsSignal, 'GET-VOUT11')
ch11_trg = Cpt(EpicsSignal, 'SET-VTRGT11')
ch12 = Cpt(EpicsSignal, 'GET-VOUT12')
ch12_trg = Cpt(EpicsSignal, 'SET-VTRGT12')
ch13 = Cpt(EpicsSignal, 'GET-VOUT13')
ch13_trg = Cpt(EpicsSignal, 'SET-VTRGT13')
ch14 = Cpt(EpicsSignal, 'GET-VOUT14')
ch14_trg = Cpt(EpicsSignal, 'SET-VTRGT14')
ch15 = Cpt(EpicsSignal, 'GET-VOUT15')
ch15_trg = Cpt(EpicsSignal, 'SET-VTRGT15')
shift_rel = Cpt(EpicsSignal, 'SET-ALLSHIFT')
set_tar = Cpt(EpicsSignal, 'SET-ALLTRGT')
#This is the default vfm mirror voltage for smi swaxs hutch
default_vfm_v2 = [39, -102, 277, 234, 325, 163, 392, 280, 365, 273, 196, 400, 219, 304, 51, -327]
# default_vfm_v2 = -430 + np.asarray([ 39, 85, 311, 310, -15, 485, 68, 447, 291, 130, 606, 170, 272, 437, 192, -308]) #Ca edge
# default_vfm_v2 = [-281, -235, -9, -10, -335, 165, -252, 127, -29, -190, 286, -150, -48, 117, -128, -628] #S edge
#This is the default vfm mirror voltage for opls hutch
default_vfm_opls = [-206, -191, 6, 71, -316, 184, -223, 120, 45, -130, 202, -111, 17, 62, -75, -553]
def set_target(self, mode='SWAXS'):
ch_pattern = re.compile("ch(?P<number>\d{1,2})")
for att_an in dir(self):
ch_pattern_match = ch_pattern.match(att_an)
if ch_pattern_match and 'trg' in att_an:
if mode == 'SWAXS':
yield from bps.mv(getattr(self, att_an), self.default_vfm_v2[int(ch_pattern_match[1])])
yield from bps.sleep(5)
elif mode == 'OPLS':
yield from bps.mv(getattr(self, att_an), self.default_vfm_opls[int(ch_pattern_match[1])])
yield from bps.sleep(5)
else:
print('Unknown mode, your should choose between SWAXS or OPLS')
def move_target(self):
yield from bps.mv(self.set_tar, 0)
def shift_relative(self, relative_value=0):
yield from bps.mv(self.shift_rel, relative_value)
def move_abs(self, mode='SWAXS'):
yield from self.set_target(mode = mode)
yield from bps.sleep(5)
yield from self.move_target()
vfm_voltage = VFM_voltage('VFM:', name='vfm_voltage')
| 42.615942 | 139 | 0.635776 | 824 | 5,881 | 4.407767 | 0.259709 | 0.262115 | 0.168502 | 0.176211 | 0.822137 | 0.822137 | 0.822137 | 0.806718 | 0.78359 | 0.78359 | 0 | 0.092993 | 0.221051 | 5,881 | 137 | 140 | 42.927007 | 0.699847 | 0.081619 | 0 | 0.858407 | 0 | 0 | 0.155572 | 0.007788 | 0 | 0 | 0 | 0 | 0 | 1 | 0.070796 | false | 0 | 0.017699 | 0 | 0.734513 | 0.00885 | 0 | 0 | 0 | null | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 10 |
e58721167f7ecb40260c7a68063c74b9bba31bf0 | 1,809 | py | Python | special_function/help_msg.py | luoluo964/QQrobotFramework | 7c5128dfffc424775af279f8dade365430c40e49 | [
"MIT"
] | 9 | 2021-04-13T12:25:36.000Z | 2021-11-28T07:11:40.000Z | special_function/help_msg.py | luoluo964/QQrobotFramework | 7c5128dfffc424775af279f8dade365430c40e49 | [
"MIT"
] | 1 | 2021-09-03T08:15:29.000Z | 2021-09-03T08:15:29.000Z | special_function/help_msg.py | luoluo964/QQrobotFramework | 7c5128dfffc424775af279f8dade365430c40e49 | [
"MIT"
] | 4 | 2021-06-15T02:17:17.000Z | 2021-08-29T13:09:44.000Z | # coding=utf-8
#这里有的符号要进行url转码
# | %7C
# ' %27
# 换行 %0a
# 空格 %20
def private_chat_help():
content="[CQ:face,id=63][CQ:face,id=63][CQ:face,id=63]%20专有指令:%0a%0a"
content+="1-调教:%27#学习%20[目标语]%20[自动回复语]%27%20%0a"
content+="2-翻译:%27翻译%20[待翻译内容]%27%20%0a"
content+="3-手机号码信息:%27号码信息%20[手机号码]%27%20%0a"
content+="4-壁纸:%27壁纸%7C高清壁纸%27%20%0a"
content+="5-头像推荐:%27头像%20[女%7C男%7C动漫]%27%20%0a"
content+="6-新闻:%27新闻%27%20%0a"
content+="%0a"
content+="[CQ:face,id=69]订阅频道:%27#订阅%20[每日英语]%27%20%0a"
content+="[CQ:face,id=69]订阅频道:%27#订阅%20[每日必应壁纸]%27%20%0a"
content+="[CQ:face,id=69]订阅频道:%27#订阅%20[网易云每日推荐]%27%20%0a"
content+="[CQ:face,id=69]订阅频道:%27#订阅%20[每日微博热搜]%27%20%0a"
content+="[CQ:face,id=69]订阅频道:%27#订阅%20[每日生活小窍门]%27%20%0a"
content+="[CQ:face,id=203]将%27#订阅%27换成%27#取消订阅%27,即可取消相应订阅%0a"
content+="%0a"
content+="%20关闭机器人:%27#关机%27%20%0a"
content+="更多指令和订阅请自行触发[CQ:face,id=21]"
return content
def group_chat_help():
content="[CQ:face,id=63][CQ:face,id=63][CQ:face,id=63]%20专有指令:%0a"
content+="1-翻译:%27翻译%20[待翻译内容]%27%20%0a"
content+="2-手机号码信息:%27号码信息%20[手机号码]%27%20%0a"
content+="3-壁纸:%27壁纸%7C高清壁纸%27%20%0a"
content+="4-头像推荐:%27头像%20[女%7C男%7C动漫]%27%20%0a"
content+="5-新闻:%27新闻%27%20%0a"
content+="%0a"
content+="[CQ:face,id=69]订阅频道:%27#订阅%20[每日英语]%27%20%0a"
content+="[CQ:face,id=69]订阅频道:%27#订阅%20[每日必应壁纸]%27%20%0a"
content+="[CQ:face,id=69]订阅频道:%27#订阅%20[网易云每日推荐]%27%20%0a"
content+="[CQ:face,id=69]订阅频道:%27#订阅%20[每日微博热搜]%27%20%0a"
content+="[CQ:face,id=69]订阅频道:%27#订阅%20[每日生活小窍门]%27%20%0a"
content+="[CQ:face,id=203]将%27#订阅%27换成%27#取消订阅%27,即可取消相应订阅%0a"
content+="%0a"
content+="%20关闭机器人:%27#关机%27%20%0a"
content+="更多指令和订阅请自行触发[CQ:face,id=21]"
return content
| 36.918367 | 73 | 0.619127 | 345 | 1,809 | 3.234783 | 0.188406 | 0.25 | 0.123656 | 0.267921 | 0.918459 | 0.899642 | 0.899642 | 0.854839 | 0.752688 | 0.752688 | 0 | 0.185232 | 0.116639 | 1,809 | 48 | 74 | 37.6875 | 0.513141 | 0.029851 | 0 | 0.594595 | 0 | 0.405405 | 0.639748 | 0.611111 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054054 | false | 0 | 0 | 0 | 0.108108 | 0 | 0 | 0 | 0 | null | 1 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 11 |
e5be153be6c3e6b55d7dfa6f75efe6dbd24c377a | 1,862 | py | Python | pytorch_fft/fft/autograd.py | taras-sereda/pytorch_fft | 2f314d0b9d618954880c74a8ff28d562d31d2e4e | [
"Apache-2.0"
] | null | null | null | pytorch_fft/fft/autograd.py | taras-sereda/pytorch_fft | 2f314d0b9d618954880c74a8ff28d562d31d2e4e | [
"Apache-2.0"
] | null | null | null | pytorch_fft/fft/autograd.py | taras-sereda/pytorch_fft | 2f314d0b9d618954880c74a8ff28d562d31d2e4e | [
"Apache-2.0"
] | null | null | null | import torch
from .fft import fft,ifft,fft2,ifft2,fft3,ifft3
class Fft(torch.autograd.Function):
def forward(self, X_re, X_im):
return fft(X_re, X_im)
def backward(self, grad_output_re, grad_output_im):
N = grad_output_re.size(-1)
gr, gi = ifft(grad_output_re,-grad_output_im)
gr, gi = gr * N, -gi * N
return gr,gi
class Ifft(torch.autograd.Function):
def forward(self, k_re, k_im):
return ifft(k_re, k_im)
def backward(self, grad_output_re, grad_output_im):
gr, gi = fft(grad_output_re,-grad_output_im)
gr, gi = gr, -gi
return gr, gi
class Fft2d(torch.autograd.Function):
def forward(self, X_re, X_im):
return fft2(X_re, X_im)
def backward(self, grad_output_re, grad_output_im):
N = grad_output_re.size(-1) * grad_output_re.size(-2)
gr, gi = ifft2(grad_output_re,-grad_output_im)
gr, gi = gr * N, -gi * N
return gr,gi
class Ifft2d(torch.autograd.Function):
def forward(self, k_re, k_im):
return ifft2(k_re, k_im)
def backward(self, grad_output_re, grad_output_im):
gr, gi = fft2(grad_output_re,-grad_output_im)
gr, gi = gr, -gi
return gr, gi
class Fft3d(torch.autograd.Function):
def forward(self, X_re, X_im):
return fft3(X_re, X_im)
def backward(self, grad_output_re, grad_output_im):
N = grad_output_re.size(-1) * grad_output_re.size(-2) * grad_output_re.size(-3)
gr, gi = ifft3(grad_output_re,-grad_output_im)
gr, gi = gr * N, -gi * N
return gr,gi
class Ifft3d(torch.autograd.Function):
def forward(self, k_re, k_im):
return ifft3(k_re, k_im)
def backward(self, grad_output_re, grad_output_im):
gr, gi = fft3(grad_output_re,-grad_output_im)
gr, gi = gr, -gi
return gr, gi
| 26.985507 | 87 | 0.634264 | 308 | 1,862 | 3.561688 | 0.11039 | 0.273473 | 0.196901 | 0.175023 | 0.852325 | 0.852325 | 0.852325 | 0.852325 | 0.852325 | 0.852325 | 0 | 0.015748 | 0.249731 | 1,862 | 68 | 88 | 27.382353 | 0.769506 | 0 | 0 | 0.510638 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.255319 | false | 0 | 0.042553 | 0.12766 | 0.680851 | 0 | 0 | 0 | 0 | null | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 12 |
0080728d8a588dcfcd824ca160e73792807d64f3 | 196 | py | Python | splitviewsmodels/models/__init__.py | jagodjango/kode | 81fc8f833da4695556ad6ca92ddd3c564c8ae884 | [
"MIT"
] | 4 | 2020-02-28T10:25:53.000Z | 2021-11-24T03:36:08.000Z | splitviewsmodels/models/__init__.py | oonid/jagodjango | 81fc8f833da4695556ad6ca92ddd3c564c8ae884 | [
"MIT"
] | null | null | null | splitviewsmodels/models/__init__.py | oonid/jagodjango | 81fc8f833da4695556ad6ca92ddd3c564c8ae884 | [
"MIT"
] | 6 | 2020-02-07T11:55:54.000Z | 2022-01-12T07:03:56.000Z | from .ta import * # impor semua model (tabel) and fungsi di file ta sebagai definisi dari models
from .tb import * # impor semua model (tabel) and fungsi di file ta sebagai definisi dari models
| 65.333333 | 97 | 0.755102 | 32 | 196 | 4.625 | 0.5 | 0.148649 | 0.216216 | 0.283784 | 0.918919 | 0.918919 | 0.918919 | 0.918919 | 0.918919 | 0.918919 | 0 | 0 | 0.193878 | 196 | 2 | 98 | 98 | 0.936709 | 0.780612 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | null | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 0 | 0 | 0 | 11 |
dabaebaf2bab5deeaa94505d748f9d4359aa2a5c | 152 | py | Python | mytoolbox/mysubpackage/mysubsubpackage/mymodule4.py | randolf-scholz/Sphinx-Autosummary-Recursion | e2f707fb0d18639ee077101892f67a76a909f4ec | [
"MIT"
] | 39 | 2020-09-09T18:23:20.000Z | 2022-03-12T09:42:54.000Z | mytoolbox/mysubpackage/mysubsubpackage/mymodule4.py | randolf-scholz/Sphinx-Autosummary-Recursion | e2f707fb0d18639ee077101892f67a76a909f4ec | [
"MIT"
] | 3 | 2020-08-19T17:09:48.000Z | 2021-11-02T21:53:34.000Z | mytoolbox/mysubpackage/mysubsubpackage/mymodule4.py | randolf-scholz/Sphinx-Autosummary-Recursion | e2f707fb0d18639ee077101892f67a76a909f4ec | [
"MIT"
] | 51 | 2020-06-29T03:00:29.000Z | 2022-03-28T13:40:15.000Z | """
Module containing a fourth class.
"""
from mytoolbox.mymodule1 import myClass1
class myClass4(myClass1):
"""This is the fourth class."""
pass
| 15.2 | 40 | 0.717105 | 19 | 152 | 5.736842 | 0.789474 | 0.201835 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.031496 | 0.164474 | 152 | 9 | 41 | 16.888889 | 0.826772 | 0.388158 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0.333333 | 0.333333 | 0 | 0.666667 | 0 | 1 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 1 | 0 | 1 | 0 | 0 | 7 |
dabfe1edcd1f29a6ca5d1fdad00689f2a036168f | 190 | py | Python | lib/models/__init__.py | gopitk/CvT | 2cc8deb07d5afcc7f246abaca18ce0352b3a10f4 | [
"MIT"
] | 250 | 2021-05-26T07:33:23.000Z | 2022-03-31T14:02:03.000Z | lib/models/__init__.py | ZHOUWeilian/CvT | 4cedb05b343e13ab08c0a29c5166b6e94c751112 | [
"MIT"
] | 14 | 2021-05-27T23:30:28.000Z | 2022-03-09T08:24:38.000Z | lib/models/__init__.py | ZHOUWeilian/CvT | 4cedb05b343e13ab08c0a29c5166b6e94c751112 | [
"MIT"
] | 34 | 2021-05-27T11:59:03.000Z | 2022-03-15T07:11:42.000Z | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .cls_cvt import *
from .registry import *
from .build import build_model
| 19 | 38 | 0.831579 | 26 | 190 | 5.461538 | 0.461538 | 0.211268 | 0.338028 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.142105 | 190 | 9 | 39 | 21.111111 | 0.871166 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 1 | 0 | 1 | 0.166667 | 1 | 0 | 0 | null | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 1 | 0 | 1 | 0 | 0 | 7 |
daeb2160e2547a6b699a5d23c866e1d4de3741f3 | 3,138 | py | Python | faeAuditor/pageResults/migrations/0004_auto_20181026_1039.py | opena11y/fae-auditor | ea9099b37b77ddc30092b0cdd962647c92b143a7 | [
"Apache-2.0"
] | 2 | 2018-02-28T19:03:28.000Z | 2021-09-30T13:40:23.000Z | faeAuditor/pageResults/migrations/0004_auto_20181026_1039.py | opena11y/fae-auditor | ea9099b37b77ddc30092b0cdd962647c92b143a7 | [
"Apache-2.0"
] | 6 | 2020-02-11T21:53:58.000Z | 2022-02-10T07:57:58.000Z | faeAuditor/pageResults/migrations/0004_auto_20181026_1039.py | opena11y/fae-auditor | ea9099b37b77ddc30092b0cdd962647c92b143a7 | [
"Apache-2.0"
] | 1 | 2019-12-05T06:05:20.000Z | 2019-12-05T06:05:20.000Z | # -*- coding: utf-8 -*-
# Generated by Django 1.11.8 on 2018-10-26 15:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('pageResults', '0003_auto_20181025_1515'),
]
operations = [
migrations.AddField(
model_name='pageguidelineresult',
name='implementation_score_fail',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='pageguidelineresult',
name='implementation_score_mc',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='pageguidelineresult',
name='implementation_summ_fail',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='pageresult',
name='implementation_score_fail',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='pageresult',
name='implementation_score_mc',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='pageresult',
name='implementation_summ_fail',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='pagerulecategoryresult',
name='implementation_score_fail',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='pagerulecategoryresult',
name='implementation_score_mc',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='pagerulecategoryresult',
name='implementation_summ_fail',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='pageruleresult',
name='implementation_score_fail',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='pageruleresult',
name='implementation_score_mc',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='pagerulescoperesult',
name='implementation_score_fail',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='pagerulescoperesult',
name='implementation_score_mc',
field=models.DecimalField(decimal_places=1, default=-1, max_digits=4),
),
migrations.AddField(
model_name='pagerulescoperesult',
name='implementation_summ_fail',
field=models.IntegerField(default=0),
),
]
| 36.488372 | 82 | 0.611855 | 293 | 3,138 | 6.313993 | 0.1843 | 0.136216 | 0.174054 | 0.204324 | 0.894054 | 0.894054 | 0.894054 | 0.894054 | 0.836757 | 0.815676 | 0 | 0.029685 | 0.280752 | 3,138 | 85 | 83 | 36.917647 | 0.789987 | 0.02167 | 0 | 0.897436 | 1 | 0 | 0.198239 | 0.138572 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.025641 | 0 | 0.064103 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 1 | 1 | 1 | 1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 9 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.