hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bc2dcdd77208fd3bc7153fc05f77fe846212c2fd
| 200
|
py
|
Python
|
src/wai/common/file/__init__.py
|
waikato-datamining/wai-common
|
bf3d7ae6e01bcb7ffe9f5c2b5d10a05908a68c34
|
[
"MIT"
] | null | null | null |
src/wai/common/file/__init__.py
|
waikato-datamining/wai-common
|
bf3d7ae6e01bcb7ffe9f5c2b5d10a05908a68c34
|
[
"MIT"
] | 8
|
2020-07-01T02:11:31.000Z
|
2020-12-17T01:57:17.000Z
|
src/wai/common/file/__init__.py
|
waikato-datamining/wai-common
|
bf3d7ae6e01bcb7ffe9f5c2b5d10a05908a68c34
|
[
"MIT"
] | null | null | null |
from ._NamedColumnSelection import NamedColumnSelection, SELECTION_TYPE
from ._functions import load_dir
from ._PathContextManager import PathContextManager, offset_cwd
from ._util import ensure_path
| 40
| 71
| 0.88
| 22
| 200
| 7.636364
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09
| 200
| 4
| 72
| 50
| 0.923077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
bc3d8a9588570e4f9585cb45afa47cd589e20ffd
| 114
|
py
|
Python
|
introduction.py
|
manojb7/general
|
d3e94bb41cd9d7e00e00661e67503902704d5c0b
|
[
"MIT"
] | null | null | null |
introduction.py
|
manojb7/general
|
d3e94bb41cd9d7e00e00661e67503902704d5c0b
|
[
"MIT"
] | null | null | null |
introduction.py
|
manojb7/general
|
d3e94bb41cd9d7e00e00661e67503902704d5c0b
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import pandas
import calculate
print("Hello world")
print(calculate.x)
print("Hi Again")
| 12.666667
| 22
| 0.745614
| 17
| 114
| 5
| 0.764706
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009901
| 0.114035
| 114
| 9
| 23
| 12.666667
| 0.831683
| 0.184211
| 0
| 0
| 0
| 0
| 0.204301
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.4
| 0
| 0.4
| 0.6
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
70b43f3039b451e6234256a435108b33dbab2629
| 2,013
|
py
|
Python
|
tests/controllers/subscription_controller_test.py
|
terrazoon/stoic-service
|
0ce5af867aac4d35bdb924a5c8d50fc283fae3d0
|
[
"MIT"
] | 1
|
2020-04-16T01:02:25.000Z
|
2020-04-16T01:02:25.000Z
|
tests/controllers/subscription_controller_test.py
|
terrazoon/stoic-service
|
0ce5af867aac4d35bdb924a5c8d50fc283fae3d0
|
[
"MIT"
] | null | null | null |
tests/controllers/subscription_controller_test.py
|
terrazoon/stoic-service
|
0ce5af867aac4d35bdb924a5c8d50fc283fae3d0
|
[
"MIT"
] | null | null | null |
import unittest
import boto3
import mock
import pytest
from moto import mock_dynamodb, mock_dynamodb2
from src.controllers.subscription_controller import unsubscribe, subscribe
class SubscriptionControllerTest(unittest.TestCase):
@mock_dynamodb2
def test_unsubscribe(self):
table_name = 'emailAddresses'
dynamodb = boto3.resource('dynamodb', 'us-east-1')
table = dynamodb.create_table(
TableName=table_name,
KeySchema=[
{
'AttributeName': 'email',
'KeyType': 'HASH'
},
],
AttributeDefinitions=[
{
'AttributeName': 'email',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1
}
)
event = {"pathParameters": {"email": "abc@x.y"}}
response = unsubscribe(event, None)
assert response['statusCode'] == 200
assert response['body'] == '"abc@x.y"'
@mock_dynamodb2
def test_subscribe(self):
table_name = 'emailAddresses'
dynamodb = boto3.resource('dynamodb', 'us-east-1')
table = dynamodb.create_table(
TableName=table_name,
KeySchema=[
{
'AttributeName': 'email',
'KeyType': 'HASH'
},
],
AttributeDefinitions=[
{
'AttributeName': 'email',
'AttributeType': 'S'
},
],
ProvisionedThroughput={
'ReadCapacityUnits': 1,
'WriteCapacityUnits': 1
}
)
event = {"pathParameters": {"email": "abc@x.y"}}
response = subscribe(event, None)
assert response['statusCode'] == 200
assert response['body'] == '"abc@x.y"'
| 28.352113
| 74
| 0.491803
| 143
| 2,013
| 6.832168
| 0.363636
| 0.036847
| 0.020471
| 0.040942
| 0.714432
| 0.714432
| 0.714432
| 0.714432
| 0.714432
| 0.714432
| 0
| 0.014851
| 0.397914
| 2,013
| 70
| 75
| 28.757143
| 0.791254
| 0
| 0
| 0.622951
| 0
| 0
| 0.174863
| 0
| 0
| 0
| 0
| 0
| 0.065574
| 1
| 0.032787
| false
| 0
| 0.098361
| 0
| 0.147541
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
70e168e7c5cf7f80477df836c59e97185a9b229c
| 186
|
py
|
Python
|
more_or_less/screen_dimensions.py
|
jeroen-dhollander/python-paginator
|
8c60ae6dd64a7440feda4561440117d9bebc3ae7
|
[
"CC0-1.0"
] | 3
|
2018-09-26T20:07:51.000Z
|
2021-07-10T11:59:44.000Z
|
more_or_less/screen_dimensions.py
|
jeroen-dhollander/python-paginator
|
8c60ae6dd64a7440feda4561440117d9bebc3ae7
|
[
"CC0-1.0"
] | 1
|
2020-02-19T10:15:38.000Z
|
2020-03-14T21:48:55.000Z
|
more_or_less/screen_dimensions.py
|
jeroen-dhollander/python-more-or-less
|
8c60ae6dd64a7440feda4561440117d9bebc3ae7
|
[
"CC0-1.0"
] | null | null | null |
from abc import ABC, abstractmethod
class ScreenDimensions(ABC):
@abstractmethod
def get_height(self):
pass
@abstractmethod
def get_width(self):
pass
| 14.307692
| 35
| 0.66129
| 20
| 186
| 6.05
| 0.6
| 0.280992
| 0.330579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.274194
| 186
| 12
| 36
| 15.5
| 0.896296
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0.25
| 0.125
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
70f12ae7d023831303366dc4f345905b18d4d0ac
| 5,801
|
py
|
Python
|
temporary_siren.py
|
AkramjitS/Video-Encoder-Decoder-Hypernetwork
|
bdff1f77d988b95cef2298eba8f4d8c3621bfe78
|
[
"MIT"
] | null | null | null |
temporary_siren.py
|
AkramjitS/Video-Encoder-Decoder-Hypernetwork
|
bdff1f77d988b95cef2298eba8f4d8c3621bfe78
|
[
"MIT"
] | null | null | null |
temporary_siren.py
|
AkramjitS/Video-Encoder-Decoder-Hypernetwork
|
bdff1f77d988b95cef2298eba8f4d8c3621bfe78
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn.functional as F
from torch.tensor import Tensor
from torch.nn import Module
from torch.nn.parameter import Parameter
from typing import List, Tuple, Optional
from math import tau
class SineLayer(Module):
weight:Parameter
bias:Parameter
def __init__(self, in_features:int, out_features:int, half_range_weight:float, half_range_bias:float):
super().__init__()
self.weight = Parameter(torch.empty((out_features, in_features)).uniform_(-half_range_weight, half_range_weight), True)
self.bias = Parameter(torch.empty((out_features)).uniform_(-half_range_bias, half_range_bias), True)
def forward(self, input:Tensor)->Tensor:
return F.linear(input, self.weight, self.bias).sin()
class JumpNetwork(Module):
modules:List[Module]
fourier:Optional[Tensor]
def __init__(self, modules:List[Module], fourier:Optional[Tensor]):
super().__init__()
self.modules = modules
self.fourier = fourier
for index, module in enumerate(self.modules):
self.add_module('module-{}'.format(index), module)
def forward(self, input:Tensor)->Tuple[Tensor, Tensor]:
'''
# temp8Siren
# 0 -> 1 |-> 3 |-> 2 -> 3 -> 4
# |-> |
input = self.modules[0](input)
input = self.modules[1](input)
skip = input.clone()
input = self.modules[3](input)
input = input + skip
input = self.modules[2](input)
input = self.modules[3](input)
return self.modules[4](input), None
'''
'''
# temp7Siren
# 0 -> 1 |-> 3 |-> 4 ?
# |-> 2 -> 3 |
input = self.modules[0](input)
input = self.modules[1](input)
jump = input.clone()
jump = self.modules[2](jump)
jump = self.modules[3](jump)
input = self.modules[3](input)
input = input + jump
return self.modules[4](input), None
'''
'''
# temp6Siren
# 0 -> 1 -> 3 -> 2 -> 3 -> 4
input = self.modules[0](input)
input = self.modules[1](input)
input = self.modules[3](input)
input = self.modules[2](input)
input = self.modules[3](input)
return self.modules[4](input), None
'''
'''
# temp5Siren
# 0 |-> 1 -> |-> 3 -> 4 ?
# |-> 2 -> 1 |
input = self.modules[0](input)
input_jump = input.clone()
input_jump = self.modules[2](input_jump)
input_jump = self.modules[1](input_jump)
input = self.modules[1](input)
input = input + input_jump
input = self.modules[3](input)
return self.modules[4](input), None
'''
'''
# temp4Siren
# 0 -> 1 |-> 2 -> 1 |-> 3 -> 4 ?
# |-> |
input = self.modules[0](input)
input = self.modules[1](input)
skip = input.clone()
input = self.modules[2](input)
input = self.modules[1](input)
input = input + skip
input = self.modules[3](input)
return self.modules[4](input), None
'''
'''
# temp3Siren
# 0 -> 1 -> 2 -> 1 -> 3 -> 4 ?
input = self.modules[0](input)
input = self.modules[1](input)
input = self.modules[2](input)
input = self.modules[1](input)
input = self.modules[3](input)
return self.modules[4](input), None
'''
'''
# temp2Siren
# 0 |-> 1 -> |-> 3 -> 3 -> 4 ?
# |-> 2 -> 1 |
input = self.modules[0](input)
input_jump = input.clone()
input_jump = self.modules[2](input_jump)
input_jump = self.modules[1](input_jump)
input = self.modules[1](input)
input = input + input_jump
input = self.modules[3](input)
input = self.modules[3](input)
return self.modules[4](input), None
'''
'''# tempSiren
input = self.modules[0](input)
input = self.modules[1](input)
input = self.modules[1](input)
input = self.modules[2](input)
input = self.modules[2](input)
input = self.modules[3](input)
input = self.modules[3](input)
return self.modules[4](input), None
'''
'''
# origSiren
input = self.modules[0](input)
input = self.modules[1](input)
input = self.modules[2](input)
input = self.modules[3](input)
return self.modules[4](input), None
'''
# OrigExpandedSiren
return self.modules[0](input), None
'''
# fourierOrigSiren
if self.fourier is None:
raise ValueError("Fourier tensor expected, got None")
input = tau * torch.mm(input, self.fourier)
input = torch.cat((input.cos(), input.sin()), dim=1)
return self.modules[0](input), None
'''
'''
# fourierOrigExpandedSiren
if self.fourier is None:
raise ValueError("Fourier tensor expected, got None")
input = tau * torch.mm(input, self.fourier)
input = torch.cat((input.cos(), input.sin()), dim=1)
return self.modules[0](input), None
'''
'''
input = self.modules[0](input)
#skip1 = input.clone()
input = self.modules[1](input)
input = self.modules[2](input)
#input = input + skip1
input = self.modules[1](input)
input = self.modules[2](input)
#skip2 = input.clone()
input = self.modules[3](input)
input = self.modules[2](input)
#input = input + skip2
input = self.modules[3](input)
return self.modules[4](input), None
'''
| 31.188172
| 127
| 0.535425
| 673
| 5,801
| 4.543834
| 0.118871
| 0.255396
| 0.256377
| 0.178548
| 0.726292
| 0.677567
| 0.632112
| 0.614454
| 0.604317
| 0.604317
| 0
| 0.031377
| 0.318738
| 5,801
| 185
| 128
| 31.356757
| 0.742409
| 0.053956
| 0
| 0.074074
| 0
| 0
| 0.006503
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.148148
| false
| 0
| 0.259259
| 0.037037
| 0.703704
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
70f77c03590fe6af317fc6534b3c08c9dfeda185
| 201
|
py
|
Python
|
Python/Book Assignments/ad1.py
|
AungWinnHtut/CStutorial
|
4b57721b814e9c2d288af64a979704dd70f14ddb
|
[
"MIT"
] | null | null | null |
Python/Book Assignments/ad1.py
|
AungWinnHtut/CStutorial
|
4b57721b814e9c2d288af64a979704dd70f14ddb
|
[
"MIT"
] | null | null | null |
Python/Book Assignments/ad1.py
|
AungWinnHtut/CStutorial
|
4b57721b814e9c2d288af64a979704dd70f14ddb
|
[
"MIT"
] | 1
|
2022-03-15T12:20:26.000Z
|
2022-03-15T12:20:26.000Z
|
print("You will meet two people - one is the sweet shop owner who wants to steal the password so he can keep the sweets.")
print()
playername=input("What is your name? ")
print("Welcome "+ playername)
| 40.2
| 122
| 0.741294
| 34
| 201
| 4.382353
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164179
| 201
| 4
| 123
| 50.25
| 0.886905
| 0
| 0
| 0
| 0
| 0.25
| 0.696517
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.25
| 0
| 0
| 0
| 0.75
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
|
0
| 5
|
cb1bf41def4346284e875d9c023b49af8af07764
| 90
|
py
|
Python
|
lib/utils/__init__.py
|
n2westman/CS410_Project
|
f8cfd5ab4d07354f3bb5f712e848853fbc9d7f83
|
[
"MIT"
] | null | null | null |
lib/utils/__init__.py
|
n2westman/CS410_Project
|
f8cfd5ab4d07354f3bb5f712e848853fbc9d7f83
|
[
"MIT"
] | null | null | null |
lib/utils/__init__.py
|
n2westman/CS410_Project
|
f8cfd5ab4d07354f3bb5f712e848853fbc9d7f83
|
[
"MIT"
] | null | null | null |
from .utils import *
from .metric import *
from .functions import *
from .seqeval import *
| 22.5
| 24
| 0.744444
| 12
| 90
| 5.583333
| 0.5
| 0.447761
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.166667
| 90
| 4
| 25
| 22.5
| 0.893333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
cb809e69b5e21766e52bf899686150edfc3f1649
| 63
|
py
|
Python
|
regex/re-introduction/matching-digits-non-digit-character.py
|
feliposz/hackerrank-solutions
|
fb1d63ca12a0d289362c9b3fb4cb0b79ef73f72f
|
[
"MIT"
] | null | null | null |
regex/re-introduction/matching-digits-non-digit-character.py
|
feliposz/hackerrank-solutions
|
fb1d63ca12a0d289362c9b3fb4cb0b79ef73f72f
|
[
"MIT"
] | null | null | null |
regex/re-introduction/matching-digits-non-digit-character.py
|
feliposz/hackerrank-solutions
|
fb1d63ca12a0d289362c9b3fb4cb0b79ef73f72f
|
[
"MIT"
] | null | null | null |
Regex_Pattern = r"\d\d\D\d\d\D\d\d\d\d" # Do not delete 'r'.
| 15.75
| 60
| 0.571429
| 17
| 63
| 2.058824
| 0.411765
| 0.514286
| 0.685714
| 0.8
| 0.285714
| 0.285714
| 0.285714
| 0.285714
| 0.285714
| 0
| 0
| 0
| 0.15873
| 63
| 3
| 61
| 21
| 0.660377
| 0.285714
| 0
| 0
| 0
| 0
| 0.487805
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cb889fa02c29739973dad2cb77fea65e227de083
| 24,363
|
py
|
Python
|
src/sardana/util/test/test_parser.py
|
marc2332/sardana
|
48dc9191baaa63f6c714d8c025e8f3f96548ad26
|
[
"CC-BY-3.0"
] | 43
|
2016-11-25T15:21:23.000Z
|
2021-08-20T06:09:40.000Z
|
src/sardana/util/test/test_parser.py
|
marc2332/sardana
|
48dc9191baaa63f6c714d8c025e8f3f96548ad26
|
[
"CC-BY-3.0"
] | 1,263
|
2016-11-25T15:58:37.000Z
|
2021-11-02T22:23:47.000Z
|
src/sardana/util/test/test_parser.py
|
marc2332/sardana
|
48dc9191baaa63f6c714d8c025e8f3f96548ad26
|
[
"CC-BY-3.0"
] | 58
|
2016-11-21T11:33:55.000Z
|
2021-09-01T06:21:21.000Z
|
#!/usr/bin/env python
##############################################################################
##
# This file is part of Sardana
##
# http://www.sardana-controls.org/
##
# Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
# Sardana is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
##
# Sardana is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
##
# You should have received a copy of the GNU Lesser General Public License
# along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
"""Tests for parser utilities."""
import unittest
from taurus.test import insertTest
from sardana.util.parser import ParamParser
pt0_params_def = []
pt1d_params_def = [
{
"default_value": 99,
"description": "some bloody float",
"max": None,
"min": 1,
"name": "value",
"type": "Float"
}
]
pt2_params_def = [
{
"default_value": None,
"description": "some bloody motor",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
}
]
pt3_params_def = [
{
"default_value": None,
"description": "List of values",
"max": None,
"min": 1,
"name": "numb_list",
"type": [
{
"default_value": None,
"description": "value",
"max": None,
"min": 1,
"name": "position",
"type": "Float"
}
]
}
]
pt3d_params_def = [
{
"default_value": None,
"description": "List of values",
"max": None,
"min": 1,
"name": "numb_list",
"type": [
{
"default_value": 21,
"description": "value",
"max": None,
"min": 1,
"name": "position",
"type": "Float"
}
]
}
]
pt4_params_def = [
{
"default_value": None,
"description": "List of motors",
"max": None,
"min": 1,
"name": "motor_list",
"type": [
{
"default_value": None,
"description": "motor name",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
}
]
}
]
pt5_params_def = [
{
"default_value": None,
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
},
{
"default_value": None,
"description": "List of values",
"max": None,
"min": 1,
"name": "numb_list",
"type": [
{
"default_value": None,
"description": "value",
"max": None,
"min": 1,
"name": "pos",
"type": "Float"
}
]
}
]
pt6_params_def = [
{
"default_value": None,
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
},
{
"default_value": None,
"description": "List of values",
"max": None,
"min": 1,
"name": "numb_list",
"type": [
{
"default_value": None,
"description": "value",
"max": None,
"min": 1,
"name": "pos",
"type": "Float"
}
]
}
]
pt7_params_def = [
{
"default_value": None,
"description": "List of motor/position pairs",
"max": None,
"min": 1,
"name": "m_p_pair",
"type": [
{
"default_value": None,
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
},
{
"default_value": None,
"description": "Position to move to",
"max": None,
"min": 1,
"name": "position",
"type": "Float"
}
]
}
]
pt7d1_params_def = [
{
"default_value": None,
"description": "List of motor/position pairs",
"max": None,
"min": 1,
"name": "m_p_pair",
"type": [
{
"default_value": None,
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
},
{
"default_value": 2,
"description": "Position to move to",
"max": None,
"min": 1,
"name": "pos",
"type": "Float"
}
]
}
]
pt7d2_params_def = [
{
"default_value": None,
"description": "List of motor/position pairs",
"max": None,
"min": 1,
"name": "m_p_pair",
"type": [
{
"default_value": 'mot1',
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
},
{
"default_value": 2,
"description": "Position to move to",
"max": None,
"min": 1,
"name": "pos",
"type": "Float"
}
]
}
]
pt8_params_def = [
{
"default_value": None,
"description": "List of motor/position pairs",
"max": None,
"min": 1,
"name": "m_p_pair",
"type": [
{
"default_value": None,
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
},
{
"default_value": None,
"description": "Position to move to",
"max": 2,
"min": 1,
"name": "pos",
"type": "Float"
}
]
}
]
pt9_params_def = [
{
"default_value": None,
"description": "List of motor/position pairs",
"max": None,
"min": 1,
"name": "m_p_pair",
"type": [
{
"default_value": None,
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
},
{
"default_value": None,
"description": "Position to move to",
"max": 2,
"min": 1,
"name": "pos",
"type": "Float"
}
]
}
]
pt10_params_def = [
{
"default_value": None,
"description": "List of values",
"max": None,
"min": 1,
"name": "numb_list",
"type": [
{
"default_value": None,
"description": "value",
"max": None,
"min": 1,
"name": "pos",
"type": "Float"
}
]
},
{
"default_value": None,
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
},
]
pt11_params_def = [
{
"default_value": None,
"description": "Counter to count",
"max": None,
"min": 1,
"name": "counter",
"type": "ExpChannel"
},
{
"default_value": None,
"description": "List of values",
"max": None,
"min": 1,
"name": "numb_list",
"type": [
{
"default_value": None,
"description": "value",
"max": None,
"min": 1,
"name": "pos",
"type": "Float"
},
]
},
{
"default_value": None,
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
},
]
pt12_params_def = [
{
"default_value": None,
"description": "List of values",
"max": None,
"min": 1,
"name": "numb_list",
"type": [
{
"default_value": None,
"description": "value",
"max": None,
"min": 1,
"name": "pos",
"type": "Float"
},
]
},
{
"default_value": None,
"description": "List of Motors",
"max": None,
"min": 1,
"name": "motor_list",
"type": [
{
"default_value": 'mot1',
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
},
]
},
]
pt13_params_def = [
{
"default_value": None,
"description": "Motor groups",
"max": None,
"min": 1,
"name": "motor_group_list",
"type": [
{
"default_value": None,
"description": "List of motors",
"max": None,
"min": 1,
"name": "motor list",
"type": [
{
"default_value": None,
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
}
]
}
]
}
]
pt14_params_def = [
{
"default_value": None,
"description": "Motor groups",
"max": None,
"min": 1,
"name": "motor_group_list",
"type": [
{
"default_value": None,
"description": "List of motors",
"max": None,
"min": 1,
"name": "motor_list",
"type": [
{
"default_value": None,
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
}
]
},
{
"default_value": None,
"description": "Number",
"max": None,
"min": 1,
"name": "float",
"type": "Float"
}
]
}
]
pt14d_params_def = [
{
"default_value": None,
"description": "Motor groups",
"max": None,
"min": 1,
"name": "motor_group_list",
"type": [
{
"default_value": None,
"description": "List of motors",
"max": None,
"min": 1,
"name": "motor_list",
"type": [
{
"default_value": "mot1",
"description": "Motor to move",
"max": None,
"min": 1,
"name": "motor",
"type": "Motor"
}
]
},
{
"default_value": 33,
"description": "Number",
"max": None,
"min": 1,
"name": "float",
"type": "Float"
}
]
}
]
extra1_params_def = [
{
"default_value": None,
"description": "Parameter",
"max": None,
"min": 1,
"name": "param",
"type": "String"
},
{
"default_value": None,
"description": "List of Scan files",
"max": None,
"min": 1,
"name": "ScanFiles List",
"type": [
{
"default_value": None,
"description": "ScanFile",
"max": None,
"min": 1,
"name": "ScanFile",
"type": "String",
}
]
}
]
extra2_params_def = [
{
"default_value": None,
"description": "Value 1",
"max": None,
"min": 1,
"name": "value1",
"type": "Float"
},
{
"default_value": None,
"description": "Value 2",
"max": None,
"min": 1,
"name": "value2",
"type": "float"
},
{
"default_value": None,
"description": "List of Strings",
"max": None,
"min": 1,
"name": "string_list",
"type": [
{
"default_value": None,
"description": "string",
"max": None,
"min": 1,
"name": "string",
"type": "String"
},
]
},
]
extra3_params_def = [
{
"default_value": None,
"description": "param",
"max": None,
"min": 1,
"name": "param",
"type": "String"
},
{
"default_value": None,
"description": "Value",
"max": None,
"min": 1,
"name": "value",
"type": "String"
},
]
extra4_params_def = [
{
"default_value": None,
"description": "value 1",
"max": None,
"min": 1,
"name": "value1",
"type": "Float"
},
{
"default_value": None,
"description": "Value 2",
"max": None,
"min": 1,
"name": "value2",
"type": "Float"
},
]
extra5_params_def = [
{
"default_value": None,
"description": "List of Motor and Values",
"max": None,
"min": 1,
"name": "numb_list",
"type": [
{
"default_value": None,
"description": "Motor",
"max": None,
"min": 1,
"name": "pos",
"type": "Motor"
},
{
"default_value": None,
"description": "Position to move to",
"max": 2,
"min": 1,
"name": "pos",
"type": "Float"
}
]
},
{
"default_value": None,
"description": "Counter to use",
"max": None,
"min": 1,
"name": "counter",
"type": "ExpChan"
},
{
"default_value": None,
"description": "Value",
"max": None,
"min": 1,
"name": "Value",
"type": "Float"
}
]
extra6_params_def = [
{
"default_value": None,
"description": "List of Values",
"max": None,
"min": 1,
"name": "numb_list",
"type": [
{
"default_value": None,
"description": "Value 1",
"max": None,
"min": 1,
"name": "value1",
"type": "Float"
},
{
"default_value": None,
"description": "Value 2",
"max": None,
"min": 1,
"name": "value2",
"type": "Float"
}
]
}
]
extra7_params_def = [
{
"default_value": None,
"description": "value 1",
"max": None,
"min": 1,
"name": "value1",
"type": "Float"
},
{
"default_value": None,
"description": "List of Values",
"max": None,
"min": 1,
"name": "numb_list",
"type": [
{
"default_value": None,
"description": "Value 2.1",
"max": None,
"min": 1,
"name": "value21",
"type": "Float"
},
{
"default_value": None,
"description": "Value 2.2",
"max": None,
"min": 1,
"name": "value22",
"type": "Float"
}
]
}
]
extra8_params_def = [
{
"default_value": None,
"description": "value 1",
"max": None,
"min": 1,
"name": "value1",
"type": "Float"
},
{
"default_value": None,
"description": "Value 2",
"max": None,
"min": 1,
"name": "value2",
"type": "Float"
},
]
# parameters examples tests
@insertTest(helper_name="parse", params_def=pt0_params_def,
params_str="", params=[])
@insertTest(helper_name="parse", params_def=pt1d_params_def,
params_str="1", params=["1"])
@insertTest(helper_name="parse", params_def=pt1d_params_def,
params_str="", params=[])
@insertTest(helper_name="parse", params_def=pt2_params_def,
params_str="mot1", params=["mot1"])
@insertTest(helper_name="parse", params_def=pt3_params_def,
params_str="1 34 15", params=[["1", "34", "15"]])
@insertTest(helper_name="parse", params_def=pt3_params_def,
params_str="[1 34 15]", params=[["1", "34", "15"]])
@insertTest(helper_name="parse", params_def=pt3d_params_def,
params_str="1 34 15", params=[["1", "34", "15"]])
@insertTest(helper_name="parse", params_def=pt3d_params_def,
params_str="[1 34 15]", params=[["1", "34", "15"]])
@insertTest(helper_name="parse", params_def=pt3d_params_def,
params_str="[1 [] 15]", params=[["1", [], "15"]])
@insertTest(helper_name="parse", params_def=pt4_params_def,
params_str="[mot1 mot2 mot3]", params=[["mot1", "mot2", "mot3"]])
@insertTest(helper_name="parse", params_def=pt4_params_def,
params_str="mot1 mot2 mot3", params=[["mot1", "mot2", "mot3"]])
@insertTest(helper_name="parse", params_def=pt5_params_def,
params_str="mot1 1 3", params=["mot1", ["1", "3"]])
@insertTest(helper_name="parse", params_def=pt5_params_def,
params_str="mot1 [1 3]", params=["mot1", ["1", "3"]])
@insertTest(helper_name="parse", params_def=pt6_params_def,
params_str="mot1 [1 34 1]", params=["mot1", ["1", "34", "1"]])
@insertTest(helper_name="parse", params_def=pt6_params_def,
params_str="mot1 1 34 1", params=["mot1", ["1", "34", "1"]])
@insertTest(helper_name="parse", params_def=pt7_params_def,
params_str="mot1 1 mot2 3",
params=[[["mot1", "1"], ["mot2", "3"]]])
@insertTest(helper_name="parse", params_def=pt7_params_def,
params_str="[[mot1 1] [mot2 3]]",
params=[[["mot1", "1"], ["mot2", "3"]]])
@insertTest(helper_name="parse", params_def=pt7d1_params_def,
params_str="[[mot1 1] [mot2 3]]",
params=[[["mot1", "1"], ["mot2", "3"]]])
@insertTest(helper_name="parse", params_def=pt7d1_params_def,
params_str="mot1 1 mot2 3",
params=[[["mot1", "1"], ["mot2", "3"]]])
@insertTest(helper_name="parse", params_def=pt7d1_params_def,
params_str="[[mot1] [mot2 3]]",
params=[[["mot1"], ["mot2", "3"]]])
@insertTest(helper_name="parse", params_def=pt7d2_params_def,
params_str="[[mot1 1] [mot2 3]]",
params=[[["mot1", "1"], ["mot2", "3"]]])
@insertTest(helper_name="parse", params_def=pt7d2_params_def,
params_str="mot1 1 mot2 3",
params=[[["mot1", "1"], ["mot2", "3"]]])
@insertTest(helper_name="parse", params_def=pt7d2_params_def,
params_str="[[] [mot2 3] []]",
params=[[[], ["mot2", "3"], []]])
@insertTest(helper_name="parse", params_def=pt8_params_def,
params_str="[[mot1 1] [mot2 3]]",
params=[[["mot1", "1"], ["mot2", "3"]]])
@insertTest(helper_name="parse", params_def=pt8_params_def,
params_str="mot1 1 mot2 3",
params=[[["mot1", "1"], ["mot2", "3"]]])
@insertTest(helper_name="parse", params_def=pt9_params_def,
params_str="[[mot1 1] [mot2 3]]",
params=[[["mot1", "1"], ["mot2", "3"]]])
@insertTest(helper_name="parse", params_def=pt9_params_def,
params_str="mot1 1 mot2 3",
params=[[["mot1", "1"], ["mot2", "3"]]])
@insertTest(helper_name="parse", params_def=pt10_params_def,
params_str="[1 3] mot1", params=[["1", "3"], "mot1"])
@insertTest(helper_name="parse", params_def=pt10_params_def,
params_str="1 mot1", params=[["1"], "mot1"])
@insertTest(helper_name="parse", params_def=pt11_params_def,
params_str="ct1 [1 3] mot1", params=["ct1", ["1", "3"], "mot1"])
@insertTest(helper_name="parse", params_def=pt12_params_def,
params_str="[1 3 4] [mot1 mot2]",
params=[["1", "3", "4"], ["mot1", "mot2"]])
@insertTest(helper_name="parse", params_def=pt13_params_def,
params_str="[[mot1 mot2] [mot3 mot4]]",
params=[[["mot1", "mot2"], ["mot3", "mot4"]]])
@insertTest(helper_name="parse", params_def=pt14_params_def,
params_str="[[[mot1 mot2] 3] [[mot3] 5]]",
params=[[[["mot1", "mot2"], "3"], [["mot3"], "5"]]])
@insertTest(helper_name="parse", params_def=pt14d_params_def,
params_str="[[[mot1 mot2] 3] [[mot3] []]]",
params=[[[["mot1", "mot2"], "3"], [["mot3"], []]]])
@insertTest(helper_name="parse", params_def=pt14d_params_def,
params_str="[[[mot1 []] 3] [[mot3] []]]",
params=[[[["mot1", []], "3"], [["mot3"], []]]])
@insertTest(helper_name="parse", params_def=pt14d_params_def,
params_str="[[[[]] 3] [[mot3] []]]",
params=[[[[[]], "3"], [["mot3"], []]]])
# extra tests for complex parameter values
@insertTest(helper_name="parse", params_def=extra1_params_def,
params_str="ScanFile ['file.nxs' 'file.dat']",
params=["ScanFile", ["file.nxs", "file.dat"]])
@insertTest(helper_name="parse", params_def=extra2_params_def,
params_str="2 3 ['Hello world!' 'How are you?']",
params=["2", "3", ["Hello world!", "How are you?"]])
@insertTest(helper_name="parse", params_def=extra3_params_def,
params_str="ScanFile file.dat",
params=["ScanFile", "file.dat"])
@insertTest(helper_name="parse", params_def=extra4_params_def,
params_str="'2 3'", params=["2 3"])
@insertTest(helper_name="parse", params_def=extra5_params_def,
params_str="[[mot01 3][mot02 5]] ct01 999",
params=[[["mot01", "3"], ["mot02", "5"]], "ct01", "999"])
@insertTest(helper_name="parse", params_def=extra6_params_def,
params_str="[[2 3][4 5]]",
params=[[["2", "3"], ["4", "5"]]])
@insertTest(helper_name="parse", params_def=extra7_params_def,
params_str="1 [2 3]",
params=["1", ["2", "3"]])
@insertTest(helper_name="parse", params_def=extra8_params_def,
params_str="2 3", params=["2", "3"])
class ParamParserTestCase(unittest.TestCase):
"""Unit tests for ParamParser class. Mainly based on macro examples for
parameters definition.
"""
def parse(self, params_def, params_str, params):
"""Helper method to test parameters parsing. To be used with
insertTest decorator.
"""
p = ParamParser(params_def)
result = p.parse(params_str)
msg = "Parsing failed (result: %r; expected: %r)" % \
(result, params)
self.assertListEqual(result, params, msg)
| 27.685227
| 78
| 0.42622
| 2,233
| 24,363
| 4.477833
| 0.096283
| 0.105311
| 0.060806
| 0.080308
| 0.842684
| 0.820682
| 0.749575
| 0.719472
| 0.680968
| 0.666067
| 0
| 0.035041
| 0.401428
| 24,363
| 879
| 79
| 27.716724
| 0.650621
| 0.042031
| 0
| 0.594059
| 0
| 0
| 0.255755
| 0
| 0
| 0
| 0
| 0
| 0.001238
| 1
| 0.001238
| false
| 0
| 0.003713
| 0
| 0.006188
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cb949fd978b8e52ee0d6c74254b2870a453e7ef5
| 7,970
|
py
|
Python
|
Database.py
|
Warthog710/Rankie
|
42378f5e3ab6165298c7bb86e1bbb040bdc36611
|
[
"MIT"
] | null | null | null |
Database.py
|
Warthog710/Rankie
|
42378f5e3ab6165298c7bb86e1bbb040bdc36611
|
[
"MIT"
] | 10
|
2021-07-01T08:14:55.000Z
|
2021-08-09T21:55:24.000Z
|
Database.py
|
Warthog710/Rankie
|
42378f5e3ab6165298c7bb86e1bbb040bdc36611
|
[
"MIT"
] | null | null | null |
import os
import psycopg2
class rankie_db:
def __init__(self):
self.__db = psycopg2.connect(os.environ.get('DATABASE_URL'), sslmode='require')
def get_prefix(self, guild_id):
query = 'SELECT prefix FROM prefixes WHERE guild_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (guild_id,))
# Get result(s)
result = db_handle.fetchone()
# Close connection
db_handle.close()
# Return result
return result
def get_roles(self, guild_id):
query = 'SELECT rank_id, range FROM roles WHERE guild_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (guild_id,))
# Get result(s)
result = db_handle.fetchall()
# Close connection
db_handle.close()
# Return result
return result
def get_season(self, guild_id):
query = 'SELECT season FROM season WHERE guild_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (guild_id,))
# Get result(s)
result = db_handle.fetchone()
# Close connection
db_handle.close()
# Return result
return result
def get_managed_channels_for_guild(self, guild_id):
query = 'SELECT channel_id, frequency FROM managed_guilds WHERE guild_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (guild_id,))
# Get result(s)
result = db_handle.fetchall()
# Close connection
db_handle.close()
# Return result
return result
def get_all_managed_channels(self):
query = 'SELECT channel_id, frequency FROM managed_guilds;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query)
# Get results
result = db_handle.fetchall()
# Close connection
db_handle.close()
# Return result
return result
def get_saved_messages_for_channel(self, channel_id):
query = 'SELECT message_id FROM managed_channels WHERE channel_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (channel_id,))
# Get result(s)
result = db_handle.fetchall()
# Close connection
db_handle.close()
# Return result
return result
def set_prefix(self, guild_id, prefix):
# If none no prefix has been set
if self.get_prefix(guild_id) == None:
query = 'INSERT INTO prefixes (guild_id, prefix) VALUES (%s, %s);'
args = (guild_id, prefix,)
# An existing prefix is present, update the existing value
else:
query = 'UPDATE prefixes SET prefix=%s WHERE guild_id=%s;'
args = (prefix, guild_id,)
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, args)
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
#? Dumb function, does not detect for dupes, this is done by Rankie before setting a role
def set_roles(self, guild_id, rank_id, rank_range):
query = 'INSERT INTO roles (guild_id, rank_id, range) VALUES (%s, %s, %s);'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (guild_id, rank_id, rank_range,))
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
def set_season(self, guild_id, season):
# If none no season has been set
if self.get_season(guild_id) == None:
query = 'INSERT INTO season (guild_id, season) VALUES (%s, %s);'
args = (guild_id, season,)
# An existing season is present, update the existing value
else:
query = 'UPDATE season SET season=%s WHERE guild_id=%s;'
args = (season, guild_id,)
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, args)
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
#? Dumb function, does not detect for dupes, this is done by Rankie before setting a role
def set_managed_channel_for_guild(self, guild_id, channel_id, frequency):
query = 'INSERT INTO managed_guilds (guild_id, channel_id, frequency) VALUES (%s, %s, %s);'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (guild_id, channel_id, frequency,))
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
def set_saved_message_for_guild(self, channel_id, message_id):
query = 'INSERT INTO managed_channels (channel_id, message_id) VALUES (%s, %s);'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (channel_id, message_id,))
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
def del_role(self, guild_id, rank_id):
query = 'DELETE FROM roles WHERE guild_id=%s AND rank_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (guild_id, rank_id,))
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
def del_managed_channel_from_guild(self, guild_id, channel_id):
query = 'DELETE FROM managed_guilds WHERE guild_id=%s AND channel_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (guild_id, channel_id,))
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
def del_all_saved_messages_from_channel(self, channel_id):
query = 'DELETE FROM managed_channels WHERE channel_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (channel_id,))
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
def del_all_managed_channels_from_guild(self, guild_id):
query = 'DELETE FROM managed_guilds WHERE guild_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (guild_id,))
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
def del_saved_message_from_channel(self, channel_id, message_id):
query = 'DELETE FROM managed_channels WHERE channel_id=%s AND message_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (channel_id, message_id,))
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
def del_prefix(self, guild_id):
query = 'DELETE FROM prefixes WHERE guild_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (guild_id,))
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
def del_all_roles_for_guild(self, guild_id):
query = 'DELETE FROM roles WHERE guild_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (guild_id,))
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
def del_season(self, guild_id):
query = 'DELETE FROM season WHERE guild_id=%s;'
# Perform query
db_handle = self.__db.cursor()
db_handle.execute(query, (guild_id,))
# Commit results
self.__db.commit()
# Close connection
db_handle.close()
def close_db(self):
self.__db.close()
| 27.388316
| 99
| 0.606148
| 986
| 7,970
| 4.612576
| 0.086207
| 0.110818
| 0.058487
| 0.083553
| 0.849164
| 0.800792
| 0.720976
| 0.71482
| 0.698329
| 0.667546
| 0
| 0.000357
| 0.297992
| 7,970
| 290
| 100
| 27.482759
| 0.812511
| 0.163237
| 0
| 0.576642
| 0
| 0
| 0.173208
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153285
| false
| 0
| 0.014599
| 0
| 0.218978
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
cba27c576ee7271a2b431a17a408670580c694fd
| 804
|
py
|
Python
|
keras/applications/resnet_v2.py
|
PJmouraocs/keras
|
7a39b6c62d43c25472b2c2476bd2a8983ae4f682
|
[
"MIT"
] | 259
|
2016-02-09T09:06:29.000Z
|
2021-07-29T05:27:40.000Z
|
keras/applications/resnet_v2.py
|
PJmouraocs/keras
|
7a39b6c62d43c25472b2c2476bd2a8983ae4f682
|
[
"MIT"
] | 50
|
2016-02-24T14:46:57.000Z
|
2020-01-20T07:34:19.000Z
|
keras/applications/resnet_v2.py
|
PJmouraocs/keras
|
7a39b6c62d43c25472b2c2476bd2a8983ae4f682
|
[
"MIT"
] | 94
|
2016-02-17T20:59:27.000Z
|
2021-04-19T08:18:16.000Z
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
try:
from keras_applications import resnet_v2
except:
resnet_v2 = None
from . import keras_modules_injection
@keras_modules_injection
def ResNet50V2(*args, **kwargs):
return resnet_v2.ResNet50V2(*args, **kwargs)
@keras_modules_injection
def ResNet101V2(*args, **kwargs):
return resnet_v2.ResNet101V2(*args, **kwargs)
@keras_modules_injection
def ResNet152V2(*args, **kwargs):
return resnet_v2.ResNet152V2(*args, **kwargs)
@keras_modules_injection
def decode_predictions(*args, **kwargs):
return resnet_v2.decode_predictions(*args, **kwargs)
@keras_modules_injection
def preprocess_input(*args, **kwargs):
return resnet_v2.preprocess_input(*args, **kwargs)
| 22.971429
| 56
| 0.778607
| 99
| 804
| 5.939394
| 0.272727
| 0.170068
| 0.214286
| 0.204082
| 0.435374
| 0.231293
| 0
| 0
| 0
| 0
| 0
| 0.041135
| 0.123134
| 804
| 34
| 57
| 23.647059
| 0.792908
| 0
| 0
| 0.217391
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.217391
| false
| 0
| 0.217391
| 0.217391
| 0.652174
| 0.043478
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
cbccb58ddb85cede1bf2dd6044f33499a1f43b97
| 81
|
py
|
Python
|
metatag/__init__.py
|
industrydive/django-site-metatags
|
549a84a5ddbfefcc09d59b864fd7f3f4bf871026
|
[
"Unlicense"
] | null | null | null |
metatag/__init__.py
|
industrydive/django-site-metatags
|
549a84a5ddbfefcc09d59b864fd7f3f4bf871026
|
[
"Unlicense"
] | 1
|
2019-10-23T14:48:53.000Z
|
2019-10-23T14:48:53.000Z
|
metatag/__init__.py
|
industrydive/django-site-metatags
|
549a84a5ddbfefcc09d59b864fd7f3f4bf871026
|
[
"Unlicense"
] | null | null | null |
from __future__ import absolute_import
from metatag.special_class import Metatag
| 27
| 41
| 0.888889
| 11
| 81
| 6
| 0.636364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098765
| 81
| 2
| 42
| 40.5
| 0.90411
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
1de15970423f67ea7c0f664c992cee92d3e4387d
| 114
|
py
|
Python
|
simplpy/__init__.py
|
frankhart2018/simplpy
|
a9e8781f9cc8ba9578d6ec786d58e349cba9c52a
|
[
"MIT"
] | 1
|
2021-02-15T11:36:47.000Z
|
2021-02-15T11:36:47.000Z
|
simplpy/__init__.py
|
frankhart2018/simplpy
|
a9e8781f9cc8ba9578d6ec786d58e349cba9c52a
|
[
"MIT"
] | null | null | null |
simplpy/__init__.py
|
frankhart2018/simplpy
|
a9e8781f9cc8ba9578d6ec786d58e349cba9c52a
|
[
"MIT"
] | null | null | null |
import simplpy.pd_
import simplpy.dict_
import simplpy.list_
import simplpy.sklearn_
__version__ = "0.1-alpha-1"
| 16.285714
| 27
| 0.807018
| 17
| 114
| 4.941176
| 0.588235
| 0.619048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.105263
| 114
| 6
| 28
| 19
| 0.794118
| 0
| 0
| 0
| 0
| 0
| 0.096491
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
382cf89827a3a6549b4c0a56eff9541be37b69cb
| 75
|
py
|
Python
|
Extra Soundsets/src/setup_win.py
|
scify/LeapGame
|
25be4e9b5366994b6ae93dfdd6e78d556bae3a4f
|
[
"Apache-2.0"
] | null | null | null |
Extra Soundsets/src/setup_win.py
|
scify/LeapGame
|
25be4e9b5366994b6ae93dfdd6e78d556bae3a4f
|
[
"Apache-2.0"
] | null | null | null |
Extra Soundsets/src/setup_win.py
|
scify/LeapGame
|
25be4e9b5366994b6ae93dfdd6e78d556bae3a4f
|
[
"Apache-2.0"
] | 1
|
2020-06-30T11:21:31.000Z
|
2020-06-30T11:21:31.000Z
|
from distutils.core import setup
import py2exe
setup(console=['test.py'])
| 15
| 32
| 0.773333
| 11
| 75
| 5.272727
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014925
| 0.106667
| 75
| 4
| 33
| 18.75
| 0.850746
| 0
| 0
| 0
| 0
| 0
| 0.093333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
383b7f0a13ace07c0511c541e31d8a9737429487
| 80
|
py
|
Python
|
server/dionysos/errors.py
|
pettinen/dionysos
|
43f47c6903ff214270e40aacef5dd21e3f885361
|
[
"MIT"
] | 2
|
2019-12-11T02:00:44.000Z
|
2020-07-16T23:10:42.000Z
|
server/dionysos/errors.py
|
pettinen/dionysos
|
43f47c6903ff214270e40aacef5dd21e3f885361
|
[
"MIT"
] | 6
|
2021-06-10T06:55:00.000Z
|
2021-06-10T06:58:26.000Z
|
server/dionysos/errors.py
|
pettinen/dionysos
|
43f47c6903ff214270e40aacef5dd21e3f885361
|
[
"MIT"
] | null | null | null |
class DatabaseError(Exception):
pass
class GameError(Exception):
pass
| 11.428571
| 31
| 0.725
| 8
| 80
| 7.25
| 0.625
| 0.448276
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 80
| 6
| 32
| 13.333333
| 0.90625
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
384617449ad7f595690396fac605dc057d2bc488
| 122
|
py
|
Python
|
Gustavo Guanabara - Python Learning Exercises/ex012 - F String [Exercise] 5.py
|
TiagoPL/Python-Learning
|
20855433a8a050647ee9c5039aac1e50807324f8
|
[
"MIT"
] | null | null | null |
Gustavo Guanabara - Python Learning Exercises/ex012 - F String [Exercise] 5.py
|
TiagoPL/Python-Learning
|
20855433a8a050647ee9c5039aac1e50807324f8
|
[
"MIT"
] | null | null | null |
Gustavo Guanabara - Python Learning Exercises/ex012 - F String [Exercise] 5.py
|
TiagoPL/Python-Learning
|
20855433a8a050647ee9c5039aac1e50807324f8
|
[
"MIT"
] | null | null | null |
n1 = float(input('What is the price of the product? '))
print(f'The price with 5% off would be: U${(n1 / 100) *95:.2f}')
| 40.666667
| 65
| 0.631148
| 24
| 122
| 3.208333
| 0.833333
| 0.207792
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09
| 0.180328
| 122
| 2
| 66
| 61
| 0.68
| 0
| 0
| 0
| 0
| 0
| 0.729508
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
6974db30d7d76f9a694f27cd7a0fc99c5bfb5356
| 58
|
py
|
Python
|
misc/python/mango/ioTest/__init__.py
|
pymango/pymango
|
b55f831f0194b214e746b2dfb4d9c6671a1abc38
|
[
"BSD-2-Clause"
] | 3
|
2020-05-11T03:23:17.000Z
|
2021-03-16T09:01:48.000Z
|
misc/python/mango/ioTest/__init__.py
|
pymango/pymango
|
b55f831f0194b214e746b2dfb4d9c6671a1abc38
|
[
"BSD-2-Clause"
] | null | null | null |
misc/python/mango/ioTest/__init__.py
|
pymango/pymango
|
b55f831f0194b214e746b2dfb4d9c6671a1abc38
|
[
"BSD-2-Clause"
] | 2
|
2017-03-04T11:03:40.000Z
|
2020-08-01T10:01:36.000Z
|
from ._compressTest import *
from ._ddsioTest import *
| 19.333333
| 28
| 0.741379
| 6
| 58
| 6.833333
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.189655
| 58
| 2
| 29
| 29
| 0.87234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
698d79d2b92f9b7468fd2b20cee6e6779a85e8fd
| 199
|
py
|
Python
|
cyflash/__init__.py
|
StrainMeasurementDevices/cyflash
|
7fec1990d89d4e17f74d463874ad32168c4cb1e0
|
[
"BSD-2-Clause"
] | null | null | null |
cyflash/__init__.py
|
StrainMeasurementDevices/cyflash
|
7fec1990d89d4e17f74d463874ad32168c4cb1e0
|
[
"BSD-2-Clause"
] | null | null | null |
cyflash/__init__.py
|
StrainMeasurementDevices/cyflash
|
7fec1990d89d4e17f74d463874ad32168c4cb1e0
|
[
"BSD-2-Clause"
] | null | null | null |
from .bootload import BootloaderHost
from .cyacd import BootloaderData
from .protocol import SerialTransport
from .protocol import CANbusTransport
__version__ = "2.0-alpha"
__name__ = 'smd-cyflash'
| 24.875
| 37
| 0.819095
| 23
| 199
| 6.73913
| 0.695652
| 0.154839
| 0.232258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011364
| 0.115578
| 199
| 8
| 38
| 24.875
| 0.869318
| 0
| 0
| 0
| 0
| 0
| 0.1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
69b4df6565831f61a48cea7d4d30f90faf7298c6
| 1,009
|
py
|
Python
|
src/templie/exceptions.py
|
sbsmirnoff/templie
|
1fe0e4425bc13c8e97d4b25e235395d1424a21cd
|
[
"MIT"
] | 1
|
2017-06-23T11:25:18.000Z
|
2017-06-23T11:25:18.000Z
|
src/templie/exceptions.py
|
sbsmirnoff/templie
|
1fe0e4425bc13c8e97d4b25e235395d1424a21cd
|
[
"MIT"
] | null | null | null |
src/templie/exceptions.py
|
sbsmirnoff/templie
|
1fe0e4425bc13c8e97d4b25e235395d1424a21cd
|
[
"MIT"
] | null | null | null |
"""
Templie exceptions
"""
class TemplieException(Exception):
def __init__(self, message):
super().__init__('ERROR: {}'.format(message))
class DslSyntaxError(TemplieException):
@classmethod
def get_error(cls, line):
return cls('invalid line: {}'.format(line.strip('\n')))
class MissingSection(TemplieException):
@classmethod
def get_error(cls, section):
return cls('Input file does not contain [{}] section'.format(section))
class MissingParameter(TemplieException):
@classmethod
def get_error(cls, name, section):
return cls('Missing {} in section [{}]'.format(name, section))
class WrongValue(TemplieException):
@classmethod
def get_error(cls, name, correct_value):
return cls('Wrong value of the parameter {}: it must be {}'.format(name, correct_value))
class ValidationError(TemplieException):
@classmethod
def get_error(cls, message):
return cls(message)
class ParseException(Exception):
pass
| 22.931818
| 96
| 0.685828
| 109
| 1,009
| 6.211009
| 0.412844
| 0.199409
| 0.221566
| 0.243722
| 0.314623
| 0.314623
| 0.132939
| 0
| 0
| 0
| 0
| 0
| 0.188305
| 1,009
| 43
| 97
| 23.465116
| 0.826618
| 0.017839
| 0
| 0.2
| 0
| 0
| 0.141404
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.24
| false
| 0.04
| 0
| 0.2
| 0.72
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
69d147476a4b180b13f6365296d22eebaae9e695
| 90
|
py
|
Python
|
tests/__init__.py
|
danagle/boggled
|
13fea4c31b5dff72093c38d1ad368dec9d44f4d0
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
danagle/boggled
|
13fea4c31b5dff72093c38d1ad368dec9d44f4d0
|
[
"MIT"
] | null | null | null |
tests/__init__.py
|
danagle/boggled
|
13fea4c31b5dff72093c38d1ad368dec9d44f4d0
|
[
"MIT"
] | null | null | null |
all = ['test_dice', 'test_boggle_dice',
'test_boggle_board', 'test_boggle_solver']
| 30
| 49
| 0.7
| 12
| 90
| 4.666667
| 0.5
| 0.535714
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144444
| 90
| 2
| 50
| 45
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
69d5d8babf7a01c25da3c309ee02cc84cbe5b57d
| 526
|
py
|
Python
|
J_Mathematical_Modeling/Section 3/solutionExercise9.py
|
sylvain2002/CBM101
|
4d9dc4264ce81cc2af58ceaff96fd0ed7a570af5
|
[
"MIT"
] | null | null | null |
J_Mathematical_Modeling/Section 3/solutionExercise9.py
|
sylvain2002/CBM101
|
4d9dc4264ce81cc2af58ceaff96fd0ed7a570af5
|
[
"MIT"
] | null | null | null |
J_Mathematical_Modeling/Section 3/solutionExercise9.py
|
sylvain2002/CBM101
|
4d9dc4264ce81cc2af58ceaff96fd0ed7a570af5
|
[
"MIT"
] | null | null | null |
print("The elementary reactions are:")
print(" Reaction 1: the birth of susceptible individuals with propensity b")
print(" Reaction 2: their death with propensity mu_S*S(t)")
print(" Reaction 3: their infection with propensity beta * S(t)*I(t)")
print(" Reaction 4: the death of infected individuals with propensity mu_I*I(t) with mu_I>mu_S")
print(" Reaction 5: the recovery of infected individuals with rate alpha * I(t)")
print(" Reaction 6: the death of recovered individuals with propensity mu_R*R(t), with mu_R<mu_I.")
| 65.75
| 99
| 0.754753
| 90
| 526
| 4.333333
| 0.366667
| 0.2
| 0.192308
| 0.076923
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013216
| 0.136882
| 526
| 7
| 100
| 75.142857
| 0.845815
| 0
| 0
| 0
| 0
| 0.142857
| 0.86692
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
0e0366960a59b6a05662084f64fb0d2bc1e56d73
| 7,073
|
py
|
Python
|
providers/forms.py
|
EDario333/minegocito
|
5dd0869fa2510bb8152f4a117f33b2a30bb6d69c
|
[
"MIT"
] | null | null | null |
providers/forms.py
|
EDario333/minegocito
|
5dd0869fa2510bb8152f4a117f33b2a30bb6d69c
|
[
"MIT"
] | null | null | null |
providers/forms.py
|
EDario333/minegocito
|
5dd0869fa2510bb8152f4a117f33b2a30bb6d69c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms import ModelForm
from django.forms.widgets import TextInput
from django.utils.translation import gettext as _
from .models import Providers
from catalogues.models import Person
class FrmProviders(ModelForm):
title = None
action = None
btn_label = None
icon_btn_submit=None
def __init__(self, title=None, action=None, btn_label=None, icon_btn_submit=None, *args, **kwargs):
super(FrmProviders, self).__init__(*args, **kwargs)
self.title = title
self.action = action
self.btn_label = btn_label
self.icon_btn_submit = icon_btn_submit
self.fields['name'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['rfc'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['city'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['address_line1'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['address_line2'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['email'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['cell_phone'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['home_phone'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['other_phone'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
#self.fields['contact_person'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
title_frm = title.upper()
if 'AGREGAR' in title_frm:
self.fields['name'].label = '* ' + self.fields['name'].label
self.fields['rfc'].label = '* ' + self.fields['rfc'].label
self.fields['city'].label = '* ' + self.fields['city'].label
self.fields['address_line1'].label = '* ' + self.fields['address_line1'].label
self.fields['email'].label = '* ' + self.fields['email'].label
self.fields['cell_phone'].label = '* ' + self.fields['cell_phone'].label
#elif 'BUSCAR' in title_frm:
else:
self.fields['name'].required = False
self.fields['rfc'].required = False
self.fields['city'].required = False
self.fields['address_line1'].required = False
self.fields['address_line2'].required = False
self.fields['email'].required = False
self.fields['cell_phone'].required = False
self.fields['home_phone'].required = False
self.fields['other_phone'].required = False
#self.fields['contact_person'].required = False
class Meta:
model = Providers
fields = [
'name', 'rfc', 'city', 'address_line1',
'address_line2', 'email', 'cell_phone',
'home_phone', 'other_phone'#, 'contact_person'
]
exclude = [
'created_at', 'created_when', 'disabled',
'disabled_at', 'disabled_when', 'disabled_reason',
'dropped', 'dropped_at', 'dropped_when',
'created_by_user', 'dropped_reason'
]
widgets = {
'city': TextInput(),
#'contact_person': TextInput()
}
class FrmContactPerson(ModelForm):
title = None
btn_label = None
def __init__(self, title=None, btn_label=None, *args, **kwargs):
super(FrmContactPerson, self).__init__(*args, **kwargs)
self.title = title
self.btn_label = btn_label
self.fields['last_name'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['mothers_last_name'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['first_name'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['middle_name'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['gender'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
self.fields['dob'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0', 'placeholder': _('Date format')}
self.fields['email'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0', 'id': 'id_email_contact_person'}
self.fields['city'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0', 'id': 'id_city_contact_person'}
self.fields['address_line1'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0', 'id': 'id_address_line1_contact_person'}
self.fields['address_line2'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0', 'id': 'id_address_line2_contact_person'}
self.fields['cell_phone'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0', 'id': 'id_cell_phone_contact_person'}
self.fields['home_phone'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0', 'id': 'id_home_phone_contact_person'}
self.fields['other_phone'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0', 'id': 'id_other_phone_contact_person'}
#self.fields['contact_person'].widget.attrs = {'class': 'form-control validate', 'style': 'margin: 0 0 10px 0'}
title_frm = title.upper()
if _('New female').upper() in title_frm:
self.fields['last_name'].label = '* ' + self.fields['last_name'].label
self.fields['first_name'].label = '* ' + self.fields['first_name'].label
self.fields['gender'].label = '* ' + self.fields['gender'].label
self.fields['dob'].label = '* ' + self.fields['dob'].label
self.fields['email'].label = '* ' + self.fields['email'].label
self.fields['cell_phone'].label = '* ' + self.fields['cell_phone'].label
# self.fields['dob'].required = False
#elif 'BUSCAR' in title_frm:
else:
self.fields['last_name'].required = False
self.fields['mothers_last_name'].required = False
self.fields['first_name'].required = False
self.fields['middle_name'].required = False
self.fields['gender'].required = False
self.fields['dob'].required = False
self.fields['email'].required = False
self.fields['city'].required = False
self.fields['address_line1'].required = False
self.fields['address_line2'].required = False
self.fields['cell_phone'].required = False
self.fields['home_phone'].required = False
self.fields['other_phone'].required = False
#self.fields['contact_person'].required = False
class Meta:
model = Person
fields = [
'last_name', 'mothers_last_name', 'first_name',
'middle_name', 'gender', 'dob', 'email',
'city', 'address_line1', 'address_line2',
'cell_phone', 'home_phone', 'other_phone'
]
exclude = [
'created_at', 'created_when', 'disabled',
'disabled_at', 'disabled_when', 'disabled_reason',
'dropped', 'dropped_at', 'dropped_when',
'created_by_user', 'dropped_reason'
]
widgets = {
'city': TextInput(),
#'contact_person': TextInput()
}
| 49.118056
| 153
| 0.665913
| 926
| 7,073
| 4.920086
| 0.099352
| 0.160228
| 0.084284
| 0.105356
| 0.861282
| 0.792142
| 0.741659
| 0.703468
| 0.658692
| 0.636743
| 0
| 0.022868
| 0.152976
| 7,073
| 144
| 154
| 49.118056
| 0.737606
| 0.07055
| 0
| 0.403361
| 0
| 0
| 0.36729
| 0.029907
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016807
| false
| 0
| 0.05042
| 0
| 0.151261
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
0e117e7015cb670f322b20dd8db2aff11a263c0e
| 611
|
py
|
Python
|
Cython/cython1/ex2/main.py
|
hoppfull/Legacy-Python
|
43f465bfdb76c91f2ac16aabb0783fdf5f459adb
|
[
"MIT"
] | null | null | null |
Cython/cython1/ex2/main.py
|
hoppfull/Legacy-Python
|
43f465bfdb76c91f2ac16aabb0783fdf5f459adb
|
[
"MIT"
] | null | null | null |
Cython/cython1/ex2/main.py
|
hoppfull/Legacy-Python
|
43f465bfdb76c91f2ac16aabb0783fdf5f459adb
|
[
"MIT"
] | null | null | null |
import pprimes #Here we import a pure python version of my prime-function
import cprimes #Here we import a file with two cython compiled versions of my prime function
#The first cython compiled function is identical in code to the python, the second contains three int declerations
# print(pprimes.primes_py(12000)) #The pure python version takes 13s to execute with 12000 as value
# print(cprimes.primes_cy(12000)) #The cython compiled pure python version takes 9s to execute with 12000 as value
# print(cprimes.primes(12000)) #The cython compiled version with declerations takes less than a second to execute
| 87.285714
| 114
| 0.808511
| 100
| 611
| 4.92
| 0.44
| 0.113821
| 0.103659
| 0.052846
| 0.174797
| 0.174797
| 0.174797
| 0.174797
| 0.174797
| 0
| 0
| 0.053846
| 0.148936
| 611
| 7
| 115
| 87.285714
| 0.892308
| 0.92635
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3875b49eadc83fa03cf31a85bd13eb43f62c17b4
| 150
|
py
|
Python
|
solver/actions/append.py
|
Levivb/CalculatorSolver
|
27e1b38881522628397a9f8ab1347ce50ad16004
|
[
"MIT"
] | null | null | null |
solver/actions/append.py
|
Levivb/CalculatorSolver
|
27e1b38881522628397a9f8ab1347ce50ad16004
|
[
"MIT"
] | null | null | null |
solver/actions/append.py
|
Levivb/CalculatorSolver
|
27e1b38881522628397a9f8ab1347ce50ad16004
|
[
"MIT"
] | null | null | null |
from ..input_action import InputAction
class Append(InputAction):
def run(self, total, game):
return int(str(total) + str(self._value))
| 21.428571
| 49
| 0.693333
| 20
| 150
| 5.1
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.186667
| 150
| 6
| 50
| 25
| 0.836066
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.25
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
3876b9516aa70a1915a24ba36d827daea8e93733
| 125
|
py
|
Python
|
data/db/table/__init__.py
|
dingyuanhong/Collection
|
43bfae16c93b643d1b09d84f27bfdbe5ef8ed497
|
[
"MIT"
] | 1
|
2021-07-03T13:39:42.000Z
|
2021-07-03T13:39:42.000Z
|
data/db/table/__init__.py
|
dingyuanhong/Collection
|
43bfae16c93b643d1b09d84f27bfdbe5ef8ed497
|
[
"MIT"
] | null | null | null |
data/db/table/__init__.py
|
dingyuanhong/Collection
|
43bfae16c93b643d1b09d84f27bfdbe5ef8ed497
|
[
"MIT"
] | 1
|
2020-05-29T23:38:22.000Z
|
2020-05-29T23:38:22.000Z
|
from .day import *
from .minute import *
from .month import *
from .week import *
from .company import *
from .stock import *
| 20.833333
| 22
| 0.72
| 18
| 125
| 5
| 0.444444
| 0.555556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.184
| 125
| 6
| 23
| 20.833333
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
388b50902f0a566b288a52aa3ce195039e85a519
| 245
|
py
|
Python
|
amadeus/media/_files.py
|
akshitsingla/amadeus-python
|
d8f3595e556b674998156f98d8a318045bb4c21c
|
[
"MIT"
] | 125
|
2018-04-09T07:27:24.000Z
|
2022-02-22T11:45:20.000Z
|
amadeus/media/_files.py
|
akshitsingla/amadeus-python
|
d8f3595e556b674998156f98d8a318045bb4c21c
|
[
"MIT"
] | 58
|
2018-03-29T14:58:01.000Z
|
2022-03-17T10:18:07.000Z
|
amadeus/media/_files.py
|
akshitsingla/amadeus-python
|
d8f3595e556b674998156f98d8a318045bb4c21c
|
[
"MIT"
] | 58
|
2018-04-06T10:56:20.000Z
|
2022-03-04T01:23:24.000Z
|
from amadeus.client.decorator import Decorator
from .files import GeneratedPhotos
class Files(Decorator, object):
def __init__(self, client):
Decorator.__init__(self, client)
self.generated_photos = GeneratedPhotos(client)
| 27.222222
| 55
| 0.755102
| 27
| 245
| 6.518519
| 0.518519
| 0.170455
| 0.159091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.167347
| 245
| 8
| 56
| 30.625
| 0.862745
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
38d866e2d7f47ed638e33caa25bb8c9798961f89
| 263
|
py
|
Python
|
src/koeda/utils/space.py
|
toriving/KoEDA
|
5dfbb0e88ede13da2e5e72ac94fe7cb12c0b7cd1
|
[
"MIT"
] | 48
|
2021-04-23T16:13:41.000Z
|
2022-03-24T09:03:26.000Z
|
src/koeda/utils/space.py
|
toriving/KoEDA
|
5dfbb0e88ede13da2e5e72ac94fe7cb12c0b7cd1
|
[
"MIT"
] | 6
|
2020-11-19T13:56:29.000Z
|
2021-09-26T12:13:23.000Z
|
src/koeda/utils/space.py
|
toriving/KoEDA
|
5dfbb0e88ede13da2e5e72ac94fe7cb12c0b7cd1
|
[
"MIT"
] | 3
|
2021-09-13T07:14:29.000Z
|
2021-12-29T09:52:36.000Z
|
SPACE_TOKEN = "\u241F"
def replace_space(text: str) -> str:
return text.replace(" ", SPACE_TOKEN)
def revert_space(text: list) -> str:
clean = (
" ".join("".join(text).replace(SPACE_TOKEN, " ").split())
.strip()
)
return clean
| 18.785714
| 65
| 0.577947
| 31
| 263
| 4.741935
| 0.451613
| 0.204082
| 0.217687
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015075
| 0.243346
| 263
| 13
| 66
| 20.230769
| 0.723618
| 0
| 0
| 0
| 0
| 0
| 0.034221
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0
| 0.111111
| 0.444444
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
2a1d6f89b15c29b924f7235a132078d0dbc8b653
| 144
|
py
|
Python
|
Lib/site-packages/grasp-1.0-py3.5.egg/grasp/models/grippers/__init__.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | 1
|
2021-11-25T02:14:23.000Z
|
2021-11-25T02:14:23.000Z
|
grasp/models/grippers/__init__.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | null | null | null |
grasp/models/grippers/__init__.py
|
shfkdroal/Robot-Learning-in-Mixed-Adversarial-and-Collaborative-Settings
|
1fa4cd6a566c8745f455fc3d2273208f21f88ced
|
[
"bzip2-1.0.6"
] | null | null | null |
from .gripper import Gripper
from .gripper_factory import gripper_factory
from .two_finger_gripper import TwoFingerGripper, LeftTwoFingerGripper
| 48
| 70
| 0.888889
| 17
| 144
| 7.294118
| 0.470588
| 0.177419
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 144
| 3
| 70
| 48
| 0.939394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
2a22d661aad89afc53a07f2c0c6c4d3e567695d1
| 101
|
py
|
Python
|
blog/views.py
|
joegasewicz/joes-tech-blog
|
35f63aa92d47641e19b82d4a50f56833e47b9911
|
[
"MIT"
] | null | null | null |
blog/views.py
|
joegasewicz/joes-tech-blog
|
35f63aa92d47641e19b82d4a50f56833e47b9911
|
[
"MIT"
] | null | null | null |
blog/views.py
|
joegasewicz/joes-tech-blog
|
35f63aa92d47641e19b82d4a50f56833e47b9911
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
def blog(request):
return render(request, "blog.html", {})
| 16.833333
| 43
| 0.712871
| 13
| 101
| 5.538462
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.158416
| 101
| 5
| 44
| 20.2
| 0.847059
| 0
| 0
| 0
| 0
| 0
| 0.089109
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
aa6e7de340509e00ead939b373fa7a6ab2a055bb
| 266
|
py
|
Python
|
services/risks/risk_by_vehicle_year.py
|
rfukui/orign
|
7d0c22d5f006727ec33fa57efec75c7e762decc5
|
[
"Unlicense"
] | null | null | null |
services/risks/risk_by_vehicle_year.py
|
rfukui/orign
|
7d0c22d5f006727ec33fa57efec75c7e762decc5
|
[
"Unlicense"
] | null | null | null |
services/risks/risk_by_vehicle_year.py
|
rfukui/orign
|
7d0c22d5f006727ec33fa57efec75c7e762decc5
|
[
"Unlicense"
] | 1
|
2020-11-09T15:21:51.000Z
|
2020-11-09T15:21:51.000Z
|
from datetime import datetime
from config import VEHICLE_AGE_TO_INCREASE_RISK, RISK_INCREASE_BY_VEHICLE_AGE
def risk_by_vehicle_year(year):
if datetime.now().year - year <= VEHICLE_AGE_TO_INCREASE_RISK:
return RISK_INCREASE_BY_VEHICLE_AGE
return 0
| 29.555556
| 77
| 0.808271
| 41
| 266
| 4.780488
| 0.390244
| 0.204082
| 0.122449
| 0.204082
| 0.489796
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004405
| 0.146617
| 266
| 8
| 78
| 33.25
| 0.859031
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.333333
| 0
| 0.833333
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
aa95983ebdc594b021caac1ddcd452d090b226e2
| 56
|
py
|
Python
|
python/ql/test/3/library-tests/modules/package_members/test_package/__init__.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 4,036
|
2020-04-29T00:09:57.000Z
|
2022-03-31T14:16:38.000Z
|
python/ql/test/3/library-tests/modules/package_members/test_package/__init__.py
|
vadi2/codeql
|
a806a4f08696d241ab295a286999251b56a6860c
|
[
"MIT"
] | 2,970
|
2020-04-28T17:24:18.000Z
|
2022-03-31T22:40:46.000Z
|
python/ql/test/3/library-tests/modules/package_members/test_package/__init__.py
|
ScriptBox99/github-codeql
|
2ecf0d3264db8fb4904b2056964da469372a235c
|
[
"MIT"
] | 794
|
2020-04-29T00:28:25.000Z
|
2022-03-30T08:21:46.000Z
|
from .module1 import *
from .module4 import *
import sys
| 18.666667
| 22
| 0.767857
| 8
| 56
| 5.375
| 0.625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.042553
| 0.160714
| 56
| 3
| 23
| 18.666667
| 0.87234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
aaa4cd0c794e6fd3cb94a0ca6787134af48f1f22
| 7,801
|
py
|
Python
|
services/friend_service/friend_request_services.py
|
hnguyenworkstation/hoocons_backend
|
725461812a172ca0a88181e3399e6e2294953273
|
[
"MIT"
] | null | null | null |
services/friend_service/friend_request_services.py
|
hnguyenworkstation/hoocons_backend
|
725461812a172ca0a88181e3399e6e2294953273
|
[
"MIT"
] | null | null | null |
services/friend_service/friend_request_services.py
|
hnguyenworkstation/hoocons_backend
|
725461812a172ca0a88181e3399e6e2294953273
|
[
"MIT"
] | null | null | null |
from flask_jwt import jwt_required, current_identity
from flask_restful import reqparse, Resource
from static import status
from models.user import User
from models.relationship import *
from static import app_constant
parser = reqparse.RequestParser()
parser.add_argument("username", type=str, location="json")
class SendFriendRequest(Resource):
@jwt_required()
def post(self):
try:
# Parsing JSON
body = parser.parse_args()
to_username = body.username
user = current_identity.user()
to_user = User.objects(username=to_username).first()
if user is None or to_user is None:
return {"message": "failed to find user"}, status.HTTP_203_NON_AUTHORITATIVE_INFORMATION
'''
**********
If this user already sent you a request -- sent back to him a request means accept friendship
**********
'''
if any(to_username in sublist for sublist in user.friends_request_from) is True:
# Getting the relationship object
relationship = Relationship.objects(between_users=[user.username, to_username]).first()
if relationship is None:
relationship = Relationship.objects(between_users=[to_username, user.username]).first()
if relationship is None:
return {"message": "unable to find friend request"}, status.HTTP_204_NO_CONTENT
# If the friend request found
user.update(pull__friends_request_from=relationship)
to_user.update(pull__friends_request_to=relationship)
relationship.update(time_of_action=datetime.utcnow(), status=app_constant.is_friend)
user.update(add_to_set__friends=relationship)
to_user.update(add_to_set__friends=relationship)
'''
**********
If you already sent this user a friend request before
**********
'''
if any(to_username in sublist for sublist in user.friends_request_to) is True:
return {"message": "request already sent"}, status.HTTP_201_CREATED
'''
**********
Nothing happened before, send branch new request to the user
**********
'''
friend_request = Relationship(between_users=[user.username, to_username],
status=app_constant.friend_requesting).save()
user.update(add_to_set__friends_request_to=friend_request)
to_user.update(add_to_set__friends_request_from=friend_request)
return {"message": "success"}, status.HTTP_200_OK
except Exception as e:
return {"message": str(e)}, status.HTTP_400_BAD_REQUEST
class AcceptFriendRequest(Resource):
@jwt_required()
def post(self):
try:
# Parsing JSON
body = parser.parse_args()
from_username = body.username
user = current_identity.user()
from_user = User.objects(username=from_username).first()
if user is None or from_user is None:
return {"message": "failed to find user"}, status.HTTP_203_NON_AUTHORITATIVE_INFORMATION
# Checking if this user requested friend yet
if any(from_username in sublist for sublist in user.friends_request_from) is True:
# Getting the relationship object
relationship = Relationship.objects(between_users=[user.username, from_username]).first()
if relationship is None:
relationship = Relationship.objects(between_users=[from_username, user.username]).first()
if relationship is None:
return {"message": "unable to find friend request"}, status.HTTP_204_NO_CONTENT
# If the friend request found
user.update(pull__friends_request_from=relationship)
from_user.update(pull__friends_request_to=relationship)
relationship.update(time_of_action=datetime.utcnow(), status=app_constant.is_friend)
user.update(add_to_set__friends=relationship)
from_user.update(add_to_set__friends=relationship)
return {"message": "success"}, status.HTTP_200_OK
else:
return {"message": "unable to find user request"}, status.HTTP_204_NO_CONTENT
except Exception as e:
return {"message": str(e)}, status.HTTP_400_BAD_REQUEST
class DeclineFriendRequest(Resource):
@jwt_required()
def post(self):
try:
# Parsing JSON
body = parser.parse_args()
from_username = body.username
user = current_identity.user()
blocked_user = User.objects(username=from_username).first()
if user is None or blocked_user is None:
return {"message": "failed to find user"}, status.HTTP_203_NON_AUTHORITATIVE_INFORMATION
# Checking if this user requested friend yet
if any(from_username in sublist for sublist in user.friends_request_from) is True:
# Getting the relationship object
relationship = Relationship.objects(between_users=[user.username, from_username]).first()
if relationship is None:
relationship = Relationship.objects(between_users=[from_username, user.username]).first()
if relationship is None:
return {"message": "unable to find friend request"}, status.HTTP_204_NO_CONTENT
# If the friend request found
user.update(pull__friends_request_from=relationship)
blocked_user.update(pull__friends_request_to=relationship)
relationship.delete()
return {"message": "success"}, status.HTTP_200_OK
else:
return {"message": "unable to find user request"}, status.HTTP_204_NO_CONTENT
except Exception as e:
return {"message": str(e)}, status.HTTP_400_BAD_REQUEST
class UnfriendRequest(Resource):
@jwt_required()
def delete(self):
try:
# Parsing JSON
body = parser.parse_args()
friend_username = body.username
user = current_identity.user()
friend = User.objects(username=friend_username).first()
if user is None or friend is None:
return {"message": "failed to find user"}, status.HTTP_203_NON_AUTHORITATIVE_INFORMATION
'''
Checking if the requesting user is in the friend list
'''
if any(friend_username in sublist for sublist in user.friends) is True:
# Getting the relationship object
relationship = Relationship.objects(between_users=[user.username, friend_username]).first()
if relationship is None:
relationship = Relationship.objects(between_users=[friend_username, user.username]).first()
if relationship is None:
return {"message": "unable to find relationship"}, status.HTTP_204_NO_CONTENT
# If the friend request found
user.update(pull__friends=relationship)
friend.update(pull__friends=relationship)
relationship.delete()
return {"message": "success"}, status.HTTP_200_OK
else:
return {"message": "not active friend"}
except Exception as e:
return {"message": str(e)}, status.HTTP_400_BAD_REQUEST
| 44.073446
| 111
| 0.610306
| 848
| 7,801
| 5.385613
| 0.14033
| 0.05693
| 0.039413
| 0.033282
| 0.806656
| 0.800088
| 0.792205
| 0.722794
| 0.699365
| 0.699365
| 0
| 0.01055
| 0.307396
| 7,801
| 176
| 112
| 44.323864
| 0.834721
| 0.048327
| 0
| 0.607143
| 0
| 0
| 0.067152
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.053571
| 0
| 0.303571
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2ab8da4aa0c07a443695f4464280c6b8fdb4ac6d
| 424
|
py
|
Python
|
hw/maria_saganovich/lesson6_hw/test_lvl7_str_duplicate_char.py
|
alexander-sidorov/qap-05
|
6db7c0a1eeadd15f7d3f826e7f0ac4be3949ec8c
|
[
"MIT"
] | 9
|
2021-12-10T21:30:07.000Z
|
2022-02-25T21:32:34.000Z
|
hw/maria_saganovich/lesson6_hw/test_lvl7_str_duplicate_char.py
|
alexander-sidorov/qap-05
|
6db7c0a1eeadd15f7d3f826e7f0ac4be3949ec8c
|
[
"MIT"
] | 22
|
2021-12-11T08:46:58.000Z
|
2022-02-02T15:56:37.000Z
|
hw/maria_saganovich/lesson6_hw/test_lvl7_str_duplicate_char.py
|
alexander-sidorov/qap-05
|
6db7c0a1eeadd15f7d3f826e7f0ac4be3949ec8c
|
[
"MIT"
] | 8
|
2021-12-11T09:15:45.000Z
|
2022-02-02T08:09:09.000Z
|
from hw.maria_saganovich.lesson6_hw.lvl7_str_duplicate_char import (
func7_str_duplicate_char,
)
def test_func7_str_duplicate_char() -> None:
assert func7_str_duplicate_char("a3b2c1") == {"data": "aaabbc"}
assert func7_str_duplicate_char("aaa3b2c1") == {"data": "aaaaaaaaabbc"}
assert func7_str_duplicate_char("") == {"data": ""}
assert func7_str_duplicate_char([]) == {"errors": ["Invalid argument"]}
| 38.545455
| 75
| 0.721698
| 52
| 424
| 5.423077
| 0.442308
| 0.297872
| 0.397163
| 0.446809
| 0.382979
| 0
| 0
| 0
| 0
| 0
| 0
| 0.037736
| 0.125
| 424
| 10
| 76
| 42.4
| 0.722372
| 0
| 0
| 0
| 0
| 0
| 0.15566
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.125
| true
| 0
| 0.125
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2ae08adaae5d95085ed9c1a3c47ddd83fa78bcc6
| 52
|
py
|
Python
|
Problems/areal.py
|
ramonrwx/kattis
|
f1de45dc3afd0c5d5eeb9fe40a7a54b768202d33
|
[
"Unlicense"
] | 1
|
2021-11-30T06:47:24.000Z
|
2021-11-30T06:47:24.000Z
|
Problems/areal.py
|
ramonrwx/kattis
|
f1de45dc3afd0c5d5eeb9fe40a7a54b768202d33
|
[
"Unlicense"
] | null | null | null |
Problems/areal.py
|
ramonrwx/kattis
|
f1de45dc3afd0c5d5eeb9fe40a7a54b768202d33
|
[
"Unlicense"
] | null | null | null |
from math import sqrt
print(sqrt(int(input())) * 4)
| 17.333333
| 29
| 0.692308
| 9
| 52
| 4
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022222
| 0.134615
| 52
| 2
| 30
| 26
| 0.777778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
2af39730cbb1252b5c32782bb9c0aa158df813cc
| 77
|
py
|
Python
|
plugins/wfp/admin.py
|
BLSQ/iaso-copy
|
85fb17f408c15e8c2d730416d1312f58f8db39b7
|
[
"MIT"
] | null | null | null |
plugins/wfp/admin.py
|
BLSQ/iaso-copy
|
85fb17f408c15e8c2d730416d1312f58f8db39b7
|
[
"MIT"
] | null | null | null |
plugins/wfp/admin.py
|
BLSQ/iaso-copy
|
85fb17f408c15e8c2d730416d1312f58f8db39b7
|
[
"MIT"
] | 1
|
2022-03-23T16:44:12.000Z
|
2022-03-23T16:44:12.000Z
|
from django.contrib import admin
from iaso.models import Entity, EntityType
| 19.25
| 42
| 0.831169
| 11
| 77
| 5.818182
| 0.818182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12987
| 77
| 3
| 43
| 25.666667
| 0.955224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
2d47f9030fd3a942f3a6aec629981ba363f85291
| 119
|
py
|
Python
|
AI2/AI2/accounts/views.py
|
badrabbit96/Django_Python_App
|
7876b02f5c9fee09d8fb1cb158f8310ed12a7faa
|
[
"MIT"
] | null | null | null |
AI2/AI2/accounts/views.py
|
badrabbit96/Django_Python_App
|
7876b02f5c9fee09d8fb1cb158f8310ed12a7faa
|
[
"MIT"
] | null | null | null |
AI2/AI2/accounts/views.py
|
badrabbit96/Django_Python_App
|
7876b02f5c9fee09d8fb1cb158f8310ed12a7faa
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, HttpResponse
def home(request):
return render(request, 'accounts/login.html')
| 29.75
| 49
| 0.781513
| 15
| 119
| 6.2
| 0.866667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 119
| 4
| 50
| 29.75
| 0.885714
| 0
| 0
| 0
| 0
| 0
| 0.158333
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
2d5ef8dbe78cced309375b97d59e7f6268a1c03b
| 6,721
|
py
|
Python
|
plot/plot_train.py
|
ShaoboYang-USTC/DisperPicker
|
d9365d981d896a9ace4f0298c7c888fd2da699cb
|
[
"MIT"
] | null | null | null |
plot/plot_train.py
|
ShaoboYang-USTC/DisperPicker
|
d9365d981d896a9ace4f0298c7c888fd2da699cb
|
[
"MIT"
] | null | null | null |
plot/plot_train.py
|
ShaoboYang-USTC/DisperPicker
|
d9365d981d896a9ace4f0298c7c888fd2da699cb
|
[
"MIT"
] | 1
|
2022-03-30T20:57:46.000Z
|
2022-03-30T20:57:46.000Z
|
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
from config.config import Config
# np.set_printoptions(threshold=np.nan)
def plot_train(fig, curve1, curve2, data_area, name):
""" Plot the figures of the training process.
Args:
fig: Group and phase dispersion images.
curve1: Predicted probability images.
curve2: Label probability images.
data_area: Data area.
name: Image storage name.
"""
data_T_range = {'Suqian':[0.5, 8, 76], 'Changning':[0.1, 7.6, 76],
'Weifang':[0.6, 8.1, 76]}
range_V = [1, 5, 201] # velocity range
range_T = data_T_range[data_area] # period range
fontsize = 18
figformat = '.png'
plt.figure(figsize=(12, 16), clear=True)
plt.tick_params(labelsize=15)
# plt.subplots_adjust(wspace=0.3, hspace=0.3)
plt.subplot(421)
image = fig[0]
z_max = np.array(image).max()
z_min = np.array(image).min()
x1 = np.linspace(range_T[0],range_T[1],range_T[2])
y1 = np.linspace(range_V[0],range_V[1],range_V[2])
plt.pcolor(x1, y1, image, cmap='jet', vmin=z_min, vmax=z_max+0.05)
plt.colorbar()
ture_G = []
curve2[0] = np.array(curve2[0])
max = np.max(curve2[0], axis=0)
curve2[0] = curve2[0].T
for i in range(len(max)):
index = list(curve2[0][i]).index(max[i])
ture_G.append(index*Config().dV+range_V[0])
b, e = line_interval(ture_G, range_T, range_V)
plt.plot(x1[b:e],ture_G[b:e],'-wo', linewidth=2, markersize=3, label='label')
curve2[0] = curve2[0].T
plt.xlabel('Period (s)',fontsize=fontsize)
plt.ylabel('Group Velocity (km/s)',fontsize=fontsize)
plt.title('G disp spectrogram',fontsize=fontsize)
plt.tick_params(labelsize=15)
plt.subplot(422)
x2 = x1
y2 = y1
plt.pcolor(x2, y2, curve1[0], cmap='jet', vmin=0, vmax=1.05)
plt.colorbar()
plt.xlabel('Period (s)',fontsize=fontsize)
plt.ylabel('Group Velocity (km/s)',fontsize=fontsize)
plt.title('Predicted G',fontsize=fontsize)
plt.tick_params(labelsize=15)
plt.subplot(423)
x3 = x1
y3 = y1
plt.pcolor(x3, y3, curve2[0], cmap='jet', vmin=0, vmax=1.05)
plt.colorbar()
plt.xlabel('Period (s)',fontsize=fontsize)
plt.ylabel('Group Velocity (km/s)',fontsize=fontsize)
plt.title('Label G',fontsize=fontsize)
plt.tick_params(labelsize=15)
plt.subplot(424)
x4 = x1
curve1[0] = np.array(curve1[0])
max = np.max(curve1[0], axis=0)
curve1[0] = curve1[0].T
y4=[]
for i in range(len(max)):
index = list(curve1[0][i]).index(max[i])
y4.append(index*Config().dV+range_V[0])
plt.plot(x4,y4,'-ko', linewidth=2, markersize=3, label='Predicted')
x4 = x1
#curve2[0] = np.array(curve2[0])
#max = np.max(curve2[0], axis=0)
#curve2[0] = curve2[0].T
#y4=[]
#for i in range(len(max)):
# index = list(curve2[0][i]).index(max[i])
# y4.append(index/500)
plt.pcolor(x1, y1, image, cmap='jet', vmin=z_min, vmax=z_max+0.05)
plt.colorbar()
b, e = line_interval(ture_G, range_T, range_V)
plt.plot(x4[b:e],ture_G[b:e],'-wo', linewidth=2, markersize=3, label='Label')
xrefer = x4[b:e]
yrefer = ture_G[b:e]
plt.ylim((range_V[0],range_V[1]))
plt.legend(loc=0,fontsize=14)
plt.xlabel('Period (s)',fontsize=fontsize)
plt.ylabel('Group Velocity (km/s)',fontsize=fontsize)
plt.title('Group velocity',fontsize=fontsize)
plt.tick_params(labelsize=15)
plt.subplot(425)
image = fig[1]
z_max = np.array(image).max()
z_min = np.array(image).min()
x1 = np.linspace(range_T[0],range_T[1],range_T[2])
y1 = np.linspace(range_V[0],range_V[1],range_V[2])
plt.pcolor(x1, y1, image, cmap='jet', vmin=z_min, vmax=z_max+0.05)
plt.colorbar()
ture_C = []
curve2[1] = np.array(curve2[1])
max = np.max(curve2[1], axis=0)
curve2[1] = curve2[1].T
for i in range(len(max)):
index = list(curve2[1][i]).index(max[i])
ture_C.append(index*Config().dV+range_V[0])
b, e = line_interval(ture_C, range_T, range_V)
plt.plot(x1[b:e],ture_C[b:e],'-wo', linewidth=2, markersize=3, label='Label')
curve2[1] = curve2[1].T
plt.xlabel('Period (s)',fontsize=fontsize)
plt.ylabel('Phase Velocity (km/s)',fontsize=fontsize)
plt.title('C disp spectrogram',fontsize=fontsize)
plt.tick_params(labelsize=15)
plt.subplot(426)
x2 = x1
y2 = y1
plt.pcolor(x2, y2, curve1[1], cmap='jet', vmin=0, vmax=1.05)
plt.colorbar()
plt.xlabel('Period (s)',fontsize=fontsize)
plt.ylabel('Phase Velocity (km/s)',fontsize=fontsize)
plt.title('Predicted C',fontsize=fontsize)
plt.tick_params(labelsize=15)
plt.subplot(427)
x3 = x1
y3 = y1
plt.pcolor(x3, y3, curve2[1], cmap='jet', vmin=0, vmax=1.05)
plt.colorbar()
plt.xlabel('Period (s)',fontsize=fontsize)
plt.ylabel('Phase Velocity (km/s)',fontsize=fontsize)
plt.title('Label C',fontsize=fontsize)
plt.tick_params(labelsize=15)
plt.subplot(428)
x4 = x1
curve1[1] = np.array(curve1[1])
max = np.max(curve1[1], axis=0)
curve1[1] = curve1[1].T
y4=[]
for i in range(len(max)):
index = list(curve1[1][i]).index(max[i])
y4.append(index*Config().dV+range_V[0])
plt.plot(x4,y4,'-ko', linewidth=2, markersize=3, label='Predicted')
x4 = x1
#curve2[1] = np.array(curve2[1])
#max = np.max(curve2[1], axis=0)
#curve2[1] = curve2[1].T
#y4=[]
#for i in range(len(max)):
# index = list(curve2[1][i]).index(max[i])
# y4.append(index/500)
plt.pcolor(x1, y1, image, cmap='jet', vmin=z_min, vmax=z_max+0.05)
plt.colorbar()
b, e = line_interval(ture_C, range_T, range_V)
plt.plot(x4[b:e],ture_C[b:e],'-wo', linewidth=2, markersize=3, label='Label')
# plt.plot(xrefer,yrefer,'-co', linewidth=1.5, markersize=2, label='G velocity')
plt.ylim((range_V[0],range_V[1]))
plt.legend(loc=0,fontsize=14)
plt.xlabel('Period (s)',fontsize=fontsize)
plt.ylabel('Phase Velocity (km/s)',fontsize=fontsize)
plt.title('Phase velocity',fontsize=fontsize)
plt.tick_params(labelsize=15)
plt.tight_layout()
plt.savefig(name+figformat, bbox_inches='tight', dpi=300)
plt.close()
def line_interval(curve, range_T, range_V):
start = 0
end = range_T[-1]
for each in curve:
if each != range_V[0]:
break
start += 1
reverse = list(curve)
reverse.reverse()
for each in reverse:
if each != range_V[0]:
break
end -= 1
return start, end
if __name__ == '__main__':
pass
| 31.115741
| 84
| 0.615385
| 1,073
| 6,721
| 3.767008
| 0.147251
| 0.095002
| 0.112815
| 0.079169
| 0.752598
| 0.74666
| 0.731074
| 0.731074
| 0.723652
| 0.669471
| 0
| 0.05985
| 0.206963
| 6,721
| 215
| 85
| 31.260465
| 0.698499
| 0.116649
| 0
| 0.528662
| 0
| 0
| 0.079891
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012739
| false
| 0.006369
| 0.025478
| 0
| 0.044586
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
2db06a2ad5cfce215a90b61ec129c1b07aca52a0
| 922
|
py
|
Python
|
06.Polymorphism_and_abstraction/Lab/image_area.py
|
nmoskova/Python-OOP
|
07327bcb93eee3a7db5d7c0bbdd1b54eb9e8b864
|
[
"MIT"
] | null | null | null |
06.Polymorphism_and_abstraction/Lab/image_area.py
|
nmoskova/Python-OOP
|
07327bcb93eee3a7db5d7c0bbdd1b54eb9e8b864
|
[
"MIT"
] | null | null | null |
06.Polymorphism_and_abstraction/Lab/image_area.py
|
nmoskova/Python-OOP
|
07327bcb93eee3a7db5d7c0bbdd1b54eb9e8b864
|
[
"MIT"
] | null | null | null |
class ImageArea:
def __init__(self, width, height):
self.width = width
self.height = height
def get_area(self):
area = self.width * self.height
return area
def __gt__(self, other):
return self.get_area() > other.get_area()
def __ge__(self, other):
return self.get_area() >= other.get_area()
def __lt__(self, other):
return self.get_area() < other.get_area()
def __le__(self, other):
return self.get_area() <= other.get_area()
def __eq__(self, other):
return self.get_area() == other.get_area()
def __ne__(self, other):
return self.get_area() != other.get_area()
image_one = ImageArea(10, 10)
image_two = ImageArea(10, 10)
print(image_one < image_two)
print(image_one <= image_two)
print(image_one > image_two)
print(image_one >= image_two)
print(image_one == image_two)
print(image_one != image_two)
| 24.263158
| 50
| 0.644252
| 130
| 922
| 4.146154
| 0.169231
| 0.168831
| 0.166976
| 0.211503
| 0.684601
| 0.684601
| 0.684601
| 0.684601
| 0.684601
| 0.6141
| 0
| 0.011252
| 0.22885
| 922
| 37
| 51
| 24.918919
| 0.746835
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.296296
| false
| 0
| 0
| 0.222222
| 0.592593
| 0.222222
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
2dcaabe40920abbb3e912e0a14a77ad4369c7b5d
| 61
|
py
|
Python
|
tests/tasks/firebolt/__init__.py
|
suryatmodulus/prefect
|
e4ac9f6aa831140c7fba0397f3e5e0884b1b9e42
|
[
"Apache-2.0"
] | 3
|
2021-11-09T10:46:58.000Z
|
2022-03-11T04:22:35.000Z
|
tests/tasks/firebolt/__init__.py
|
suryatmodulus/prefect
|
e4ac9f6aa831140c7fba0397f3e5e0884b1b9e42
|
[
"Apache-2.0"
] | 10
|
2021-06-26T08:04:45.000Z
|
2022-03-26T08:04:52.000Z
|
tests/tasks/firebolt/__init__.py
|
suryatmodulus/prefect
|
e4ac9f6aa831140c7fba0397f3e5e0884b1b9e42
|
[
"Apache-2.0"
] | 1
|
2022-03-11T04:22:40.000Z
|
2022-03-11T04:22:40.000Z
|
import pytest
pytest.importorskip("firebolt.db.connection")
| 15.25
| 45
| 0.819672
| 7
| 61
| 7.142857
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.065574
| 61
| 3
| 46
| 20.333333
| 0.877193
| 0
| 0
| 0
| 0
| 0
| 0.360656
| 0.360656
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
2dda30e35eefe43ca7a6507a1123ca279b44e73c
| 156
|
py
|
Python
|
Notebooks/SentinelUtilities/SentinelExceptions/__init__.py
|
ytognder/Azure-Sentinel
|
7345560f178e731d7ba5a5541fd3383bca285311
|
[
"MIT"
] | 266
|
2019-10-18T00:41:39.000Z
|
2022-03-18T05:44:01.000Z
|
Notebooks/SentinelUtilities/SentinelExceptions/__init__.py
|
ytognder/Azure-Sentinel
|
7345560f178e731d7ba5a5541fd3383bca285311
|
[
"MIT"
] | 113
|
2020-03-10T16:56:10.000Z
|
2022-03-28T21:54:26.000Z
|
Notebooks/SentinelUtilities/SentinelExceptions/__init__.py
|
ytognder/Azure-Sentinel
|
7345560f178e731d7ba5a5541fd3383bca285311
|
[
"MIT"
] | 93
|
2020-01-07T20:28:43.000Z
|
2022-03-23T04:09:39.000Z
|
# pylint: disable-msg=C0103
"""
SentinelExceptions: This package is developed for custom exceptions.
"""
# __init__.py
from .input_error import InputError
| 19.5
| 68
| 0.775641
| 19
| 156
| 6.105263
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.128205
| 156
| 7
| 69
| 22.285714
| 0.823529
| 0.685897
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
930457b86ff736f0ab28bdf05dc3e7edac060778
| 79
|
py
|
Python
|
passenger_wsgi.py
|
perqu/BookShelf
|
7880586b91c2c7d0fb52bc8c08628ad775cba8cc
|
[
"MIT"
] | null | null | null |
passenger_wsgi.py
|
perqu/BookShelf
|
7880586b91c2c7d0fb52bc8c08628ad775cba8cc
|
[
"MIT"
] | null | null | null |
passenger_wsgi.py
|
perqu/BookShelf
|
7880586b91c2c7d0fb52bc8c08628ad775cba8cc
|
[
"MIT"
] | null | null | null |
import sys, os
sys.path.append(os.getcwd())
from App import app as application
| 19.75
| 34
| 0.772152
| 14
| 79
| 4.357143
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.126582
| 79
| 3
| 35
| 26.333333
| 0.884058
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9306447e59d43ad304fd5f96e3a183b0992b95b1
| 75
|
py
|
Python
|
src/evaluation/__init__.py
|
iserh/data-augmentation
|
1e1e99177ff4256c68cafe043bd7e50d52bf669d
|
[
"MIT"
] | null | null | null |
src/evaluation/__init__.py
|
iserh/data-augmentation
|
1e1e99177ff4256c68cafe043bd7e50d52bf669d
|
[
"MIT"
] | null | null | null |
src/evaluation/__init__.py
|
iserh/data-augmentation
|
1e1e99177ff4256c68cafe043bd7e50d52bf669d
|
[
"MIT"
] | null | null | null |
"""Evaluation."""
from .models import CNNMNIST, ModelProben1 # noqa: F401
| 25
| 56
| 0.72
| 8
| 75
| 6.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.061538
| 0.133333
| 75
| 2
| 57
| 37.5
| 0.769231
| 0.306667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
9341d0f223840ced13ba056ee6d6007afa0195ea
| 145
|
py
|
Python
|
exceptions.py
|
t3m8ch/WeatherTelegramBot
|
170f9096ce08a0138c238e10b9ffb91e6bf6c7fc
|
[
"MIT"
] | 1
|
2021-02-01T15:01:11.000Z
|
2021-02-01T15:01:11.000Z
|
exceptions.py
|
t3m8ch/WeatherTelegramBot
|
170f9096ce08a0138c238e10b9ffb91e6bf6c7fc
|
[
"MIT"
] | null | null | null |
exceptions.py
|
t3m8ch/WeatherTelegramBot
|
170f9096ce08a0138c238e10b9ffb91e6bf6c7fc
|
[
"MIT"
] | 1
|
2022-03-24T09:27:40.000Z
|
2022-03-24T09:27:40.000Z
|
class CityNotFoundError(Exception):
pass
class ServerError(Exception):
pass
class OWMApiKeyIsNotCorrectError(ServerError):
pass
| 12.083333
| 46
| 0.758621
| 12
| 145
| 9.166667
| 0.5
| 0.236364
| 0.327273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.17931
| 145
| 11
| 47
| 13.181818
| 0.92437
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
935b2ddb634c08e3060555857bb19a2dfb94531b
| 78,736
|
py
|
Python
|
biosys/apps/main/tests/api/test_observation.py
|
florianm/biosys
|
934d06ed805b0734f3cb9a00feec6cd81a94e512
|
[
"Apache-2.0"
] | 1
|
2020-08-24T02:44:36.000Z
|
2020-08-24T02:44:36.000Z
|
biosys/apps/main/tests/api/test_observation.py
|
florianm/biosys
|
934d06ed805b0734f3cb9a00feec6cd81a94e512
|
[
"Apache-2.0"
] | 19
|
2016-09-29T01:03:18.000Z
|
2021-07-02T06:54:05.000Z
|
biosys/apps/main/tests/api/test_observation.py
|
florianm/biosys
|
934d06ed805b0734f3cb9a00feec6cd81a94e512
|
[
"Apache-2.0"
] | 5
|
2018-12-20T05:36:28.000Z
|
2021-09-29T00:44:31.000Z
|
import datetime
import io
import json
import re
from os import path
from django.contrib.gis.geos import Point
from django.urls import reverse
from django.utils import timezone
from openpyxl import load_workbook
from rest_framework import status
from main import constants
from main.models import Site, Dataset, Record
from main.tests import factories
from main.tests.api import helpers
from main.tests.test_data_package import clone
class TestPermissions(helpers.BaseUserTestCase):
"""
Test Permissions
Get: authenticated
Update: admin, data_engineer, custodians
Create: admin, data_engineer, custodians
Delete: admin, data_engineer, custodians
"""
def setUp(self):
super(TestPermissions, self).setUp()
rows = [
['What', 'When', 'Latitude', 'Longitude', 'Comments'],
['Chubby bat', '2018-06-01', -32, 115.75, 'It is huge!']
]
self.ds_1 = self._create_dataset_and_records_from_rows(rows)
self.assertEqual(self.ds_1.type, Dataset.TYPE_OBSERVATION)
self.record_1 = self.ds_1.record_set.first()
self.assertIsNotNone(self.record_1)
def test_get(self):
urls = [
reverse('api:record-list'),
reverse('api:record-detail', kwargs={'pk': self.record_1.pk})
]
access = {
"forbidden": [self.anonymous_client],
"allowed": [
self.readonly_client,
self.custodian_1_client,
self.custodian_2_client,
self.admin_client,
self.data_engineer_1_client,
self.data_engineer_2_client
]
}
for client in access['forbidden']:
for url in urls:
self.assertIn(
client.get(url).status_code,
[status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]
)
for client in access['allowed']:
for url in urls:
self.assertEqual(
client.get(url).status_code,
status.HTTP_200_OK
)
def test_create(self):
"""
Admin, custodians and data engineers
:return:
"""
urls = [reverse('api:record-list')]
ds = self.ds_1
rec = self.record_1
data = {
"dataset": rec.dataset.pk,
"data": rec.data,
"datetime": rec.datetime,
"geometry": rec.geometry.geojson
}
access = {
"forbidden": [
self.anonymous_client,
self.readonly_client,
self.custodian_2_client,
self.data_engineer_2_client
],
"allowed": [
self.admin_client,
self.custodian_1_client
]
}
for client in access['forbidden']:
for url in urls:
self.assertIn(
client.post(url, data, format='json').status_code,
[status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]
)
for client in access['allowed']:
for url in urls:
count = ds.record_queryset.count()
self.assertEqual(
client.post(url, data, format='json').status_code,
status.HTTP_201_CREATED
)
self.assertEqual(ds.record_queryset.count(), count + 1)
def test_bulk_create(self):
"""
Cannot create bulk with this end point
:return:
"""
urls = [reverse('api:record-list')]
rec = self.record_1
ds = self.ds_1
data = [
{
"dataset": rec.dataset.pk,
"data": rec.data
},
{
"dataset": rec.dataset.pk,
"data": rec.data
}
]
access = {
"forbidden": [
self.anonymous_client,
self.readonly_client,
self.custodian_2_client,
self.admin_client,
self.custodian_1_client,
self.data_engineer_1_client,
self.data_engineer_2_client
],
"allowed": []
}
for client in access['forbidden']:
for url in urls:
self.assertIn(
client.post(url, data, format='json').status_code,
[status.HTTP_400_BAD_REQUEST, status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]
)
for client in access['allowed']:
for url in urls:
count = ds.record_queryset.count()
self.assertEqual(
client.post(url, data, format='json').status_code,
status.HTTP_201_CREATED
)
self.assertEqual(ds.record_queryset.count(), count + len(data))
def test_update(self):
"""
admin + custodian of project for site 1
:return:
"""
rec = self.record_1
previous_data = clone(rec.data)
updated_data = clone(previous_data)
updated_data['Longitude'] = '118.78'
urls = [reverse('api:record-detail', kwargs={'pk': rec.pk})]
data = {
"data": updated_data,
}
access = {
"forbidden": [
self.anonymous_client,
self.readonly_client,
self.custodian_2_client,
self.data_engineer_2_client
],
"allowed": [self.admin_client, self.custodian_1_client, self.data_engineer_1_client]
}
for client in access['forbidden']:
for url in urls:
self.assertIn(
client.patch(url, data, format='json').status_code,
[status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]
)
for client in access['allowed']:
for url in urls:
rec.data = previous_data
rec.save()
self.assertEqual(
client.patch(url, data, format='json').status_code,
status.HTTP_200_OK
)
rec.refresh_from_db()
self.assertEqual(rec.data, updated_data)
def test_delete(self):
"""
Currently admin, custodians and data engineers
:return:
"""
rec = self.record_1
urls = [reverse('api:record-detail', kwargs={'pk': rec.pk})]
data = None
access = {
"forbidden": [
self.anonymous_client,
self.readonly_client,
self.custodian_2_client,
self.data_engineer_2_client
],
"allowed": [
self.admin_client,
self.custodian_1_client,
self.data_engineer_1_client
]
}
for client in access['forbidden']:
for url in urls:
self.assertIn(
client.delete(url, data, format='json').status_code,
[status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]
)
for client in access['allowed']:
for url in urls:
rec.save()
count = Dataset.objects.count()
self.assertEqual(
client.delete(url, data, format='json').status_code,
status.HTTP_204_NO_CONTENT
)
self.assertTrue(Dataset.objects.count(), count - 1)
def test_options(self):
urls = [
reverse('api:record-list'),
reverse('api:record-detail', kwargs={'pk': 1})
]
access = {
"forbidden": [self.anonymous_client],
"allowed": [
self.readonly_client,
self.custodian_1_client,
self.custodian_2_client,
self.admin_client,
self.data_engineer_1_client,
self.data_engineer_2_client
]
}
for client in access['forbidden']:
for url in urls:
self.assertIn(
client.options(url).status_code,
[status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]
)
# authenticated
for client in access['allowed']:
for url in urls:
self.assertEqual(
client.options(url).status_code,
status.HTTP_200_OK
)
class TestDataValidation(helpers.BaseUserTestCase):
def setUp(self):
super(TestDataValidation, self).setUp()
self.ds_1 = self._create_dataset_with_schema(
self.project_1,
self.data_engineer_1_client,
self.observation_schema_with_with_all_possible_geometry_fields(),
dataset_type=Dataset.TYPE_OBSERVATION
)
# set the date
self.record_1 = self._create_record(
self.custodian_1_client,
self.ds_1,
{
'What': 'Chubby bat',
'When': '2018-06-30',
'Latitude': -32.0,
'Longitude': 115.75
}
)
self.assertIsNotNone(self.record_1)
def test_create_one_happy_path(self):
"""
Test the create of one record
:return:
"""
# grab one existing an re-inject it
record = self.record_1
ds = self.ds_1
data = {
"dataset": record.dataset.pk,
"data": record.data
}
url = reverse('api:record-list')
client = self.custodian_1_client
count = ds.record_queryset.count()
self.assertEqual(
client.post(url, data, format='json').status_code,
status.HTTP_201_CREATED
)
self.assertEqual(ds.record_queryset.count(), count + 1)
def test_empty_not_allowed(self):
ds = self.ds_1
record = self.record_1
data = {
"dataset": record.dataset.pk,
"data": {}
}
url = reverse('api:record-list')
client = self.custodian_1_client
count = ds.record_queryset.count()
self.assertEqual(
client.post(url, data, format='json').status_code,
status.HTTP_400_BAD_REQUEST
)
self.assertEqual(ds.record_queryset.count(), count)
def test_create_column_not_in_schema(self):
"""
Test that if we introduce a column not in the the dataset it will not validate
:return:
"""
ds = self.ds_1
record = self.record_1
incorrect_data = clone(record.data)
incorrect_data['Extra Column'] = "Extra Value"
data = {
"dataset": record.dataset.pk,
"data": incorrect_data
}
url = reverse('api:record-list')
# set strict mode
url = helpers.set_strict_mode(url)
client = self.custodian_1_client
count = ds.record_queryset.count()
self.assertEqual(
client.post(url, data, format='json').status_code,
status.HTTP_400_BAD_REQUEST
)
self.assertEqual(ds.record_queryset.count(), count)
def test_update_column_not_in_schema(self):
"""
Test that if we introduce a column not in the the dataset it will not validate
:return:
"""
ds = self.ds_1
record = self.record_1
incorrect_data = clone(record.data)
incorrect_data['Extra Column'] = "Extra Value"
data = {
"dataset": record.dataset.pk,
"data": incorrect_data
}
url = reverse('api:record-detail', kwargs={"pk": record.pk})
client = self.custodian_1_client
count = ds.record_queryset.count()
# set strict mode
url = helpers.set_strict_mode(url)
self.assertEqual(
client.put(url, data, format='json').status_code,
status.HTTP_400_BAD_REQUEST
)
self.assertEqual(ds.record_queryset.count(), count)
self.assertEqual(
client.patch(url, data, format='json').status_code,
status.HTTP_400_BAD_REQUEST
)
self.assertEqual(ds.record_queryset.count(), count)
def test_date_error(self):
"""
Test date values
:return:
"""
ds = self.ds_1
record = self.record_1
date_column = ds.schema.observation_date_field.name
# ensure the date field is set as required
self.assertTrue(ds.schema.observation_date_field.required)
new_data = clone(record.data)
url_post = reverse('api:record-list')
url_update = reverse('api:record-detail', kwargs={'pk': record.pk})
valid_values = ['15/08/2008']
for value in valid_values:
new_data[date_column] = value
data = {
"dataset": record.dataset.pk,
"data": new_data
}
client = self.custodian_1_client
count = ds.record_queryset.count()
self.assertEqual(
client.post(url_post, data, format='json').status_code,
status.HTTP_201_CREATED
)
self.assertEqual(ds.record_queryset.count(), count + 1)
invalid_values = [None, '', 'abcd']
for value in invalid_values:
new_data[date_column] = value
data = {
"dataset": record.dataset.pk,
"data": new_data
}
client = self.custodian_1_client
count = ds.record_queryset.count()
self.assertEqual(
client.post(url_post, data, format='json').status_code,
status.HTTP_400_BAD_REQUEST
)
self.assertEqual(
client.put(url_update, data, format='json').status_code,
status.HTTP_400_BAD_REQUEST
)
self.assertEqual(
client.patch(url_update, data, format='json').status_code,
status.HTTP_400_BAD_REQUEST
)
self.assertEqual(ds.record_queryset.count(), count)
def test_geometry_error(self):
"""
An observation must have a valid geometry
:return:
"""
ds = self.ds_1
record = self.record_1
lat_column = ds.schema.latitude_field.name
new_data = clone(record.data)
url_post = reverse('api:record-list')
url_update = reverse('api:record-detail', kwargs={'pk': record.pk})
valid_values = [-34.125]
for value in valid_values:
new_data[lat_column] = value
data = {
"dataset": record.dataset.pk,
"data": new_data
}
client = self.custodian_1_client
count = ds.record_queryset.count()
self.assertEqual(
client.post(url_post, data, format='json').status_code,
status.HTTP_201_CREATED
)
self.assertEqual(ds.record_queryset.count(), count + 1)
invalid_values = [None, '', 'abcd']
for value in invalid_values:
new_data[lat_column] = value
data = {
"dataset": record.dataset.pk,
"data": new_data
}
client = self.custodian_1_client
count = ds.record_queryset.count()
self.assertEqual(
client.post(url_post, data, format='json').status_code,
status.HTTP_400_BAD_REQUEST
)
self.assertEqual(
client.put(url_update, data, format='json').status_code,
status.HTTP_400_BAD_REQUEST
)
self.assertEqual(
client.patch(url_update, data, format='json').status_code,
status.HTTP_400_BAD_REQUEST
)
self.assertEqual(ds.record_queryset.count(), count)
class TestSiteExtraction(helpers.BaseUserTestCase):
def setUp(self):
super(TestSiteExtraction, self).setUp()
self.site_1 = factories.SiteFactory.create(
project=self.project_1,
code='COTT',
geometry="SRID=4326;"
"LINESTRING (124.18701171875 -17.6484375, 126.38427734375 -18.615234375, 123.35205078125 "
"-20.65869140625, 124.1650390625 -17.71435546875)",)
self.ds_1 = self._create_dataset_with_schema(
self.project_1,
self.data_engineer_1_client,
self.observation_schema_with_with_all_possible_geometry_fields(),
dataset_type=Dataset.TYPE_OBSERVATION
)
# set the date
self.record_1 = self._create_record(
self.custodian_1_client,
self.ds_1,
{
'What': 'Chubby bat',
'When': '2018-06-30',
'Site Code': 'COTT',
}
)
self.assertIsNotNone(self.record_1)
def test_create_with_site(self):
"""
The descriptor contains a foreign key to the site.
Test that the site is extracted from the data
:return:
"""
# clear all records
ds = self.ds_1
ds.record_queryset.delete()
self.assertEqual(ds.record_queryset.count(), 0)
record = self.record_1
data = {
"dataset": record.dataset.pk,
"data": record.data
}
schema = ds.schema
self.assertTrue(schema.has_fk_for_model('Site'))
expected_site = record.site
url = reverse('api:record-list')
client = self.custodian_1_client
self.assertEqual(
client.post(url, data, format='json').status_code,
status.HTTP_201_CREATED
)
self.assertEqual(ds.record_queryset.count(), 1)
self.assertEqual(ds.record_queryset.first().site, expected_site)
def test_update_site(self):
ds = self.ds_1
record = self.record_1
site = factories.SiteFactory.create(code='NEW-SITE', project=self.project_1, geometry=Point(117, 33))
# need to test if the site belongs to the dataset project or the update won't happen
self.assertIsNotNone(site)
self.assertEqual(site.project, record.dataset.project)
self.assertNotEqual(record.site, site)
# update site value
schema = record.dataset.schema
site_column = schema.get_fk_for_model('Site').data_field
self.assertIsNotNone(site_column)
r_data = record.data
r_data[site_column] = site.code
data = {
"data": r_data
}
url = reverse('api:record-detail', kwargs={"pk": record.pk})
client = self.custodian_1_client
resp = client.patch(url, data, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
record.refresh_from_db()
self.assertEqual(record.site, site)
class TestDateTimeAndGeometryExtraction(helpers.BaseUserTestCase):
@staticmethod
def schema_with_lat_long_and_date():
schema_fields = [
{
"name": "What",
"type": "string",
"constraints": helpers.REQUIRED_CONSTRAINTS
},
{
"name": "When",
"type": "date",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS,
"format": "any",
"biosys": {
'type': 'observationDate'
}
},
{
"name": "Latitude",
"type": "number",
"biosys": {
"type": "latitude"
},
"constraints": {
"required": True,
"minimum": -90.0,
"maximum": 90.0,
}
},
{
"name": "Longitude",
"type": "number",
"biosys": {
"type": "longitude"
},
"constraints": {
"required": True,
"minimum": -180.0,
"maximum": 180.0,
}
},
]
schema = helpers.create_schema_from_fields(schema_fields)
return schema
@staticmethod
def schema_with_no_date():
schema_fields = [
{
"name": "What",
"type": "string",
"constraints": helpers.REQUIRED_CONSTRAINTS
},
{
"name": "Latitude",
"type": "number",
"biosys": {
"type": "latitude"
},
"constraints": {
"required": True,
"minimum": -90.0,
"maximum": 90.0,
}
},
{
"name": "Longitude",
"type": "number",
"biosys": {
"type": "longitude"
},
"constraints": {
"required": True,
"minimum": -180.0,
"maximum": 180.0,
}
},
]
schema = helpers.create_schema_from_fields(schema_fields)
return schema
def test_create(self):
"""
Test that the date and geometry are extracted from the data
and saved in DB
:return:
"""
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_lat_long_and_date()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema,
dataset_type=Dataset.TYPE_OBSERVATION)
self.assertEqual(dataset.record_queryset.count(), 0)
record_data = {
'What': 'A test',
'When': '01/06/2017',
'Latitude': -32.0,
'Longitude': 116.0
}
payload = {
"dataset": dataset.pk,
"data": record_data
}
expected_date = datetime.date(2017, 6, 1)
url = reverse('api:record-list')
self.assertEqual(
client.post(url, payload, format='json').status_code,
status.HTTP_201_CREATED
)
self.assertEqual(dataset.record_queryset.count(), 1)
record = dataset.record_queryset.first()
self.assertEqual(timezone.localtime(record.datetime).date(), expected_date)
geometry = record.geometry
self.assertIsInstance(geometry, Point)
self.assertEqual(geometry.x, 116.0)
self.assertEqual(geometry.y, -32.0)
def test_update(self):
"""
Test that the date and geometry are extracted from the data
and saved in DB
:return:
"""
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_lat_long_and_date()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
self.assertEqual(dataset.record_queryset.count(), 0)
record_data = {
'What': 'A test',
'When': '01/06/2017',
'Latitude': -32.0,
'Longitude': 116.0
}
payload = {
"dataset": dataset.pk,
"data": record_data
}
url = reverse('api:record-list')
self.assertEqual(
client.post(url, payload, format='json').status_code,
status.HTTP_201_CREATED
)
record = dataset.record_queryset.first()
# change date
new_date = '20/4/2016'
# change lat/lon
new_long = 111.111
new_lat = 22.222
record_data = {
'When': new_date,
'Latitude': new_lat,
'Longitude': new_long
}
payload = {
"dataset": dataset.pk,
"data": record_data
}
url = reverse('api:record-detail', kwargs={"pk": record.pk})
self.assertEqual(
client.patch(url, data=payload, format='json').status_code,
status.HTTP_200_OK
)
self.assertEqual(dataset.record_queryset.count(), 1)
record.refresh_from_db()
expected_date = datetime.date(2016, 4, 20)
self.assertEqual(timezone.localtime(record.datetime).date(), expected_date)
geometry = record.geometry
self.assertIsInstance(geometry, Point)
self.assertEqual(geometry.x, new_long)
self.assertEqual(geometry.y, new_lat)
def test_create_without_date(self):
"""
As of 29/06/2017. Date are not mandatory to create a Observation record
"""
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_no_date()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION)
self.assertEqual(dataset.record_queryset.count(), 0)
record_data = {
'What': 'A test',
'Latitude': -32.0,
'Longitude': 116.0
}
payload = {
"dataset": dataset.pk,
"data": record_data
}
url = reverse('api:record-list')
self.assertEqual(
client.post(url, payload, format='json').status_code,
status.HTTP_201_CREATED
)
self.assertEqual(dataset.record_queryset.count(), 1)
record = dataset.record_queryset.first()
self.assertIsNone(record.datetime)
geometry = record.geometry
self.assertIsInstance(geometry, Point)
self.assertEqual(geometry.x, 116.0)
self.assertEqual(geometry.y, -32.0)
def test_update_without_date(self):
"""
As of 29/06/2017. Date are not mandatory to create a Observation record
"""
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_no_date()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
self.assertEqual(dataset.record_queryset.count(), 0)
record_data = {
'What': 'A test',
'Latitude': -32.0,
'Longitude': 116.0
}
payload = {
"dataset": dataset.pk,
"data": record_data
}
url = reverse('api:record-list')
self.assertEqual(
client.post(url, payload, format='json').status_code,
status.HTTP_201_CREATED
)
record = dataset.record_queryset.first()
new_long = 111.111
new_lat = 22.222
record_data = {
'Latitude': new_lat,
'Longitude': new_long
}
payload = {
"dataset": dataset.pk,
"data": record_data
}
url = reverse('api:record-detail', kwargs={"pk": record.pk})
self.assertEqual(
client.patch(url, data=payload, format='json').status_code,
status.HTTP_200_OK
)
self.assertEqual(dataset.record_queryset.count(), 1)
record.refresh_from_db()
self.assertIsNone(record.datetime)
geometry = record.geometry
self.assertIsInstance(geometry, Point)
self.assertEqual(geometry.x, new_long)
self.assertEqual(geometry.y, new_lat)
class TestEastingNorthing(helpers.BaseUserTestCase):
"""
Use case: the schema contains a datum and a zone field and easting/northing.
"""
@staticmethod
def schema_with_easting_northing():
schema_fields = [
{
"name": "What",
"type": "string",
"constraints": helpers.REQUIRED_CONSTRAINTS
},
{
"name": "When",
"type": "date",
"constraints": helpers.REQUIRED_CONSTRAINTS,
"format": "any",
"biosys": {
'type': 'observationDate'
}
},
{
"name": "Northing",
"type": "number",
"constraints": helpers.REQUIRED_CONSTRAINTS,
"biosys": {
"type": "northing"
}
},
{
"name": "Easting",
"type": "number",
"constraints": helpers.REQUIRED_CONSTRAINTS,
"biosys": {
"type": "easting"
}
},
{
"name": "Datum",
"type": "string",
"constraints": helpers.REQUIRED_CONSTRAINTS
},
{
"name": "Zone",
"type": "integer",
"constraints": helpers.REQUIRED_CONSTRAINTS
}
]
return helpers.create_schema_from_fields(schema_fields)
def test_create_happy_path(self):
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_easting_northing()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
self.assertIsNotNone(dataset.schema.datum_field)
self.assertIsNotNone(dataset.schema.zone_field)
easting = 405542.537
northing = 6459127.469
datum = 'GDA94'
zone = 50
record_data = {
'What': 'Chubby Bat',
'When': '12/12/2017',
'Easting': easting,
'Northing': northing,
'Datum': datum,
'Zone': zone
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
qs = dataset.record_queryset
self.assertEqual(qs.count(), 1)
record = qs.first()
geom = record.geometry
# should be in WGS84 -> srid = 4326
self.assertEqual(geom.srid, 4326)
# convert it back to GAD / zone 50 -> srid = 28350
geom.transform(28350)
# compare with 2 decimal place precision
self.assertAlmostEqual(geom.x, easting, places=2)
self.assertAlmostEqual(geom.y, northing, places=2)
def test_update_happy_path(self):
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_easting_northing()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
self.assertIsNotNone(dataset.schema.datum_field)
self.assertIsNotNone(dataset.schema.zone_field)
# first create record with wrong zone
easting = 405542.537
northing = 6459127.469
datum = 'GDA94'
zone = 58
record_data = {
'What': 'Chubby Bat',
'When': '12/12/2017',
'Easting': easting,
'Northing': northing,
'Datum': datum,
'Zone': zone
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
qs = dataset.record_queryset
self.assertEqual(qs.count(), 1)
record = qs.first()
geom = record.geometry
# should be in WGS84 -> srid = 4326
self.assertEqual(geom.srid, 4326)
# convert it back to GAD / zone 50 -> srid = 28350
geom.transform(28350)
# compare with 2 decimal place precision. Should be different that of expected
self.assertNotAlmostEqual(geom.x, easting, places=2)
self.assertNotAlmostEqual(geom.y, northing, places=2)
# send path to update the zone
record_data = {
'What': 'Chubby Bat',
'When': '12/12/2017',
'Easting': easting,
'Northing': northing,
'Datum': datum,
'Zone': 50
}
payload = {
'data': record_data
}
url = reverse('api:record-detail', kwargs={'pk': record.pk})
resp = client.patch(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
record.refresh_from_db()
geom = record.geometry
# should be in WGS84 -> srid = 4326
self.assertEqual(geom.srid, 4326)
# convert it back to GAD / zone 50 -> srid = 28350
geom.transform(28350)
self.assertAlmostEqual(geom.x, easting, places=2)
self.assertAlmostEqual(geom.y, northing, places=2)
def test_default_datum(self):
"""
If only easting and northing are provided the project's datum/zone should be used
"""
project = self.project_1
srid = constants.get_datum_srid('GDA94 / MGA zone 50')
self.assertEqual(srid, 28350)
project.datum = srid
project.save()
client = self.custodian_1_client
# schema with datum and zone not required
schema_fields = [
{
"name": "What",
"type": "string",
"constraints": helpers.REQUIRED_CONSTRAINTS
},
{
"name": "When",
"type": "date",
"constraints": helpers.REQUIRED_CONSTRAINTS,
"format": "any",
"biosys": {
'type': 'observationDate'
}
},
{
"name": "Northing",
"type": "number",
"constraints": helpers.REQUIRED_CONSTRAINTS,
"biosys": {
"type": "northing"
}
},
{
"name": "Easting",
"type": "number",
"constraints": helpers.REQUIRED_CONSTRAINTS,
"biosys": {
"type": "easting"
}
},
{
"name": "Datum",
"type": "string",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS
},
{
"name": "Zone",
"type": "integer",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS
}
]
schema = helpers.create_schema_from_fields(schema_fields)
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
self.assertIsNotNone(dataset.schema.datum_field)
self.assertIsNotNone(dataset.schema.zone_field)
easting = 405542.537
northing = 6459127.469
record_data = {
'What': 'Chubby Bat',
'When': '12/12/2017',
'Easting': easting,
'Northing': northing,
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list') + '?strict=true'
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
qs = dataset.record_queryset
self.assertEqual(qs.count(), 1)
record = qs.first()
geom = record.geometry
# should be in WGS84 -> srid = 4326
self.assertEqual(geom.srid, 4326)
self.assertIsInstance(geom, Point)
self.assertAlmostEqual(geom.x, 116, places=2)
self.assertAlmostEqual(geom.y, -32, places=2)
# convert it back to GAD / zone 50 -> srid = 28350
geom.transform(srid)
# compare with 2 decimal place precision
self.assertAlmostEqual(geom.x, easting, places=2)
self.assertAlmostEqual(geom.y, northing, places=2)
class TestGeometryFromSite(helpers.BaseUserTestCase):
"""
Use case: the observation dataset doesn't contain any geometry columns/fields
but a reference (foreign key) to the site code. In this case the when yhe user uploads observations with a site
reference only the observation geometry should be copied (not referenced) from the site geometry.
"""
@staticmethod
def schema_with_site_code_fk():
schema_fields = [
{
"name": "What",
"type": "string",
"constraints": helpers.REQUIRED_CONSTRAINTS
},
{
"name": "When",
"type": "date",
"constraints": helpers.REQUIRED_CONSTRAINTS,
"format": "any",
"biosys": {
'type': 'observationDate'
}
},
{
"name": "Site Code",
"type": "string",
"constraints": helpers.REQUIRED_CONSTRAINTS
},
]
schema = helpers.create_schema_from_fields(schema_fields)
schema = helpers.add_model_field_foreign_key_to_schema(schema, {
'schema_field': 'Site Code',
'model': 'Site',
'model_field': 'code'
})
return schema
@staticmethod
def schema_with_latlong_and_site_code_fk():
schema_fields = [
{
"name": "What",
"type": "string",
"constraints": helpers.REQUIRED_CONSTRAINTS
},
{
"name": "When",
"type": "date",
"constraints": helpers.REQUIRED_CONSTRAINTS,
"format": "any",
"biosys": {
'type': 'observationDate'
}
},
{
"name": "Latitude",
"type": "number",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS,
"biosys": {
"type": 'latitude'
}
},
{
"name": "Longitude",
"type": "number",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS,
"biosys": {
"type": 'longitude'
}
},
{
"name": "Site Code",
"type": "string",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS
},
]
schema = helpers.create_schema_from_fields(schema_fields)
schema = helpers.add_model_field_foreign_key_to_schema(schema, {
'schema_field': 'Site Code',
'model': 'Site',
'model_field': 'code'
})
return schema
def test_observation_schema_valid_with_site_foreign_key(self):
"""
An observation schema should be valid without geometry fields as long it has a foreign key to site.
"""
schema_fields = [
{
"name": "What",
"type": "string",
"constraints": helpers.REQUIRED_CONSTRAINTS
},
{
"name": "When",
"type": "date",
"constraints": helpers.REQUIRED_CONSTRAINTS,
"biosys": {
'type': 'observationDate'
}
},
{
"name": "Site Code",
"type": "string",
"constraints": helpers.REQUIRED_CONSTRAINTS
},
]
schema = helpers.create_schema_from_fields(schema_fields)
schema = helpers.add_model_field_foreign_key_to_schema(schema, {
'schema_field': 'Site Code',
'model': 'Site',
'model_field': 'code'
})
data_package = helpers.create_data_package_from_schema(schema)
# create data set
url = reverse('api:dataset-list')
project = self.project_1
client = self.data_engineer_1_client
dataset_name = "Observation with site foreign key and no geometry"
payload = {
"name": dataset_name,
"type": Dataset.TYPE_OBSERVATION,
"project": project.pk,
'data_package': data_package
}
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
# double check
self.assertIsNotNone(Dataset.objects.filter(project=project, name=dataset_name).first())
def test_observation_schema_not_valid_with_other_foreign_key(self):
"""
only a foreign key to the site code is accepted.
"""
schema_fields = [
{
"name": "What",
"type": "string",
"constraints": helpers.REQUIRED_CONSTRAINTS
},
{
"name": "When",
"type": "date",
"constraints": helpers.REQUIRED_CONSTRAINTS,
"biosys": {
'type': 'observationDate'
}
},
{
"name": "Project", # project not site
"type": "string",
"constraints": helpers.REQUIRED_CONSTRAINTS
},
]
schema = helpers.create_schema_from_fields(schema_fields)
schema = helpers.add_model_field_foreign_key_to_schema(schema, {
'schema_field': 'Project', # project not site
'model': 'Project',
'model_field': 'title'
})
data_package = helpers.create_data_package_from_schema(schema)
# create data set
url = reverse('api:dataset-list')
project = self.project_1
client = self.data_engineer_1_client
dataset_name = "Observation with project foreign key and no geometry"
payload = {
"name": dataset_name,
"type": Dataset.TYPE_OBSERVATION,
"project": project.pk,
'data_package': data_package
}
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
def test_geometry_extracted_create(self):
"""
Test that the record geometry is properly copied from the site when posting through api
"""
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_site_code_fk()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
site_code = 'Cottesloe'
site_geometry = Point(115.76, -32.0)
# create the site
site = factories.SiteFactory(code=site_code, geometry=site_geometry, project=project)
record_data = {
'What': 'Hello! This is a test.',
'When': '12/12/2017',
'Site Code': site_code
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
record = Record.objects.filter(id=resp.json().get('id')).first()
self.assertIsNotNone(record)
self.assertEqual(record.site, site)
self.assertEqual(record.geometry, site_geometry)
def test_geometry_extracted_update(self):
"""
Test that the record geometry is properly copied from the site when updating/patching
"""
# create the record
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_site_code_fk()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
site_code = 'Cottesloe'
site_geometry = Point(115.76, -32.0)
# create the site
factories.SiteFactory(code=site_code, geometry=site_geometry, project=project)
record_data = {
'What': 'Hello! This is a test.',
'When': '12/12/2017',
'Site Code': site_code
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
record = Record.objects.filter(id=resp.json().get('id')).first()
self.assertIsNotNone(record)
self.assertEqual(record.geometry, site_geometry)
# update record with new site
site_code = 'Somewhere'
site_geometry = Point(116.0, -30.0)
# create the site
factories.SiteFactory(code=site_code, geometry=site_geometry, project=project)
record_data = {
'What': 'Yellow!',
'When': '01/01/2017',
'Site Code': site_code
}
payload = {
'data': record_data
}
url = reverse('api:record-detail', kwargs={'pk': record.pk})
resp = client.patch(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_200_OK)
record.refresh_from_db()
self.assertIsNotNone(record)
self.assertEqual(timezone.make_naive(record.datetime), datetime.datetime(2017, 1, 1, 0, 0))
self.assertEqual(record.geometry, site_geometry)
def test_record_rejected_if_site_has_no_geometry_api(self):
"""
When using api
If the referenced site has no geometry the record should be rejected
"""
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_site_code_fk()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
site_code = 'Cottesloe'
# create the site
site = factories.SiteFactory(code=site_code, geometry=None, project=project)
self.assertIsNone(site.geometry)
record_data = {
'What': 'Hello! This is a test.',
'When': '12/12/2017',
'Site Code': site_code
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
# check error
errors = resp.json().get('data')
# errors is of string of format 'field_name::message'
self.assertIsNotNone(errors)
self.assertTrue(isinstance(errors, list))
self.assertEqual(len(errors), 1)
field_name, message = errors[0].split('::')
self.assertEqual(field_name, 'Site Code')
# message should be something like:
expected_message = 'The site Cottesloe has no geometry'
self.assertEqual(message, expected_message)
def test_schema_with_lat_long_and_site_fk(self):
"""
Use case:
The schema contains a classic lat/lon fields and a site_code foreign key.
Test that:
1 - the lat/long provided takes precedence over the site geometry
2 - if lat/long not provided the site geometry is used
"""
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_latlong_and_site_code_fk()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
self.assertIsNotNone(dataset.schema.latitude_field)
self.assertIsNotNone(dataset.schema.longitude_field)
site_code = 'Cottesloe'
site_geometry = Point(115.76, -32.0)
# create the site
site = factories.SiteFactory(code=site_code, geometry=site_geometry, project=project)
# the observation geometry different than the site geometry
observation_geometry = Point(site_geometry.x + 2, site_geometry.y + 2)
self.assertNotEqual(site.geometry, observation_geometry)
# lat/long + site
record_data = {
'What': 'Hello! This is a test.',
'When': '12/12/2017',
'Longitude': observation_geometry.x,
'Latitude': observation_geometry.y,
'Site Code': site_code
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
record = Record.objects.filter(id=resp.json().get('id')).first()
self.assertIsNotNone(record)
self.assertEqual(record.site, site)
self.assertEqual(record.geometry.geojson, observation_geometry.geojson)
# lat/long no site
record_data = {
'What': 'Hello! This is a test.',
'When': '12/12/2017',
'Longitude': observation_geometry.x,
'Latitude': observation_geometry.y,
'Site Code': None
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
record = Record.objects.filter(id=resp.json().get('id')).first()
self.assertIsNotNone(record)
self.assertIsNone(record.site)
self.assertEqual(record.geometry.geojson, observation_geometry.geojson)
# site without lat/long
record_data = {
'What': 'Hello! This is a test.',
'When': '12/12/2017',
'Longitude': None,
'Latitude': None,
'Site Code': site_code
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
record = Record.objects.filter(id=resp.json().get('id')).first()
self.assertIsNotNone(record)
self.assertEqual(record.site, site)
self.assertEqual(record.geometry.geojson, site_geometry.geojson)
# no lat/long no site -> error
record_data = {
'What': 'Hello! This is a test.',
'When': '12/12/2017',
'Longitude': None,
'Latitude': None,
'Site Code': None
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
def test_geometry_extracted_upload(self):
"""
Test that the record geometry is properly copied from the site when using an xlsx upload
"""
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_site_code_fk()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
# create two sites
site_1_code = 'Cottesloe'
site_1_geometry = Point(115.76, -32.0)
site_1 = factories.SiteFactory(code=site_1_code, geometry=site_1_geometry, project=project)
site_2_code = 'Somewhere'
site_2_geometry = Point(116.0, -30.0)
# create the site
site_2 = factories.SiteFactory(code=site_2_code, geometry=site_2_geometry, project=project)
# data
csv_data = [
['What', 'When', 'Site Code'],
['what_1', '01/01/2017', site_1_code],
['what_2', '02/02/2017', site_2_code]
]
file_ = helpers.rows_to_xlsx_file(csv_data)
self.assertEqual(0, Record.objects.filter(dataset=dataset).count())
url = reverse('api:dataset-upload', kwargs={'pk': dataset.pk})
with open(file_, 'rb') as fp:
payload = {
'file': fp
}
resp = client.post(url, data=payload, format='multipart')
self.assertEqual(status.HTTP_200_OK, resp.status_code)
records = Record.objects.filter(dataset=dataset)
self.assertEqual(records.count(), len(csv_data) - 1)
r = [r for r in records if r.data['What'] == 'what_1'][0]
self.assertEqual(r.site, site_1)
self.assertEqual(r.geometry, site_1_geometry)
r = [r for r in records if r.data['What'] == 'what_2'][0]
self.assertEqual(r.site, site_2)
self.assertEqual(r.geometry, site_2_geometry)
def test_record_rejected_if_site_has_no_geometry_upload(self):
"""
When uploading with Excel
If the referenced site has no geometry the record should be rejected
"""
# same as above but site_2 has no geometry
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_site_code_fk()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
# create two sites the number 2 without a geometry
site_1_code = 'Cottesloe'
site_1_geometry = Point(115.76, -32.0)
site_1 = factories.SiteFactory(code=site_1_code, geometry=site_1_geometry, project=project)
site_2_code = 'Somewhere'
site_2_geometry = None
factories.SiteFactory(code=site_2_code, geometry=site_2_geometry, project=project)
csv_data = [
['What', 'When', 'Site Code'],
['what_1', '01/01/2017', site_1_code],
['what_2', '02/02/2017', site_2_code]
]
file_ = helpers.rows_to_xlsx_file(csv_data)
self.assertEqual(0, Record.objects.filter(dataset=dataset).count())
url = reverse('api:dataset-upload', kwargs={'pk': dataset.pk})
with open(file_, 'rb') as fp:
payload = {
'file': fp
}
resp = client.post(url, data=payload, format='multipart')
self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST)
# Check that the good record is there.
records = Record.objects.filter(dataset=dataset)
self.assertEqual(records.count(), 1)
r = records.first()
self.assertEqual(r.site, site_1)
self.assertEqual(r.geometry, site_1_geometry)
def test_site_geometry_updated(self):
"""
Use case:
observations has been created with a site geometry, user update the site location.
user expect that the associated observations have their geometry updated.
This can only if the observations has the site as a FK (of course) and exactly the same geometry.
"""
project = self.project_1
client = self.custodian_1_client
schema = self.schema_with_site_code_fk()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
# create two sites
site_1_code = 'Cottesloe'
site_1_geometry = Point(115.76, -32.0)
site_1 = factories.SiteFactory(code=site_1_code, geometry=site_1_geometry, project=project)
site_2_code = 'Somewhere'
site_2_geometry = Point(116.0, -30.0)
# create the site
site_2 = factories.SiteFactory(code=site_2_code, geometry=site_2_geometry, project=project)
# data
csv_data = [
['What', 'When', 'Site Code'],
['what_1', '01/01/2017', site_1_code],
['what_2', '02/02/2017', site_2_code]
]
file_ = helpers.rows_to_xlsx_file(csv_data)
self.assertEqual(0, Record.objects.filter(dataset=dataset).count())
url = reverse('api:dataset-upload', kwargs={'pk': dataset.pk})
with open(file_, 'rb') as fp:
payload = {
'file': fp
}
resp = client.post(url, data=payload, format='multipart')
self.assertEqual(status.HTTP_200_OK, resp.status_code)
records = Record.objects.filter(dataset=dataset)
self.assertEqual(records.count(), len(csv_data) - 1)
record_1 = [r for r in records if r.data['What'] == 'what_1'][0]
self.assertEqual(record_1.site, site_1)
self.assertEqual(record_1.geometry, site_1_geometry)
record_2 = [r for r in records if r.data['What'] == 'what_2'][0]
self.assertEqual(record_2.site, site_2)
self.assertEqual(record_2.geometry, site_2_geometry)
# Change the site_1 geometry and expect the record_1 to have its geometry updated
previous_geometry = site_1_geometry
new_geometry = Point(previous_geometry.x + 2, previous_geometry.y + 2)
self.assertNotEqual(previous_geometry, new_geometry)
url = reverse('api:site-detail', kwargs={'pk': site_1.pk})
payload = {
"geometry": new_geometry.wkt
}
resp = client.patch(url, data=payload, format='json')
self.assertEqual(resp.status_code, 200)
# check that the record has been updated
record_1.refresh_from_db()
self.assertEqual(record_1.geometry.geojson, new_geometry.geojson)
# site_2 record should be untouched
record_2.refresh_from_db()
self.assertEqual(record_2.geometry.geojson, site_2_geometry.geojson)
# Use case: the record geometry should be updated ONLY if it matches exactly the site geometry
# new geometry for record_1
new_site_geometry = Point(179, -30)
self.assertNotEqual(new_site_geometry, site_1.geometry)
new_record_geometry = Point(180, -35)
self.assertNotEqual(new_record_geometry, new_site_geometry)
record_1.geometry = new_record_geometry
record_1.save()
self.assertNotEqual(record_1.geometry.geojson, record_1.site.geometry.geojson)
site = record_1.site
site.geometry = new_site_geometry
site.save()
# check record not changed
record_1.refresh_from_db()
self.assertEqual(record_1.geometry, new_record_geometry)
self.assertNotEqual(record_1.geometry.geojson, record_1.site.geometry.geojson)
class TestMultipleGeometrySource(helpers.BaseUserTestCase):
def test_geometry_easting_northing_precedence(self):
"""
If all fields are provided easting and northing have precedence over lat/long and site code.
"""
project = self.project_1
client = self.custodian_1_client
schema = self.observation_schema_with_with_all_possible_geometry_fields()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
self.assertIsNotNone(dataset.schema.datum_field)
self.assertIsNotNone(dataset.schema.zone_field)
# site geometry
site_code = 'Cottesloe'
site_geometry = Point(115.76, -32.0)
# create the site
site = factories.SiteFactory(code=site_code, geometry=site_geometry, project=project)
# easting/northing: nearly (116.0, -32.0)
easting = 405542.537
northing = 6459127.469
east_north_datum = 'GDA94'
zone = 50
east_north_srid = 28350
# lat/long
longitude = 117.0
latitude = -33.0
lat_long_datum = 'WGS84'
lat_long_srid = 4326
record_data = {
'What': 'A record with all geometry fields populated',
'When': '12/12/2017',
'Site Code': site_code,
'Easting': easting,
'Northing': northing,
'Datum': east_north_datum,
'Zone': zone,
'Latitude': latitude,
'Longitude': longitude
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
record = Record.objects.filter(id=resp.json().get('id')).first()
self.assertIsNotNone(record)
geometry = record.geometry
self.assertIsNotNone(geometry)
self.assertIsInstance(geometry, Point)
# it should be the easting/northing geometry
geometry.transform(east_north_srid)
self.assertAlmostEqual(geometry.x, easting, places=2)
self.assertAlmostEqual(geometry.y, northing, places=2)
def test_geometry_lat_long_precedence(self):
"""
Lat/long takes precedence over site code
"""
project = self.project_1
client = self.custodian_1_client
schema = self.observation_schema_with_with_all_possible_geometry_fields()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
self.assertIsNotNone(dataset.schema.datum_field)
self.assertIsNotNone(dataset.schema.zone_field)
# site geometry
site_code = 'Cottesloe'
site_geometry = Point(115.76, -32.0)
# create the site
site = factories.SiteFactory(code=site_code, geometry=site_geometry, project=project)
# lat/long
longitude = 117.0
latitude = -33.0
lat_long_datum = 'WGS84'
lat_long_srid = 4326
record_data = {
'What': 'A record with all geometry fields populated',
'When': '12/12/2017',
'Site Code': site_code,
'Easting': None,
'Northing': None,
'Datum': lat_long_datum,
'Zone': None,
'Latitude': latitude,
'Longitude': longitude
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
record = Record.objects.filter(id=resp.json().get('id')).first()
self.assertIsNotNone(record)
geometry = record.geometry
self.assertIsNotNone(geometry)
self.assertIsInstance(geometry, Point)
# it should be the lat/long geometry
geometry.transform(lat_long_srid)
self.assertAlmostEqual(geometry.x, longitude, places=4)
self.assertAlmostEqual(geometry.y, latitude, places=4)
# and not the site
self.assertNotAlmostEqual(geometry.x, site_geometry.x, places=4)
self.assertNotAlmostEqual(geometry.y, site_geometry.y, places=4)
def test_easting_northing_and_site(self):
"""
Easting/Northing > site_code
"""
project = self.project_1
client = self.custodian_1_client
schema = self.observation_schema_with_with_all_possible_geometry_fields()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
self.assertIsNotNone(dataset.schema.datum_field)
self.assertIsNotNone(dataset.schema.zone_field)
# site geometry
site_code = 'Cottesloe'
site_geometry = Point(115.76, -32.0)
# create the site
site = factories.SiteFactory(code=site_code, geometry=site_geometry, project=project)
# easting/northing: nearly (116.0, -32.0)
easting = 405542.537
northing = 6459127.469
east_north_datum = 'GDA94'
zone = 50
east_north_srid = 28350
record_data = {
'What': 'A record with all geometry fields populated',
'When': '12/12/2017',
'Site Code': site_code,
'Easting': easting,
'Northing': northing,
'Datum': east_north_datum,
'Zone': zone,
'Latitude': None,
'Longitude': None
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
record = Record.objects.filter(id=resp.json().get('id')).first()
self.assertIsNotNone(record)
geometry = record.geometry
self.assertIsNotNone(geometry)
self.assertIsInstance(geometry, Point)
# it should be the easting/northing geometry
geometry.transform(east_north_srid)
self.assertAlmostEqual(geometry.x, easting, places=2)
self.assertAlmostEqual(geometry.y, northing, places=2)
def test_site_only(self):
project = self.project_1
client = self.custodian_1_client
schema = self.observation_schema_with_with_all_possible_geometry_fields()
dataset = self._create_dataset_with_schema(
project, self.data_engineer_1_client, schema, dataset_type=Dataset.TYPE_OBSERVATION
)
self.assertIsNotNone(dataset.schema.datum_field)
self.assertIsNotNone(dataset.schema.zone_field)
# site geometry
site_code = 'Cottesloe'
site_geometry = Point(115.76, -32.0)
# create the site
site = factories.SiteFactory(code=site_code, geometry=site_geometry, project=project)
record_data = {
'What': 'A record with all geometry fields populated',
'When': '12/12/2017',
'Site Code': site_code,
'Easting': None,
'Northing': None,
'Datum': None,
'Zone': None,
'Latitude': None,
'Longitude': None
}
payload = {
'dataset': dataset.pk,
'data': record_data
}
url = reverse('api:record-list')
resp = client.post(url, data=payload, format='json')
self.assertEqual(resp.status_code, status.HTTP_201_CREATED)
record = Record.objects.filter(id=resp.json().get('id')).first()
self.assertIsNotNone(record)
geometry = record.geometry
self.assertIsNotNone(geometry)
self.assertIsInstance(geometry, Point)
# it should be the easting/northing geometry
self.assertAlmostEqual(geometry.x, site_geometry.x, places=4)
self.assertAlmostEqual(geometry.y, site_geometry.y, places=4)
class TestGeometryConversion(helpers.BaseUserTestCase):
def test_lat_long_with_projected_project_datum(self):
"""
see: https://youtrack.gaiaresources.com.au/youtrack/issue/BIOSYS-152
Use case:
- Project datum set to be a projected one, e.g GDA/Zone 56.
- Schema has a latitude, longitude, datum, zone, easting and northing field (the whole shebang)
- Post a record with lat=-32.0 long=115.75 and Datum=WGS84
Success if record.geometry is Point(115.75, -32.0)
"""
# Create project with projected datum
program = factories.ProgramFactory.create()
program.data_engineers.add(self.data_engineer_1_user)
datum_srid = constants.get_datum_srid('GDA94 / MGA zone 56')
project = factories.ProjectFactory.create(
program=program,
datum=datum_srid
)
self.assertTrue(constants.is_projected_srid(project.datum))
project.custodians.add(self.custodian_1_user)
self.assertTrue(project.is_custodian(self.custodian_1_user))
# Dataset and records in lat/long
schema = self.observation_schema_with_with_all_possible_geometry_fields()
client = self.custodian_1_client
dataset = self._create_dataset_with_schema(
project,
self.data_engineer_1_client,
schema,
dataset_type=Dataset.TYPE_OBSERVATION
)
# post record
record_data = {
'When': "2018-05-25",
'Datum': 'WGS84',
'Latitude': -32.0,
'Longitude': 115.75
}
record = self._create_record(
client,
dataset,
record_data
)
self.assertIsNotNone(record)
self.assertIsNotNone(record.geometry)
self.assertEqual(record.geometry.x, 115.75)
self.assertEqual(record.geometry.y, -32.0)
# try with the upload end-point
dataset.record_set.all().delete()
rows = [
['When', 'Datum', 'Latitude', 'Longitude'],
['2018-05-25', 'WGS84', -32.0, 115.75]
]
response = self._upload_records_from_rows(
rows,
dataset.pk,
strict=True
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
record = dataset.record_set.last()
self.assertIsNotNone(record)
self.assertIsNotNone(record.geometry)
self.assertEqual(record.geometry.x, 115.75)
self.assertEqual(record.geometry.y, -32.0)
class TestSerialization(helpers.BaseUserTestCase):
def test_date_serialization_uses_project_timezone(self):
# TODO: implement this
pass
class TestExport(helpers.BaseUserTestCase):
def setUp(self):
super(TestExport, self).setUp()
rows = [
['When', 'Species', 'How Many', 'Latitude', 'Longitude', 'Comments'],
['2018-02-07', 'Canis lupus', 1, -32.0, 115.75, ''],
['2018-01-12', 'Chubby bat', 10, -32.0, 115.75, 'Awesome'],
['2018-02-02', 'Canis dingo', 2, -32.0, 115.75, 'Watch out kids'],
['2018-02-10', 'Unknown', 3, -32.0, 115.75, 'Canis?'],
]
self.ds_1 = self._create_dataset_and_records_from_rows(rows)
self.assertEqual(self.ds_1.type, Dataset.TYPE_OBSERVATION)
def test_happy_path_no_filter(self):
client = self.custodian_1_client
dataset = self.ds_1
all_records = Record.objects.filter(dataset=dataset)
self.assertTrue(all_records.count() > 0)
url = reverse('api:record-list')
query = {
'dataset__id': dataset.pk,
'output': 'xlsx'
}
try:
resp = client.get(url, query)
except Exception as e:
self.fail("Export should not raise an exception: {}".format(e))
self.assertEqual(resp.status_code, status.HTTP_200_OK)
# check headers
self.assertEqual(resp.get('content-type'),
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
content_disposition = resp.get('content-disposition')
# should be something like:
# 'attachment; filename=DatasetName_YYYY_MM_DD-HHMMSS.xlsx
match = re.match('attachment; filename=(.+)', content_disposition)
self.assertIsNotNone(match)
filename, ext = path.splitext(match.group(1))
self.assertEqual(ext, '.xlsx')
filename.startswith(dataset.name)
# read content
wb = load_workbook(io.BytesIO(resp.content), read_only=True)
# one datasheet named from dataset
sheet_names = wb.sheetnames
self.assertEqual(1, len(sheet_names))
self.assertEqual(dataset.name, sheet_names[0])
ws = wb[dataset.name]
rows = list(ws.rows)
expected_records = Record.objects.filter(dataset=dataset)
self.assertEqual(len(rows), expected_records.count() + 1)
headers = [c.value for c in rows[0]]
schema = dataset.schema
# all the columns of the schema should be in the excel
self.assertEqual(schema.headers, headers)
def test_permission_ok_for_not_custodian(self):
"""
Export is a read action. Should be authorised for every logged-in user.
"""
client = self.custodian_2_client
dataset = self.ds_1
url = reverse('api:record-list')
query = {
'dataset__id': dataset.pk,
'output': 'xlsx'
}
try:
resp = client.get(url, query)
except Exception as e:
self.fail("Export should not raise an exception: {}".format(e))
self.assertEqual(resp.status_code, status.HTTP_200_OK)
def test_permission_denied_if_not_logged_in(self):
"""Must be logged-in."""
client = self.anonymous_client
dataset = self.ds_1
url = reverse('api:record-list')
query = {
'dataset__id': dataset.pk,
'output': 'xlsx'
}
try:
resp = client.get(url, query)
except Exception as e:
self.fail("Export should not raise an exception: {}".format(e))
self.assertEqual(resp.status_code, status.HTTP_401_UNAUTHORIZED)
class TestDateNotMandatory(helpers.BaseUserTestCase):
date_easting_northing_site_nothing_required_schema = [
{
"name": "What",
"type": "string",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS
},
{
"name": "When",
"type": "date",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS,
"format": "any",
"biosys": {
'type': 'observationDate'
}
},
{
"name": 'Site',
"type": "string",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS,
"biosys": {
'type': 'siteCode'
}
},
{
"name": "Northing",
"type": "number",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS,
"biosys": {
"type": "northing"
}
},
{
"name": "Easting",
"type": "number",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS,
"biosys": {
"type": "easting"
}
},
{
"name": "Datum",
"type": "string",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS
},
{
"name": "Zone",
"type": "integer",
"constraints": helpers.NOT_REQUIRED_CONSTRAINTS
}
]
def test_record_without_date(self):
project = self.project_1
client = self.custodian_1_client
dataset = self._create_dataset_with_schema(
project,
self.data_engineer_1_client,
self.date_easting_northing_site_nothing_required_schema,
Dataset.TYPE_OBSERVATION
)
# easting/northing: nearly (116.0, -32.0)
easting = 405542.537
northing = 6459127.469
datum = 'GDA94'
zone = 50
east_north_srid = 28350
record_data = {
'What': 'Whaaat?',
'Easting': easting,
'Northing': northing,
'Datum': datum,
'Zone': zone
}
record = self._create_record(client, dataset, record_data)
self.assertIsNone(record.datetime)
geometry = record.geometry
self.assertIsNotNone(geometry)
self.assertIsInstance(geometry, Point)
geometry.transform(east_north_srid)
self.assertAlmostEqual(geometry.x, easting, places=2)
self.assertAlmostEqual(geometry.y, northing, places=2)
class TestPatch(helpers.BaseUserTestCase):
def test_patch_validated(self):
"""
Test that we can patch just the 'validated' flag
:return:
"""
rows = [
['What', 'When', 'Latitude', 'Longitude', 'Comments'],
['Chubby bat', '2018-06-01', -32, 115.75, 'It is huge!']
]
dataset = self._create_dataset_and_records_from_rows(rows)
self.assertEqual(dataset.type, Dataset.TYPE_OBSERVATION)
records = dataset.record_set.all()
record = records.last()
self.assertIsNotNone(record)
self.assertFalse(record.validated)
previous_data = json.dumps(record.data)
# patch
url = reverse('api:record-detail', kwargs={"pk": record.pk})
client = self.custodian_1_client
payload = {
'validated': True
}
resp = client.patch(url, payload)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
record.refresh_from_db()
self.assertTrue(record.validated)
self.assertTrue(json.dumps(record.data), previous_data)
def test_patch_locked(self):
"""
Test that we can patch just the 'locked' flag
:return:
"""
rows = [
['What', 'When', 'Latitude', 'Longitude', 'Comments'],
['Chubby bat', '2018-06-01', -32, 115.75, 'It is huge!']
]
dataset = self._create_dataset_and_records_from_rows(rows)
self.assertEqual(dataset.type, Dataset.TYPE_OBSERVATION)
records = dataset.record_set.all()
record = records.last()
self.assertIsNotNone(record)
self.assertFalse(record.locked)
previous_data = json.dumps(record.data)
# patch
url = reverse('api:record-detail', kwargs={"pk": record.pk})
client = self.custodian_1_client
payload = {
'locked': True
}
resp = client.patch(url, payload)
self.assertEqual(resp.status_code, status.HTTP_200_OK)
record.refresh_from_db()
self.assertTrue(record.locked)
self.assertTrue(json.dumps(record.data), previous_data)
| 36.350877
| 116
| 0.56264
| 8,270
| 78,736
| 5.159613
| 0.059492
| 0.050621
| 0.021748
| 0.027185
| 0.799719
| 0.782119
| 0.76187
| 0.749309
| 0.732622
| 0.715702
| 0
| 0.030278
| 0.333469
| 78,736
| 2,165
| 117
| 36.367667
| 0.782793
| 0.075086
| 0
| 0.690055
| 0
| 0.000552
| 0.092741
| 0.000907
| 0
| 0
| 0
| 0.000462
| 0.139227
| 1
| 0.028177
| false
| 0.000552
| 0.008287
| 0
| 0.046409
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
fa8a0631b393860229d2ba0c84d1d68ee4710fdb
| 78
|
py
|
Python
|
maddux/objects/__init__.py
|
emielke12/maddux
|
fed7770c54124c14935523e6bcdba81ad9538cbc
|
[
"MIT"
] | 23
|
2016-04-23T18:13:24.000Z
|
2021-12-14T23:37:09.000Z
|
maddux/objects/__init__.py
|
emielke12/maddux
|
fed7770c54124c14935523e6bcdba81ad9538cbc
|
[
"MIT"
] | 12
|
2016-04-14T23:59:51.000Z
|
2019-05-25T10:00:14.000Z
|
maddux/objects/__init__.py
|
emielke12/maddux
|
fed7770c54124c14935523e6bcdba81ad9538cbc
|
[
"MIT"
] | 8
|
2018-10-19T13:52:27.000Z
|
2020-10-31T23:30:24.000Z
|
from ball import Ball
from target import Target
from obstacle import Obstacle
| 19.5
| 29
| 0.846154
| 12
| 78
| 5.5
| 0.416667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153846
| 78
| 3
| 30
| 26
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
faec51972a4689cb202e308168f31075c0782f3b
| 205
|
py
|
Python
|
apps.py
|
anthill-gaming/anthill-admin
|
e3c29a9bd7c04d2c6ce29528578a93395adf59e0
|
[
"MIT"
] | 1
|
2018-11-30T21:56:14.000Z
|
2018-11-30T21:56:14.000Z
|
apps.py
|
anthill-gaming/anthill-admin
|
e3c29a9bd7c04d2c6ce29528578a93395adf59e0
|
[
"MIT"
] | null | null | null |
apps.py
|
anthill-gaming/anthill-admin
|
e3c29a9bd7c04d2c6ce29528578a93395adf59e0
|
[
"MIT"
] | null | null | null |
from anthill.platform.apps import BaseAnthillApplication
import logging
logger = logging.getLogger('anthill.application')
class AnthillApplication(BaseAnthillApplication):
"""Anthill application"""
| 22.777778
| 56
| 0.814634
| 18
| 205
| 9.277778
| 0.666667
| 0.215569
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097561
| 205
| 8
| 57
| 25.625
| 0.902703
| 0.092683
| 0
| 0
| 0
| 0
| 0.105556
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
877d4d8cba6bceb022be9f354ccfa0f24e98fa24
| 13
|
py
|
Python
|
word-scanner.py
|
Mazdaywik/diplom
|
1f01b7ef6df08c971c3e5286a2639e1a400c3d8f
|
[
"MIT"
] | null | null | null |
word-scanner.py
|
Mazdaywik/diplom
|
1f01b7ef6df08c971c3e5286a2639e1a400c3d8f
|
[
"MIT"
] | null | null | null |
word-scanner.py
|
Mazdaywik/diplom
|
1f01b7ef6df08c971c3e5286a2639e1a400c3d8f
|
[
"MIT"
] | null | null | null |
print('Bye!')
| 13
| 13
| 0.615385
| 2
| 13
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 13
| 1
| 13
| 13
| 0.615385
| 0
| 0
| 0
| 0
| 0
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
8782785b5cbe36fd4a3951d7630dbe9496180470
| 178
|
py
|
Python
|
tcex/bin/__init__.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | 18
|
2017-01-09T22:17:49.000Z
|
2022-01-24T20:46:42.000Z
|
tcex/bin/__init__.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | 84
|
2017-04-11T13:47:49.000Z
|
2022-03-21T20:12:57.000Z
|
tcex/bin/__init__.py
|
kdeltared/tcex
|
818c0d09256764f871e42d9ca5916f92d941d882
|
[
"Apache-2.0"
] | 43
|
2017-01-05T20:40:26.000Z
|
2022-03-31T19:18:02.000Z
|
"""Bin module for TcEx Framework"""
# flake8: noqa
from .init import Init
from .lib import Lib
from .package import Package
from .test import Test
from .validate import Validate
| 22.25
| 35
| 0.769663
| 27
| 178
| 5.074074
| 0.518519
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006667
| 0.157303
| 178
| 7
| 36
| 25.428571
| 0.906667
| 0.241573
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
87c52b3194e192d0797d5b91a3809cb3cd6eb95f
| 132
|
py
|
Python
|
app/models/__init__.py
|
zhiyong-lv/flask-login
|
d8bf0719bae19ba8f7f44ea6d6a8ca65ba22aa63
|
[
"MIT"
] | null | null | null |
app/models/__init__.py
|
zhiyong-lv/flask-login
|
d8bf0719bae19ba8f7f44ea6d6a8ca65ba22aa63
|
[
"MIT"
] | null | null | null |
app/models/__init__.py
|
zhiyong-lv/flask-login
|
d8bf0719bae19ba8f7f44ea6d6a8ca65ba22aa63
|
[
"MIT"
] | null | null | null |
from .users import User
from .documents import Document
from .files import File
from .tags import Tag
from .file_tags import FileTag
| 26.4
| 31
| 0.818182
| 21
| 132
| 5.095238
| 0.52381
| 0.186916
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.143939
| 132
| 5
| 32
| 26.4
| 0.946903
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
87d2ac50934cd57dda9488dc2299a7c4c3017660
| 488
|
py
|
Python
|
tests/unittest/test_get_tripcounts.py
|
MPSLab-ASU/dMazeRunner
|
b8b0a77729a6ec31b7952fd3c209f09781819d0a
|
[
"MIT"
] | 23
|
2020-06-11T00:51:16.000Z
|
2022-03-21T06:11:41.000Z
|
tests/unittest/test_get_tripcounts.py
|
MPSLab-ASU/dMazeRunner
|
b8b0a77729a6ec31b7952fd3c209f09781819d0a
|
[
"MIT"
] | 4
|
2020-03-12T03:32:16.000Z
|
2021-03-10T05:39:34.000Z
|
tests/unittest/test_get_tripcounts.py
|
MPSLab-ASU/dMazeRunner
|
b8b0a77729a6ec31b7952fd3c209f09781819d0a
|
[
"MIT"
] | 10
|
2020-04-28T06:19:43.000Z
|
2022-03-29T21:26:23.000Z
|
from test_common import get_sample_layer
def test_get_TripCounts():
layer = get_sample_layer()
assert layer.get_TripCounts("C", "loop") == 256
assert layer.get_TripCounts("Ox", "Spatial") == 7
def test_get_TripCounts_all_IVs():
layer = get_sample_layer()
assert layer.get_TripCounts_all_IVs("loop") == 462422016
assert layer.get_TripCounts_all_IVs("Spatial") == 196
"""
if __name__ == "__main__":
test_get_TripCounts()
test_get_TripCounts_all_IVs()
"""
| 27.111111
| 60
| 0.721311
| 66
| 488
| 4.80303
| 0.348485
| 0.328076
| 0.214511
| 0.302839
| 0.529968
| 0.384858
| 0.271293
| 0.271293
| 0
| 0
| 0
| 0.038741
| 0.153689
| 488
| 17
| 61
| 28.705882
| 0.728814
| 0
| 0
| 0.222222
| 0
| 0
| 0.063452
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
87dfa73d803ea0d685f28a07b27054972d43e371
| 47
|
py
|
Python
|
tests/correct_scripts/ipython_ext_test.py
|
shiba6v/shape_commentator
|
e57c4d7e0781fa02fdc49dbc5c88236df5948cf1
|
[
"MIT"
] | 65
|
2018-12-06T15:34:50.000Z
|
2021-09-10T12:38:43.000Z
|
tests/correct_scripts/ipython_ext_test.py
|
shiba6v/shape_commentator
|
e57c4d7e0781fa02fdc49dbc5c88236df5948cf1
|
[
"MIT"
] | 5
|
2018-12-14T06:32:54.000Z
|
2019-08-04T09:36:45.000Z
|
tests/correct_scripts/ipython_ext_test.py
|
shiba6v/shape_commentator
|
e57c4d7e0781fa02fdc49dbc5c88236df5948cf1
|
[
"MIT"
] | 1
|
2019-01-02T00:05:38.000Z
|
2019-01-02T00:05:38.000Z
|
a = np.array([1,2,3]) #_ (3,)
b = a #_ (3,)
| 11.75
| 30
| 0.361702
| 10
| 47
| 1.5
| 0.7
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 0.276596
| 47
| 3
| 31
| 15.666667
| 0.294118
| 0.255319
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
35646b49b82e67d79f5c73c49bbcd6371629292f
| 942
|
py
|
Python
|
src/py-opentimelineio/opentimelineio/exceptions.py
|
michdolan/OpenTimelineIO
|
1ec6f07f1af525ba4ca0aa91e01e5939d6237f01
|
[
"Apache-2.0"
] | null | null | null |
src/py-opentimelineio/opentimelineio/exceptions.py
|
michdolan/OpenTimelineIO
|
1ec6f07f1af525ba4ca0aa91e01e5939d6237f01
|
[
"Apache-2.0"
] | 4
|
2022-03-09T22:28:42.000Z
|
2022-03-14T15:16:50.000Z
|
src/py-opentimelineio/opentimelineio/exceptions.py
|
michdolan/OpenTimelineIO
|
1ec6f07f1af525ba4ca0aa91e01e5939d6237f01
|
[
"Apache-2.0"
] | null | null | null |
# SPDX-License-Identifier: Apache-2.0
# Copyright Contributors to the OpenTimelineIO project
"""Exception classes for OpenTimelineIO"""
from . _otio import ( # noqa
OTIOError,
NotAChildError,
UnsupportedSchemaError,
CannotComputeAvailableRangeError
)
class CouldNotReadFileError(OTIOError):
pass
class NoKnownAdapterForExtensionError(OTIOError):
pass
class ReadingNotSupportedError(OTIOError):
pass
class WritingNotSupportedError(OTIOError):
pass
class NotSupportedError(OTIOError):
pass
class InvalidSerializableLabelError(OTIOError):
pass
class AdapterDoesntSupportFunctionError(OTIOError):
pass
class InstancingNotAllowedError(OTIOError):
pass
class TransitionFollowingATransitionError(OTIOError):
pass
class MisconfiguredPluginError(OTIOError):
pass
class CannotTrimTransitionsError(OTIOError):
pass
class NoDefaultMediaLinkerError(OTIOError):
pass
| 15.966102
| 54
| 0.780255
| 72
| 942
| 10.194444
| 0.513889
| 0.212534
| 0.269755
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002528
| 0.160297
| 942
| 58
| 55
| 16.241379
| 0.925411
| 0.139066
| 0
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.4
| 0.033333
| 0
| 0.433333
| 0
| 0
| 0
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
3568b6319174e4c04befe03812b698097ee680ef
| 218
|
py
|
Python
|
plotly/validators/frame/__init__.py
|
gnestor/plotly.py
|
a8ae062795ddbf9867b8578fe6d9e244948c15ff
|
[
"MIT"
] | 12
|
2020-04-18T18:10:22.000Z
|
2021-12-06T10:11:15.000Z
|
plotly/validators/frame/__init__.py
|
Vesauza/plotly.py
|
e53e626d59495d440341751f60aeff73ff365c28
|
[
"MIT"
] | 27
|
2020-04-28T21:23:12.000Z
|
2021-06-25T15:36:38.000Z
|
plotly/validators/frame/__init__.py
|
Vesauza/plotly.py
|
e53e626d59495d440341751f60aeff73ff365c28
|
[
"MIT"
] | 6
|
2020-04-18T23:07:08.000Z
|
2021-11-18T07:53:06.000Z
|
from ._traces import TracesValidator
from ._name import NameValidator
from ._layout import LayoutValidator
from ._group import GroupValidator
from ._data import DataValidator
from ._baseframe import BaseframeValidator
| 31.142857
| 42
| 0.862385
| 24
| 218
| 7.583333
| 0.583333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.110092
| 218
| 6
| 43
| 36.333333
| 0.938144
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
3573c129a667ab6ae887c12d86ee745fd22025df
| 82
|
py
|
Python
|
1stSemester_PythonCourse/work1/E01_1827406005.py
|
chenyz2000/schoolCourses
|
cca7f25b0f44186e0c248b26b5d7ed2bcb23c630
|
[
"MIT"
] | null | null | null |
1stSemester_PythonCourse/work1/E01_1827406005.py
|
chenyz2000/schoolCourses
|
cca7f25b0f44186e0c248b26b5d7ed2bcb23c630
|
[
"MIT"
] | null | null | null |
1stSemester_PythonCourse/work1/E01_1827406005.py
|
chenyz2000/schoolCourses
|
cca7f25b0f44186e0c248b26b5d7ed2bcb23c630
|
[
"MIT"
] | null | null | null |
a=eval(input("please input a"))
b=eval(input("please input b"))
print(divmod(a,b))
| 27.333333
| 31
| 0.695122
| 16
| 82
| 3.5625
| 0.4375
| 0.315789
| 0.526316
| 0.701754
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.073171
| 82
| 3
| 32
| 27.333333
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0.337349
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
359ca349c152ae13d7932920e261d355c8a5a897
| 85
|
py
|
Python
|
stdlib_tests/test_floor.py
|
egoelm/voc
|
9e6c545ac9d7825230d397dfff96da81cd089faf
|
[
"BSD-3-Clause"
] | null | null | null |
stdlib_tests/test_floor.py
|
egoelm/voc
|
9e6c545ac9d7825230d397dfff96da81cd089faf
|
[
"BSD-3-Clause"
] | 1
|
2019-09-24T08:06:49.000Z
|
2019-09-24T08:06:49.000Z
|
stdlib_tests/test_floor.py
|
egoelm/voc
|
9e6c545ac9d7825230d397dfff96da81cd089faf
|
[
"BSD-3-Clause"
] | null | null | null |
# to run it: voc stdlib_tests/test_floor.py
from math import floor
print(floor(3.4))
| 21.25
| 43
| 0.764706
| 17
| 85
| 3.705882
| 0.882353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027027
| 0.129412
| 85
| 3
| 44
| 28.333333
| 0.824324
| 0.482353
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
35bb09785952b5711a539de5318e28c73119f523
| 454
|
py
|
Python
|
28-Packages.py
|
Ahm3d-Salama/Python-apps
|
fce528ed62ae76cbd856af05209ffc28dac38480
|
[
"MIT"
] | 2
|
2021-06-08T17:41:30.000Z
|
2022-01-13T10:44:23.000Z
|
28-Packages.py
|
Ahm3d-Salama/Python-apps
|
fce528ed62ae76cbd856af05209ffc28dac38480
|
[
"MIT"
] | null | null | null |
28-Packages.py
|
Ahm3d-Salama/Python-apps
|
fce528ed62ae76cbd856af05209ffc28dac38480
|
[
"MIT"
] | null | null | null |
# import entire module
# import ecommerce.shipping
# ecommerce.shipping.calc_shipping()
# start from the package and import specific module
# start from package.module and import specific function
# from ecommerce.shipping import calc_shipping, calc_tax
# calc_shipping()
# calc_shipping()
# calc_shipping()
# calc_shipping()
# calc_tax()
# import entire module
from ecommerce import shipping
shipping.calc_shipping()
| 17.461538
| 57
| 0.735683
| 54
| 454
| 6.018519
| 0.259259
| 0.258462
| 0.307692
| 0.221538
| 0.16
| 0.16
| 0.16
| 0
| 0
| 0
| 0
| 0
| 0.19163
| 454
| 25
| 58
| 18.16
| 0.885559
| 0.742291
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ea04193fd216c224f3cd60cc438149a56159a538
| 38
|
py
|
Python
|
way/python/exercises/solo_learn/basics/printing_text.py
|
only-romano/junkyard
|
b60a25b2643f429cdafee438d20f9966178d6f36
|
[
"MIT"
] | null | null | null |
way/python/exercises/solo_learn/basics/printing_text.py
|
only-romano/junkyard
|
b60a25b2643f429cdafee438d20f9966178d6f36
|
[
"MIT"
] | null | null | null |
way/python/exercises/solo_learn/basics/printing_text.py
|
only-romano/junkyard
|
b60a25b2643f429cdafee438d20f9966178d6f36
|
[
"MIT"
] | null | null | null |
print("Hi!")
print("Spam and Eggs")
| 12.666667
| 23
| 0.605263
| 6
| 38
| 3.833333
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.157895
| 38
| 2
| 24
| 19
| 0.71875
| 0
| 0
| 0
| 0
| 0
| 0.444444
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
ea09315ab6f08ff217f16e31cd2b88cc834bcd08
| 13
|
py
|
Python
|
ExpertPythonProgramming/ThirdEdition/5/meta-hooks/sub.py
|
t2y/python-study
|
52a132ea600d4696164e540d8a8f8f5fc58e097a
|
[
"Apache-2.0"
] | 18
|
2016-08-15T00:24:44.000Z
|
2020-11-30T15:11:52.000Z
|
ExpertPythonProgramming/ThirdEdition/5/meta-hooks/sub.py
|
t2y/python-study
|
52a132ea600d4696164e540d8a8f8f5fc58e097a
|
[
"Apache-2.0"
] | null | null | null |
ExpertPythonProgramming/ThirdEdition/5/meta-hooks/sub.py
|
t2y/python-study
|
52a132ea600d4696164e540d8a8f8f5fc58e097a
|
[
"Apache-2.0"
] | 6
|
2016-09-28T10:47:03.000Z
|
2020-10-14T10:20:06.000Z
|
print('sub')
| 6.5
| 12
| 0.615385
| 2
| 13
| 4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 13
| 1
| 13
| 13
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0.230769
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
ea1d2b6c5d9f706738cf5e297fc34fd84f4de2d0
| 96
|
py
|
Python
|
net_task/core/__init__.py
|
W84TheSun/user_input_prompt_task
|
32590b45ab44ee8da6b0f6178a2e35cd4366c14e
|
[
"MIT"
] | null | null | null |
net_task/core/__init__.py
|
W84TheSun/user_input_prompt_task
|
32590b45ab44ee8da6b0f6178a2e35cd4366c14e
|
[
"MIT"
] | null | null | null |
net_task/core/__init__.py
|
W84TheSun/user_input_prompt_task
|
32590b45ab44ee8da6b0f6178a2e35cd4366c14e
|
[
"MIT"
] | null | null | null |
from .common_functions import create_words_dictionary, find_suitable_words
from .errors import *
| 48
| 74
| 0.875
| 13
| 96
| 6.076923
| 0.769231
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 96
| 2
| 75
| 48
| 0.897727
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
ea3c309a239f0eec90a8450d4bd0b52ee3971ed2
| 199
|
py
|
Python
|
mpcontribs-users/mpcontribs/users/dilute_solute_diffusion/explorer/urls.py
|
josuav1/MPContribs
|
3cbf0e83ba6cd749dd4fc988c9f6ad076b05f935
|
[
"MIT"
] | 1
|
2019-07-03T04:38:58.000Z
|
2019-07-03T04:38:58.000Z
|
mpcontribs-users/mpcontribs/users/dilute_solute_diffusion/explorer/urls.py
|
josuav1/MPContribs
|
3cbf0e83ba6cd749dd4fc988c9f6ad076b05f935
|
[
"MIT"
] | null | null | null |
mpcontribs-users/mpcontribs/users/dilute_solute_diffusion/explorer/urls.py
|
josuav1/MPContribs
|
3cbf0e83ba6cd749dd4fc988c9f6ad076b05f935
|
[
"MIT"
] | 1
|
2019-07-03T04:39:04.000Z
|
2019-07-03T04:39:04.000Z
|
from django.conf.urls import url
from . import views
from mpcontribs.users_modules import get_user_explorer_name
urlpatterns = [
url(r'^$', views.index, name=get_user_explorer_name(__file__))
]
| 24.875
| 66
| 0.78392
| 29
| 199
| 5
| 0.62069
| 0.096552
| 0.206897
| 0.262069
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.120603
| 199
| 7
| 67
| 28.428571
| 0.828571
| 0
| 0
| 0
| 0
| 0
| 0.01005
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ea3cadb377ccdd641cec3dfa7dc2da9a1d1b0815
| 41
|
pyw
|
Python
|
SeriesMgr/SeriesMgr.pyw
|
esitarski/CrossMgr
|
de33b5ed662556ec659e6e2910f5fd0f88f25fa0
|
[
"MIT"
] | 25
|
2015-02-26T01:26:10.000Z
|
2022-03-25T15:46:55.000Z
|
SeriesMgr/SeriesMgr.pyw
|
mbuckaway/CrossMgr
|
4c64e429eb3215fda1b685c5e684c56f5d0c02cf
|
[
"MIT"
] | 76
|
2015-12-09T04:24:30.000Z
|
2022-02-18T16:39:28.000Z
|
SeriesMgr/SeriesMgr.pyw
|
mbuckaway/CrossMgr
|
4c64e429eb3215fda1b685c5e684c56f5d0c02cf
|
[
"MIT"
] | 17
|
2015-04-23T07:37:13.000Z
|
2020-01-22T17:47:16.000Z
|
from MainWin import MainLoop
MainLoop()
| 10.25
| 28
| 0.804878
| 5
| 41
| 6.6
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 41
| 3
| 29
| 13.666667
| 0.942857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ea3de340dfbe7e22cd3ca8ca36236c4213a3b2b9
| 37
|
py
|
Python
|
src/nostradamus/preprocessing/__init__.py
|
Orlogskapten/tsNostradamus
|
707cbc23fac3e0f92875d89550046e5c3b7b17d2
|
[
"MIT"
] | 3
|
2020-07-06T10:58:40.000Z
|
2020-07-23T21:39:51.000Z
|
src/nostradamus/preprocessing/__init__.py
|
wenceslas-sanchez/tsNostradamus
|
707cbc23fac3e0f92875d89550046e5c3b7b17d2
|
[
"MIT"
] | null | null | null |
src/nostradamus/preprocessing/__init__.py
|
wenceslas-sanchez/tsNostradamus
|
707cbc23fac3e0f92875d89550046e5c3b7b17d2
|
[
"MIT"
] | null | null | null |
from .tunnel_snake import tunnelSnake
| 37
| 37
| 0.891892
| 5
| 37
| 6.4
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081081
| 37
| 1
| 37
| 37
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ea53713a32a66faa73bb9a7d1378fabb90f206bd
| 79
|
py
|
Python
|
web2py-appliances-master/EStore/models/plugin_shipping.py
|
wantsomechocolate/WantsomeBeanstalk
|
8c8a0a80490d04ea52661a3114fd3db8de65a01e
|
[
"BSD-3-Clause"
] | null | null | null |
web2py-appliances-master/EStore/models/plugin_shipping.py
|
wantsomechocolate/WantsomeBeanstalk
|
8c8a0a80490d04ea52661a3114fd3db8de65a01e
|
[
"BSD-3-Clause"
] | null | null | null |
web2py-appliances-master/EStore/models/plugin_shipping.py
|
wantsomechocolate/WantsomeBeanstalk
|
8c8a0a80490d04ea52661a3114fd3db8de65a01e
|
[
"BSD-3-Clause"
] | null | null | null |
def compute_shipping(type, volume, weight, address1, address2):
return 0.0
| 26.333333
| 63
| 0.746835
| 11
| 79
| 5.272727
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059701
| 0.151899
| 79
| 2
| 64
| 39.5
| 0.80597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
ea5a9d6016940136e337dfce9f4b12bc3b3248bf
| 96
|
py
|
Python
|
venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/hmac.py
|
GiulianaPola/select_repeats
|
17a0d053d4f874e42cf654dd142168c2ec8fbd11
|
[
"MIT"
] | 2
|
2022-03-13T01:58:52.000Z
|
2022-03-31T06:07:54.000Z
|
venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/hmac.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | 19
|
2021-11-20T04:09:18.000Z
|
2022-03-23T15:05:55.000Z
|
venv/lib/python3.8/site-packages/cryptography/hazmat/backends/openssl/hmac.py
|
DesmoSearch/Desmobot
|
b70b45df3485351f471080deb5c785c4bc5c4beb
|
[
"MIT"
] | null | null | null |
/home/runner/.cache/pip/pool/8d/d8/8d/aebb8123c2c6ff5c6b4d6fe9ccfdd5e047e6bd620653d995f7032dac46
| 96
| 96
| 0.895833
| 9
| 96
| 9.555556
| 0.888889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.354167
| 0
| 96
| 1
| 96
| 96
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ea6f62b5bc4f45cefd9fb009c9276ba663bedfca
| 10,517
|
py
|
Python
|
HW4 - 95542247/q4/gen/Assignment4q4Lexer.py
|
SadraGoudarzdashti/IUSTCompiler
|
7aa24df7de10030c313ad2e8f3830d9e2b182ce1
|
[
"MIT"
] | null | null | null |
HW4 - 95542247/q4/gen/Assignment4q4Lexer.py
|
SadraGoudarzdashti/IUSTCompiler
|
7aa24df7de10030c313ad2e8f3830d9e2b182ce1
|
[
"MIT"
] | null | null | null |
HW4 - 95542247/q4/gen/Assignment4q4Lexer.py
|
SadraGoudarzdashti/IUSTCompiler
|
7aa24df7de10030c313ad2e8f3830d9e2b182ce1
|
[
"MIT"
] | null | null | null |
# Generated from C:/Users/novin/PycharmProjects/tamrin-compiler\Assignment4q4.g4 by ANTLR 4.9.1
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2)")
buf.write("\u010c\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7")
buf.write("\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r")
buf.write("\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\4\23")
buf.write("\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30")
buf.write("\4\31\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36")
buf.write("\t\36\4\37\t\37\4 \t \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%")
buf.write("\4&\t&\4\'\t\'\4(\t(\3\2\3\2\3\2\3\2\3\2\3\2\3\3\3\3\3")
buf.write("\4\3\4\3\4\3\4\3\4\3\4\3\4\3\5\3\5\3\5\3\5\3\5\3\5\3\5")
buf.write("\3\6\3\6\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\t\3")
buf.write("\t\3\t\3\t\3\t\3\t\3\t\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3")
buf.write("\r\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3")
buf.write("\20\3\20\3\21\3\21\3\21\3\21\3\21\3\21\3\21\3\22\3\22")
buf.write("\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\23\3\24")
buf.write("\3\24\3\24\3\25\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26")
buf.write("\3\26\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27")
buf.write("\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\30")
buf.write("\3\30\3\31\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3\34\3\35")
buf.write("\3\35\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3\37\3\37\3 ")
buf.write("\3 \3 \3 \3 \3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3#")
buf.write("\3#\3#\3#\3$\3$\3%\3%\7%\u00f1\n%\f%\16%\u00f4\13%\3&")
buf.write("\6&\u00f7\n&\r&\16&\u00f8\3\'\6\'\u00fc\n\'\r\'\16\'\u00fd")
buf.write("\3\'\3\'\3(\3(\3(\3(\7(\u0106\n(\f(\16(\u0109\13(\3(\3")
buf.write("(\2\2)\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27")
buf.write("\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30")
buf.write("/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'")
buf.write("M(O)\3\2\7\4\2C\\c|\6\2\62;C\\aac|\3\2\62;\5\2\13\f\17")
buf.write("\17\"\"\4\2\f\f\17\17\2\u010f\2\3\3\2\2\2\2\5\3\2\2\2")
buf.write("\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17")
buf.write("\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3")
buf.write("\2\2\2\2\31\3\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2")
buf.write("\2\2\2!\3\2\2\2\2#\3\2\2\2\2%\3\2\2\2\2\'\3\2\2\2\2)\3")
buf.write("\2\2\2\2+\3\2\2\2\2-\3\2\2\2\2/\3\2\2\2\2\61\3\2\2\2\2")
buf.write("\63\3\2\2\2\2\65\3\2\2\2\2\67\3\2\2\2\29\3\2\2\2\2;\3")
buf.write("\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E")
buf.write("\3\2\2\2\2G\3\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2")
buf.write("O\3\2\2\2\3Q\3\2\2\2\5W\3\2\2\2\7Y\3\2\2\2\t`\3\2\2\2")
buf.write("\13g\3\2\2\2\rl\3\2\2\2\17q\3\2\2\2\21s\3\2\2\2\23z\3")
buf.write("\2\2\2\25|\3\2\2\2\27~\3\2\2\2\31\u0080\3\2\2\2\33\u0082")
buf.write("\3\2\2\2\35\u008a\3\2\2\2\37\u008c\3\2\2\2!\u008e\3\2")
buf.write("\2\2#\u0095\3\2\2\2%\u0099\3\2\2\2\'\u00a1\3\2\2\2)\u00a4")
buf.write("\3\2\2\2+\u00a9\3\2\2\2-\u00af\3\2\2\2/\u00c2\3\2\2\2")
buf.write("\61\u00c4\3\2\2\2\63\u00c7\3\2\2\2\65\u00c9\3\2\2\2\67")
buf.write("\u00cb\3\2\2\29\u00cd\3\2\2\2;\u00cf\3\2\2\2=\u00d1\3")
buf.write("\2\2\2?\u00d8\3\2\2\2A\u00dd\3\2\2\2C\u00e3\3\2\2\2E\u00e8")
buf.write("\3\2\2\2G\u00ec\3\2\2\2I\u00ee\3\2\2\2K\u00f6\3\2\2\2")
buf.write("M\u00fb\3\2\2\2O\u0101\3\2\2\2QR\7e\2\2RS\7n\2\2ST\7c")
buf.write("\2\2TU\7u\2\2UV\7u\2\2V\4\3\2\2\2WX\7}\2\2X\6\3\2\2\2")
buf.write("YZ\7r\2\2Z[\7w\2\2[\\\7d\2\2\\]\7n\2\2]^\7k\2\2^_\7e\2")
buf.write("\2_\b\3\2\2\2`a\7u\2\2ab\7v\2\2bc\7c\2\2cd\7v\2\2de\7")
buf.write("k\2\2ef\7e\2\2f\n\3\2\2\2gh\7x\2\2hi\7q\2\2ij\7k\2\2j")
buf.write("k\7f\2\2k\f\3\2\2\2lm\7o\2\2mn\7c\2\2no\7k\2\2op\7p\2")
buf.write("\2p\16\3\2\2\2qr\7*\2\2r\20\3\2\2\2st\7U\2\2tu\7v\2\2")
buf.write("uv\7t\2\2vw\7k\2\2wx\7p\2\2xy\7i\2\2y\22\3\2\2\2z{\7]")
buf.write("\2\2{\24\3\2\2\2|}\7_\2\2}\26\3\2\2\2~\177\7+\2\2\177")
buf.write("\30\3\2\2\2\u0080\u0081\7\177\2\2\u0081\32\3\2\2\2\u0082")
buf.write("\u0083\7g\2\2\u0083\u0084\7z\2\2\u0084\u0085\7v\2\2\u0085")
buf.write("\u0086\7g\2\2\u0086\u0087\7p\2\2\u0087\u0088\7f\2\2\u0088")
buf.write("\u0089\7u\2\2\u0089\34\3\2\2\2\u008a\u008b\7=\2\2\u008b")
buf.write("\36\3\2\2\2\u008c\u008d\7.\2\2\u008d \3\2\2\2\u008e\u008f")
buf.write("\7t\2\2\u008f\u0090\7g\2\2\u0090\u0091\7v\2\2\u0091\u0092")
buf.write("\7w\2\2\u0092\u0093\7t\2\2\u0093\u0094\7p\2\2\u0094\"")
buf.write("\3\2\2\2\u0095\u0096\7k\2\2\u0096\u0097\7p\2\2\u0097\u0098")
buf.write("\7v\2\2\u0098$\3\2\2\2\u0099\u009a\7d\2\2\u009a\u009b")
buf.write("\7q\2\2\u009b\u009c\7q\2\2\u009c\u009d\7n\2\2\u009d\u009e")
buf.write("\7g\2\2\u009e\u009f\7c\2\2\u009f\u00a0\7p\2\2\u00a0&\3")
buf.write("\2\2\2\u00a1\u00a2\7k\2\2\u00a2\u00a3\7h\2\2\u00a3(\3")
buf.write("\2\2\2\u00a4\u00a5\7g\2\2\u00a5\u00a6\7n\2\2\u00a6\u00a7")
buf.write("\7u\2\2\u00a7\u00a8\7g\2\2\u00a8*\3\2\2\2\u00a9\u00aa")
buf.write("\7y\2\2\u00aa\u00ab\7j\2\2\u00ab\u00ac\7k\2\2\u00ac\u00ad")
buf.write("\7n\2\2\u00ad\u00ae\7g\2\2\u00ae,\3\2\2\2\u00af\u00b0")
buf.write("\7U\2\2\u00b0\u00b1\7{\2\2\u00b1\u00b2\7u\2\2\u00b2\u00b3")
buf.write("\7v\2\2\u00b3\u00b4\7g\2\2\u00b4\u00b5\7o\2\2\u00b5\u00b6")
buf.write("\7\60\2\2\u00b6\u00b7\7q\2\2\u00b7\u00b8\7w\2\2\u00b8")
buf.write("\u00b9\7v\2\2\u00b9\u00ba\7\60\2\2\u00ba\u00bb\7r\2\2")
buf.write("\u00bb\u00bc\7t\2\2\u00bc\u00bd\7k\2\2\u00bd\u00be\7p")
buf.write("\2\2\u00be\u00bf\7v\2\2\u00bf\u00c0\7n\2\2\u00c0\u00c1")
buf.write("\7p\2\2\u00c1.\3\2\2\2\u00c2\u00c3\7?\2\2\u00c3\60\3\2")
buf.write("\2\2\u00c4\u00c5\7(\2\2\u00c5\u00c6\7(\2\2\u00c6\62\3")
buf.write("\2\2\2\u00c7\u00c8\7>\2\2\u00c8\64\3\2\2\2\u00c9\u00ca")
buf.write("\7-\2\2\u00ca\66\3\2\2\2\u00cb\u00cc\7/\2\2\u00cc8\3\2")
buf.write("\2\2\u00cd\u00ce\7,\2\2\u00ce:\3\2\2\2\u00cf\u00d0\7\60")
buf.write("\2\2\u00d0<\3\2\2\2\u00d1\u00d2\7n\2\2\u00d2\u00d3\7g")
buf.write("\2\2\u00d3\u00d4\7p\2\2\u00d4\u00d5\7i\2\2\u00d5\u00d6")
buf.write("\7v\2\2\u00d6\u00d7\7j\2\2\u00d7>\3\2\2\2\u00d8\u00d9")
buf.write("\7v\2\2\u00d9\u00da\7t\2\2\u00da\u00db\7w\2\2\u00db\u00dc")
buf.write("\7g\2\2\u00dc@\3\2\2\2\u00dd\u00de\7h\2\2\u00de\u00df")
buf.write("\7c\2\2\u00df\u00e0\7n\2\2\u00e0\u00e1\7u\2\2\u00e1\u00e2")
buf.write("\7g\2\2\u00e2B\3\2\2\2\u00e3\u00e4\7v\2\2\u00e4\u00e5")
buf.write("\7j\2\2\u00e5\u00e6\7k\2\2\u00e6\u00e7\7u\2\2\u00e7D\3")
buf.write("\2\2\2\u00e8\u00e9\7p\2\2\u00e9\u00ea\7g\2\2\u00ea\u00eb")
buf.write("\7y\2\2\u00ebF\3\2\2\2\u00ec\u00ed\7#\2\2\u00edH\3\2\2")
buf.write("\2\u00ee\u00f2\t\2\2\2\u00ef\u00f1\t\3\2\2\u00f0\u00ef")
buf.write("\3\2\2\2\u00f1\u00f4\3\2\2\2\u00f2\u00f0\3\2\2\2\u00f2")
buf.write("\u00f3\3\2\2\2\u00f3J\3\2\2\2\u00f4\u00f2\3\2\2\2\u00f5")
buf.write("\u00f7\t\4\2\2\u00f6\u00f5\3\2\2\2\u00f7\u00f8\3\2\2\2")
buf.write("\u00f8\u00f6\3\2\2\2\u00f8\u00f9\3\2\2\2\u00f9L\3\2\2")
buf.write("\2\u00fa\u00fc\t\5\2\2\u00fb\u00fa\3\2\2\2\u00fc\u00fd")
buf.write("\3\2\2\2\u00fd\u00fb\3\2\2\2\u00fd\u00fe\3\2\2\2\u00fe")
buf.write("\u00ff\3\2\2\2\u00ff\u0100\b\'\2\2\u0100N\3\2\2\2\u0101")
buf.write("\u0102\7\61\2\2\u0102\u0103\7\61\2\2\u0103\u0107\3\2\2")
buf.write("\2\u0104\u0106\n\6\2\2\u0105\u0104\3\2\2\2\u0106\u0109")
buf.write("\3\2\2\2\u0107\u0105\3\2\2\2\u0107\u0108\3\2\2\2\u0108")
buf.write("\u010a\3\2\2\2\u0109\u0107\3\2\2\2\u010a\u010b\b(\2\2")
buf.write("\u010bP\3\2\2\2\7\2\u00f2\u00f8\u00fd\u0107\3\b\2\2")
return buf.getvalue()
class Assignment4q4Lexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
T__0 = 1
T__1 = 2
T__2 = 3
T__3 = 4
T__4 = 5
T__5 = 6
T__6 = 7
T__7 = 8
T__8 = 9
T__9 = 10
T__10 = 11
T__11 = 12
T__12 = 13
T__13 = 14
T__14 = 15
T__15 = 16
T__16 = 17
T__17 = 18
T__18 = 19
T__19 = 20
T__20 = 21
T__21 = 22
T__22 = 23
T__23 = 24
T__24 = 25
T__25 = 26
T__26 = 27
T__27 = 28
T__28 = 29
T__29 = 30
T__30 = 31
T__31 = 32
T__32 = 33
T__33 = 34
T__34 = 35
IDENTIFIER = 36
INTEGER_LITERAL = 37
WS = 38
LINE_COMMENT = 39
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'class'", "'{'", "'public'", "'static'", "'void'", "'main'",
"'('", "'String'", "'['", "']'", "')'", "'}'", "'extends'",
"';'", "','", "'return'", "'int'", "'boolean'", "'if'", "'else'",
"'while'", "'System.out.println'", "'='", "'&&'", "'<'", "'+'",
"'-'", "'*'", "'.'", "'length'", "'true'", "'false'", "'this'",
"'new'", "'!'" ]
symbolicNames = [ "<INVALID>",
"IDENTIFIER", "INTEGER_LITERAL", "WS", "LINE_COMMENT" ]
ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
"T__7", "T__8", "T__9", "T__10", "T__11", "T__12", "T__13",
"T__14", "T__15", "T__16", "T__17", "T__18", "T__19",
"T__20", "T__21", "T__22", "T__23", "T__24", "T__25",
"T__26", "T__27", "T__28", "T__29", "T__30", "T__31",
"T__32", "T__33", "T__34", "IDENTIFIER", "INTEGER_LITERAL",
"WS", "LINE_COMMENT" ]
grammarFileName = "Assignment4q4.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.9.1")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
| 53.116162
| 103
| 0.543216
| 2,460
| 10,517
| 2.256098
| 0.154065
| 0.14018
| 0.078919
| 0.077117
| 0.234054
| 0.16018
| 0.084865
| 0.075315
| 0.066667
| 0.06
| 0
| 0.323269
| 0.162309
| 10,517
| 197
| 104
| 53.385787
| 0.306697
| 0.008843
| 0
| 0
| 1
| 0.363128
| 0.592514
| 0.535988
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011173
| false
| 0
| 0.022346
| 0
| 0.307263
| 0.005587
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
ea84d4da3104665d3ccf691022c68d1657a221a5
| 150
|
py
|
Python
|
build/sources/hooker.py
|
PlayerG9/AppStalker
|
576bb9b3d4cd199cf762839e913905d60c9b88ff
|
[
"MIT"
] | null | null | null |
build/sources/hooker.py
|
PlayerG9/AppStalker
|
576bb9b3d4cd199cf762839e913905d60c9b88ff
|
[
"MIT"
] | null | null | null |
build/sources/hooker.py
|
PlayerG9/AppStalker
|
576bb9b3d4cd199cf762839e913905d60c9b88ff
|
[
"MIT"
] | null | null | null |
import sys
import os
BASE = os.path.dirname(__file__)
sys.path.append(os.path.join(BASE, 'libs'))
os.add_dll_directory(os.path.join(BASE, 'windll'))
| 21.428571
| 50
| 0.746667
| 26
| 150
| 4.076923
| 0.538462
| 0.169811
| 0.188679
| 0.264151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08
| 150
| 6
| 51
| 25
| 0.768116
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 0.4
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ea892fd5041b2a4bf409c325bb4ab66b8fd99588
| 48
|
py
|
Python
|
stable_baselines_custom/a2c/__init__.py
|
iamlab-cmu/stable-baselines
|
6e9a8b2ad1d690bd9a9611405e4f319a52101540
|
[
"MIT"
] | null | null | null |
stable_baselines_custom/a2c/__init__.py
|
iamlab-cmu/stable-baselines
|
6e9a8b2ad1d690bd9a9611405e4f319a52101540
|
[
"MIT"
] | null | null | null |
stable_baselines_custom/a2c/__init__.py
|
iamlab-cmu/stable-baselines
|
6e9a8b2ad1d690bd9a9611405e4f319a52101540
|
[
"MIT"
] | null | null | null |
from stable_baselines_custom.a2c.a2c import A2C
| 24
| 47
| 0.875
| 8
| 48
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068182
| 0.083333
| 48
| 1
| 48
| 48
| 0.840909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
ea9cb324e332d0615ae95ad4535278240caac80c
| 58
|
py
|
Python
|
tests/roots/test_pymat/pysrc/func.py
|
mhbl3/matlabdomain
|
b210f5b5bd9276dba27f411cce0398538dbe7d9c
|
[
"BSD-2-Clause"
] | 38
|
2017-11-23T19:27:05.000Z
|
2022-03-28T11:34:08.000Z
|
tests/roots/test_pymat/pysrc/func.py
|
mhbl3/matlabdomain
|
b210f5b5bd9276dba27f411cce0398538dbe7d9c
|
[
"BSD-2-Clause"
] | 83
|
2017-11-22T19:08:49.000Z
|
2022-02-14T09:06:23.000Z
|
tests/roots/test_pymat/pysrc/func.py
|
mhbl3/matlabdomain
|
b210f5b5bd9276dba27f411cce0398538dbe7d9c
|
[
"BSD-2-Clause"
] | 31
|
2017-11-21T13:38:50.000Z
|
2022-02-10T19:34:58.000Z
|
def main():
""" Returns the answer. """
return 42
| 14.5
| 31
| 0.534483
| 7
| 58
| 4.428571
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 0.293103
| 58
| 3
| 32
| 19.333333
| 0.707317
| 0.327586
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
576e56a525807d1655faa1eb619e22f99be2dbc3
| 217
|
py
|
Python
|
backend/app/extensions/__init__.py
|
JHowell45/financial-application
|
9be70692651658f9b0c7e73cd93522e0a579cd98
|
[
"MIT"
] | null | null | null |
backend/app/extensions/__init__.py
|
JHowell45/financial-application
|
9be70692651658f9b0c7e73cd93522e0a579cd98
|
[
"MIT"
] | null | null | null |
backend/app/extensions/__init__.py
|
JHowell45/financial-application
|
9be70692651658f9b0c7e73cd93522e0a579cd98
|
[
"MIT"
] | null | null | null |
"""Use this file for importing and creating instances for all of the extensions.
This file contains the instances of all of the extensions required for this Flask
application.
"""
from .restplus_extension import api
| 31
| 81
| 0.801843
| 33
| 217
| 5.242424
| 0.636364
| 0.092486
| 0.092486
| 0.208092
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.156682
| 217
| 6
| 82
| 36.166667
| 0.945355
| 0.797235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5774ac2a2ae1095d3ad56c8b5bf1e093abc19dea
| 134
|
py
|
Python
|
image_server/admin.py
|
spralja/portfolio_website
|
732d851771b59b8588783eeb53bec5bc788df5d6
|
[
"MIT"
] | null | null | null |
image_server/admin.py
|
spralja/portfolio_website
|
732d851771b59b8588783eeb53bec5bc788df5d6
|
[
"MIT"
] | 1
|
2022-03-08T21:55:33.000Z
|
2022-03-08T21:55:33.000Z
|
image_server/admin.py
|
spralja/portfolio_website
|
732d851771b59b8588783eeb53bec5bc788df5d6
|
[
"MIT"
] | null | null | null |
from django.contrib import admin
from .models import Image, ImageBinary
admin.site.register(Image)
admin.site.register(ImageBinary)
| 19.142857
| 38
| 0.820896
| 18
| 134
| 6.111111
| 0.555556
| 0.163636
| 0.309091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.097015
| 134
| 6
| 39
| 22.333333
| 0.909091
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
57c3fbebd3ae8512ae17e05a5a068013f416bdda
| 187
|
py
|
Python
|
aioredis_benchmarks/bench_config.py
|
m-novikov/aioredis-benchmarks
|
9259d9a323821a7423eadaa37957d212fb85731d
|
[
"MIT"
] | null | null | null |
aioredis_benchmarks/bench_config.py
|
m-novikov/aioredis-benchmarks
|
9259d9a323821a7423eadaa37957d212fb85731d
|
[
"MIT"
] | null | null | null |
aioredis_benchmarks/bench_config.py
|
m-novikov/aioredis-benchmarks
|
9259d9a323821a7423eadaa37957d212fb85731d
|
[
"MIT"
] | null | null | null |
import os
max_conn = int(os.environ.get('MAX_CONNECTIONS', 64))
num_iterations = int(os.environ.get('NUM_ITERATIONS', 10000))
url = os.environ.get('REDIS_URL', 'redis://localhost:6379')
| 31.166667
| 61
| 0.737968
| 29
| 187
| 4.586207
| 0.551724
| 0.203008
| 0.270677
| 0.225564
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063953
| 0.080214
| 187
| 5
| 62
| 37.4
| 0.709302
| 0
| 0
| 0
| 0
| 0
| 0.320856
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
57dba3f3d63acd380adc47f78f7145a118c3a41d
| 24,223
|
py
|
Python
|
pyTaguchi/taguchi.py
|
rbngpp/pyTaguchi
|
2370cfa0388604f9a3ebda4896a12c22200212d9
|
[
"MIT"
] | null | null | null |
pyTaguchi/taguchi.py
|
rbngpp/pyTaguchi
|
2370cfa0388604f9a3ebda4896a12c22200212d9
|
[
"MIT"
] | null | null | null |
pyTaguchi/taguchi.py
|
rbngpp/pyTaguchi
|
2370cfa0388604f9a3ebda4896a12c22200212d9
|
[
"MIT"
] | null | null | null |
from numpy import size, zeros, random
import pandas as pd
import matplotlib.pyplot as plt
class Variable():
def __init__(self, name, values):
self.name = str(name)
self.values = values
self.dof = size(values)
# Ref. https://www.itl.nist.gov/div898/handbook/pri/section5/pri56.htm
class Taguchi():
def __init__(self):
self.variables = []
def add(self, v):
name = v["name"]
value = v["values"]
var = Variable(name,value)
self.variables.append(var)
def run(self, randomize=False):
assert size(self.variables) > 0, "Empty vector"
self.FACTORS = size(self.variables)
self.LEVELS = self.variables[0].dof
self.check_dof()
self.generate_design()
if randomize == True:
self.randomize_runs()
self.generate_df()
self.generate_plot()
def check_dof(self):
for variable in self.variables:
if variable.dof != self.LEVELS:
raise ValueError("Degrees of freedom should be the same!")
def generate_design(self):
if self.FACTORS == 3 and self.LEVELS == 2:
self.design = 'L4'
self.OBSERVATIONS = 4
self.design_L4()
elif self.FACTORS == 4 and self.LEVELS == 3:
self.design = 'L9'
self.OBSERVATIONS = 9
self.design_L9()
elif self.FACTORS == 5 and self.LEVELS == 4:
self.design ='L16b'
self.OBSERVATIONS = 16
self.design_L16b()
elif self.FACTORS == 7 and self.LEVELS == 2:
self.design = 'L8'
self.OBSERVATIONS = 8
self.design_L8()
elif self.FACTORS == 11 and self.LEVELS == 2:
self.design = "L12"
self.OBSERVATIONS = 12
self.design_L12()
else:
raise Exception("Taguchi design not available.")
def randomize_runs(self):
self.matrix = self.matrix[random.choice(self.matrix.shape[0], self.matrix.shape[0], replace=False)]
def generate_df(self):
self.columns = []
self.rows = []
index = 0
for variable in self.variables:
self.columns.append(variable.name)
while index < self.OBSERVATIONS:
row = "RUN " + str(index+1)
self.rows.append(row)
index += 1
self.df = pd.DataFrame(self.matrix, columns = self.columns, index = self.rows)
def generate_plot(self):
self.fig = plt.figure()
ax = self.fig.add_subplot(111)
ax.table(cellText = self.df.values,
rowLabels = self.df.index,
colLabels = self.df.columns,
loc = "center",
cellLoc="center"
)
plot_title = "Taguchi table " + self.design
ax.set_title(plot_title)
ax.axis("off")
plt.show()
def design_L4(self):
# L4: https://www.itl.nist.gov/div898/software/dataplot/dex/L4.DAT
self.matrix = zeros((4,3))
# Row 1
self.matrix[[0],[0]] = self.variables[0].values[0]
self.matrix[[0],[1]] = self.variables[1].values[0]
self.matrix[[0],[2]] = self.variables[2].values[0]
# Row 2
self.matrix[[1],[0]] = self.variables[0].values[0]
self.matrix[[1],[1]] = self.variables[1].values[1]
self.matrix[[1],[2]] = self.variables[2].values[1]
# Row 3
self.matrix[[2],[0]] = self.variables[0].values[1]
self.matrix[[2],[1]] = self.variables[1].values[0]
self.matrix[[2],[2]] = self.variables[2].values[1]
# Row 4
self.matrix[[3],[0]] = self.variables[0].values[1]
self.matrix[[3],[1]] = self.variables[1].values[1]
self.matrix[[3],[2]] = self.variables[2].values[0]
def design_L9(self):
# L9: https://www.itl.nist.gov/div898/software/dataplot/dex/L9.DAT
self.matrix = zeros((9,4))
# Row 1 : 1,1,1,1
self.matrix[[0],[0]] = self.variables[0].values[0]
self.matrix[[0],[1]] = self.variables[1].values[0]
self.matrix[[0],[2]] = self.variables[2].values[0]
self.matrix[[0],[3]] = self.variables[3].values[0]
# Row 2 : 1,2,2,2
self.matrix[[1],[0]] = self.variables[0].values[0]
self.matrix[[1],[1]] = self.variables[1].values[1]
self.matrix[[1],[2]] = self.variables[2].values[1]
self.matrix[[1],[3]] = self.variables[3].values[1]
# Row 3 : 1,3,3,3
self.matrix[[2],[0]] = self.variables[0].values[0]
self.matrix[[2],[1]] = self.variables[1].values[2]
self.matrix[[2],[2]] = self.variables[2].values[2]
self.matrix[[2],[3]] = self.variables[3].values[2]
# Row 4 : 2,1,2,3
self.matrix[[3],[0]] = self.variables[0].values[1]
self.matrix[[3],[1]] = self.variables[1].values[0]
self.matrix[[3],[2]] = self.variables[2].values[1]
self.matrix[[3],[3]] = self.variables[3].values[2]
# Row 5 : 2,2,3,1
self.matrix[[4],[0]] = self.variables[0].values[1]
self.matrix[[4],[1]] = self.variables[1].values[1]
self.matrix[[4],[2]] = self.variables[2].values[2]
self.matrix[[4],[3]] = self.variables[3].values[0]
# Row 6 : 2,3,1,2
self.matrix[[5],[0]] = self.variables[0].values[1]
self.matrix[[5],[1]] = self.variables[1].values[2]
self.matrix[[5],[2]] = self.variables[2].values[0]
self.matrix[[5],[3]] = self.variables[3].values[1]
# Row 7 : 3,1,3,2
self.matrix[[6],[0]] = self.variables[0].values[2]
self.matrix[[6],[1]] = self.variables[1].values[0]
self.matrix[[6],[2]] = self.variables[2].values[2]
self.matrix[[6],[3]] = self.variables[3].values[1]
# Row 8 : 3,2,1,3
self.matrix[[7],[0]] = self.variables[0].values[2]
self.matrix[[7],[1]] = self.variables[1].values[1]
self.matrix[[7],[2]] = self.variables[2].values[0]
self.matrix[[7],[3]] = self.variables[3].values[2]
# Row 9 : 3,3,2,1
self.matrix[[8],[0]] = self.variables[0].values[2]
self.matrix[[8],[1]] = self.variables[1].values[2]
self.matrix[[8],[2]] = self.variables[2].values[1]
self.matrix[[8],[3]] = self.variables[3].values[0]
def design_L16b(self):
# L16b (1): https://www.york.ac.uk/depts/maths/tables/l16b.htm
# L16b (2): https://www.itl.nist.gov/div898/software/dataplot/dex/L16B.DAT
self.matrix = zeros((16,5))
# Row 1 : 1,1,1,1,1
self.matrix[[0],[0]] = self.variables[0].values[0]
self.matrix[[0],[1]] = self.variables[1].values[0]
self.matrix[[0],[2]] = self.variables[2].values[0]
self.matrix[[0],[3]] = self.variables[3].values[0]
self.matrix[[0],[4]] = self.variables[4].values[0]
# Row 2 : 1,2,2,2,2
self.matrix[[1],[0]] = self.variables[0].values[0]
self.matrix[[1],[1]] = self.variables[1].values[1]
self.matrix[[1],[2]] = self.variables[2].values[1]
self.matrix[[1],[3]] = self.variables[3].values[1]
self.matrix[[1],[4]] = self.variables[4].values[1]
# Row 3 : 1,3,3,3,3
self.matrix[[2],[0]] = self.variables[0].values[0]
self.matrix[[2],[1]] = self.variables[1].values[2]
self.matrix[[2],[2]] = self.variables[2].values[2]
self.matrix[[2],[3]] = self.variables[3].values[2]
self.matrix[[2],[4]] = self.variables[4].values[2]
# Row 4 : 1,4,4,4,4
self.matrix[[3],[0]] = self.variables[0].values[0]
self.matrix[[3],[1]] = self.variables[1].values[3]
self.matrix[[3],[2]] = self.variables[2].values[3]
self.matrix[[3],[3]] = self.variables[3].values[3]
self.matrix[[3],[4]] = self.variables[4].values[3]
# Row 5 : 2,1,2,3,4
self.matrix[[4],[0]] = self.variables[0].values[1]
self.matrix[[4],[1]] = self.variables[1].values[0]
self.matrix[[4],[2]] = self.variables[2].values[1]
self.matrix[[4],[3]] = self.variables[3].values[2]
self.matrix[[4],[4]] = self.variables[4].values[3]
# Row 6 : 2,2,1,4,3
self.matrix[[5],[0]] = self.variables[0].values[1]
self.matrix[[5],[1]] = self.variables[1].values[1]
self.matrix[[5],[2]] = self.variables[2].values[0]
self.matrix[[5],[3]] = self.variables[3].values[3]
self.matrix[[5],[4]] = self.variables[4].values[2]
# Row 7 : 2,3,4,1,2
self.matrix[[6],[0]] = self.variables[0].values[1]
self.matrix[[6],[1]] = self.variables[1].values[2]
self.matrix[[6],[2]] = self.variables[2].values[3]
self.matrix[[6],[3]] = self.variables[3].values[0]
self.matrix[[6],[4]] = self.variables[4].values[1]
# Row 8 : 2,4,3,2,1
self.matrix[[7],[0]] = self.variables[0].values[1]
self.matrix[[7],[1]] = self.variables[1].values[3]
self.matrix[[7],[2]] = self.variables[2].values[2]
self.matrix[[7],[3]] = self.variables[3].values[1]
self.matrix[[7],[4]] = self.variables[4].values[0]
# Row 9 : 3,1,3,4,2
self.matrix[[8],[0]] = self.variables[0].values[2]
self.matrix[[8],[1]] = self.variables[1].values[0]
self.matrix[[8],[2]] = self.variables[2].values[2]
self.matrix[[8],[3]] = self.variables[3].values[3]
self.matrix[[8],[4]] = self.variables[4].values[1]
# Row 10 : 3,2,4,3,1
self.matrix[[9],[0]] = self.variables[0].values[2]
self.matrix[[9],[1]] = self.variables[1].values[1]
self.matrix[[9],[2]] = self.variables[2].values[3]
self.matrix[[9],[3]] = self.variables[3].values[2]
self.matrix[[9],[4]] = self.variables[4].values[0]
# Row 11 : 3,3,1,2,4
self.matrix[[10],[0]] = self.variables[0].values[2]
self.matrix[[10],[1]] = self.variables[1].values[2]
self.matrix[[10],[2]] = self.variables[2].values[0]
self.matrix[[10],[3]] = self.variables[3].values[1]
self.matrix[[10],[4]] = self.variables[4].values[3]
# Row 12 : 3,4,2,1,3
self.matrix[[11],[0]] = self.variables[0].values[2]
self.matrix[[11],[1]] = self.variables[1].values[3]
self.matrix[[11],[2]] = self.variables[2].values[1]
self.matrix[[11],[3]] = self.variables[3].values[0]
self.matrix[[11],[4]] = self.variables[4].values[2]
# Row 13 : 4,1,4,2,3
self.matrix[[12],[0]] = self.variables[0].values[3]
self.matrix[[12],[1]] = self.variables[1].values[0]
self.matrix[[12],[2]] = self.variables[2].values[3]
self.matrix[[12],[3]] = self.variables[3].values[1]
self.matrix[[12],[4]] = self.variables[4].values[2]
# Row 14 : 4,2,3,1,4
self.matrix[[13],[0]] = self.variables[0].values[3]
self.matrix[[13],[1]] = self.variables[1].values[1]
self.matrix[[13],[2]] = self.variables[2].values[2]
self.matrix[[13],[3]] = self.variables[3].values[0]
self.matrix[[13],[4]] = self.variables[4].values[3]
# Row 15 : 4,3,2,4,1
self.matrix[[14],[0]] = self.variables[0].values[3]
self.matrix[[14],[1]] = self.variables[1].values[2]
self.matrix[[14],[2]] = self.variables[2].values[1]
self.matrix[[14],[3]] = self.variables[3].values[3]
self.matrix[[14],[4]] = self.variables[4].values[0]
# Row 16 : 4,4,1,3,2
self.matrix[[15],[0]] = self.variables[0].values[3]
self.matrix[[15],[1]] = self.variables[1].values[3]
self.matrix[[15],[2]] = self.variables[2].values[0]
self.matrix[[15],[3]] = self.variables[3].values[2]
self.matrix[[15],[4]] = self.variables[4].values[1]
def design_L8(self):
# L8: https://www.itl.nist.gov/div898/software/dataplot/dex/L8.DAT
self.matrix = zeros((8,7))
# Row 1 : 1,1,1,1,1,1,1
self.matrix[[0],[0]] = self.variables[0].values[0]
self.matrix[[0],[1]] = self.variables[1].values[0]
self.matrix[[0],[2]] = self.variables[2].values[0]
self.matrix[[0],[3]] = self.variables[3].values[0]
self.matrix[[0],[4]] = self.variables[4].values[0]
self.matrix[[0],[5]] = self.variables[5].values[0]
self.matrix[[0],[6]] = self.variables[6].values[0]
# Row 2 : 1,1,1,2,2,2,2
self.matrix[[1],[0]] = self.variables[0].values[0]
self.matrix[[1],[1]] = self.variables[1].values[0]
self.matrix[[1],[2]] = self.variables[2].values[0]
self.matrix[[1],[3]] = self.variables[3].values[1]
self.matrix[[1],[4]] = self.variables[4].values[1]
self.matrix[[1],[5]] = self.variables[5].values[1]
self.matrix[[1],[6]] = self.variables[6].values[1]
# Row 3 : 1,2,2,1,1,2,2
self.matrix[[2],[0]] = self.variables[0].values[0]
self.matrix[[2],[1]] = self.variables[1].values[1]
self.matrix[[2],[2]] = self.variables[2].values[1]
self.matrix[[2],[3]] = self.variables[3].values[0]
self.matrix[[2],[4]] = self.variables[4].values[0]
self.matrix[[2],[5]] = self.variables[5].values[1]
self.matrix[[2],[6]] = self.variables[6].values[1]
# Row 4 : 1,2,2,2,2,1,1
self.matrix[[3],[0]] = self.variables[0].values[0]
self.matrix[[3],[1]] = self.variables[1].values[1]
self.matrix[[3],[2]] = self.variables[2].values[1]
self.matrix[[3],[3]] = self.variables[3].values[1]
self.matrix[[3],[4]] = self.variables[4].values[1]
self.matrix[[3],[5]] = self.variables[5].values[0]
self.matrix[[3],[6]] = self.variables[6].values[0]
# Row 5 : 2,1,2,1,2,1,2
self.matrix[[4],[0]] = self.variables[0].values[1]
self.matrix[[4],[1]] = self.variables[1].values[0]
self.matrix[[4],[2]] = self.variables[2].values[1]
self.matrix[[4],[3]] = self.variables[3].values[0]
self.matrix[[4],[4]] = self.variables[4].values[1]
self.matrix[[4],[5]] = self.variables[5].values[0]
self.matrix[[4],[6]] = self.variables[6].values[1]
# Row 6 : 2,1,2,2,1,2,1
self.matrix[[5],[0]] = self.variables[0].values[1]
self.matrix[[5],[1]] = self.variables[1].values[0]
self.matrix[[5],[2]] = self.variables[2].values[1]
self.matrix[[5],[3]] = self.variables[3].values[1]
self.matrix[[5],[4]] = self.variables[4].values[0]
self.matrix[[5],[5]] = self.variables[5].values[1]
self.matrix[[5],[6]] = self.variables[6].values[0]
# Row 7 : 2,2,1,1,2,2,1
self.matrix[[6],[0]] = self.variables[0].values[1]
self.matrix[[6],[1]] = self.variables[1].values[1]
self.matrix[[6],[2]] = self.variables[2].values[0]
self.matrix[[6],[3]] = self.variables[3].values[0]
self.matrix[[6],[4]] = self.variables[4].values[1]
self.matrix[[6],[5]] = self.variables[5].values[1]
self.matrix[[6],[6]] = self.variables[6].values[0]
# Row 8 : 2,2,1,2,1,1,2
self.matrix[[7],[0]] = self.variables[0].values[1]
self.matrix[[7],[1]] = self.variables[1].values[1]
self.matrix[[7],[2]] = self.variables[2].values[0]
self.matrix[[7],[3]] = self.variables[3].values[1]
self.matrix[[7],[4]] = self.variables[4].values[0]
self.matrix[[7],[5]] = self.variables[5].values[0]
self.matrix[[7],[6]] = self.variables[6].values[1]
def design_L12(self):
# L12: https://www.york.ac.uk/depts/maths/tables/l12.gif
self.matrix = zeros((12,11))
# Row 1 : 1,1,1 1,1,1 1,1,1 1,1
self.matrix[[0],[0]] = self.variables[0].values[0]
self.matrix[[0],[1]] = self.variables[1].values[0]
self.matrix[[0],[2]] = self.variables[2].values[0]
self.matrix[[0],[3]] = self.variables[3].values[0]
self.matrix[[0],[4]] = self.variables[4].values[0]
self.matrix[[0],[5]] = self.variables[5].values[0]
self.matrix[[0],[6]] = self.variables[6].values[0]
self.matrix[[0],[7]] = self.variables[7].values[0]
self.matrix[[0],[8]] = self.variables[8].values[0]
self.matrix[[0],[9]] = self.variables[9].values[0]
self.matrix[[0],[10]] = self.variables[10].values[0]
# Row 2 : 1,1,1 1,1,2 2,2,2 2,2
self.matrix[[1],[0]] = self.variables[0].values[0]
self.matrix[[1],[1]] = self.variables[1].values[0]
self.matrix[[1],[2]] = self.variables[2].values[0]
self.matrix[[1],[3]] = self.variables[3].values[0]
self.matrix[[1],[4]] = self.variables[4].values[0]
self.matrix[[1],[5]] = self.variables[5].values[1]
self.matrix[[1],[6]] = self.variables[6].values[1]
self.matrix[[1],[7]] = self.variables[7].values[1]
self.matrix[[1],[8]] = self.variables[8].values[1]
self.matrix[[1],[9]] = self.variables[9].values[1]
self.matrix[[1],[10]] = self.variables[10].values[1]
# Row 3 : 1,1,2 2,2,1 1,1,2 2,2
self.matrix[[2],[0]] = self.variables[0].values[0]
self.matrix[[2],[1]] = self.variables[1].values[0]
self.matrix[[2],[2]] = self.variables[2].values[1]
self.matrix[[2],[3]] = self.variables[3].values[1]
self.matrix[[2],[4]] = self.variables[4].values[1]
self.matrix[[2],[5]] = self.variables[5].values[0]
self.matrix[[2],[6]] = self.variables[6].values[0]
self.matrix[[2],[7]] = self.variables[7].values[0]
self.matrix[[2],[8]] = self.variables[8].values[1]
self.matrix[[2],[9]] = self.variables[9].values[1]
self.matrix[[2],[10]] = self.variables[10].values[1]
# Row 4 : 1,2,1 2,2,1 2,2,1 1,2
self.matrix[[3],[0]] = self.variables[0].values[0]
self.matrix[[3],[1]] = self.variables[1].values[1]
self.matrix[[3],[2]] = self.variables[2].values[0]
self.matrix[[3],[3]] = self.variables[3].values[1]
self.matrix[[3],[4]] = self.variables[4].values[1]
self.matrix[[3],[5]] = self.variables[5].values[0]
self.matrix[[3],[6]] = self.variables[6].values[1]
self.matrix[[3],[7]] = self.variables[7].values[1]
self.matrix[[3],[8]] = self.variables[8].values[0]
self.matrix[[3],[9]] = self.variables[9].values[0]
self.matrix[[3],[10]] = self.variables[10].values[1]
# Row 5 : 1,2,2 1,2,2 1,2,1 2,1
self.matrix[[4],[0]] = self.variables[0].values[0]
self.matrix[[4],[1]] = self.variables[1].values[1]
self.matrix[[4],[2]] = self.variables[2].values[1]
self.matrix[[4],[3]] = self.variables[3].values[0]
self.matrix[[4],[4]] = self.variables[4].values[1]
self.matrix[[4],[5]] = self.variables[5].values[1]
self.matrix[[4],[6]] = self.variables[6].values[0]
self.matrix[[4],[7]] = self.variables[7].values[1]
self.matrix[[4],[8]] = self.variables[8].values[0]
self.matrix[[4],[9]] = self.variables[9].values[1]
self.matrix[[4],[10]] = self.variables[10].values[0]
# Row 6 : 1,2,2 2,1,2 2,1,2 1,1
self.matrix[[5],[0]] = self.variables[0].values[0]
self.matrix[[5],[1]] = self.variables[1].values[1]
self.matrix[[5],[2]] = self.variables[2].values[1]
self.matrix[[5],[3]] = self.variables[3].values[1]
self.matrix[[5],[4]] = self.variables[4].values[0]
self.matrix[[5],[5]] = self.variables[5].values[1]
self.matrix[[5],[6]] = self.variables[6].values[1]
self.matrix[[5],[7]] = self.variables[7].values[0]
self.matrix[[5],[8]] = self.variables[8].values[1]
self.matrix[[5],[9]] = self.variables[9].values[0]
self.matrix[[5],[10]] = self.variables[10].values[0]
# Row 7 : 2,1,2 2,1,1 2,2,1 2,1
self.matrix[[6],[0]] = self.variables[0].values[1]
self.matrix[[6],[1]] = self.variables[1].values[0]
self.matrix[[6],[2]] = self.variables[2].values[1]
self.matrix[[6],[3]] = self.variables[3].values[1]
self.matrix[[6],[4]] = self.variables[4].values[0]
self.matrix[[6],[5]] = self.variables[5].values[0]
self.matrix[[6],[6]] = self.variables[6].values[1]
self.matrix[[6],[7]] = self.variables[7].values[1]
self.matrix[[6],[8]] = self.variables[8].values[0]
self.matrix[[6],[9]] = self.variables[9].values[1]
self.matrix[[6],[10]] = self.variables[10].values[0]
# Row 8 : 2,1,2 1,2,2 2,1,1 1,2
self.matrix[[7],[0]] = self.variables[0].values[1]
self.matrix[[7],[1]] = self.variables[1].values[0]
self.matrix[[7],[2]] = self.variables[2].values[1]
self.matrix[[7],[3]] = self.variables[3].values[0]
self.matrix[[7],[4]] = self.variables[4].values[1]
self.matrix[[7],[5]] = self.variables[5].values[1]
self.matrix[[7],[6]] = self.variables[6].values[1]
self.matrix[[7],[7]] = self.variables[7].values[0]
self.matrix[[7],[8]] = self.variables[8].values[0]
self.matrix[[7],[9]] = self.variables[9].values[0]
self.matrix[[7],[10]] = self.variables[10].values[1]
# Row 9 : 2,1,1 2,2,2 1,2,2 1,1
self.matrix[[8],[0]] = self.variables[0].values[1]
self.matrix[[8],[1]] = self.variables[1].values[0]
self.matrix[[8],[2]] = self.variables[2].values[0]
self.matrix[[8],[3]] = self.variables[3].values[1]
self.matrix[[8],[4]] = self.variables[4].values[1]
self.matrix[[8],[5]] = self.variables[5].values[1]
self.matrix[[8],[6]] = self.variables[6].values[0]
self.matrix[[8],[7]] = self.variables[7].values[1]
self.matrix[[8],[8]] = self.variables[8].values[1]
self.matrix[[8],[9]] = self.variables[9].values[0]
self.matrix[[8],[10]] = self.variables[10].values[0]
# Row 10 : 2,2,2 1,1,1 1,2,2 1,2
self.matrix[[9],[0]] = self.variables[0].values[1]
self.matrix[[9],[1]] = self.variables[1].values[1]
self.matrix[[9],[2]] = self.variables[2].values[1]
self.matrix[[9],[3]] = self.variables[3].values[0]
self.matrix[[9],[4]] = self.variables[4].values[0]
self.matrix[[9],[5]] = self.variables[5].values[0]
self.matrix[[9],[6]] = self.variables[6].values[0]
self.matrix[[9],[7]] = self.variables[7].values[1]
self.matrix[[9],[8]] = self.variables[8].values[1]
self.matrix[[9],[9]] = self.variables[9].values[0]
self.matrix[[9],[10]] = self.variables[10].values[1]
# Row 11 : 2,2,1 2,1,2 1,1,1 2,2
self.matrix[[10],[0]] = self.variables[0].values[1]
self.matrix[[10],[1]] = self.variables[1].values[1]
self.matrix[[10],[2]] = self.variables[2].values[0]
self.matrix[[10],[3]] = self.variables[3].values[1]
self.matrix[[10],[4]] = self.variables[4].values[0]
self.matrix[[10],[5]] = self.variables[5].values[1]
self.matrix[[10],[6]] = self.variables[6].values[0]
self.matrix[[10],[7]] = self.variables[7].values[0]
self.matrix[[10],[8]] = self.variables[8].values[0]
self.matrix[[10],[9]] = self.variables[9].values[1]
self.matrix[[10],[10]] = self.variables[10].values[1]
# Row 12 : 2,2,1 1,2,1 2,1,2 2,1
self.matrix[[11],[0]] = self.variables[0].values[1]
self.matrix[[11],[1]] = self.variables[1].values[1]
self.matrix[[11],[2]] = self.variables[2].values[0]
self.matrix[[11],[3]] = self.variables[3].values[0]
self.matrix[[11],[4]] = self.variables[4].values[1]
self.matrix[[11],[5]] = self.variables[5].values[0]
self.matrix[[11],[6]] = self.variables[6].values[1]
self.matrix[[11],[7]] = self.variables[7].values[0]
self.matrix[[11],[8]] = self.variables[8].values[1]
self.matrix[[11],[9]] = self.variables[9].values[1]
self.matrix[[11],[10]] = self.variables[10].values[0]
| 50.995789
| 107
| 0.540478
| 3,708
| 24,223
| 3.522114
| 0.036138
| 0.249617
| 0.110337
| 0.14709
| 0.860184
| 0.846861
| 0.83193
| 0.793032
| 0.505283
| 0.455743
| 0
| 0.096847
| 0.231433
| 24,223
| 475
| 108
| 50.995789
| 0.604662
| 0.065021
| 0
| 0.385185
| 0
| 0
| 0.005976
| 0
| 0
| 0
| 0
| 0
| 0.002469
| 1
| 0.034568
| false
| 0
| 0.007407
| 0
| 0.046914
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
aa45e83b68d1edaf352a6f1afd2efbd32cd58ee7
| 146
|
py
|
Python
|
interview_kernel/__init__.py
|
MathHubInfo/MoSIS_Jupyter_Kernel
|
a8764c648a2242edf2d88a7558966d4f3edba360
|
[
"MIT"
] | 1
|
2019-03-17T19:01:25.000Z
|
2019-03-17T19:01:25.000Z
|
interview_kernel/__init__.py
|
MathHubInfo/MoSIS_Jupyter_Kernel
|
a8764c648a2242edf2d88a7558966d4f3edba360
|
[
"MIT"
] | 2
|
2018-05-14T11:05:14.000Z
|
2018-07-09T17:20:14.000Z
|
interview_kernel/__init__.py
|
MathHubInfo/MoSIS_Jupyter_Kernel
|
a8764c648a2242edf2d88a7558966d4f3edba360
|
[
"MIT"
] | null | null | null |
from . import string_handling, mmtinterface, exaoutput, pde_state_machine
#from .interview_kernel import Interview
from . import interview_kernel
| 36.5
| 73
| 0.849315
| 18
| 146
| 6.611111
| 0.611111
| 0.168067
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10274
| 146
| 3
| 74
| 48.666667
| 0.908397
| 0.267123
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
a4bab93e456e1075ceae1972e3dcf56cac2059fb
| 328,630
|
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_wireless_controller_vap.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 1
|
2020-01-22T13:11:23.000Z
|
2020-01-22T13:11:23.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_wireless_controller_vap.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12
|
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/fortinet/fortios/plugins/modules/fortios_wireless_controller_vap.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019-2020 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_wireless_controller_vap
short_description: Configure Virtual Access Points (VAPs) in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify wireless_controller feature and vap category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.0
version_added: "2.10"
author:
- Link Zheng (@chillancezen)
- Jie Xue (@JieX19)
- Hongbin Lu (@fgtdev-hblu)
- Frank Shen (@frankshen01)
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Legacy fortiosapi has been deprecated, httpapi is the preferred way to run playbooks
requirements:
- ansible>=2.9.0
options:
access_token:
description:
- Token-based authentication.
Generated from GUI of Fortigate.
type: str
required: false
enable_log:
description:
- Enable/Disable logging for task.
type: bool
required: false
default: false
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
member_path:
type: str
description:
- Member attribute path to operate on.
- Delimited by a slash character if there are more than one attribute.
- Parameter marked with member_path is legitimate for doing member operation.
member_state:
type: str
description:
- Add or delete a member under specified attribute path.
- When member_state is specified, the state option is ignored.
choices:
- present
- absent
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
wireless_controller_vap:
description:
- Configure Virtual Access Points (VAPs).
default: null
type: dict
suboptions:
access_control_list:
description:
- access-control-list profile name. Source wireless-controller.access-control-list.name.
type: str
acct_interim_interval:
description:
- WiFi RADIUS accounting interim interval (60 - 86400 sec).
type: int
additional_akms:
description:
- Additional AKMs.
type: list
choices:
- akm6
address_group:
description:
- Address group ID. Source wireless-controller.addrgrp.id.
type: str
alias:
description:
- Alias.
type: str
antivirus_profile:
description:
- AntiVirus profile name. Source antivirus.profile.name.
type: str
application_list:
description:
- Application control list name. Source application.list.name.
type: str
atf_weight:
description:
- Airtime weight in percentage .
type: int
auth:
description:
- Authentication protocol.
type: str
choices:
- psk
- radius
- usergroup
broadcast_ssid:
description:
- Enable/disable broadcasting the SSID .
type: str
choices:
- enable
- disable
broadcast_suppression:
description:
- Optional suppression of broadcast messages. For example, you can keep DHCP messages, ARP broadcasts, and so on off of the wireless
network.
type: list
choices:
- dhcp-up
- dhcp-down
- dhcp-starvation
- arp-known
- arp-unknown
- arp-reply
- arp-poison
- arp-proxy
- netbios-ns
- netbios-ds
- ipv6
- all-other-mc
- all-other-bc
- dhcp-ucast
bss_color_partial:
description:
- Enable/disable 802.11ax partial BSS color .
type: str
choices:
- enable
- disable
bstm_disassociation_imminent:
description:
- Enable/disable forcing of disassociation after the BSTM request timer has been reached .
type: str
choices:
- enable
- disable
bstm_load_balancing_disassoc_timer:
description:
- Time interval for client to voluntarily leave AP before forcing a disassociation due to AP load-balancing (0 to 30).
type: int
bstm_rssi_disassoc_timer:
description:
- Time interval for client to voluntarily leave AP before forcing a disassociation due to low RSSI (0 to 2000).
type: int
captive_portal_ac_name:
description:
- Local-bridging captive portal ac-name.
type: str
captive_portal_auth_timeout:
description:
- Hard timeout - AP will always clear the session after timeout regardless of traffic (0 - 864000 sec).
type: int
captive_portal_macauth_radius_secret:
description:
- Secret key to access the macauth RADIUS server.
type: str
captive_portal_macauth_radius_server:
description:
- Captive portal external RADIUS server domain name or IP address.
type: str
captive_portal_radius_secret:
description:
- Secret key to access the RADIUS server.
type: str
captive_portal_radius_server:
description:
- Captive portal RADIUS server domain name or IP address.
type: str
captive_portal_session_timeout_interval:
description:
- Session timeout interval (0 - 864000 sec).
type: int
dhcp_address_enforcement:
description:
- Enable/disable DHCP address enforcement .
type: str
choices:
- enable
- disable
dhcp_lease_time:
description:
- DHCP lease time in seconds for NAT IP address.
type: int
dhcp_option43_insertion:
description:
- Enable/disable insertion of DHCP option 43 .
type: str
choices:
- enable
- disable
dhcp_option82_circuit_id_insertion:
description:
- Enable/disable DHCP option 82 circuit-id insert .
type: str
choices:
- style-1
- style-2
- disable
- style-3
dhcp_option82_insertion:
description:
- Enable/disable DHCP option 82 insert .
type: str
choices:
- enable
- disable
dhcp_option82_remote_id_insertion:
description:
- Enable/disable DHCP option 82 remote-id insert .
type: str
choices:
- style-1
- disable
dynamic_vlan:
description:
- Enable/disable dynamic VLAN assignment.
type: str
choices:
- enable
- disable
eap_reauth:
description:
- Enable/disable EAP re-authentication for WPA-Enterprise security.
type: str
choices:
- enable
- disable
eap_reauth_intv:
description:
- EAP re-authentication interval (1800 - 864000 sec).
type: int
eapol_key_retries:
description:
- Enable/disable retransmission of EAPOL-Key frames (message 3/4 and group message 1/2) .
type: str
choices:
- disable
- enable
encrypt:
description:
- Encryption protocol to use (only available when security is set to a WPA type).
type: str
choices:
- TKIP
- AES
- TKIP-AES
external_fast_roaming:
description:
- Enable/disable fast roaming or pre-authentication with external APs not managed by the FortiGate .
type: str
choices:
- enable
- disable
external_logout:
description:
- URL of external authentication logout server.
type: str
external_web:
description:
- URL of external authentication web server.
type: str
external_web_format:
description:
- URL query parameter detection .
type: str
choices:
- auto-detect
- no-query-string
- partial-query-string
fast_bss_transition:
description:
- Enable/disable 802.11r Fast BSS Transition (FT) .
type: str
choices:
- disable
- enable
fast_roaming:
description:
- Enable/disable fast-roaming, or pre-authentication, where supported by clients .
type: str
choices:
- enable
- disable
ft_mobility_domain:
description:
- Mobility domain identifier in FT (1 - 65535).
type: int
ft_over_ds:
description:
- Enable/disable FT over the Distribution System (DS).
type: str
choices:
- disable
- enable
ft_r0_key_lifetime:
description:
- Lifetime of the PMK-R0 key in FT, 1-65535 minutes.
type: int
gas_comeback_delay:
description:
- GAS comeback delay (0 or 100 - 10000 milliseconds).
type: int
gas_fragmentation_limit:
description:
- GAS fragmentation limit (512 - 4096).
type: int
gtk_rekey:
description:
- Enable/disable GTK rekey for WPA security.
type: str
choices:
- enable
- disable
gtk_rekey_intv:
description:
- GTK rekey interval (1800 - 864000 sec).
type: int
high_efficiency:
description:
- Enable/disable 802.11ax high efficiency .
type: str
choices:
- enable
- disable
hotspot20_profile:
description:
- Hotspot 2.0 profile name. Source wireless-controller.hotspot20.hs-profile.name.
type: str
igmp_snooping:
description:
- Enable/disable IGMP snooping.
type: str
choices:
- enable
- disable
intra_vap_privacy:
description:
- Enable/disable blocking communication between clients on the same SSID (called intra-SSID privacy) .
type: str
choices:
- enable
- disable
ip:
description:
- IP address and subnet mask for the local standalone NAT subnet.
type: str
ips_sensor:
description:
- IPS sensor name. Source ips.sensor.name.
type: str
ipv6_rules:
description:
- Optional rules of IPv6 packets. For example, you can keep RA, RS and so on off of the wireless network.
type: list
choices:
- drop-icmp6ra
- drop-icmp6rs
- drop-llmnr6
- drop-icmp6mld2
- drop-dhcp6s
- drop-dhcp6c
- ndp-proxy
- drop-ns-dad
- drop-ns-nondad
key:
description:
- WEP Key.
type: str
keyindex:
description:
- WEP key index (1 - 4).
type: int
ldpc:
description:
- VAP low-density parity-check (LDPC) coding configuration.
type: str
choices:
- disable
- rx
- tx
- rxtx
local_authentication:
description:
- Enable/disable AP local authentication.
type: str
choices:
- enable
- disable
local_bridging:
description:
- Enable/disable bridging of wireless and Ethernet interfaces on the FortiAP .
type: str
choices:
- enable
- disable
local_lan:
description:
- Allow/deny traffic destined for a Class A, B, or C private IP address .
type: str
choices:
- allow
- deny
local_standalone:
description:
- Enable/disable AP local standalone .
type: str
choices:
- enable
- disable
local_standalone_dns:
description:
- Enable/disable AP local standalone DNS.
type: str
choices:
- enable
- disable
local_standalone_dns_ip:
description:
- IPv4 addresses for the local standalone DNS.
type: str
local_standalone_nat:
description:
- Enable/disable AP local standalone NAT mode.
type: str
choices:
- enable
- disable
mac_auth_bypass:
description:
- Enable/disable MAC authentication bypass.
type: str
choices:
- enable
- disable
mac_called_station_delimiter:
description:
- MAC called station delimiter .
type: str
choices:
- hyphen
- single-hyphen
- colon
- none
mac_calling_station_delimiter:
description:
- MAC calling station delimiter .
type: str
choices:
- hyphen
- single-hyphen
- colon
- none
mac_case:
description:
- MAC case .
type: str
choices:
- uppercase
- lowercase
mac_filter:
description:
- Enable/disable MAC filtering to block wireless clients by mac address.
type: str
choices:
- enable
- disable
mac_filter_list:
description:
- Create a list of MAC addresses for MAC address filtering.
type: list
suboptions:
id:
description:
- ID.
required: true
type: int
mac:
description:
- MAC address.
type: str
mac_filter_policy:
description:
- Deny or allow the client with this MAC address.
type: str
choices:
- allow
- deny
mac_filter_policy_other:
description:
- Allow or block clients with MAC addresses that are not in the filter list.
type: str
choices:
- allow
- deny
mac_password_delimiter:
description:
- MAC authentication password delimiter .
type: str
choices:
- hyphen
- single-hyphen
- colon
- none
mac_username_delimiter:
description:
- MAC authentication username delimiter .
type: str
choices:
- hyphen
- single-hyphen
- colon
- none
max_clients:
description:
- Maximum number of clients that can connect simultaneously to the VAP .
type: int
max_clients_ap:
description:
- Maximum number of clients that can connect simultaneously to each radio .
type: int
mbo:
description:
- Enable/disable Multiband Operation .
type: str
choices:
- disable
- enable
mbo_cell_data_conn_pref:
description:
- MBO cell data connection preference (0, 1, or 255).
type: str
choices:
- excluded
- prefer-not
- prefer-use
me_disable_thresh:
description:
- Disable multicast enhancement when this many clients are receiving multicast traffic.
type: int
mesh_backhaul:
description:
- Enable/disable using this VAP as a WiFi mesh backhaul . This entry is only available when security is set to a WPA type or open.
type: str
choices:
- enable
- disable
mpsk:
description:
- Enable/disable multiple pre-shared keys (PSKs.)
type: str
choices:
- enable
- disable
mpsk_concurrent_clients:
description:
- Number of pre-shared keys (PSKs) to allow if multiple pre-shared keys are enabled.
type: int
mpsk_key:
description:
- Pre-shared keys that can be used to connect to this virtual access point.
type: list
suboptions:
comment:
description:
- Comment.
type: str
concurrent_clients:
description:
- Number of clients that can connect using this pre-shared key.
type: str
key_name:
description:
- Pre-shared key name.
type: str
mpsk_schedules:
description:
- Firewall schedule for MPSK passphrase. The passphrase will be effective only when at least one schedule is valid.
type: list
suboptions:
name:
description:
- Schedule name. Source firewall.schedule.group.name firewall.schedule.recurring.name firewall.schedule.onetime.name.
required: true
type: str
passphrase:
description:
- WPA Pre-shared key.
type: str
mpsk_profile:
description:
- MPSK profile name. Source wireless-controller.mpsk-profile.name.
type: str
mu_mimo:
description:
- Enable/disable Multi-user MIMO .
type: str
choices:
- enable
- disable
multicast_enhance:
description:
- Enable/disable converting multicast to unicast to improve performance .
type: str
choices:
- enable
- disable
multicast_rate:
description:
- Multicast rate (0, 6000, 12000, or 24000 kbps).
type: str
choices:
- 0
- 6000
- 12000
- 24000
nac:
description:
- Enable/disable network access control.
type: str
choices:
- enable
- disable
nac_profile:
description:
- NAC profile name. Source wireless-controller.nac-profile.name.
type: str
name:
description:
- Virtual AP name.
required: true
type: str
neighbor_report_dual_band:
description:
- Enable/disable dual-band neighbor report .
type: str
choices:
- disable
- enable
okc:
description:
- Enable/disable Opportunistic Key Caching (OKC) .
type: str
choices:
- disable
- enable
owe_groups:
description:
- OWE-Groups.
type: list
choices:
- 19
- 20
- 21
owe_transition:
description:
- Enable/disable OWE transition mode support.
type: str
choices:
- disable
- enable
owe_transition_ssid:
description:
- OWE transition mode peer SSID.
type: str
passphrase:
description:
- WPA pre-shard key (PSK) to be used to authenticate WiFi users.
type: str
pmf:
description:
- Protected Management Frames (PMF) support .
type: str
choices:
- disable
- enable
- optional
pmf_assoc_comeback_timeout:
description:
- Protected Management Frames (PMF) comeback maximum timeout (1-20 sec).
type: int
pmf_sa_query_retry_timeout:
description:
- Protected Management Frames (PMF) SA query retry timeout interval (1 - 5 100s of msec).
type: int
port_macauth:
description:
- Enable/disable LAN port MAC authentication .
type: str
choices:
- disable
- radius
- address-group
port_macauth_reauth_timeout:
description:
- LAN port MAC authentication re-authentication timeout value .
type: int
port_macauth_timeout:
description:
- LAN port MAC authentication idle timeout value .
type: int
portal_message_override_group:
description:
- Replacement message group for this VAP (only available when security is set to a captive portal type). Source system.replacemsg-group
.name.
type: str
portal_message_overrides:
description:
- Individual message overrides.
type: dict
suboptions:
auth_disclaimer_page:
description:
- Override auth-disclaimer-page message with message from portal-message-overrides group.
type: str
auth_login_failed_page:
description:
- Override auth-login-failed-page message with message from portal-message-overrides group.
type: str
auth_login_page:
description:
- Override auth-login-page message with message from portal-message-overrides group.
type: str
auth_reject_page:
description:
- Override auth-reject-page message with message from portal-message-overrides group.
type: str
portal_type:
description:
- Captive portal functionality. Configure how the captive portal authenticates users and whether it includes a disclaimer.
type: str
choices:
- auth
- auth+disclaimer
- disclaimer
- email-collect
- cmcc
- cmcc-macauth
- auth-mac
- external-auth
- external-macauth
primary_wag_profile:
description:
- Primary wireless access gateway profile name. Source wireless-controller.wag-profile.name.
type: str
probe_resp_suppression:
description:
- Enable/disable probe response suppression (to ignore weak signals) .
type: str
choices:
- enable
- disable
probe_resp_threshold:
description:
- Minimum signal level/threshold in dBm required for the AP response to probe requests (-95 to -20).
type: str
ptk_rekey:
description:
- Enable/disable PTK rekey for WPA-Enterprise security.
type: str
choices:
- enable
- disable
ptk_rekey_intv:
description:
- PTK rekey interval (1800 - 864000 sec).
type: int
qos_profile:
description:
- Quality of service profile name. Source wireless-controller.qos-profile.name.
type: str
quarantine:
description:
- Enable/disable station quarantine .
type: str
choices:
- enable
- disable
radio_2g_threshold:
description:
- Minimum signal level/threshold in dBm required for the AP response to receive a packet in 2.4G band (-95 to -20).
type: str
radio_5g_threshold:
description:
- Minimum signal level/threshold in dBm required for the AP response to receive a packet in 5G band(-95 to -20).
type: str
radio_sensitivity:
description:
- Enable/disable software radio sensitivity (to ignore weak signals) .
type: str
choices:
- enable
- disable
radius_mac_auth:
description:
- Enable/disable RADIUS-based MAC authentication of clients .
type: str
choices:
- enable
- disable
radius_mac_auth_server:
description:
- RADIUS-based MAC authentication server. Source user.radius.name.
type: str
radius_mac_auth_usergroups:
description:
- Selective user groups that are permitted for RADIUS mac authentication.
type: list
suboptions:
name:
description:
- User group name.
required: true
type: str
radius_server:
description:
- RADIUS server to be used to authenticate WiFi users. Source user.radius.name.
type: str
rates_11a:
description:
- Allowed data rates for 802.11a.
type: list
choices:
- 1
- 1-basic
- 2
- 2-basic
- 5.5
- 5.5-basic
- 11
- 11-basic
- 6
- 6-basic
- 9
- 9-basic
- 12
- 12-basic
- 18
- 18-basic
- 24
- 24-basic
- 36
- 36-basic
- 48
- 48-basic
- 54
- 54-basic
rates_11ac_ss12:
description:
- Allowed data rates for 802.11ac with 1 or 2 spatial streams.
type: list
choices:
- mcs0/1
- mcs1/1
- mcs2/1
- mcs3/1
- mcs4/1
- mcs5/1
- mcs6/1
- mcs7/1
- mcs8/1
- mcs9/1
- mcs10/1
- mcs11/1
- mcs0/2
- mcs1/2
- mcs2/2
- mcs3/2
- mcs4/2
- mcs5/2
- mcs6/2
- mcs7/2
- mcs8/2
- mcs9/2
- mcs10/2
- mcs11/2
rates_11ac_ss34:
description:
- Allowed data rates for 802.11ac with 3 or 4 spatial streams.
type: list
choices:
- mcs0/3
- mcs1/3
- mcs2/3
- mcs3/3
- mcs4/3
- mcs5/3
- mcs6/3
- mcs7/3
- mcs8/3
- mcs9/3
- mcs10/3
- mcs11/3
- mcs0/4
- mcs1/4
- mcs2/4
- mcs3/4
- mcs4/4
- mcs5/4
- mcs6/4
- mcs7/4
- mcs8/4
- mcs9/4
- mcs10/4
- mcs11/4
rates_11bg:
description:
- Allowed data rates for 802.11b/g.
type: list
choices:
- 1
- 1-basic
- 2
- 2-basic
- 5.5
- 5.5-basic
- 11
- 11-basic
- 6
- 6-basic
- 9
- 9-basic
- 12
- 12-basic
- 18
- 18-basic
- 24
- 24-basic
- 36
- 36-basic
- 48
- 48-basic
- 54
- 54-basic
rates_11n_ss12:
description:
- Allowed data rates for 802.11n with 1 or 2 spatial streams.
type: list
choices:
- mcs0/1
- mcs1/1
- mcs2/1
- mcs3/1
- mcs4/1
- mcs5/1
- mcs6/1
- mcs7/1
- mcs8/2
- mcs9/2
- mcs10/2
- mcs11/2
- mcs12/2
- mcs13/2
- mcs14/2
- mcs15/2
rates_11n_ss34:
description:
- Allowed data rates for 802.11n with 3 or 4 spatial streams.
type: list
choices:
- mcs16/3
- mcs17/3
- mcs18/3
- mcs19/3
- mcs20/3
- mcs21/3
- mcs22/3
- mcs23/3
- mcs24/4
- mcs25/4
- mcs26/4
- mcs27/4
- mcs28/4
- mcs29/4
- mcs30/4
- mcs31/4
sae_groups:
description:
- SAE-Groups.
type: list
choices:
- 19
- 20
- 21
- 1
- 2
- 5
- 14
- 15
- 16
- 17
- 18
- 27
- 28
- 29
- 30
- 31
sae_password:
description:
- WPA3 SAE password to be used to authenticate WiFi users.
type: str
scan_botnet_connections:
description:
- Block or monitor connections to Botnet servers or disable Botnet scanning.
type: str
choices:
- disable
- monitor
- block
schedule:
description:
- VAP schedule name.
type: str
secondary_wag_profile:
description:
- Secondary wireless access gateway profile name. Source wireless-controller.wag-profile.name.
type: str
security:
description:
- Security mode for the wireless interface .
type: str
choices:
- open
- captive-portal
- wep64
- wep128
- wpa-personal
- wpa-personal+captive-portal
- wpa-enterprise
- wpa-only-personal
- wpa-only-personal+captive-portal
- wpa-only-enterprise
- wpa2-only-personal
- wpa2-only-personal+captive-portal
- wpa2-only-enterprise
- osen
- wpa3-enterprise
- wpa3-sae
- wpa3-sae-transition
- owe
- wpa3-only-enterprise
- wpa3-enterprise-transition
security_exempt_list:
description:
- Optional security exempt list for captive portal authentication. Source user.security-exempt-list.name.
type: str
security_obsolete_option:
description:
- Enable/disable obsolete security options.
type: str
choices:
- enable
- disable
security_redirect_url:
description:
- Optional URL for redirecting users after they pass captive portal authentication.
type: str
selected_usergroups:
description:
- Selective user groups that are permitted to authenticate.
type: list
suboptions:
name:
description:
- User group name. Source user.group.name.
required: true
type: str
split_tunneling:
description:
- Enable/disable split tunneling .
type: str
choices:
- enable
- disable
ssid:
description:
- IEEE 802.11 service set identifier (SSID) for the wireless interface. Users who wish to use the wireless network must configure their
computers to access this SSID name.
type: str
sticky_client_remove:
description:
- Enable/disable sticky client remove to maintain good signal level clients in SSID. .
type: str
choices:
- enable
- disable
sticky_client_threshold_2g:
description:
- Minimum signal level/threshold in dBm required for the 2G client to be serviced by the AP (-95 to -20).
type: str
sticky_client_threshold_5g:
description:
- Minimum signal level/threshold in dBm required for the 5G client to be serviced by the AP (-95 to -20).
type: str
target_wake_time:
description:
- Enable/disable 802.11ax target wake time .
type: str
choices:
- enable
- disable
tkip_counter_measure:
description:
- Enable/disable TKIP counter measure.
type: str
choices:
- enable
- disable
tunnel_echo_interval:
description:
- The time interval to send echo to both primary and secondary tunnel peers (1 - 65535 sec).
type: int
tunnel_fallback_interval:
description:
- The time interval for secondary tunnel to fall back to primary tunnel (0 - 65535 sec).
type: int
usergroup:
description:
- Firewall user group to be used to authenticate WiFi users.
type: list
suboptions:
name:
description:
- User group name. Source user.group.name.
required: true
type: str
utm_log:
description:
- Enable/disable UTM logging.
type: str
choices:
- enable
- disable
utm_profile:
description:
- UTM profile name. Source wireless-controller.utm-profile.name.
type: str
utm_status:
description:
- Enable to add one or more security profiles (AV, IPS, etc.) to the VAP.
type: str
choices:
- enable
- disable
vdom:
description:
- Name of the VDOM that the Virtual AP has been added to. Source system.vdom.name.
type: str
vlan_auto:
description:
- Enable/disable automatic management of SSID VLAN interface.
type: str
choices:
- enable
- disable
vlan_pool:
description:
- VLAN pool.
type: list
suboptions:
id:
description:
- ID.
required: true
type: int
wtp_group:
description:
- WTP group name. Source wireless-controller.wtp-group.name.
type: str
vlan_pooling:
description:
- Enable/disable VLAN pooling, to allow grouping of multiple wireless controller VLANs into VLAN pools . When set to wtp-group, VLAN
pooling occurs with VLAN assignment by wtp-group.
type: str
choices:
- wtp-group
- round-robin
- hash
- disable
vlanid:
description:
- Optional VLAN ID.
type: int
voice_enterprise:
description:
- Enable/disable 802.11k and 802.11v assisted Voice-Enterprise roaming .
type: str
choices:
- disable
- enable
webfilter_profile:
description:
- WebFilter profile name. Source webfilter.profile.name.
type: str
'''
EXAMPLES = '''
- collections:
- fortinet.fortios
connection: httpapi
hosts: fortigate01
vars:
ansible_httpapi_port: 443
ansible_httpapi_use_ssl: true
ansible_httpapi_validate_certs: false
vdom: root
tasks:
- name: fortios_wireless_controller_vap
fortios_wireless_controller_vap:
vdom: root
state: present
wireless_controller_vap:
acct_interim_interval: 0
atf_weight: 20
auth: psk
broadcast_ssid: enable
broadcast_suppression: dhcp-up
bss_color_partial: enable
captive_portal_auth_timeout: 0
captive_portal_session_timeout_interval: 0
dhcp_lease_time: 2400
dhcp_option43_insertion: enable
dhcp_option82_circuit_id_insertion: disable
dhcp_option82_insertion: disable
dhcp_option82_remote_id_insertion: disable
dynamic_vlan: disable
eap_reauth: disable
eap_reauth_intv: 86400
eapol_key_retries: enable
encrypt: AES
external_fast_roaming: disable
external_web_format: auto-detect
fast_bss_transition: disable
fast_roaming: enable
ft_mobility_domain: 1000
ft_over_ds: enable
ft_r0_key_lifetime: 480
gtk_rekey: disable
gtk_rekey_intv: 86400
high_efficiency: enable
igmp_snooping: disable
intra_vap_privacy: disable
ip: 0.0.0.0 0.0.0.0
ipv6_rules: drop-icmp6ra
keyindex: 1
ldpc: rxtx
local_authentication: disable
local_bridging: disable
local_lan: allow
local_standalone: disable
local_standalone_nat: disable
mac_auth_bypass: disable
mac_filter: disable
mac_filter_policy_other: allow
max_clients: 0
max_clients_ap: 0
me_disable_thresh: 32
mesh_backhaul: disable
mpsk_concurrent_clients: 0
mu_mimo: enable
multicast_enhance: disable
multicast_rate: '0'
name: terr-test
okc: enable
owe_transition: disable
passphrase: fortinet-pass
pmf: disable
pmf_assoc_comeback_timeout: 1
pmf_sa_query_retry_timeout: 2
port_macauth: disable
port_macauth_reauth_timeout: 7200
port_macauth_timeout: 600
portal_type: auth
probe_resp_suppression: disable
probe_resp_threshold: '-80'
ptk_rekey: disable
ptk_rekey_intv: 86400
quarantine: enable
radio_2g_threshold: '-79'
radio_5g_threshold: '-76'
radio_sensitivity: disable
radius_mac_auth: disable
security: wpa2-only-personal
security_obsolete_option: disable
split_tunneling: disable
ssid: fortinet
sticky_client_remove: disable
sticky_client_threshold_2g: '-79'
sticky_client_threshold_5g: '-76'
target_wake_time: enable
tkip_counter_measure: enable
tunnel_echo_interval: 300
tunnel_fallback_interval: 7200
vdom: root
vlan_auto: disable
vlan_pooling: disable
vlanid: 0
voice_enterprise: disable
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import FortiOSHandler
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import check_legacy_fortiosapi
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import schema_to_module_spec
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.fortios import check_schema_versioning
from ansible_collections.fortinet.fortios.plugins.module_utils.fortimanager.common import FAIL_SOCKET_MSG
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.comparison import is_same_comparison
from ansible_collections.fortinet.fortios.plugins.module_utils.fortios.comparison import serialize
def filter_wireless_controller_vap_data(json):
option_list = ['access_control_list', 'acct_interim_interval', 'additional_akms',
'address_group', 'alias', 'antivirus_profile',
'application_list', 'atf_weight', 'auth',
'broadcast_ssid', 'broadcast_suppression', 'bss_color_partial',
'bstm_disassociation_imminent', 'bstm_load_balancing_disassoc_timer', 'bstm_rssi_disassoc_timer',
'captive_portal_ac_name', 'captive_portal_auth_timeout', 'captive_portal_macauth_radius_secret',
'captive_portal_macauth_radius_server', 'captive_portal_radius_secret', 'captive_portal_radius_server',
'captive_portal_session_timeout_interval', 'dhcp_address_enforcement', 'dhcp_lease_time',
'dhcp_option43_insertion', 'dhcp_option82_circuit_id_insertion', 'dhcp_option82_insertion',
'dhcp_option82_remote_id_insertion', 'dynamic_vlan', 'eap_reauth',
'eap_reauth_intv', 'eapol_key_retries', 'encrypt',
'external_fast_roaming', 'external_logout', 'external_web',
'external_web_format', 'fast_bss_transition', 'fast_roaming',
'ft_mobility_domain', 'ft_over_ds', 'ft_r0_key_lifetime',
'gas_comeback_delay', 'gas_fragmentation_limit', 'gtk_rekey',
'gtk_rekey_intv', 'high_efficiency', 'hotspot20_profile',
'igmp_snooping', 'intra_vap_privacy', 'ip',
'ips_sensor', 'ipv6_rules', 'key',
'keyindex', 'ldpc', 'local_authentication',
'local_bridging', 'local_lan', 'local_standalone',
'local_standalone_dns', 'local_standalone_dns_ip', 'local_standalone_nat',
'mac_auth_bypass', 'mac_called_station_delimiter', 'mac_calling_station_delimiter',
'mac_case', 'mac_filter', 'mac_filter_list',
'mac_filter_policy_other', 'mac_password_delimiter', 'mac_username_delimiter',
'max_clients', 'max_clients_ap', 'mbo',
'mbo_cell_data_conn_pref', 'me_disable_thresh', 'mesh_backhaul',
'mpsk', 'mpsk_concurrent_clients', 'mpsk_key',
'mpsk_profile', 'mu_mimo', 'multicast_enhance',
'multicast_rate', 'nac', 'nac_profile',
'name', 'neighbor_report_dual_band', 'okc',
'owe_groups', 'owe_transition', 'owe_transition_ssid',
'passphrase', 'pmf', 'pmf_assoc_comeback_timeout',
'pmf_sa_query_retry_timeout', 'port_macauth', 'port_macauth_reauth_timeout',
'port_macauth_timeout', 'portal_message_override_group', 'portal_message_overrides',
'portal_type', 'primary_wag_profile', 'probe_resp_suppression',
'probe_resp_threshold', 'ptk_rekey', 'ptk_rekey_intv',
'qos_profile', 'quarantine', 'radio_2g_threshold',
'radio_5g_threshold', 'radio_sensitivity', 'radius_mac_auth',
'radius_mac_auth_server', 'radius_mac_auth_usergroups', 'radius_server',
'rates_11a', 'rates_11ac_ss12', 'rates_11ac_ss34',
'rates_11bg', 'rates_11n_ss12', 'rates_11n_ss34',
'sae_groups', 'sae_password', 'scan_botnet_connections',
'schedule', 'secondary_wag_profile', 'security',
'security_exempt_list', 'security_obsolete_option', 'security_redirect_url',
'selected_usergroups', 'split_tunneling', 'ssid',
'sticky_client_remove', 'sticky_client_threshold_2g', 'sticky_client_threshold_5g',
'target_wake_time', 'tkip_counter_measure', 'tunnel_echo_interval',
'tunnel_fallback_interval', 'usergroup', 'utm_log',
'utm_profile', 'utm_status', 'vdom',
'vlan_auto', 'vlan_pool', 'vlan_pooling',
'vlanid', 'voice_enterprise', 'webfilter_profile']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def flatten_single_path(data, path, index):
if not data or index == len(path) or path[index] not in data or not data[path[index]]:
return
if index == len(path) - 1:
data[path[index]] = ' '.join(str(elem) for elem in data[path[index]])
elif isinstance(data[path[index]], list):
for value in data[path[index]]:
flatten_single_path(value, path, index + 1)
else:
flatten_single_path(data[path[index]], path, index + 1)
def flatten_multilists_attributes(data):
multilist_attrs = [
[u'rates_11ac_ss34'],
[u'rates_11a'],
[u'ipv6_rules'],
[u'owe_groups'],
[u'rates_11n_ss12'],
[u'broadcast_suppression'],
[u'sae_groups'],
[u'rates_11n_ss34'],
[u'additional_akms'],
[u'rates_11ac_ss12'],
[u'rates_11bg'],
]
for attr in multilist_attrs:
flatten_single_path(data, attr, 0)
return data
def underscore_to_hyphen(data):
if isinstance(data, list):
for i, elem in enumerate(data):
data[i] = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def wireless_controller_vap(data, fos, check_mode=False):
vdom = data['vdom']
state = data['state']
wireless_controller_vap_data = data['wireless_controller_vap']
wireless_controller_vap_data = flatten_multilists_attributes(wireless_controller_vap_data)
filtered_data = underscore_to_hyphen(filter_wireless_controller_vap_data(wireless_controller_vap_data))
# check_mode starts from here
if check_mode:
mkey = fos.get_mkey('wireless_controller', 'vap', filtered_data, vdom=vdom)
current_data = fos.get('wireless_controller', 'vap', vdom=vdom, mkey=mkey)
is_existed = current_data and current_data.get('http_status') == 200 \
and isinstance(current_data.get('results'), list) \
and len(current_data['results']) > 0
# 2. if it exists and the state is 'present' then compare current settings with desired
if state == 'present' or state is True:
if mkey is None:
return False, True, filtered_data
# if mkey exists then compare each other
# record exits and they're matched or not
if is_existed:
is_same = is_same_comparison(
serialize(current_data['results'][0]), serialize(filtered_data))
return False, not is_same, filtered_data
# record does not exist
return False, True, filtered_data
if state == 'absent':
if mkey is None:
return False, False, filtered_data
if is_existed:
return False, True, filtered_data
return False, False, filtered_data
return True, False, {'reason: ': 'Must provide state parameter'}
if state == "present" or state is True:
return fos.set('wireless-controller',
'vap',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('wireless-controller',
'vap',
mkey=filtered_data['name'],
vdom=vdom)
else:
fos._module.fail_json(msg='state must be present or absent!')
def is_successful_status(resp):
return 'status' in resp and resp['status'] == 'success' or \
'http_status' in resp and resp['http_status'] == 200 or \
'http_method' in resp and resp['http_method'] == "DELETE" and resp['http_status'] == 404
def fortios_wireless_controller(data, fos, check_mode):
fos.do_member_operation('wireless_controller_vap')
if data['wireless_controller_vap']:
resp = wireless_controller_vap(data, fos, check_mode)
else:
fos._module.fail_json(msg='missing task body: %s' % ('wireless_controller_vap'))
if check_mode:
return resp
return not is_successful_status(resp), \
is_successful_status(resp) and \
(resp['revision_changed'] if 'revision_changed' in resp else True), \
resp
versioned_schema = {
"type": "list",
"children": {
"dhcp_option43_insertion": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "disable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"captive_portal_radius_secret": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ft_mobility_domain": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"captive_portal_macauth_radius_secret": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mesh_backhaul": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"rates_11ac_ss34": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "mcs0/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs1/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs2/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs3/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs4/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs5/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs6/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs7/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs8/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs9/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs10/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs11/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs0/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs1/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs2/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs3/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs4/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs5/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs6/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs7/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs8/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs9/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs10/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs11/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"radio_5g_threshold": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"radius_mac_auth_usergroups": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port_macauth_timeout": {
"type": "integer",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"nac_profile": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"owe_transition_ssid": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"keyindex": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"local_bridging": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mu_mimo": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"mac_password_delimiter": {
"type": "string",
"options": [
{
"value": "hyphen",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "single-hyphen",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "colon",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "none",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"auth": {
"type": "string",
"options": [
{
"value": "psk",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "radius",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "usergroup",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"voice_enterprise": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"okc": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ldpc": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "rx",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "tx",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "rxtx",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"dhcp_lease_time": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"secondary_wag_profile": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"mac_auth_bypass": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"local_standalone_nat": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"atf_weight": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"eapol_key_retries": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"quarantine": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mac_filter_policy_other": {
"type": "string",
"options": [
{
"value": "allow",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "deny",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"radio_sensitivity": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mpsk_profile": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
"sticky_client_remove": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "disable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"radio_2g_threshold": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"portal_message_override_group": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"tunnel_fallback_interval": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"access_control_list": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"security": {
"type": "string",
"options": [
{
"value": "open",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "captive-portal",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wep64",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wep128",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wpa-personal",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wpa-personal+captive-portal",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wpa-enterprise",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wpa-only-personal",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wpa-only-personal+captive-portal",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wpa-only-enterprise",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wpa2-only-personal",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wpa2-only-personal+captive-portal",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wpa2-only-enterprise",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "osen",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "wpa3-enterprise",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "wpa3-sae",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "wpa3-sae-transition",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "owe",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "wpa3-only-enterprise",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "wpa3-enterprise-transition",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"hotspot20_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"radius_server": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"address_group": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"rates_11a": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "1-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "2-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "5.5",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "5.5-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "11",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "11-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "6",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "6-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "9",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "9-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "12",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "12-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "18",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "18-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "24",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "24-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "36",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "36-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "48",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "48-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "54",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "54-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ptk_rekey": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ptk_rekey_intv": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"usergroup": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mac_username_delimiter": {
"type": "string",
"options": [
{
"value": "hyphen",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "single-hyphen",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "colon",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "none",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"vlan_pool": {
"type": "list",
"children": {
"id": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"wtp_group": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ipv6_rules": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "drop-icmp6ra",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "drop-icmp6rs",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "drop-llmnr6",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "drop-icmp6mld2",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "drop-dhcp6s",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "drop-dhcp6c",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "ndp-proxy",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "drop-ns-dad",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
{
"value": "drop-ns-nondad",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"acct_interim_interval": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"tunnel_echo_interval": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"passphrase": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ssid": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"vlan_pooling": {
"type": "string",
"options": [
{
"value": "wtp-group",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "round-robin",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "hash",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"schedule": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"tkip_counter_measure": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"vlanid": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"neighbor_report_dual_band": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"max_clients_ap": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"local_lan": {
"type": "string",
"options": [
{
"value": "allow",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "deny",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"gas_fragmentation_limit": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"mac_filter_list": {
"type": "list",
"children": {
"mac": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"id": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mac_filter_policy": {
"type": "string",
"options": [
{
"value": "allow",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "deny",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"external_web_format": {
"type": "string",
"options": [
{
"value": "auto-detect",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "no-query-string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "partial-query-string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"gtk_rekey": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"security_obsolete_option": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.11": True,
"v6.0.0": True,
"v6.2.3": True,
"v6.0.5": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.11": True,
"v6.0.0": True,
"v6.2.3": True,
"v6.0.5": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False,
"v6.0.11": True
}
},
"local_standalone_dns": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"key": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mac_called_station_delimiter": {
"type": "string",
"options": [
{
"value": "hyphen",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "single-hyphen",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "colon",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "none",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"dhcp_address_enforcement": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"port_macauth_reauth_timeout": {
"type": "integer",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"vdom": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": False,
"v6.2.3": False,
"v6.2.5": False,
"v6.2.7": False,
"v6.0.11": True
}
},
"bstm_rssi_disassoc_timer": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"scan_botnet_connections": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v7.0.1": True
}
},
{
"value": "monitor",
"revisions": {
"v7.0.1": True
}
},
{
"value": "block",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"utm_log": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"gtk_rekey_intv": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mpsk": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"security_exempt_list": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mac_filter": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sticky_client_threshold_2g": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"antivirus_profile": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"mbo": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"application_list": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"target_wake_time": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"bstm_load_balancing_disassoc_timer": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"probe_resp_suppression": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"captive_portal_auth_timeout": {
"type": "integer",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"mbo_cell_data_conn_pref": {
"type": "string",
"options": [
{
"value": "excluded",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "prefer-not",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "prefer-use",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"mpsk_concurrent_clients": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"external_fast_roaming": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"bstm_disassociation_imminent": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"external_logout": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"vlan_auto": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sticky_client_threshold_5g": {
"type": "string",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
},
"owe_groups": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "19",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "20",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "21",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ip": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"dhcp_option82_remote_id_insertion": {
"type": "string",
"options": [
{
"value": "style-1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"pmf_assoc_comeback_timeout": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"local_standalone": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"portal_type": {
"type": "string",
"options": [
{
"value": "auth",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "auth+disclaimer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disclaimer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "email-collect",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "cmcc",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "cmcc-macauth",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "auth-mac",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "external-auth",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "external-macauth",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"port_macauth": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "radius",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "address-group",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"local_standalone_dns_ip": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"high_efficiency": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"local_authentication": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"external_web": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"qos_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"utm_profile": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"rates_11n_ss12": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "mcs0/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs1/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs2/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs3/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs4/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs5/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs6/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs7/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs8/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs9/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs10/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs11/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs12/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs13/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs14/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs15/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"primary_wag_profile": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"security_redirect_url": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"pmf_sa_query_retry_timeout": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"me_disable_thresh": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"fast_bss_transition": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"broadcast_suppression": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "dhcp-up",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "dhcp-down",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "dhcp-starvation",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "arp-known",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "arp-unknown",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "arp-reply",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "arp-poison",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "arp-proxy",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "netbios-ns",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "netbios-ds",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "ipv6",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "all-other-mc",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "all-other-bc",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "dhcp-ucast",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"pmf": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "optional",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sae_groups": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "19",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "20",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "21",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "1",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
{
"value": "2",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
{
"value": "5",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
{
"value": "14",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
{
"value": "15",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
{
"value": "16",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
{
"value": "17",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
{
"value": "18",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
{
"value": "27",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
{
"value": "28",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
{
"value": "29",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
{
"value": "30",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
},
{
"value": "31",
"revisions": {
"v7.0.1": False,
"v7.0.0": False,
"v6.4.4": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"broadcast_ssid": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"multicast_enhance": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"rates_11n_ss34": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "mcs16/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs17/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs18/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs19/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs20/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs21/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs22/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs23/3",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs24/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs25/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs26/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs27/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs28/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs29/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs30/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs31/4",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"bss_color_partial": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True
}
},
{
"value": "disable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": False
}
},
"multicast_rate": {
"type": "string",
"options": [
{
"value": "0",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "6000",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "12000",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "24000",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ft_over_ds": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"radius_mac_auth": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"additional_akms": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "akm6",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"mac_case": {
"type": "string",
"options": [
{
"value": "uppercase",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "lowercase",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"fast_roaming": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"sae_password": {
"type": "string",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"ips_sensor": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"rates_11ac_ss12": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "mcs0/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs1/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs2/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs3/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs4/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs5/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs6/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs7/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs8/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs9/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs10/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs11/1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs0/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs1/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs2/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs3/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs4/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs5/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs6/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs7/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs8/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs9/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs10/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "mcs11/2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"captive_portal_radius_server": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"nac": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"encrypt": {
"type": "string",
"options": [
{
"value": "TKIP",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "AES",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "TKIP-AES",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"eap_reauth_intv": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"split_tunneling": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"ft_r0_key_lifetime": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"igmp_snooping": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
{
"value": "disable",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
}
],
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True
}
},
"gas_comeback_delay": {
"type": "integer",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
"captive_portal_ac_name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"owe_transition": {
"type": "string",
"options": [
{
"value": "disable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
{
"value": "enable",
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True,
"v6.4.4": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"captive_portal_session_timeout_interval": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"dynamic_vlan": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"utm_status": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v7.0.1": True
}
},
{
"value": "disable",
"revisions": {
"v7.0.1": True
}
}
],
"revisions": {
"v7.0.1": True
}
},
"selected_usergroups": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"captive_portal_macauth_radius_server": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"dhcp_option82_circuit_id_insertion": {
"type": "string",
"options": [
{
"value": "style-1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "style-2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "style-3",
"revisions": {
"v6.4.4": True,
"v7.0.0": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"max_clients": {
"type": "integer",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mpsk_key": {
"type": "list",
"children": {
"comment": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"concurrent_clients": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mpsk_schedules": {
"type": "list",
"children": {
"name": {
"type": "string",
"revisions": {
"v6.2.0": True,
"v6.2.3": True,
"v6.4.1": True,
"v6.2.5": True,
"v6.2.7": True
}
}
},
"revisions": {
"v6.2.0": True,
"v6.2.3": True,
"v6.4.1": True,
"v6.2.5": True,
"v6.2.7": True
}
},
"passphrase": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"key_name": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v6.0.5": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"webfilter_profile": {
"type": "string",
"revisions": {
"v7.0.1": True
}
},
"portal_message_overrides": {
"type": "dict",
"children": {
"auth_login_failed_page": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"auth_disclaimer_page": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"auth_login_page": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"auth_reject_page": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"eap_reauth": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"probe_resp_threshold": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"alias": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": False,
"v6.0.5": True,
"v6.4.4": False,
"v7.0.1": False,
"v6.4.0": False,
"v6.4.1": False,
"v6.2.0": False,
"v6.2.3": True,
"v6.2.5": False,
"v6.2.7": False,
"v6.0.11": True
}
},
"dhcp_option82_insertion": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"rates_11bg": {
"multiple_values": True,
"type": "list",
"options": [
{
"value": "1",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "1-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "2",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "2-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "5.5",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "5.5-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "11",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "11-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "6",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "6-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "9",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "9-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "12",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "12-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "18",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "18-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "24",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "24-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "36",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "36-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "48",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "48-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "54",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "54-basic",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"intra_vap_privacy": {
"type": "string",
"options": [
{
"value": "enable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
{
"value": "disable",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
],
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"radius_mac_auth_server": {
"type": "string",
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
},
"mac_calling_station_delimiter": {
"type": "string",
"options": [
{
"value": "hyphen",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "single-hyphen",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "colon",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
},
{
"value": "none",
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
],
"revisions": {
"v7.0.1": True,
"v7.0.0": True
}
}
},
"revisions": {
"v6.0.0": True,
"v7.0.0": True,
"v6.0.5": True,
"v6.4.4": True,
"v7.0.1": True,
"v6.4.0": True,
"v6.4.1": True,
"v6.2.0": True,
"v6.2.3": True,
"v6.2.5": True,
"v6.2.7": True,
"v6.0.11": True
}
}
def main():
module_spec = schema_to_module_spec(versioned_schema)
mkeyname = 'name'
fields = {
"access_token": {"required": False, "type": "str", "no_log": True},
"enable_log": {"required": False, "type": bool},
"vdom": {"required": False, "type": "str", "default": "root"},
"member_path": {"required": False, "type": "str"},
"member_state": {
"type": "str",
"required": False,
"choices": ["present", "absent"]
},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"wireless_controller_vap": {
"required": False, "type": "dict", "default": None,
"options": {
}
}
}
for attribute_name in module_spec['options']:
fields["wireless_controller_vap"]['options'][attribute_name] = module_spec['options'][attribute_name]
if mkeyname and mkeyname == attribute_name:
fields["wireless_controller_vap"]['options'][attribute_name]['required'] = True
check_legacy_fortiosapi()
module = AnsibleModule(argument_spec=fields,
supports_check_mode=True)
versions_check_result = None
if module._socket_path:
connection = Connection(module._socket_path)
if 'access_token' in module.params:
connection.set_option('access_token', module.params['access_token'])
if 'enable_log' in module.params:
connection.set_option('enable_log', module.params['enable_log'])
else:
connection.set_option('enable_log', False)
fos = FortiOSHandler(connection, module, mkeyname)
versions_check_result = check_schema_versioning(fos, versioned_schema, "wireless_controller_vap")
is_error, has_changed, result = fortios_wireless_controller(module.params, fos, module.check_mode)
else:
module.fail_json(**FAIL_SOCKET_MSG)
if versions_check_result and versions_check_result['matched'] is False:
module.warn("Ansible has detected version mismatch between FortOS system and your playbook, see more details by specifying option -vvv")
if not is_error:
if versions_check_result and versions_check_result['matched'] is False:
module.exit_json(changed=has_changed, version_check_warning=versions_check_result, meta=result)
else:
module.exit_json(changed=has_changed, meta=result)
else:
if versions_check_result and versions_check_result['matched'] is False:
module.fail_json(msg="Error in repo", version_check_warning=versions_check_result, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| 34.054922
| 155
| 0.260804
| 28,404
| 328,630
| 2.975215
| 0.031545
| 0.255881
| 0.135597
| 0.046765
| 0.751716
| 0.718784
| 0.693082
| 0.681722
| 0.669629
| 0.664967
| 0
| 0.132929
| 0.60961
| 328,630
| 9,649
| 156
| 34.058452
| 0.525777
| 0.002669
| 0
| 0.730339
| 0
| 0.002298
| 0.299558
| 0.017249
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000836
| false
| 0.002193
| 0.001044
| 0.000104
| 0.003551
| 0.000104
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
a4f4d9876fe895ced7cee323894a0caf35800f5e
| 94
|
py
|
Python
|
tests/__init__.py
|
dem4ply/chibi_lxc
|
159acfacf829d0fd182382323905ee3f699dabe0
|
[
"WTFPL"
] | null | null | null |
tests/__init__.py
|
dem4ply/chibi_lxc
|
159acfacf829d0fd182382323905ee3f699dabe0
|
[
"WTFPL"
] | null | null | null |
tests/__init__.py
|
dem4ply/chibi_lxc
|
159acfacf829d0fd182382323905ee3f699dabe0
|
[
"WTFPL"
] | null | null | null |
# -*- coding: utf-8 -*-
from chibi.config import basic_config
basic_config( level='DEBUG' )
| 15.666667
| 37
| 0.691489
| 13
| 94
| 4.846154
| 0.769231
| 0.349206
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0125
| 0.148936
| 94
| 5
| 38
| 18.8
| 0.775
| 0.223404
| 0
| 0
| 0
| 0
| 0.070423
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
1040e930c4b4775dc7db28ae6582642196586e71
| 3,656
|
py
|
Python
|
src/path.py
|
JohnWes7/Auto-AzurLane
|
3914414745f76d7d50b8508e4cc0823b0cb6aff1
|
[
"MIT"
] | null | null | null |
src/path.py
|
JohnWes7/Auto-AzurLane
|
3914414745f76d7d50b8508e4cc0823b0cb6aff1
|
[
"MIT"
] | null | null | null |
src/path.py
|
JohnWes7/Auto-AzurLane
|
3914414745f76d7d50b8508e4cc0823b0cb6aff1
|
[
"MIT"
] | null | null | null |
import os
# 路径类 提供项目所需文件夹路径以及文件相关的方法 路径全为绝对路径
class Path:
__cwd = None
@classmethod
def getcwd(cls):
if cls.__cwd:
return cls.__cwd
srcdir = os.path.dirname(os.path.abspath(__file__))
cls.__cwd = cwd = os.path.dirname(srcdir).replace('\\', '/')
return cwd
@classmethod
def get_adb_path(cls):
return Path.ensure_exisit(cls.getcwd() + '/adb/adb.exe')
@classmethod
def get_screenshots_dir(cls):
Path.checkdir(cls.getcwd() + '/src/image/screenshots')
return cls.getcwd() + '/src/image/screenshots'
@classmethod
def get_configini_path(cls):
return Path.ensure_exisit(cls.getcwd() + '/config.ini')
@classmethod
def get_ui_dir(cls):
Path.checkdir(cls.getcwd() + '/src/image/ui')
return cls.getcwd() + '/src/image/ui'
@classmethod
def get_ui_tryagain_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/tryagain.png')
@classmethod
def get_ui_pause_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/pause.png')
@classmethod
def get_ui_fulldock_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/fulldock.png')
@classmethod
def get_ui_clean_up_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/clean_up.png')
@classmethod
def get_ui_retired_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/retired.png')
@classmethod
def get_ui_confirm_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/confirm.png')
@classmethod
def get_ui_tap_to_continue_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/tap_to_continue.png')
@classmethod
def get_ui_weigh_anchor_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/weigh_anchor.png')
@classmethod
def get_ui_daily_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/daily.png')
@classmethod
def get_ui_delegate_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/delegate.png')
@classmethod
def get_ui_delegate_done_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/delegate_done.png')
@classmethod
def get_ui_done_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/done.png')
@classmethod
def get_ui_delegate_success_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/delegate_success.png')
@classmethod
def get_ui_delegate_page_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/delegate_page.png')
@classmethod
def get_ui_delegate_free_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/delegate_free.png')
@classmethod
def get_ui_delegate_advice_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/delegate_advice.png')
@classmethod
def get_ui_delegate_start_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/delegate_start.png')
@classmethod
def get_ui_home_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/home.png')
@classmethod
def get_ui_delegate_0_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/delegate_0.png')
@classmethod
def get_1_path(cls):
return Path.ensure_exisit(cls.get_ui_dir() + '/!.png')
@staticmethod
def checkdir(dirpath):
if os.path.exists(dirpath) and os.path.isdir(dirpath):
return
os.makedirs(dirpath)
def ensure_exisit(path):
if os.path.exists(path):
return(path)
input('找不到指定路径', path)
exit(1)
| 28.787402
| 77
| 0.662199
| 498
| 3,656
| 4.532129
| 0.134538
| 0.088613
| 0.180771
| 0.165707
| 0.690297
| 0.570669
| 0.451041
| 0.451041
| 0.386354
| 0.386354
| 0
| 0.001392
| 0.214168
| 3,656
| 126
| 78
| 29.015873
| 0.784198
| 0.009026
| 0
| 0.265957
| 0
| 0
| 0.107429
| 0.017951
| 0
| 0
| 0
| 0
| 0
| 1
| 0.287234
| false
| 0
| 0.010638
| 0.234043
| 0.606383
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
1071548d863701cf1d0f3f2ac2b167d430d1f1c1
| 183
|
py
|
Python
|
assignment que 3.py
|
priyalbhatewara123/Python-programs
|
90b84310101b76c14b89f256ee9206711908a4ae
|
[
"bzip2-1.0.6"
] | null | null | null |
assignment que 3.py
|
priyalbhatewara123/Python-programs
|
90b84310101b76c14b89f256ee9206711908a4ae
|
[
"bzip2-1.0.6"
] | null | null | null |
assignment que 3.py
|
priyalbhatewara123/Python-programs
|
90b84310101b76c14b89f256ee9206711908a4ae
|
[
"bzip2-1.0.6"
] | null | null | null |
#Write a Python class to reverse a string word by word.
class py_solution:
def reverse_words(self, s):
return reversed(s)
print(py_solution().reverse_words('rewangi'))
| 26.142857
| 55
| 0.715847
| 28
| 183
| 4.535714
| 0.678571
| 0.15748
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185792
| 183
| 6
| 56
| 30.5
| 0.852349
| 0.295082
| 0
| 0
| 0
| 0
| 0.055118
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.25
| 0.75
| 0.25
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
10895f033d59061f7c5af5308441d1cc076c5400
| 28
|
py
|
Python
|
very_scratch/old_core/__init__.py
|
ibrahemesam/Fos
|
f2f284a2c7bdc24dafafebb8aa3141ebf225e451
|
[
"BSD-3-Clause"
] | 2
|
2016-08-03T10:33:08.000Z
|
2021-06-23T18:50:14.000Z
|
scratch/very_scratch/old_core/__init__.py
|
fos/fos-legacy
|
db6047668781a0615abcebc7d55a7164f3105047
|
[
"BSD-3-Clause"
] | null | null | null |
scratch/very_scratch/old_core/__init__.py
|
fos/fos-legacy
|
db6047668781a0615abcebc7d55a7164f3105047
|
[
"BSD-3-Clause"
] | 1
|
2021-07-11T00:16:46.000Z
|
2021-07-11T00:16:46.000Z
|
# Init for core fos objects
| 14
| 27
| 0.75
| 5
| 28
| 4.2
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.214286
| 28
| 1
| 28
| 28
| 0.954545
| 0.892857
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
10954015d1738afe0a7bb7477aa5926e412f1d18
| 103
|
py
|
Python
|
c__74.py
|
fhansmann/coding-challenges
|
eebb37565c72e05b77383c24e8273a1e4019b58e
|
[
"MIT"
] | null | null | null |
c__74.py
|
fhansmann/coding-challenges
|
eebb37565c72e05b77383c24e8273a1e4019b58e
|
[
"MIT"
] | null | null | null |
c__74.py
|
fhansmann/coding-challenges
|
eebb37565c72e05b77383c24e8273a1e4019b58e
|
[
"MIT"
] | null | null | null |
import random
print random.randint(5,95)
# for random float in range use: random.uniform(start, stop)
| 20.6
| 60
| 0.76699
| 17
| 103
| 4.647059
| 0.823529
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033708
| 0.135922
| 103
| 4
| 61
| 25.75
| 0.853933
| 0.563107
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.5
| null | null | 0.5
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 5
|
52a0567871c6da2e778d0f29f244ac848a650808
| 305
|
py
|
Python
|
mitmirror/main/composers/users/__init__.py
|
Claayton/mitmirror-api
|
a78ec3aa84aa3685a26bfaf5e1ba2a3f0f8405d1
|
[
"MIT"
] | null | null | null |
mitmirror/main/composers/users/__init__.py
|
Claayton/mitmirror-api
|
a78ec3aa84aa3685a26bfaf5e1ba2a3f0f8405d1
|
[
"MIT"
] | 1
|
2021-10-09T20:42:03.000Z
|
2021-10-09T20:42:03.000Z
|
mitmirror/main/composers/users/__init__.py
|
Claayton/mitmirror-api
|
a78ec3aa84aa3685a26bfaf5e1ba2a3f0f8405d1
|
[
"MIT"
] | null | null | null |
"""Inicializacao do modulo users"""
from .get_user_composer import get_user_composer
from .get_users_composer import get_users_composer
from .register_user_composer import register_user_composer
from .update_user_composer import update_user_composer
from .delete_user_composer import delete_user_composer
| 43.571429
| 58
| 0.885246
| 44
| 305
| 5.681818
| 0.272727
| 0.384
| 0.288
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078689
| 305
| 6
| 59
| 50.833333
| 0.88968
| 0.095082
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
52abc50d5ff3af847c67058fc13809532728405d
| 205,234
|
py
|
Python
|
pytests/fts/stable_topology_fts.py
|
couchbase/testrunner
|
a23e4885fdf86d91b490e91e992e7445e61e9690
|
[
"Apache-2.0"
] | 14
|
2015-02-06T02:47:57.000Z
|
2020-03-14T15:06:05.000Z
|
pytests/fts/stable_topology_fts.py
|
couchbase/testrunner
|
a23e4885fdf86d91b490e91e992e7445e61e9690
|
[
"Apache-2.0"
] | 3
|
2019-02-27T19:29:11.000Z
|
2021-06-02T02:14:27.000Z
|
pytests/fts/stable_topology_fts.py
|
couchbase/testrunner
|
a23e4885fdf86d91b490e91e992e7445e61e9690
|
[
"Apache-2.0"
] | 108
|
2015-03-26T08:58:49.000Z
|
2022-03-21T05:21:39.000Z
|
# coding=utf-8
import copy
import json
import random
import time
from threading import Thread
import Geohash
from membase.helper.cluster_helper import ClusterOperationHelper
from remote.remote_util import RemoteMachineShellConnection
from TestInput import TestInputSingleton
from tasks.task import ESRunQueryCompare
from tasks.taskmanager import TaskManager
from lib.testconstants import FUZZY_FTS_SMALL_DATASET, FUZZY_FTS_LARGE_DATASET
from .fts_base import FTSBaseTest, INDEX_DEFAULTS, QUERY, download_from_s3
from lib.membase.api.exception import FTSException, ServerUnavailableException
from lib.membase.api.rest_client import RestConnection
from couchbase_helper.documentgenerator import SDKDataLoader
import threading
class StableTopFTS(FTSBaseTest):
def setUp(self):
super(StableTopFTS, self).setUp()
def tearDown(self):
super(StableTopFTS, self).tearDown()
def suite_setUp(self):
pass
def suite_tearDown(self):
pass
def check_fts_service_started(self):
try:
rest = RestConnection(self._cb_cluster.get_random_fts_node())
rest.get_fts_index_definition("invalid_index")
except ServerUnavailableException as e:
raise FTSException("FTS service has not started: %s" %e)
def create_simple_default_index(self, data_loader_output=False):
plan_params = self.construct_plan_params()
self.load_data(generator=None, data_loader_output=data_loader_output)
self.wait_till_items_in_bucket_equal(self._num_items//2)
self.create_fts_indexes_all_buckets(plan_params=plan_params)
if self._update or self._delete:
self.wait_for_indexing_complete()
self.validate_index_count(equal_bucket_doc_count=True,
zero_rows_ok=False)
self.async_perform_update_delete(self.upd_del_fields)
if self._update:
self.sleep(60, "Waiting for updates to get indexed...")
self.wait_for_indexing_complete()
self.validate_index_count(equal_bucket_doc_count=True)
def test_index_docvalues_option(self):
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(
bucket=self._cb_cluster.get_bucket_by_name('default'),
index_name="custom_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.load_data()
self.wait_for_indexing_complete()
for node in self._cb_cluster.get_fts_nodes():
ds = self.get_zap_docvalue_disksize(node)
if ds:
if float(ds) != float(0):
self.fail("zap files size with docvalue not empty with docValues = False")
else:
self.log.info(" zap files size found to be : {0}".format(ds))
if self.container_type == "collection":
type = "scope1.collection1.emp"
else:
type = "emp"
index.update_docvalues_email_custom_index(True, type)
self.wait_for_indexing_complete()
for node in self._cb_cluster.get_fts_nodes():
ds = self.get_zap_docvalue_disksize(node)
if ds:
if float(ds) == float(0):
self.fail("zap files size with docvalue found to be empty with docValues = True")
else:
self.log.info(" zap files size found to be : {0}".format(ds))
def test_maxttl_setting(self):
self.create_simple_default_index()
maxttl = int(self._input.param("maxttl", None))
self.sleep(maxttl,
"Waiting for expiration at the elapse of bucket maxttl")
self._cb_cluster.run_expiry_pager()
self.wait_for_indexing_complete(item_count=0)
self.validate_index_count(must_equal=0)
for index in self._cb_cluster.get_indexes():
query = eval(self._input.param("query", str(self.sample_query)))
hits, _, _, _ = index.execute_query(query,
zero_results_ok=True,
expected_hits=0)
self.log.info("Hits: %s" % hits)
def query_in_dgm(self):
self.create_simple_default_index()
for index in self._cb_cluster.get_indexes():
self.generate_random_queries(index, self.num_queries, self.query_types)
self.run_query_and_compare(index)
def run_default_index_query(self, query=None, expected_hits=None, expected_no_of_results=None):
self.create_simple_default_index()
zero_results_ok = True
if not expected_hits:
expected_hits = int(self._input.param("expected_hits", 0))
if expected_hits:
zero_results_ok = False
if not query:
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
if expected_no_of_results is None:
expected_no_of_results = self._input.param("expected_no_of_results", None)
for index in self._cb_cluster.get_indexes():
hits, matches, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
expected_no_of_results=expected_no_of_results)
self.log.info("Hits: %s" % hits)
self.log.info("Matches: %s" % matches)
def test_query_type(self):
"""
uses RQG
"""
self.load_data()
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete()
if self._update or self._delete:
self.async_perform_update_delete(self.upd_del_fields)
if self._update:
self.sleep(60, "Waiting for updates to get indexed...")
self.wait_for_indexing_complete()
self.generate_random_queries(index, self.num_queries, self.query_types)
if self.run_via_n1ql:
n1ql_executor = self._cb_cluster
else:
n1ql_executor = None
self.run_query_and_compare(index, n1ql_executor=n1ql_executor)
def test_query_type_on_alias(self):
"""
uses RQG
"""
self.load_data()
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index")
self.wait_for_indexing_complete()
if self._update or self._delete:
self.async_perform_update_delete(self.upd_del_fields)
if self._update:
self.sleep(60, "Waiting for updates to get indexed...")
self.wait_for_indexing_complete()
alias = self.create_alias([index])
self.generate_random_queries(alias, self.num_queries, self.query_types)
self.run_query_and_compare(alias)
def test_match_all(self):
self.run_default_index_query(query={"match_all": {}},
expected_hits=self._num_items)
def test_match_none(self):
self.run_default_index_query(query={"match_none": {}},
expected_hits=0)
def test_match_consistency(self):
query = {"match_all": {}}
expected_hits = int(self._input.param("expected_hits_num", self._num_items))
self.create_simple_default_index(data_loader_output=True)
if self.container_type == "collection":
scan_vectors_before_mutations = self._get_mutation_vectors()
zero_results_ok = True
for index in self._cb_cluster.get_indexes():
hits, _, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=0,
consistency_level=self.consistency_level,
consistency_vectors=self.consistency_vectors
)
self.log.info("Hits: %s" % hits)
for i in range(list(list(self.consistency_vectors.values())[0].values())[0]):
self.async_perform_update_delete(self.upd_del_fields)
if self.container_type == "collection":
scan_vectors_after_mutations = self._get_mutation_vectors()
new_scan_vectors = scan_vectors_after_mutations - scan_vectors_before_mutations
self.consistency_vectors = {}
self.consistency_vectors[self._cb_cluster.get_indexes()[0].name] = self._convert_mutation_vector_to_scan_vector(new_scan_vectors)
self.log.info(self.consistency_vectors)
hits, _, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
consistency_level=self.consistency_level,
consistency_vectors=self.consistency_vectors)
self.log.info("Hits: %s" % hits)
def test_match_consistency_error(self):
query = {"match_all": {}}
fts_node = self._cb_cluster.get_random_fts_node()
service_map = RestConnection(self._cb_cluster.get_master_node()).get_nodes_services()
# select FTS node to shutdown
for node_ip, services in list(service_map.items()):
ip = node_ip.split(':')[0]
node = self._cb_cluster.get_node(ip, node_ip.split(':')[1])
if node and 'fts' in services and 'kv' not in services:
fts_node = node
break
self.create_simple_default_index()
zero_results_ok = True
for index in self._cb_cluster.get_indexes():
hits, _, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=0,
consistency_level=self.consistency_level,
consistency_vectors=self.consistency_vectors)
self.log.info("Hits: %s" % hits)
try:
from .fts_base import NodeHelper
NodeHelper.stop_couchbase(fts_node)
for i in range(list(list(self.consistency_vectors.values())[0].values())[0]):
self.async_perform_update_delete(self.upd_del_fields)
finally:
NodeHelper.start_couchbase(fts_node)
NodeHelper.wait_service_started(fts_node)
self.sleep(10)
# "status":"remote consistency error" => expected_hits=-1
hits, _, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=-1,
consistency_level=self.consistency_level,
consistency_vectors=self.consistency_vectors)
ClusterOperationHelper.wait_for_ns_servers_or_assert([fts_node], self, wait_if_warmup=True)
self.wait_for_indexing_complete()
hits, _, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=self._num_items,
consistency_level=self.consistency_level,
consistency_vectors=self.consistency_vectors)
self.log.info("Hits: %s" % hits)
def test_match_consistency_long_timeout(self):
timeout = self._input.param("timeout", None)
query = {"match_all": {}}
self.create_simple_default_index(data_loader_output=True)
if self.container_type == "collection":
scan_vectors_before_mutations = self._get_mutation_vectors()
zero_results_ok = True
self.sleep(10)
for index in self._cb_cluster.get_indexes():
hits, _, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=0,
consistency_level=self.consistency_level,
consistency_vectors=self.consistency_vectors)
self.log.info("Hits: %s" % hits)
tasks = []
tasks.append(Thread(target=self.async_perform_update_delete, args=(self.upd_del_fields,)))
for i in range(list(list(self.consistency_vectors.values())[0].values())[0]):
tasks.append(Thread(target=self.async_perform_update_delete, args=(self.upd_del_fields,)))
for task in tasks:
task.start()
num_items = self._num_items
if self.container_type == "collection":
num_items = index.get_src_collections_doc_count()
if timeout is None or timeout <= 60000:
# Here we assume that the update takes more than 60 seconds
# when we use timeout <= 60 sec we get timeout error
# with None we have 60s by default
num_items = 0
try:
if self.container_type == "collection":
self.sleep(20)
scan_vectors_after_mutations = self._get_mutation_vectors()
self.log.info(scan_vectors_after_mutations)
new_scan_vectors = scan_vectors_after_mutations - scan_vectors_before_mutations
self.consistency_vectors = {}
self.consistency_vectors[self._cb_cluster.get_indexes()[0].name] = self._convert_mutation_vector_to_scan_vector(new_scan_vectors)
hits, _, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=num_items,
consistency_level=self.consistency_level,
consistency_vectors=self.consistency_vectors,
timeout=timeout)
finally:
for task in tasks:
task.join()
self.log.info("Hits: %s" % hits)
def index_utf16_dataset(self):
self.load_utf16_data()
try:
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket, "default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
# an exception will most likely be thrown from waiting
self.wait_for_indexing_complete()
self.validate_index_count(
equal_bucket_doc_count=False,
zero_rows_ok=True,
must_equal=0)
except Exception as e:
raise FTSException("Exception thrown in utf-16 test :{0}".format(e))
def create_simple_alias(self):
self.load_data()
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket, "default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete()
self.validate_index_count(equal_bucket_doc_count=True)
hits, _, _, _ = index.execute_query(self.sample_query,
zero_results_ok=False)
alias = self.create_alias([index])
hits2, _, _, _ = alias.execute_query(self.sample_query,
zero_results_ok=False)
if hits != hits2:
self.fail("Index query yields {0} hits while alias on same index "
"yields only {1} hits".format(hits, hits2))
return index, alias
def create_query_alias_on_multiple_indexes(self):
#delete default bucket
self._cb_cluster.delete_bucket("default")
# create "emp" bucket
self._cb_cluster.create_standard_buckets(bucket_size=1000,
name="emp",
num_replicas=0)
emp = self._cb_cluster.get_bucket_by_name('emp')
# create "wiki" bucket
self._cb_cluster.create_standard_buckets(bucket_size=1000,
name="wiki",
num_replicas=0)
wiki = self._cb_cluster.get_bucket_by_name('wiki')
#load emp dataset into emp bucket
emp_gen = self.get_generator(dataset="emp", num_items=self._num_items)
wiki_gen = self.get_generator(dataset="wiki", num_items=self._num_items)
if self.es:
# make deep copies of the generators
import copy
emp_gen_copy = copy.deepcopy(emp_gen)
wiki_gen_copy = copy.deepcopy(wiki_gen)
load_tasks = self._cb_cluster.async_load_bucket_from_generator(
bucket=emp,
kv_gen=emp_gen)
load_tasks += self._cb_cluster.async_load_bucket_from_generator(
bucket=wiki,
kv_gen=wiki_gen)
if self.es:
# create empty ES indexes
self.es.create_empty_index("emp_es_index")
self.es.create_empty_index("wiki_es_index")
load_tasks.append(self.es.async_bulk_load_ES(index_name='emp_es_index',
gen=emp_gen_copy,
op_type='create'))
load_tasks.append(self.es.async_bulk_load_ES(index_name='wiki_es_index',
gen=wiki_gen_copy,
op_type='create'))
for task in load_tasks:
task.result()
# create indexes on both buckets
emp_index = self.create_index(emp, "emp_index")
wiki_index = self.create_index(wiki, "wiki_index")
self.wait_for_indexing_complete(es_index="emp_es_index")
self.wait_for_indexing_complete(es_index="wiki_es_index")
# create compound alias
alias = self.create_alias(target_indexes=[emp_index, wiki_index],
name="emp_wiki_alias")
if self.es:
self.es.create_alias(name="emp_wiki_es_alias",
indexes=["emp_es_index", "wiki_es_index"])
# run rqg on the alias
self.generate_random_queries(alias, self.num_queries, self.query_types)
self.run_query_and_compare(alias, es_index_name="emp_wiki_es_alias")
def index_wiki(self):
self.load_wiki(lang=self.lang)
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket, "wiki_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete()
self.validate_index_count(equal_bucket_doc_count=True,
zero_rows_ok=False)
def delete_index_then_query(self):
self.load_data()
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket, "default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete()
self._cb_cluster.delete_fts_index(index.name)
try:
hits2, _, _, _ = index.execute_query(self.sample_query)
except Exception as e:
# expected, pass test
self.log.info("Expected exception: {0}".format(e))
def drop_bucket_check_index(self):
count = 0
self.load_data()
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, _type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket, "default_index", collection_index=collection_index, _type=_type,
scope=index_scope, collections=index_collections)
if self.container_type == "bucket":
self._cb_cluster.delete_bucket("default")
else:
if type(self.collection) is list:
for c in self.collection:
self._cb_cluster._drop_collection(bucket=bucket, scope=self.scope, collection=c, cli_client=self.cli_client)
else:
self._cb_cluster._drop_collection(bucket=bucket, scope=self.scope, collection=self.collection, cli_client=self.cli_client)
self.sleep(20, "waiting for bucket deletion to be known by fts")
try:
count = index.get_indexed_doc_count()
except Exception as e:
self.log.info("Expected exception: {0}".format(e))
# at this point, index has been deleted,
# remove index from list of indexes
self._cb_cluster.get_indexes().remove(index)
if count:
self.fail("Able to retrieve index json from index "
"built on bucket that was deleted")
def delete_index_having_alias(self):
index, alias = self.create_simple_alias()
self._cb_cluster.delete_fts_index(index.name)
hits, _, _, _ = alias.execute_query(self.sample_query)
self.log.info("Hits: {0}".format(hits))
if hits >= 0:
self.fail("Query alias with deleted target returns query results!")
def delete_index_having_alias_recreate_index_query(self):
index, alias = self.create_simple_alias()
hits1, _, _, _ = alias.execute_query(self.sample_query)
self.log.info("Hits: {0}".format(hits1))
index.delete()
self.log.info("Recreating deleted index ...")
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
self.create_index(bucket, "default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete()
hits2, _, _, _ = alias.execute_query(self.sample_query)
self.log.info("Hits: {0}".format(hits2))
if hits1 != hits2:
self.fail("Hits from alias before index recreation: %s,"
" after recreation: %s" %(hits1, hits2))
def create_alias_on_deleted_index(self):
self.load_employee_dataset()
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket, "default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete()
from .fts_base import INDEX_DEFAULTS
alias_def = INDEX_DEFAULTS.ALIAS_DEFINITION
alias_def['targets'][index.name] = {}
alias_def['targets'][index.name]['indexUUID'] = index.get_uuid()
index.delete()
try:
self.create_alias([index], alias_def)
self.fail("Was able to create alias on deleted target")
except Exception as e:
self.log.info("Expected exception :{0}".format(e))
def edit_index_new_name(self):
self.load_employee_dataset()
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket, "sample_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete()
index.name = "new_index"
try:
index.update()
except Exception as e:
self.log.info("Expected exception: {0}".format(e))
def edit_index(self):
self.load_employee_dataset()
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket, 'sample_index', collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete()
#hits, _, _, _ = index.execute_query(self.sample_query)
new_plan_param = {"maxPartitionsPerPIndex": 30}
self.partitions_per_pindex = 30
index.index_definition['planParams'] = \
index.build_custom_plan_params(new_plan_param)
index.index_definition['uuid'] = index.get_uuid()
index.update()
_, defn = index.get_index_defn()
self.log.info(defn['indexDef'])
def test_metrics_endpoint_availability(self):
# todo: use [global] section value instead of hardcode
fts_port = 8094
fts_node = self._cb_cluster.get_random_fts_node()
endpoint = self._input.param("endpoint", None)
self.load_data()
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, tp, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket, "default_index", collection_index=collection_index, _type=tp,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete(self._num_items//2)
rest = RestConnection(self._cb_cluster.get_random_fts_node())
status, content = rest.get_rest_endpoint_data(endpoint, ip=fts_node.ip, port=fts_port)
self.assertTrue(status, f"Endpoint {endpoint} is not accessible.")
def update_index_during_large_indexing(self):
"""
MB-22410 - Updating index with a large dirty write queue
items = some millions defined at run_time using items param
"""
rest = RestConnection(self._cb_cluster.get_random_fts_node())
self.load_employee_dataset()
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket, 'sample_index', collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
# wait till half the keys are indexed
self.wait_for_indexing_complete(self._num_items//5)
status, stat_value = rest.get_fts_stats(index_name=index.name,
bucket_name=bucket.name,
stat_name='num_recs_to_persist')
self.log.info("Data(metadata + docs) in write queue is {0}".
format(stat_value))
new_plan_param = self.construct_plan_params()
index.index_definition['planParams'] = \
index.build_custom_plan_params(new_plan_param)
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.sleep(10, "Wait for index to get updated...")
self.is_index_partitioned_balanced(index=index)
_, defn = index.get_index_defn()
self.log.info(defn['indexDef'])
# see if the index is still query-able with all data
self.wait_for_indexing_complete()
hits, _, _, _ = index.execute_query(self.sample_query,
zero_results_ok=False)
self.log.info("Hits: %s" % hits)
def delete_index_during_large_indexing(self):
"""
MB-22410 - Deleting index with a large dirty write queue is slow
items = 5M
"""
self.load_employee_dataset()
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket, 'sample_index', collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
# wait till half the keys are indexed
self.wait_for_indexing_complete(self._num_items//2)
index.delete()
self.sleep(5)
try:
_, defn = index.get_index_defn()
self.log.info(defn)
self.fail("ERROR: Index definition still exists after deletion! "
"%s" %defn['indexDef'])
except Exception as e:
self.log.info("Expected exception caught: %s" % e)
def edit_index_negative(self):
self.load_employee_dataset()
bucket = self._cb_cluster.get_bucket_by_name('default')
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket, 'sample_index', collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete()
hits, _, _, _ = index.execute_query(self.sample_query)
new_plan_param = {"maxPartitionsPerPIndex": 30}
self.partitions_per_pindex = 30
# update params with plan params values to check for validation
index.index_definition['params'] = \
index.build_custom_index_params(new_plan_param)
index.index_definition['uuid'] = index.get_uuid()
try:
index.update()
except Exception as e:
self.log.info("Expected exception: %s" % e)
def index_query_beer_sample(self):
#delete default bucket
self._cb_cluster.delete_bucket("default")
master = self._cb_cluster.get_master_node()
self.load_sample_buckets(server=master, bucketName="beer-sample")
bucket = self._cb_cluster.get_bucket_by_name("beer-sample")
index = self.create_index(bucket, "beer-index")
self.wait_for_indexing_complete()
self.validate_index_count(equal_bucket_doc_count=True,
zero_rows_ok=False)
query = {"match": "cafe", "field": "name"}
hits, _, _, _ = index.execute_query(query,
zero_results_ok=False,
expected_hits=10)
self.log.info("Hits: %s" % hits)
def index_query_custom_mapping(self):
"""
uses RQG for custom mapping
"""
# create a custom map, disable default map
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(
bucket=self._cb_cluster.get_bucket_by_name('default'),
index_name="custom_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
if self.es:
self.create_es_index_mapping(index.es_custom_map,
index.index_definition)
self.load_data()
self.wait_for_indexing_complete()
if self._update or self._delete:
self.async_perform_update_delete(self.upd_del_fields)
if self._update:
self.sleep(60, "Waiting for updates to get indexed...")
self.wait_for_indexing_complete()
self.generate_random_queries(index, self.num_queries, self.query_types)
self.sleep(30, "additional wait time to be sure, fts index is ready")
if self.run_via_n1ql:
n1ql_executor = self._cb_cluster
else:
n1ql_executor = None
self.run_query_and_compare(index, n1ql_executor=n1ql_executor, use_collections=collection_index)
def test_collection_index_data_mutations(self):
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(bucket=self._cb_cluster.get_bucket_by_name('default'),
index_name="custom_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
num_collections = TestInputSingleton.input.param("num_collections", 1)
self.load_data()
self.wait_for_indexing_complete()
query = {"query": "mutated:0"}
hits,_,_,_ = index.execute_query(query)
self.assertEquals(hits, 1000*num_collections, f"Expected hits does not match after insert. Expected - {1000*num_collections}, found {hits}")
self._update = True
self.async_perform_update_delete(self.upd_del_fields)
self.wait_for_indexing_complete()
self.sleep(30, "sleep additional time.")
query = {"query": "mutated:0"}
hits,_,_,_ = index.execute_query(query)
self.assertEquals(hits, 700*num_collections, f"Expected hits does not match after update. Expected - {700*num_collections}, found {hits}")
self._update = False
self._delete = True
self.async_perform_update_delete(self.upd_del_fields)
self.wait_for_indexing_complete()
query = {"query": "type:emp"}
hits,_,_,_ = index.execute_query(query)
self.assertEquals(hits, 700*num_collections, f"Expected hits does not match after delete. Expected - {700*num_collections}, found {hits}")
self._expires = 10
self._update = True
self._delete = False
self.async_perform_update_delete()
self.wait_for_indexing_complete()
self.sleep(30, "Wait for docs expiration")
query = {"query": "type:emp"}
hits,_,_,_ = index.execute_query(query)
self.assertEquals(hits, 400*num_collections, f"Expected hits does not match after expiration. Expected - {400*num_collections}, found {hits}")
def test_collection_mutations_isolation(self):
#delete unnecessary bucket
self._cb_cluster.delete_bucket("default")
#create bucket
bucket_size = 200
bucket_priority = None
bucket_type = TestInputSingleton.input.param("bucket_type", "membase")
maxttl = TestInputSingleton.input.param("maxttl", None)
self._cb_cluster.create_default_bucket(
bucket_size,
self._num_replicas,
eviction_policy='valueOnly',
bucket_priority=bucket_priority,
bucket_type=bucket_type,
maxttl=maxttl,
bucket_storage='couchstore',
bucket_name='bucket1')
#create 2 scopes
self.cli_client.create_scope(bucket='bucket1', scope='scope1')
self.cli_client.create_scope(bucket='bucket1', scope='scope2')
#create collections with same name
self.cli_client.create_collection(bucket='bucket1', scope='scope1', collection='collection1')
self.cli_client.create_collection(bucket='bucket1', scope='scope2', collection='collection1')
# create 2 indexes
index1 = self.create_index(self._cb_cluster.get_bucket_by_name('bucket1'),
"index_scope1", collection_index=True, _type="scope1.collection1",
scope="scope1", collections=["collection1"])
index2 = self.create_index(self._cb_cluster.get_bucket_by_name('bucket1'),
"index_scope2", collection_index=True, _type="scope2.collection1",
scope="scope2", collections=["collection1"])
#load data into collections
bucket = self._cb_cluster.get_bucket_by_name('bucket1')
gen_create = SDKDataLoader(num_ops=1000, percent_create=100, percent_update=0, percent_delete=0,
load_pattern="uniform", start_seq_num=1, key_prefix="doc_", key_suffix="_",
scope="scope1", collection="collection1", json_template="emp", doc_expiry=0,
doc_size=500, get_sdk_logs=False, username="Administrator", password="password", timeout=1000,
start=0, end=0, op_type="create", all_collections=False, es_compare=False, es_host=None, es_port=None,
es_login=None, es_password=None)
load_tasks = self._cb_cluster.async_load_bucket_from_generator(bucket, gen_create)
for task in load_tasks:
task.result()
gen_create1 = SDKDataLoader(num_ops=1000, percent_create=100, percent_update=0, percent_delete=0,
load_pattern="uniform", start_seq_num=1, key_prefix="doc_", key_suffix="_",
scope="scope2", collection="collection1", json_template="emp", doc_expiry=0,
doc_size=500, get_sdk_logs=False, username="Administrator", password="password", timeout=1000,
start=0, end=0, op_type="create", all_collections=False, es_compare=False, es_host=None, es_port=None,
es_login=None, es_password=None)
load_tasks = self._cb_cluster.async_load_bucket_from_generator(bucket, gen_create1)
for task in load_tasks:
task.result()
# run all types of mutations on scope1, check that scope2 results remain the same
query = {"query": "dept:Marketing"}
untouched_hits,_,_,_ = index1.execute_query(query)
gen_update2 = SDKDataLoader(num_ops=100, percent_create=0, percent_update=100, percent_delete=0,
load_pattern="uniform", start_seq_num=1, key_prefix="doc_", key_suffix="_",
scope="scope2", collection="collection1", json_template="emp", doc_expiry=0,
doc_size=500, get_sdk_logs=False, username="Administrator", password="password", timeout=1000,
start=0, end=100, op_type="update", all_collections=False, es_compare=False, es_host=None, es_port=None,
es_login=None, es_password=None)
load_tasks = self._cb_cluster.async_load_bucket_from_generator(bucket, gen_update2)
for task in load_tasks:
task.result()
query = {"query": "dept:Marketing"}
hits,_,_,_ = index1.execute_query(query)
self.assertEqual(hits, untouched_hits, "Update isolation test is failed")
gen_delete2 = SDKDataLoader(num_ops=100, percent_create=0, percent_update=0, percent_delete=100,
load_pattern="uniform", start_seq_num=1, key_prefix="doc_", key_suffix="_",
scope="scope2", collection="collection1", json_template="emp", doc_expiry=0,
doc_size=500, get_sdk_logs=False, username="Administrator", password="password", timeout=1000,
start=0, end=100, op_type="delete", all_collections=False, es_compare=False, es_host=None, es_port=None,
es_login=None, es_password=None)
load_tasks = self._cb_cluster.async_load_bucket_from_generator(bucket, gen_delete2)
for task in load_tasks:
task.result()
query = {"query": "dept:Marketing"}
hits,_,_,_ = index1.execute_query(query)
self.assertEqual(hits, untouched_hits, "Delete isolation test is failed")
gen_exp2 = SDKDataLoader(num_ops=100, percent_create=0, percent_update=100, percent_delete=0,
load_pattern="uniform", start_seq_num=500, key_prefix="doc_", key_suffix="_",
scope="scope2", collection="collection1", json_template="emp", doc_expiry=10,
doc_size=500, get_sdk_logs=False, username="Administrator", password="password", timeout=1000,
start=500, end=600, op_type="update", all_collections=False, es_compare=False, es_host=None, es_port=None,
es_login=None, es_password=None)
load_tasks = self._cb_cluster.async_load_bucket_from_generator(bucket, kv_gen=gen_exp2, exp=10)
for task in load_tasks:
task.result()
query = {"query": "dept:Marketing"}
hits,_,_,_ = index1.execute_query(query)
self.assertEqual(hits, untouched_hits, "Expiration isolation test is failed")
def test_query_string_combinations(self):
"""
uses RQG framework minus randomness for testing query-string combinations of '', '+', '-'
{
mterms := [
[], // none
["+Wikipedia"], // one term
["+Wikipedia", "+English"], // two terms
["+the"], // one term (stop word)
["+the", "+English"], // two terms (one stop)
["+the", "+and"], // two terms (both stop)
]
sterms = [
[], // none
["Category"], // one term
["Category", "United"], // two terms
["of"], // one term (stop word)
["of", "United"], // two terms (one stop)
["of", "at"], // two terms (both stop)
]
nterms = [
[], // none
["-language"], // one term
["-language", "-States"], // two terms
["-for"], // one term (stop word)
["-for", "-States"], // two terms (one stop)
["-for", "-with"], // two terms (both stop)
]
}
"""
self.load_data()
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index")
self.wait_for_indexing_complete()
index.fts_queries = []
mterms = [[],
["+revision.text.#text:\"Wikipedia\""],
["+revision.text.#text:\"Wikipedia\"", "+revision.text.#text:\"English\""],
["+revision.text.#text:\"the\""],
["+revision.text.#text:\"the\"", "+revision.text.#text:\"English\""],
["+revision.text.#text:\"the\"", "+revision.text.#text:\"and\""]]
sterms = [[],
["revision.text.#text:\"Category\""],
["revision.text.#text:\"Category\"", "revision.text.#text:\"United\""],
["revision.text.#text:\"of\""],
["revision.text.#text:\"of\"", "revision.text.#text:\"United\""],
["revision.text.#text:\"of\"", "revision.text.#text:\"at\""]]
nterms = [[],
["-revision.text.#text:\"language\""],
["-revision.text.#text:\"language\"", "-revision.text.#text:\"States\""],
["-revision.text.#text:\"for\""],
["-revision.text.#text:\"for\"", "-revision.text.#text:\"States\""],
["-revision.text.#text:\"for\"", "-revision.text.#text:\"with\""]]
for mterm in mterms:
for sterm in sterms:
for nterm in nterms:
clause = (' '.join(mterm) + ' ' + ' '.join(sterm) + ' ' + ' '.join(nterm)).strip()
query = {"query": clause}
index.fts_queries.append(json.loads(json.dumps(query, ensure_ascii=False)))
if self.compare_es:
self.es.es_queries.append(json.loads(json.dumps({"query": {"query_string": query}},
ensure_ascii=False)))
self.run_query_and_compare(index)
def index_edit_and_query_custom_mapping(self):
"""
Index and query index, update map, query again, uses RQG
"""
fail = False
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(
bucket=self._cb_cluster.get_bucket_by_name('default'),
index_name="custom_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.create_es_index_mapping(index.es_custom_map, index.index_definition)
self.load_data()
self.wait_for_indexing_complete()
self.generate_random_queries(index, self.num_queries, self.query_types)
try:
self.run_query_and_compare(index)
except AssertionError as err:
error_msg = str(err)
self.log.error(err)
fail = True
self.log.info("Editing custom index with new map...")
index.generate_new_custom_map(seed=index.cm_id+10, collection_index=collection_index, type_mapping=type)
index.index_definition['uuid'] = index.get_uuid()
index.update()
# updating mapping on ES is not easy, often leading to merge issues
# drop and recreate the index, load again
self.create_es_index_mapping(index.es_custom_map)
self.load_data()
self.wait_for_indexing_complete()
if self.run_via_n1ql:
n1ql_executor = self._cb_cluster
else:
n1ql_executor = None
self.run_query_and_compare(index, n1ql_executor=n1ql_executor)
if fail:
raise error_msg
def index_query_in_parallel(self):
"""
Run rqg before querying is complete
turn off es validation
goal is to make sure there are no fdb or cbft crashes
"""
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(
bucket=self._cb_cluster.get_bucket_by_name('default'),
index_name="default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.load_data()
self.generate_random_queries(index, self.num_queries, self.query_types)
self.run_query_and_compare(index)
def load_index_query_all_in_parallel(self):
"""
Run rqg before querying is complete
turn off es validation
goal is to make sure there are no fdb or cbft crashes
"""
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(
bucket=self._cb_cluster.get_bucket_by_name('default'),
index_name="default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.sleep(20)
self.generate_random_queries(index, self.num_queries, self.query_types)
from threading import Thread
threads = []
threads.append(Thread(target=self.load_data,
name="loader thread",
args=()))
threads.append(Thread(target=self.run_query_and_compare,
name="query thread",
args=(index,)))
for thread in threads:
thread.start()
for thread in threads:
thread.join()
def index_edit_and_query_custom_analyzer(self):
"""
Index and query index, update map, query again, uses RQG
"""
fail = False
index = self.create_index(
bucket=self._cb_cluster.get_bucket_by_name('default'),
index_name="custom_index")
self.create_es_index_mapping(index.es_custom_map, index.index_definition)
self.load_data()
self.wait_for_indexing_complete()
self.generate_random_queries(index, self.num_queries, self.query_types)
try:
self.run_query_and_compare(index)
except AssertionError as err:
self.log.error(err)
fail = True
self.log.info("Editing custom index with new custom analyzer...")
index.update_custom_analyzer(seed=index.cm_id + 10)
index.index_definition['uuid'] = index.get_uuid()
index.update()
# updating mapping on ES is not easy, often leading to merge issues
# drop and recreate the index, load again
self.create_es_index_mapping(index.es_custom_map, index.index_definition)
gen = copy.deepcopy(self.create_gen)
task = self.es.async_bulk_load_ES(index_name='es_index',
gen=gen,
op_type='create')
task.result()
self.wait_for_indexing_complete()
try:
if self.run_via_n1ql:
n1ql_executor = self._cb_cluster
else:
n1ql_executor = None
self.run_query_and_compare(index, n1ql_executor=n1ql_executor)
except AssertionError as err:
self.log.error(err)
fail = True
if fail:
raise err
def index_delete_custom_analyzer(self):
"""
Create Index and then update by deleting custom analyzer in use, or custom filter in use.
"""
error_msg = TestInputSingleton.input.param('error_msg', '')
fail = False
index = self.create_index(
bucket=self._cb_cluster.get_bucket_by_name('default'),
index_name="custom_index")
self.load_data()
self.wait_for_indexing_complete()
self.log.info("Editing custom index by deleting custom analyzer/filter in use...")
index.update_custom_analyzer(seed=index.cm_id + 10)
index.index_definition['uuid'] = index.get_uuid()
try:
index.update()
except Exception as err:
self.log.error(err)
if str(err).count(error_msg, 0, len(str(err))):
self.log.info("Error is expected")
else:
self.log.info("Error is not expected")
raise err
def test_field_name_alias(self):
"""
Test the Searchable As property in field mapping
"""
self.load_data()
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index")
self.wait_for_indexing_complete()
index.add_child_field_to_default_mapping(field_name=self.field_name,
field_type=self.field_type,
field_alias=self.field_alias)
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.sleep(5)
self.wait_for_indexing_complete()
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
if expected_hits:
zero_results_ok = False
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
for index in self._cb_cluster.get_indexes():
hits, matches, time_taken, status = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
consistency_level=self.consistency_level,
consistency_vectors=self.consistency_vectors)
self.log.info("Hits: %s" % hits)
def test_one_field_multiple_analyzer(self):
"""
1. Create an default FTS index on wiki dataset
2. Update it to add a field mapping for revision.text.#text field with 'en' analyzer
3. Should get 0 search results for a query
4. Update it to add another field mapping for the same field, with 'fr' analyzer
5. Same query should yield more results now.
"""
self.load_data()
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index")
self.wait_for_indexing_complete()
index.add_child_field_to_default_mapping(field_name=self.field_name,
field_type=self.field_type,
field_alias=self.field_alias,
analyzer="en")
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.sleep(5)
self.wait_for_indexing_complete()
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits1", 0))
if expected_hits:
zero_results_ok = False
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
for index in self._cb_cluster.get_indexes():
hits, _, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits)
self.log.info("Hits: %s" % hits)
index.add_analyzer_to_existing_field_map(field_name=self.field_name,
field_type=self.field_type,
field_alias=self.field_alias,
analyzer="fr")
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.sleep(5)
self.wait_for_indexing_complete()
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits2", 0))
if expected_hits:
zero_results_ok = False
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
for index in self._cb_cluster.get_indexes():
hits, _, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits)
self.log.info("Hits: %s" % hits)
def test_facets(self):
field_indexed = self._input.param("field_indexed", True)
self.load_data()
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete()
if self.container_type == 'bucket':
index.add_child_field_to_default_mapping(field_name="type",
field_type="text",
field_alias="type",
analyzer="keyword")
if field_indexed:
index.add_child_field_to_default_mapping(field_name="dept",
field_type="text",
field_alias="dept",
analyzer="keyword")
index.add_child_field_to_default_mapping(field_name="salary",
field_type="number",
field_alias="salary")
index.add_child_field_to_default_mapping(field_name="join_date",
field_type="datetime",
field_alias="join_date")
else:
index.add_child_field_to_default_collection_mapping(field_name="type",
field_type="text",
field_alias="type",
analyzer="keyword", scope=self.scope, collection=self.collection)
if field_indexed:
index.add_child_field_to_default_collection_mapping(field_name="dept",
field_type="text",
field_alias="dept",
analyzer="keyword", scope=self.scope, collection=self.collection)
index.add_child_field_to_default_collection_mapping(field_name="salary",
field_type="number",
field_alias="salary", scope=self.scope, collection=self.collection)
index.add_child_field_to_default_collection_mapping(field_name="join_date",
field_type="datetime",
field_alias="join_date", scope=self.scope, collection=self.collection)
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.sleep(5)
self.wait_for_indexing_complete()
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
if expected_hits:
zero_results_ok = False
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
try:
for index in self._cb_cluster.get_indexes():
hits, _, _, _, facets = index.execute_query_with_facets(query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits)
self.log.info("Hits: %s" % hits)
self.log.info("Facets: %s" % facets)
index.validate_facets_in_search_results(no_of_hits=hits,
facets_returned=facets)
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: "+ str(err))
def test_facets_during_index(self):
field_indexed = self._input.param("field_indexed", True)
self.load_data()
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.sleep(5)
if self.container_type == 'bucket':
index.add_child_field_to_default_mapping(field_name="type",
field_type="text",
field_alias="type",
analyzer="keyword")
if field_indexed:
index.add_child_field_to_default_mapping(field_name="dept",
field_type="text",
field_alias="dept",
analyzer="keyword")
index.add_child_field_to_default_mapping(field_name="salary",
field_type="number",
field_alias="salary")
index.add_child_field_to_default_mapping(field_name="join_date",
field_type="datetime",
field_alias="join_date")
else:
index.add_child_field_to_default_collection_mapping(field_name="type",
field_type="text",
field_alias="type",
analyzer="keyword", scope=self.scope, collection=self.collection)
if field_indexed:
index.add_child_field_to_default_collection_mapping(field_name="dept",
field_type="text",
field_alias="dept",
analyzer="keyword", scope=self.scope, collection=self.collection)
index.add_child_field_to_default_collection_mapping(field_name="salary",
field_type="number",
field_alias="salary", scope=self.scope, collection=self.collection)
index.add_child_field_to_default_collection_mapping(field_name="join_date",
field_type="datetime",
field_alias="join_date", scope=self.scope, collection=self.collection)
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.sleep(5)
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
while not self.is_index_complete(index.name):
zero_results_ok = True
try:
hits, _, _, _, facets = index.execute_query_with_facets(query,
zero_results_ok=zero_results_ok)
self.log.info("Hits: %s" % hits)
self.log.info("Facets: %s" % facets)
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: "+ str(err))
def test_doc_config(self):
# delete default bucket
self._cb_cluster.delete_bucket("default")
master = self._cb_cluster.get_master_node()
# Load Travel Sample bucket and create an index
self.load_sample_buckets(server=master, bucketName="travel-sample")
bucket = self._cb_cluster.get_bucket_by_name("travel-sample")
index = self.create_index(bucket, "travel-index", scope="_default", collections=["_default"])
self.sleep(10)
self.wait_for_indexing_complete()
# Add Type Mapping
index.add_type_mapping_to_index_definition(type="airport",
analyzer="en")
index.add_type_mapping_to_index_definition(type="hotel",
analyzer="en")
mode = self._input.param("mode", "type_field")
index.add_doc_config_to_index_definition(mode=mode)
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.sleep(15)
self.wait_for_indexing_complete()
self.validate_index_count(equal_bucket_doc_count=True,
zero_rows_ok=False)
# Run Query
expected_hits = int(self._input.param("expected_hits", 0))
if not expected_hits:
zero_results_ok = True
else:
zero_results_ok = False
query = eval(self._input.param("query", str(self.sample_query)))
try:
for index in self._cb_cluster.get_indexes():
hits, _, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
consistency_level=self.consistency_level,
consistency_vectors=self.consistency_vectors)
self.log.info("Hits: %s" % hits)
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
def test_boost_query_type(self):
# Create bucket, create index
self.load_data()
self.wait_till_items_in_bucket_equal(items=self._num_items//2)
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index")
self.wait_for_indexing_complete()
index.add_type_mapping_to_index_definition(type="emp",
analyzer="keyword")
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.sleep(15)
self.wait_for_indexing_complete()
zero_results_ok = False
expected_hits = 5
# Run Query w/o Boosting and compare the scores for Docs emp10000086 &
# emp10000021. Should be the same
query = {"query": "dept:Marketing name:Safiya"}
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
try:
for index in self._cb_cluster.get_indexes():
hits, contents, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
return_raw_hits=True)
self.log.info("Hits: %s" % hits)
self.log.info("Contents: %s" % contents)
score_before_boosting_doc1 = index.get_score_from_query_result_content(
contents=contents, doc_id='emp10000045')
score_before_boosting_doc2 = index.get_score_from_query_result_content(
contents=contents, doc_id='emp10000053')
self.log.info("Scores before boosting:")
self.log.info("")
self.log.info("emp10000045: %s", score_before_boosting_doc1)
self.log.info("emp10000053: %s", score_before_boosting_doc2)
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
if not score_before_boosting_doc1 == score_before_boosting_doc2:
self.fail("Testcase failed: Scores for emp10000045 & emp10000053 "
"are not equal before boosting")
# Run Query w/o Boosting and compare the scores for Docs emp10000021 &
# emp10000086. emp10000021 score should have improved w.r.t. emp10000086
query = {"query": "dept:Marketing^5 name:Safiya"}
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
for index in self._cb_cluster.get_indexes():
hits, contents, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
return_raw_hits=True)
self.log.info("Hits: %s" % hits)
self.log.info("Contents: %s" % contents)
score_after_boosting_doc1 = index.get_score_from_query_result_content(
contents=contents, doc_id='emp10000045')
score_after_boosting_doc2 = index.get_score_from_query_result_content(
contents=contents, doc_id='emp10000053')
self.log.info("Scores after boosting:")
self.log.info("")
self.log.info("emp10000045: %s", score_after_boosting_doc1)
self.log.info("emp10000053: %s", score_after_boosting_doc2)
assert score_after_boosting_doc1 == score_after_boosting_doc2
assert score_before_boosting_doc1 < score_after_boosting_doc1
assert score_before_boosting_doc2 < score_after_boosting_doc2
def test_doc_id_query_type(self):
# Create bucket, create index
self.load_data()
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index")
self.wait_for_indexing_complete()
index.add_type_mapping_to_index_definition(type="emp",
analyzer="standard")
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.sleep(15)
self.wait_for_indexing_complete()
expected_hits = int(self._input.param("expected_hits", 0))
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
# From the Query string, fetch the Doc IDs
doc_ids = copy.deepcopy(query['ids'])
# If invalid_doc_id param is passed, add this to the query['ids']
invalid_doc_id = self._input.param("invalid_doc_id", 0)
if invalid_doc_id:
query['ids'].append(invalid_doc_id)
# If disjuncts_query is passed, join query and disjuncts_query
# to form a new query string
disjuncts_query = self._input.param("disjuncts_query", None)
if disjuncts_query:
if isinstance(disjuncts_query, str):
disjuncts_query = json.loads(disjuncts_query)
new_query = {}
new_query['disjuncts'] = []
new_query['disjuncts'].append(disjuncts_query)
new_query['disjuncts'].append(query)
query = new_query
# Execute Query
zero_results_ok = False
try:
for index in self._cb_cluster.get_indexes():
n1ql_query = "select d, meta().id from default d where search(d, "+json.dumps(query)+") and type='emp'"
hits, contents, _, _ = index.execute_query(query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
return_raw_hits=True)
self.log.info("Hits: %s" % hits)
self.log.info("Contents: %s" % contents)
# For each doc id passed in the query, validate the
# presence in the search results
for doc_id in doc_ids:
self.assertTrue(index.is_doc_present_in_query_result_content
(contents=contents, doc_id=doc_id), "Doc ID "
"%s is not present in Search results"
% doc_id)
if self.run_via_n1ql:
n1ql_results = self._cb_cluster.run_n1ql_query(query=n1ql_query)
self.assertTrue(index.is_doc_present_in_query_result_content
(contents=n1ql_results['results'], doc_id=doc_id), "Doc ID "
"%s is not present in N1QL Search results"
% doc_id)
score = index.get_score_from_query_result_content\
(contents=contents, doc_id=doc_id)
self.log.info ("Score for Doc ID {0} is {1}".
format(doc_id, score))
if invalid_doc_id:
# Validate if invalid doc id was passed, it should
# not be present in the search results
self.assertFalse(index.is_doc_present_in_query_result_content
(contents=contents, doc_id=invalid_doc_id),
"Doc ID %s is present in Search results"
% invalid_doc_id)
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
def test_sorting_of_results(self):
self.load_data()
self.wait_till_items_in_bucket_equal(self._num_items//2)
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete()
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
default_query = {"disjuncts": [{"match": "Safiya", "field": "name"},
{"match": "Palmer", "field": "name"}]}
query = eval(self._input.param("query", str(default_query)))
if expected_hits:
zero_results_ok = False
if isinstance(query, str):
query = json.loads(query)
try:
for index in self._cb_cluster.get_indexes():
sort_params = self.build_sort_params()
hits, raw_hits, _, _ = index.execute_query(query = query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
sort_fields=sort_params,
return_raw_hits=True)
self.log.info("Hits: %s" % hits)
self.log.info("Doc IDs: %s" % raw_hits)
if hits:
result = index.validate_sorted_results(raw_hits,
self.sort_fields_list)
if not result:
self.fail(
"Testcase failed. Actual results do not match expected.")
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
def test_sorting_of_results_during_indexing(self):
self.load_data()
self.wait_till_items_in_bucket_equal(self._num_items//2)
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
#self.wait_for_indexing_complete()
self.sleep(5)
zero_results_ok = True
#expected_hits = int(self._input.param("expected_hits", 0))
default_query = {"disjuncts": [{"match": "Safiya", "field": "name"},
{"match": "Palmer", "field": "name"}]}
query = eval(self._input.param("query", str(default_query)))
if isinstance(query, str):
query = json.loads(query)
try:
for index in self._cb_cluster.get_indexes():
while not self.is_index_complete(index.name):
sort_params = self.build_sort_params()
hits, raw_hits, _, _ = index.execute_query(query = query,
zero_results_ok=zero_results_ok,
sort_fields=sort_params,
return_raw_hits=True)
self.log.info("Hits: %s" % hits)
self.log.info("Doc IDs: %s" % raw_hits)
#self.sleep(5)
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
def test_sorting_of_results_on_non_indexed_fields(self):
self.load_data()
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index", collection_index=collection_index, _type=type,
scope=index_scope, collections=index_collections)
self.wait_for_indexing_complete()
if self.container_type == 'bucket':
index.add_child_field_to_default_mapping(field_name="name",
field_type="text",
field_alias="name",
analyzer="en")
else:
index.add_child_field_to_default_collection_mapping(field_name="name",
field_type="text",
field_alias="name",
analyzer="en", scope=self.scope,
collection=self.collection)
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.sleep(5)
self.wait_for_indexing_complete()
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
default_query = {"disjuncts": [{"match": "Safiya", "field": "name"},
{"match": "Palmer", "field": "name"}]}
query = eval(self._input.param("query", str(default_query)))
if expected_hits:
zero_results_ok = False
if isinstance(query, str):
query = json.loads(query)
try:
for index in self._cb_cluster.get_indexes():
hits, raw_hits, _, _ = index.execute_query(query=query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
sort_fields=self.sort_fields_list,
return_raw_hits=True)
self.log.info("Hits: %s" % hits)
self.log.info("Doc IDs: %s" % raw_hits)
if hits:
result = index.validate_sorted_results(raw_hits,
self.sort_fields_list)
if not result:
self.fail(
"Testcase failed. Actual results do not match expected.")
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
def test_scoring_tf_score(self):
"""
Test if the TF score in the Scoring functionality works fine
"""
test_data = [{"text":"cat - a lazy cat and a brown cat"},
{"text":"a lazy cat and a brown cat"},
{"text":"a lazy cat"}]
self.create_test_dataset(self._master, test_data)
self.wait_till_items_in_bucket_equal(items=len(test_data))
plan_params = self.construct_plan_params()
index = self.create_index(plan_params=plan_params,
bucket=self._cb_cluster.get_bucket_by_name(
'default'),
index_name="default_index")
self.wait_for_indexing_complete()
self.sleep(5)
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
if expected_hits:
zero_results_ok = False
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
n1ql_query = "select search_score(d) as score, d.text, meta().id from default d where search(d," + json.dumps(query) + ")"
for index in self._cb_cluster.get_indexes():
hits, raw_hits, _, _ = index.execute_query(query=query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
return_raw_hits=True,
explain=True)
tf_score1, _, _, _, _ = index.get_detailed_scores_for_doc(
doc_id='1',
search_results=raw_hits,
weight='fieldWeight',
searchTerm='cat')
self.log.info("TF for Doc ID 1 = %s" % tf_score1)
tf_score2, _, _, _, _ = index.get_detailed_scores_for_doc(
doc_id='2',
search_results=raw_hits,
weight='fieldWeight',
searchTerm='cat')
self.log.info("TF for Doc ID 2 = %s" % tf_score2)
tf_score3, _, _, _, _ = index.get_detailed_scores_for_doc(
doc_id='3',
search_results=raw_hits,
weight='fieldWeight',
searchTerm='cat')
self.log.info("TF for Doc ID 3 = %s" % tf_score3)
self.assertTrue(tf_score1 > tf_score2 > tf_score3,
"Testcase failed. TF score for Doc1 not > Doc2 not > Doc3")
if self.run_via_n1ql:
self.compare_n1ql_fts_scoring(n1ql_query=n1ql_query, raw_hits=raw_hits)
def compare_n1ql_fts_scoring(self, n1ql_query='', raw_hits=[]):
n1ql_results = self._cb_cluster.run_n1ql_query(query=n1ql_query)
self.assertEqual(len(n1ql_results['results']), len(raw_hits),
"Return values are not the same for n1ql query and fts request.")
for res in n1ql_results['results']:
for hit in raw_hits:
if res['id'] == hit['id']:
self.assertEqual(res['score'], hit['score'],
"Scoring is not the same for n1ql result and fts request hit")
def test_scoring_idf_score(self):
"""
Test if the IDF score in the Scoring functionality works fine
"""
test_data = [{"text":"a brown cat"},
{"text":"a lazy cat"},
{"text":"a lazy cat and a brown cat"},
{"text":"a brown dog"},
{"text":"a lazy dog"},
{"text":"a lazy dog and a brown dog"},
{"text":"a lazy fox and a brown fox"}]
self.create_test_dataset(self._master, test_data)
self.wait_till_items_in_bucket_equal(items=len(test_data))
plan_params = self.construct_plan_params()
index = self.create_index(plan_params=plan_params,
bucket=self._cb_cluster.get_bucket_by_name(
'default'),
index_name="default_index")
self.wait_for_indexing_complete()
self.sleep(5)
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
if expected_hits:
zero_results_ok = False
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
n1ql_query = "select search_score(d) as score, d.text, meta().id from default d where search(d," + json.dumps(query) + ")"
for index in self._cb_cluster.get_indexes():
hits, raw_hits, _, _ = index.execute_query(query=query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
return_raw_hits=True,
explain=True)
_, _, idf1, _, _ = index.get_detailed_scores_for_doc(doc_id='2',
search_results=raw_hits,
weight='fieldWeight',
searchTerm='cat')
self.log.info("IDF score for Doc ID 1 = %s" % idf1)
_, _, idf2, _, _ = index.get_detailed_scores_for_doc(doc_id='2',
search_results=raw_hits,
weight='fieldWeight',
searchTerm='lazy')
self.log.info( "IDF score for Doc ID 2 = %s" % idf2)
self.assertTrue(idf1 > idf2, "Testcase failed. IDF score for Doc1 "
"for search term 'cat' not > that of search term 'lazy'")
if self.run_via_n1ql:
self.compare_n1ql_fts_scoring(n1ql_query=n1ql_query, raw_hits=raw_hits)
def test_scoring_field_norm_score(self):
"""
Test if the Field Normalization score in the Scoring functionality works fine
"""
test_data = [{"text":"a cat"},
{"text":"a lazy cat"},
{"text":"a lazy cat and a brown cat"}]
self.create_test_dataset(self._master, test_data)
self.wait_till_items_in_bucket_equal(items=len(test_data))
plan_params = self.construct_plan_params()
index = self.create_index(plan_params=plan_params,
bucket=self._cb_cluster.get_bucket_by_name(
'default'),
index_name="default_index")
self.wait_for_indexing_complete()
self.sleep(5)
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
if expected_hits:
zero_results_ok = False
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
n1ql_query = "select search_score(d) as score, d.text, meta().id from default d where search(d," + json.dumps(query) + ")"
for index in self._cb_cluster.get_indexes():
hits, raw_hits, _, _ = index.execute_query(query=query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
return_raw_hits=True,
explain=True)
_, field_norm1, _, _, _ = index.get_detailed_scores_for_doc(
doc_id='1',
search_results=raw_hits,
weight='fieldWeight',
searchTerm='cat')
self.log.info(
"Field Normalization score for Doc ID 1 = %s" % field_norm1)
_, field_norm2, _, _, _ = index.get_detailed_scores_for_doc(
doc_id='2',
search_results=raw_hits,
weight='fieldWeight',
searchTerm='cat')
self.log.info(
"Field Normalization score for Doc ID 2 = %s" % field_norm2)
_, field_norm3, _, _, _ = index.get_detailed_scores_for_doc(
doc_id='3',
search_results=raw_hits,
weight='fieldWeight',
searchTerm='cat')
self.log.info(
"Field Normalization score for Doc ID 3 = %s" % field_norm3)
self.assertTrue(field_norm1 > field_norm2 > field_norm3,
"Testcase failed. Field Normalization score for "
"Doc1 not > Doc2 not > Doc3")
if self.run_via_n1ql:
self.compare_n1ql_fts_scoring(n1ql_query=n1ql_query, raw_hits=raw_hits)
def test_scoring_query_norm_score(self):
"""
Test if the Query Normalization score in the Scoring functionality works fine
"""
test_data = [{"text":"a cat"},
{"text":"a lazy cat"},
{"text":"a lazy cat and a brown cat"}]
self.create_test_dataset(self._master, test_data)
self.wait_till_items_in_bucket_equal(items=len(test_data))
plan_params = self.construct_plan_params()
index = self.create_index(plan_params=plan_params,
bucket=self._cb_cluster.get_bucket_by_name(
'default'),
index_name="default_index")
self.wait_for_indexing_complete()
self.sleep(5)
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
if expected_hits:
zero_results_ok = False
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
n1ql_query = "select search_score(d) as score, d.text, meta().id from default d where search(d," + json.dumps(query) + ")"
for index in self._cb_cluster.get_indexes():
hits, raw_hits, _, _ = index.execute_query(query=query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
return_raw_hits=True,
explain=True)
_, _, _, query_norm1, _ = index.get_detailed_scores_for_doc(
doc_id='1',
search_results=raw_hits,
weight='queryWeight',
searchTerm='cat')
self.log.info(
"Query Normalization score for Doc ID 1 = %s" % query_norm1)
_, _, _, query_norm2, _ = index.get_detailed_scores_for_doc(
doc_id='2',
search_results=raw_hits,
weight='queryWeight',
searchTerm='cat')
self.log.info(
"Query Normalization score for Doc ID 2 = %s" % query_norm2)
_, _, _, query_norm3, _ = index.get_detailed_scores_for_doc(
doc_id='3',
search_results=raw_hits,
weight='queryWeight',
searchTerm='cat')
self.log.info(
"Query Normalization score for Doc ID 3 = %s" % query_norm3)
self.assertTrue(query_norm1 == query_norm2 == query_norm3,
"Testcase failed. Query Normalization score for "
"Doc1 != Doc2 != Doc3")
if self.run_via_n1ql:
self.compare_n1ql_fts_scoring(n1ql_query=n1ql_query, raw_hits=raw_hits)
def test_scoring_coord_score(self):
"""
Test if the Coord score in the Scoring functionality works fine
"""
test_data = [{"text":"a cat"},
{"text":"a lazy cat"}]
self.create_test_dataset(self._master, test_data)
self.wait_till_items_in_bucket_equal(items=len(test_data))
plan_params = self.construct_plan_params()
index = self.create_index(plan_params=plan_params,
bucket=self._cb_cluster.get_bucket_by_name(
'default'),
index_name="default_index")
self.wait_for_indexing_complete()
self.sleep(5)
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
if expected_hits:
zero_results_ok = False
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
n1ql_query = "select search_score(d) as score, d.text, meta().id from default d where search(d," + json.dumps(query) + ")"
for index in self._cb_cluster.get_indexes():
hits, raw_hits, _, _ = index.execute_query(query=query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
return_raw_hits=True,
explain=True)
_, _, _, _, coord1 = index.get_detailed_scores_for_doc(
doc_id='1',
search_results=raw_hits,
weight='coord',
searchTerm='')
self.log.info(
"Coord score for Doc ID 1 = %s" % coord1)
_, _, _, _, coord2 = index.get_detailed_scores_for_doc(
doc_id='2',
search_results=raw_hits,
weight='coord',
searchTerm='')
self.log.info(
"Coord score for Doc ID 2 = %s" % coord2)
self.assertTrue(coord1 < coord2,
"Testcase failed. Coord score for Doc1 not < Doc2")
if self.run_via_n1ql:
self.compare_n1ql_fts_scoring(n1ql_query=n1ql_query, raw_hits=raw_hits)
def test_fuzzy_query(self):
"""
Test if fuzzy queries work fine
"""
test_data = [{"text":"simmer"},
{"text":"dimmer"},
{"text":"hammer"},
{"text":"shimmer"},
{"text":"rubber"},
{"text":"jabber"},
{"text":"kilmer"},
{"text":"year"},
{"text":"mumma"},
{"text":"tool stemmer"},
{"text":"he is weak at grammar"},
{"text":"sum of all the rows"}]
self.create_test_dataset(self._master, test_data)
self.wait_till_items_in_bucket_equal(items=len(test_data))
index = self.create_index(bucket=self._cb_cluster.get_bucket_by_name(
'default'),
index_name="default_index")
self.wait_for_indexing_complete()
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
if expected_hits:
zero_results_ok = False
query = eval(self._input.param("query", str(self.sample_query)))
if isinstance(query, str):
query = json.loads(query)
zero_results_ok = True
for index in self._cb_cluster.get_indexes():
hits, content, _, _ = index.execute_query(query=query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
return_raw_hits=True)
self.log.info("Docs in Search results = %s" % content)
self.log.info("Expected Docs = %s" % self.expected_docs)
if hits>0:
all_expected_docs_present = True
for doc in self.expected_docs_list:
all_expected_docs_present &= index.is_doc_present_in_query_result_content(content, doc)
self.assertTrue(all_expected_docs_present, "All expected docs not in search results")
def test_pagination_of_search_results(self):
max_matches = self._input.param("query_max_matches", 10000000)
show_results_from_item = self._input.param("show_results_from_item", 0)
self.load_data()
self.wait_till_items_in_bucket_equal(items = self._num_items//2)
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index")
self.wait_for_indexing_complete()
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
default_query = {"match_all": "true", "field": "name"}
query = eval(self._input.param("query", str(default_query)))
if expected_hits:
zero_results_ok = False
if isinstance(query, str):
query = json.loads(query)
try:
sort_params = self.build_sort_params()
for index in self._cb_cluster.get_indexes():
hits, doc_ids, _, _ = index.execute_query(query=query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
sort_fields=sort_params,
show_results_from_item=show_results_from_item)
self.log.info("Hits: %s" % hits)
self.log.info("Doc IDs: %s" % doc_ids)
if hits:
self.log.info("Count of docs on page = %s" % len(doc_ids))
if (show_results_from_item >= 0 and show_results_from_item <=self._num_items):
items_on_page = self._num_items - show_results_from_item
elif show_results_from_item < 0:
items_on_page = self._num_items
show_results_from_item = 0
else:
items_on_page = 0
expected_items_on_page = min(items_on_page, max_matches)
self.assertEqual(len(doc_ids), expected_items_on_page, "Items per page are not correct")
doc_id_prefix='emp'
first_doc_id = 10000001
i = 0
expected_doc_present = True
while i < expected_items_on_page:
expected_doc_id = doc_id_prefix+str(first_doc_id+i+show_results_from_item)
expected_doc_present &= (expected_doc_id in doc_ids)
if not expected_doc_present:
self.log.info("Doc ID %s not in the search results page" % expected_doc_id)
i += 1
self.assertTrue(expected_doc_present, "Some docs not present in the results page")
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
def test_snippets_highlighting_of_search_term_in_results(self):
self.load_data()
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index")
self.wait_for_indexing_complete()
index.add_child_field_to_default_mapping("name", "text")
index.add_child_field_to_default_mapping("manages.reports", "text")
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.sleep(10)
self.wait_for_indexing_complete()
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
default_query = {"match": "Safiya", "field": "name"}
query = eval(self._input.param("query", str(default_query)))
if expected_hits:
zero_results_ok = False
if isinstance(query, str):
query = json.loads(query)
try:
for index in self._cb_cluster.get_indexes():
n1ql_results = None
if self.run_via_n1ql:
n1ql_query = "select b, search_meta(b.oouutt) as meta from default b where " \
"search(b, {\"query\": " + json.dumps(
query) + ", \"explain\": true, \"highlight\": {}},{\"out\": \"oouutt\"})"
n1ql_results = self._cb_cluster.run_n1ql_query(query=n1ql_query)
hits, contents, _, _ = index.execute_query(query=query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
return_raw_hits=True,
highlight=True,
highlight_style=self.highlight_style,
highlight_fields=self.highlight_fields_list)
self.log.info("Hits: %s" % hits)
self.log.info("Content: %s" % contents)
result = True
self.expected_results = json.loads(self.expected_results)
if hits:
for expected_doc in self.expected_results:
result &= index.validate_snippet_highlighting_in_result_content(
contents, expected_doc['doc_id'],
expected_doc['field_name'], expected_doc['term'],
highlight_style=self.highlight_style)
if self.run_via_n1ql:
result &= index.validate_snippet_highlighting_in_result_content_n1ql(
n1ql_results['results'], expected_doc['doc_id'],
expected_doc['field_name'], expected_doc['term'],
highlight_style=self.highlight_style)
if not result:
self.fail(
"Testcase failed. Actual results do not match expected.")
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
def test_geo_query(self):
"""
Tests both geo location and bounding box queries
compares results against ES
:return: Nothing
"""
geo_index = self.create_geo_index_and_load()
self.generate_random_geo_queries(geo_index, self.num_queries)
if self.run_via_n1ql:
n1ql_executor = self._cb_cluster
else:
n1ql_executor = None
self.run_query_and_compare(geo_index, n1ql_executor=n1ql_executor)
def test_geo_polygon_query(self):
"""
Tests both geo polygon queries
compares results against ES
:return: Nothing
"""
geo_index = self.create_geo_index_and_load()
self.generate_random_geo_polygon_queries(geo_index, self.num_queries, self.polygon_feature, self.num_vertices)
if self.run_via_n1ql:
n1ql_executor = self._cb_cluster
else:
n1ql_executor = None
self.run_query_and_compare(geo_index, n1ql_executor=n1ql_executor)
def test_geo_polygon_on_edge_corner_query(self):
expected_hits = int(self._input.param("expected_hits", 0))
expected_doc_ids = self._input.param("expected_doc_ids", None)
polygon_points = str(self._input.param("polygon_points", None))
geo_index = self.create_geo_index_and_load()
query = '{"field": "geo", "polygon_points" : ' + polygon_points + '}'
self.log.info(query)
query = json.loads(query)
contents = ""
for index in self._cb_cluster.get_indexes():
hits, contents, _, _ = index.execute_query(query=query,
zero_results_ok=True,
expected_hits=expected_hits,
return_raw_hits=True)
self.log.info("Hits: %s" % hits)
self.log.info("Content: %s" % contents)
for doc_id in expected_doc_ids.split(","):
doc_exist = False
for content in contents:
if content['id'] == doc_id:
self.log.info(content)
doc_exist = True
if not doc_exist:
self.fail("expected doc_id : " + str(doc_id) + " does not exist")
def test_geo_polygon_with_holes_must_not(self):
geo_index = self.create_geo_index_and_load()
query = '{"must": {"conjuncts": [{"field": "geo", "polygon_points": ' \
'[[-124.29807832031247, 38.01868304390075], ' \
'[-122.34800507812497, 37.12617594722073], [-120.52976777343747, 38.35114759945404], ' \
'[-120.72752167968747, 39.44978110907268], [-122.90834850139811, 40.22582625155702], ' \
'[-124.24868053264811, 39.61072953444142]]}]}, ' \
'"must_not": {"disjuncts": [{"field": "geo", "polygon_points": ' \
'[[-122.56773164062497, 39.72703407666045], ' \
'[-123.02915742187497, 38.96238669420149], [-122.07334687499997, 38.189396892659744], ' \
'[-120.79893281249997, 38.585519836298694]]}]}}'
self.log.info(query)
query = json.loads(query)
for index in self._cb_cluster.get_indexes():
hits, contents, _, _ = index.execute_query(query=query,
zero_results_ok=False,
expected_hits=18,
return_raw_hits=True)
self.log.info("Hits: %s" % hits)
self.log.info("Content: %s" % contents)
def test_sort_geo_query(self):
"""
Generate random geo location queries and compare the results against
Elasticsearch
:return: Nothing
"""
geo_index = self.create_geo_index_and_load()
from .random_query_generator.rand_query_gen import FTSESQueryGenerator
testcase_failed = False
for i in range(self.num_queries):
self.log.info("Running Query no --> " + str(i))
fts_query, es_query = FTSESQueryGenerator.construct_geo_location_query()
self.log.info(fts_query)
self.log.info("fts_query location ---> " + str(fts_query["location"]))
# If query has geo co-ordinates in form of an object
if "lon" in fts_query["location"]:
lon = fts_query["location"]["lon"]
lat = fts_query["location"]["lat"]
# If query has geo co-ordinates in form of a list
elif isinstance(fts_query["location"], list):
lon = fts_query["location"][0]
lat = fts_query["location"][1]
# If query has geo co-ordinates in form of a string or geohash
elif isinstance(fts_query["location"], str):
# If the location is in string format
if "," in fts_query["location"]:
lat = float(fts_query["location"].split(",")[0])
lon = float(fts_query["location"].split(",")[1])
else:
lat = float(Geohash.decode(fts_query["location"])[0])
lon = float (Geohash.decode(fts_query["location"])[1])
unit = fts_query["distance"][-2:]
location = None
case = random.randint(0, 3)
# Geo location as an object
if case == 0:
location = {"lon": lon,
"lat": lat}
# Geo Location as array
if case == 1:
location = [lon, lat]
# Geo Location as string
if case == 2:
location = "{0},{1}".format(lat, lon)
# Geo Location as Geohash
if case == 3:
geohash = Geohash.encode(lat, lon, precision=random.randint(3, 8))
location = geohash
self.log.info("sort_fields_location ----> " + str(location))
sort_fields = [
{
"by": "geo_distance",
"field": "geo",
"unit": unit,
"location": location
}
]
hits, doc_ids, _, _ = geo_index.execute_query(
query=fts_query,
sort_fields=sort_fields)
self.log.info("Hits from FTS: {0}".format(hits))
self.log.info("First 50 docIDs: {0}". format(doc_ids[:50]))
sort_fields_es = [
{
"_geo_distance": {
"geo": location,
"order": "asc",
"unit": unit
}
}
]
es_query["sort"] = sort_fields_es
hits2, doc_ids2, _ = self.es.search(index_name="es_index",
query=es_query)
self.log.info("Hits from ES: {0}".format(hits2))
self.log.info("First 50 doc_ids: {0}".format(doc_ids2[:50]))
if doc_ids==doc_ids2:
self.log.info("PASS: Sort order matches!")
else:
msg = "FAIL: Sort order mismatch!"
self.log.error(msg)
testcase_failed = True
self.log.info("--------------------------------------------------"
"--------------------------------------------------")
if testcase_failed:
self.fail(msg)
def test_xattr_support(self):
"""
Tests if setting includeXAttrs in index definition
breaks anything
:return: Nothing
"""
self.load_data()
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
index = self._cb_cluster.create_fts_index(
name='default_index',
source_name='default',
collection_index=collection_index,
_type=type,
source_params={"includeXAttrs": True},
scope=index_scope,
collections=index_collections)
self.is_index_partitioned_balanced(index)
self.wait_for_indexing_complete()
if self._update or self._delete:
self.async_perform_update_delete(self.upd_del_fields)
if self._update:
self.sleep(60, "Waiting for updates to get indexed...")
self.wait_for_indexing_complete()
self.generate_random_queries(index, self.num_queries, self.query_types)
self.run_query_and_compare(index)
def test_ssl(self):
"""
Tests if we are able to create an index and query over ssl port
:return: Nothing
"""
fts_ssl_port=18094
import json, subprocess
if self.container_type == 'bucket':
idx = {"sourceName": "default",
"sourceType": "couchbase",
"type": "fulltext-index"}
else:
idx = {'type': 'fulltext-index',
'params': {'mapping': {'default_mapping': {'properties': {}, 'dynamic': False, 'enabled': False},
'types': {self.scope+'.'+self.collection: {'default_analyzer': 'standard', 'dynamic': True, 'enabled': True}}
},
'doc_config': {'mode': 'scope.collection.type_field', 'type_field': 'type'}},
'sourceType': 'couchbase', 'sourceName': 'default'}
qry = {"indexName": "default_index_1",
"query": {"field": "type", "match": "emp"},
"size": 10000000}
self.load_data()
cert = RestConnection(self._master).get_cluster_ceritificate()
f = open('cert.pem', 'w')
f.write(cert)
f.close()
fts_node = self._cb_cluster.get_random_fts_node()
cmd = "curl -g -k "+\
"-XPUT -H \"Content-Type: application/json\" "+\
"-u Administrator:password "+\
"https://{0}:{1}/api/index/default_idx -d ".\
format(fts_node.ip, fts_ssl_port) +\
"\'{0}\'".format(json.dumps(idx))
self.log.info("Running command : {0}".format(cmd))
output = subprocess.check_output(cmd, shell=True)
if json.loads(output)["status"] == "ok":
query = "curl -g -k " + \
"-XPOST -H \"Content-Type: application/json\" " + \
"-u Administrator:password " + \
"https://{0}:18094/api/index/default_idx/query -d ". \
format(fts_node.ip, fts_ssl_port) + \
"\'{0}\'".format(json.dumps(qry))
self.sleep(20, "wait for indexing to complete")
output = subprocess.check_output(query, shell=True)
self.log.info("Hits: {0}".format(json.loads(output)["total_hits"]))
if int(json.loads(output)["total_hits"]) != 1000:
self.fail("Query over ssl failed!")
else:
self.fail("Index could not be created over ssl")
def test_json_types(self):
import couchbase
self.load_data()
self.create_simple_default_index()
master = self._cb_cluster.get_master_node()
dic ={}
dic['null'] = None
dic['number'] = 12345
dic['date'] = "2018-01-21T18:25:43-05:00"
dic['bool'] = True
dic['string'] = "sample string json"
dic['array'] = ['element1', 1234, True]
try:
from couchbase.cluster import Cluster
from couchbase.cluster import PasswordAuthenticator
cluster = Cluster('couchbase://{0}'.format(master.ip))
authenticator = PasswordAuthenticator('Administrator', 'password')
cluster.authenticate(authenticator)
cb = cluster.open_bucket('default')
if self.container_type == 'bucket':
for key, value in list(dic.items()):
cb.upsert(key, value)
else:
count = 1
for key, value in list(dic.items()):
if not value:
query = "insert into default:default." + self.scope + "." + self.collection + " (key,value) values ('key_" + str(
count) + "', {'" + str(key) + "' : '" + str(value) + "'})"
else:
if isinstance(value, str):
query = "insert into default:default."+self.scope+"."+self.collection+" (key,value) values ('key_"+str(count)+"', {'"+str(key)+"' : '"+str(value)+"'})"
else:
query = "insert into default:default."+self.scope+"."+self.collection+" (key,value) values ('key_"+str(count)+"', {'"+str(key)+"' : "+str(value)+"})"
count += 1
cb.n1ql_query(query).execute()
except Exception as e:
self.fail(e)
self.wait_for_indexing_complete()
self.validate_index_count(equal_bucket_doc_count=True)
for index in self._cb_cluster.get_indexes():
self.generate_random_queries(index, 5, self.query_types)
self.run_query_and_compare(index)
# This test is to validate if the value for score is 0 for all docs when score=none is specified in the search query.
def test_score_none(self):
# Create bucket, create index
self.load_data()
self.wait_till_items_in_bucket_equal(items=self._num_items // 2)
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index")
self.wait_for_indexing_complete()
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
default_query = {"match": "Safiya", "field": "name"}
query = eval(self._input.param("query", str(default_query)))
if expected_hits:
zero_results_ok = False
if isinstance(query, str):
query = json.loads(query)
try:
for index in self._cb_cluster.get_indexes():
hits, contents, _, _ = index.execute_query(query=query,
zero_results_ok=zero_results_ok,
expected_hits=expected_hits,
return_raw_hits=True,
score="none")
self.log.info("Hits: %s" % hits)
self.log.info("Content: %s" % contents)
result = True
if hits == expected_hits:
for doc in contents:
# Check if the score of the doc is 0.
if "score" in doc:
self.assertEqual(doc["score"], 0, "Score is not 0 for doc {0}".format(doc["id"]))
else:
self.fail("Score key not present in search results for doc {0}".format(doc["id"]))
if not result:
self.fail(
"Testcase failed. Actual results do not match expected.")
else:
self.fail("No. of hits not matching expected hits. Hits = {0}, Expected Hits = {1}".format(hits,
expected_hits))
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
# This test checks the correctness of search results from queries with score=none and without score=none.
def test_result_correctness_score_none(self):
# Create bucket, create index
self.load_data()
self.wait_till_items_in_bucket_equal(items=self._num_items // 2)
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index")
self.wait_for_indexing_complete()
zero_results_ok = True
expected_hits = int(self._input.param("expected_hits", 0))
default_query = {"match": "Safiya", "field": "name"}
query = eval(self._input.param("query", str(default_query)))
if expected_hits:
zero_results_ok = False
if isinstance(query, str):
query = json.loads(query)
try:
for index in self._cb_cluster.get_indexes():
hits, doc_ids_with_score_none, _, _ = index.execute_query(query=query,
zero_results_ok=zero_results_ok,
return_raw_hits=False,
score="none")
self.log.info("Hits: %s" % hits)
self.log.info("Docs: %s" % doc_ids_with_score_none)
doc_ids_with_score_none.sort()
hits, doc_ids_without_score_none, _, _ = index.execute_query(query=query,
zero_results_ok=zero_results_ok,
return_raw_hits=False)
self.log.info("Hits: %s" % hits)
self.log.info("Docs: %s" % doc_ids_without_score_none)
doc_ids_without_score_none.sort()
self.assertListEqual(doc_ids_with_score_none, doc_ids_without_score_none, "Doc Ids not equal")
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
# Tests the ASCII folding filter with different types of accented characters
def test_ascii_folding_filter(self):
# Reference for test data : http://www.jarte.com/help_new/accent_marks_diacriticals_and_special_characters.html
test_data = [
{"text": "Ápple"},
{"text": "Àpple"},
{"text": "Äpple"},
{"text": "Âpple"},
{"text": "Ãpple"},
{"text": "Åpple"},
{"text": "ápple"},
{"text": "àpple"},
{"text": "äpple"},
{"text": "âpple"},
{"text": "ãpple"},
{"text": "åpple"},
{"text": "Ðodge"},
{"text": "ðodge"},
{"text": "Élephant"},
{"text": "élephant"},
{"text": "Èlephant"},
{"text": "èlephant"},
{"text": "Ëlephant"},
{"text": "ëlephant"},
{"text": "Êlephant"},
{"text": "êlephant"},
{"text": "Íceland"},
{"text": "íceland"},
{"text": "Ìceland"},
{"text": "ìceland"},
{"text": "Ïceland"},
{"text": "ïceland"},
{"text": "Îceland"},
{"text": "îceland"},
{"text": "Órange"},
{"text": "órange"},
{"text": "Òrange"},
{"text": "òrange"},
{"text": "Örange"},
{"text": "örange"},
{"text": "Ôrange"},
{"text": "ôrange"},
{"text": "Õrange"},
{"text": "õrange"},
{"text": "Ørange"},
{"text": "ørange"},
{"text": "Únicorn"},
{"text": "únicorn"},
{"text": "Ùnicorn"},
{"text": "ùnicorn"},
{"text": "Ünicorn"},
{"text": "ünicorn"},
{"text": "Ûnicorn"},
{"text": "ûnicorn"},
{"text": "Ýellowstone"},
{"text": "ýellowstone"},
{"text": "Ÿellowstone"},
{"text": "ÿellowstone"},
{"text": "Ñocturnal"},
{"text": "ñocturnal"},
{"text": "Çelcius"},
{"text": "çelcius"},
{"text": "Œlcius"},
{"text": "œlcius"},
{"text": "Šmall"},
{"text": "šmall"},
{"text": "Žebra"},
{"text": "žebra"},
{"text": "Æsthetic"},
{"text": "æsthetic"},
{"text": "Þhonetic"},
{"text": "þhonetic"},
{"text": "Discuß"},
{"text": "ÆꜴ"}
]
search_terms = [
{"term": "apple", "expected_hits": 6},
{"term": "Apple", "expected_hits": 6},
{"term": "dodge", "expected_hits": 1},
{"term": "Dodge", "expected_hits": 1},
{"term": "Elephant", "expected_hits": 4},
{"term": "elephant", "expected_hits": 4},
{"term": "iceland", "expected_hits": 4},
{"term": "Iceland", "expected_hits": 4},
{"term": "orange", "expected_hits": 6},
{"term": "Orange", "expected_hits": 6},
{"term": "unicorn", "expected_hits": 4},
{"term": "Unicorn", "expected_hits": 4},
{"term": "yellowstone", "expected_hits": 2},
{"term": "Yellowstone", "expected_hits": 2},
{"term": "nocturnal", "expected_hits": 1},
{"term": "Nocturnal", "expected_hits": 1},
{"term": "celcius", "expected_hits": 1},
{"term": "Celcius", "expected_hits": 1},
{"term": "oelcius", "expected_hits": 1},
{"term": "OElcius", "expected_hits": 1},
{"term": "small", "expected_hits": 1},
{"term": "Small", "expected_hits": 1},
{"term": "zebra", "expected_hits": 1},
{"term": "Zebra", "expected_hits": 1},
{"term": "aesthetic", "expected_hits": 1},
{"term": "AEsthetic", "expected_hits": 1},
{"term": "thhonetic", "expected_hits": 1},
{"term": "THhonetic", "expected_hits": 1},
{"term": "Discuss", "expected_hits": 1},
{"term": "AEAO", "expected_hits": 1}
]
self.create_test_dataset(self._master, test_data)
self.wait_till_items_in_bucket_equal(items=len(test_data))
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index")
self.wait_for_indexing_complete()
# Update index to have the child field "text"
index.add_child_field_to_default_mapping("text", "text")
index.index_definition['uuid'] = index.get_uuid()
index.update()
# Update index to have a custom analyzer which uses the ascii folding filter as a char filter
index.index_definition["params"]["mapping"]["analysis"] = {}
index.index_definition["params"]["mapping"]["analysis"] = json.loads(
"{\"analyzers\": {\"asciiff\": {\"char_filters\": [\"asciifolding\"],\"tokenizer\": \"letter\",\"type\": \"custom\" }}}")
index.index_definition["params"]["mapping"]["default_analyzer"] = "asciiff"
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.wait_for_indexing_complete()
# Run queries
try:
for index in self._cb_cluster.get_indexes():
all_queries_passed = True
failed_search_terms = []
for search_term in search_terms:
self.log.info("=============== Querying for term {0} ===============".format(search_term["term"]))
query = {'match': search_term["term"], 'field': 'text'}
expected_hits = search_term["expected_hits"]
hits, contents, _, _ = index.execute_query(query=query,
zero_results_ok=True,
return_raw_hits=True)
self.log.info("Hits: %s" % hits)
self.log.info("Content: %s" % contents)
if hits != expected_hits:
all_queries_passed = False
failed_search_terms.append(search_term["term"])
self.assertTrue(all_queries_passed,
"All search terms did not return expected results. Terms for which queries failed : {0}".format(
str(failed_search_terms)))
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
def test_snowball_stemmer_token_filter(self):
# Reference for test data : http://www.jarte.com/help_new/accent_marks_diacriticals_and_special_characters.html
all_test_data = {
"generic": [
{"text": "This is something else 1"},
{"text": "This is something else 2"},
{"text": "This is other indtadfgadad"},
{"text": "This is not that"}
],
"test_hu_data": [
{"text": "This is babakocsi"},
{"text": "This is babakocsijáért"},
{"text": "This is babakocsit"},
{"text": "This is babakocsiért"}
],
"test_da_data": [
{"text": "This is indtage"},
{"text": "This is indtagelse"},
{"text": "This is indtager"},
{"text": "This is indtages"},
{"text": "This is indtaget"}
],
"test_fr_data": [
{"text": "This is continu"},
{"text": "This is continua"},
{"text": "This is continuait"},
{"text": "This is continuant"},
{"text": "This is continuation"}
],
"test_en_data": [
{"text": "This is enjoying"},
{"text": "This is enjoys"},
{"text": "This is enjoy"},
{"text": "This is enjoyed"},
{"text": "This is enjoyments"}
],
"test_it_data": [
{"text": "This is abbandonata"},
{"text": "This is abbandonate"},
{"text": "This is abbandonati"},
{"text": "This is abbandonato"},
{"text": "This is abbandonava"}
],
"test_es_data": [
{"text": "This is torá"},
{"text": "This is toreado"},
{"text": "This is toreándolo"},
{"text": "This is toreara"},
{"text": "This is torear"}
],
"test_de_data": [
{"text": "This is aufeinanderfolge"},
{"text": "This is aufeinanderfolgen"},
{"text": "This is aufeinanderfolgend"},
{"text": "This is aufeinanderfolgende"},
{"text": "This is aufeinanderfolgenden"}
]
}
all_search_terms = {
"search_hu_terms": [
{"term": "babakocs", "expected_hits": 4}
],
"search_da_terms": [
{"term": "indtag", "expected_hits": 5}
],
"search_fr_terms": [
{"term": "continu", "expected_hits": 5}
],
"search_en_terms": [
{"term": "enjoy", "expected_hits": 5}
],
"search_it_terms": [
{"term": "abbandon", "expected_hits": 5}
],
"search_es_terms": [
{"term": "tor", "expected_hits": 5}
],
"search_de_terms": [
{"term": "aufeinanderfolg", "expected_hits": 5}
]
}
test_data = all_test_data[self._input.param("test_data", "test_da_data")] + all_test_data["generic"]
search_terms = all_search_terms[self._input.param("search_terms", "search_da_terms")]
token_filter = self._input.param("token_filter", "stemmer_da_snowball")
self.create_test_dataset(self._master, test_data)
self.wait_till_items_in_bucket_equal(items=len(test_data))
index = self.create_index(
self._cb_cluster.get_bucket_by_name('default'),
"default_index")
self.wait_for_indexing_complete()
# Update index to have the child field "text"
index.add_child_field_to_default_mapping("text", "text")
index.index_definition['uuid'] = index.get_uuid()
index.update()
# Update index to have a custom analyzer which uses the ascii folding filter as a char filter
index.index_definition["params"]["mapping"]["analysis"] = {}
index.index_definition["params"]["mapping"]["analysis"] = json.loads(
"{\"analyzers\": {\"customAnalyzer1\": {\"token_filters\": [\"" + token_filter + "\"],\"tokenizer\": \"whitespace\",\"type\": \"custom\" }}}")
index.index_definition["params"]["mapping"]["default_analyzer"] = "customAnalyzer1"
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.wait_for_indexing_complete()
# Run queries
try:
for index in self._cb_cluster.get_indexes():
all_queries_passed = True
failed_search_terms = []
for search_term in search_terms:
self.log.info("=============== Querying for term {0} ===============".format(search_term["term"]))
query = {'match': search_term["term"], 'field': 'text'}
expected_hits = search_term["expected_hits"]
hits, contents, _, _ = index.execute_query(query=query,
zero_results_ok=True,
return_raw_hits=True)
self.log.info("Hits: %s" % hits)
self.log.info("Content: %s" % contents)
if hits != expected_hits:
all_queries_passed = False
failed_search_terms.append(search_term["term"])
self.assertTrue(all_queries_passed,
"All search terms did not return expected results. Terms for which queries failed : {0}".format(
str(failed_search_terms)))
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
def test_drop_index_container(self):
rest = RestConnection(self._cb_cluster.get_random_fts_node())
drop_container = self._input.param("drop_container")
drop_name = self._input.param("drop_name")
plan_params = self.construct_plan_params()
self.load_data(generator=None)
self.wait_till_items_in_bucket_equal(self._num_items//2)
self.create_fts_indexes_all_buckets(plan_params=plan_params)
self.wait_for_indexing_complete()
self.validate_index_count(equal_bucket_doc_count=True)
if drop_container == 'collection':
self.cli_client.delete_collection(scope=self.scope, collection=drop_name)
elif drop_container == 'scope':
self.cli_client.delete_scope(scope=drop_name)
else:
self._cb_cluster.delete_bucket("default")
self.sleep(20)
for idx in self._cb_cluster.get_indexes():
status,dfn = rest.get_fts_index_definition(idx.name)
self.assertEqual(status, False, "FTS index was not dropped after kv container drop.")
self._cb_cluster.get_indexes().remove(idx)
def test_drop_busy_index_container_building(self):
rest = RestConnection(self._cb_cluster.get_random_fts_node())
drop_container = self._input.param("drop_container")
drop_name = self._input.param("drop_name")
plan_params = self.construct_plan_params()
self.load_data(generator=None)
self.create_fts_indexes_all_buckets(plan_params=plan_params)
if drop_container == 'collection':
self.cli_client.delete_collection(scope=self.scope, collection=drop_name)
elif drop_container == 'scope':
self.cli_client.delete_scope(scope=drop_name)
else:
self._cb_cluster.delete_bucket("default")
self.sleep(20)
for idx in self._cb_cluster.get_indexes():
status,dfn = rest.get_fts_index_definition(idx.name)
self.assertEqual(status, False, "FTS index was not dropped during index build.")
self._cb_cluster.get_indexes().remove(idx)
def test_drop_busy_index_container_scan(self):
rest = RestConnection(self._cb_cluster.get_random_fts_node())
drop_container = self._input.param("drop_container")
drop_name = self._input.param("drop_name")
plan_params = self.construct_plan_params()
self.load_data(generator=None)
self.create_fts_indexes_all_buckets(plan_params=plan_params)
self.wait_for_indexing_complete()
index = self._cb_cluster.get_indexes()[0]
query = self._input.param("query")
import threading
query_thread = threading.Thread(target=self._index_query_wrapper, args=(index, query))
drop_thread = threading.Thread(target=self._drop_container_wrapper, args=(drop_container, drop_name))
query_thread.daemon = True
drop_thread.daemon = True
query_thread.start()
drop_thread.start()
query_thread.join()
drop_thread.join()
self.sleep(30, "wait for index drop")
for idx in self._cb_cluster.get_indexes():
status,dfn = rest.get_fts_index_definition(idx.name)
self.assertEqual(status, False, "FTS index was not dropped during index scan.")
self._cb_cluster.get_indexes().remove(idx)
def test_drop_busy_index_container_mutations(self):
rest = RestConnection(self._cb_cluster.get_random_fts_node())
drop_container = self._input.param("drop_container")
drop_name = self._input.param("drop_name")
plan_params = self.construct_plan_params()
self.load_data(generator=None)
self.create_fts_indexes_all_buckets(plan_params=plan_params)
self.wait_for_indexing_complete()
self._cb_cluster.run_n1ql_query(query=f"create primary index on default:default.{self.scope}.{self.collection}")
index = self._cb_cluster.get_indexes()[0]
query = f"update default:default.{self.scope}.{self.collection} set email='mutated@gmail.com'"
import threading
query_thread = threading.Thread(target=self._n1ql_query_wrapper, args=[query])
drop_thread = threading.Thread(target=self._drop_container_wrapper, args=(drop_container, drop_name))
query_thread.daemon = True
drop_thread.daemon = True
query_thread.start()
drop_thread.start()
query_thread.join()
drop_thread.join()
self.sleep(30, "wait for index drop")
for idx in self._cb_cluster.get_indexes():
status,dfn = rest.get_fts_index_definition(idx.name)
self.assertEqual(status, False, "FTS index was not dropped during index scan.")
self._cb_cluster.get_indexes().remove(idx)
def test_concurrent_drop_index_and_container(self):
rest = RestConnection(self._cb_cluster.get_random_fts_node())
drop_container = self._input.param("drop_container")
drop_name = self._input.param("drop_name")
plan_params = self.construct_plan_params()
self.load_data(generator=None)
self.create_fts_indexes_all_buckets(plan_params=plan_params)
self.wait_for_indexing_complete()
index = self._cb_cluster.get_indexes()[0]
import threading
query_thread = threading.Thread(target=self._drop_index_wrapper, args=[index])
drop_thread = threading.Thread(target=self._drop_container_wrapper, args=(drop_container, drop_name))
query_thread.daemon = True
drop_thread.daemon = True
query_thread.start()
drop_thread.start()
query_thread.join()
drop_thread.join()
for idx in self._cb_cluster.get_indexes():
status,dfn = rest.get_fts_index_definition(idx.name)
self.assertEqual(status, False, "FTS index was not dropped during index scan.")
self._cb_cluster.get_indexes().remove(idx)
def _drop_index_wrapper(self, index):
index.delete()
def _index_query_wrapper(self, index, query):
index.execute_query(query)
def _drop_container_wrapper(self, drop_container, drop_name):
if drop_container == 'collection':
self.cli_client.delete_collection(scope=self.scope, collection=drop_name)
elif drop_container == 'scope':
self.cli_client.delete_scope(scope=drop_name)
else:
self._cb_cluster.delete_bucket("default")
def _n1ql_query_wrapper(self, n1ql_query):
self._cb_cluster.run_n1ql_query(query=n1ql_query)
def test_create_drop_index(self):
self.load_data(generator=None)
self.wait_till_items_in_bucket_equal(self._num_items//2)
plan_params = self.construct_plan_params()
self.create_fts_indexes_all_buckets(plan_params=plan_params)
self.wait_for_indexing_complete()
self.validate_index_count(equal_bucket_doc_count=True)
for idx in self._cb_cluster.get_indexes():
idx.delete()
self.assertEqual(len(self._cb_cluster.get_indexes()), 0, "FTS index cannot be deleted.")
def test_create_index_multiple_scopes_negative(self):
collection_index, type, index_scope, index_collections = self.define_index_parameters_collection_related()
for bucket in self._cb_cluster.get_buckets():
try:
self.create_index(bucket, "fts_idx", index_params=None,
plan_params=None, collection_index=collection_index, _type=type, analyzer="standard",
scope=index_scope, collections=index_collections)
self.assertTrue(False, "Successfully created FTS index for collections from different buckets!")
except Exception as ex:
self.assertTrue(str(ex).find("Error creating index")>=0 and
str(ex).find("multiple scopes found")>=0 and
str(ex).find("index can only span collections on a single scope")>=0,
"Non-expected error message is found while trying to create FTS index for more than one scope.")
pass
def test_create_index_missed_container_negative(self):
missed_collection = 'missed_collection'
missed_scope = 'missed_scope'
_types = [f"{self.scope}.{missed_collection}",
f"_default.{missed_collection}",
f"{missed_scope}._default",
f"{missed_scope}.{self.collection}",
f"{missed_scope}.{missed_collection}"]
self.load_data()
collection_index = True
for _type in _types:
try:
index = self._cb_cluster.create_fts_index(
name='default_index',
source_name='default',
collection_index=collection_index,
_type=_type,
source_params={"includeXAttrs": True})
self.fail(f"FTS index is successfully created basing on non-existent kv container: {_type}")
except Exception as e:
self.log.info("Expected exception happened during fts index creation on missed kv container.")
def prepare_for_score_none_fuzzy(self):
fuzzy_dataset_size = self._input.param("fuzzy_dataset_size", "small")
index_params = {}
data_json = ""
dataset = ""
if "small" in fuzzy_dataset_size:
data_json = "fuzzy_small_dataset.json"
dataset = FUZZY_FTS_SMALL_DATASET
index_params = INDEX_DEFAULTS.FUZZY_SMALL_INDEX_MAPPING
self.query = QUERY.FUZZY_SMALL_INDEX_QUERY
if "large" in fuzzy_dataset_size:
data_json = "fuzzy_large_dataset.json"
dataset = FUZZY_FTS_LARGE_DATASET
index_params = INDEX_DEFAULTS.FUZZY_LARGE_INDEX_MAPPING
self.query = QUERY.FUZZY_LARGE_INDEX_QUERY
download_from_s3(dataset, "/tmp/" + data_json)
self.cbimport_data(data_json_path="/tmp/" + data_json, server=self._cb_cluster.get_master_node())
self.sleep(10)
self.fts_index = self._cb_cluster.create_fts_index(name="fuzzy_index",
source_name="default",
index_params=index_params)
self.wait_for_indexing_complete()
def test_score_none_fuzzy(self):
self.prepare_for_score_none_fuzzy()
expected_hits = 0
try:
for index in self._cb_cluster.get_indexes():
expected_hits, contents, _, _ = index.execute_query(query=self.query,
zero_results_ok=False,
return_raw_hits=True)
self.log.info("Hits: %s" % expected_hits)
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
try:
for index in self._cb_cluster.get_indexes():
hits, contents, _, _ = index.execute_query(query=self.query,
zero_results_ok=False,
expected_hits=expected_hits,
return_raw_hits=True,
score="none")
self.log.info("Hits: %s" % hits)
result = True
if hits == expected_hits:
for doc in contents:
# Check if the score of the doc is 0.
if "score" in doc:
self.assertEqual(doc["score"], 0, "Score is not 0 for doc {0}".format(doc["id"]))
else:
self.fail("Score key not present in search results for doc {0}".format(doc["id"]))
if not result:
self.fail(
"Testcase failed. Actual results do not match expected.")
else:
self.fail("No. of hits not matching expected hits. Hits = {0}, Expected Hits = {1}".format(hits,
expected_hits))
except Exception as err:
self.log.error(err)
self.fail("Testcase failed: " + str(err))
def test_mem_utilization_score_none_fuzzy(self):
self.prepare_for_score_none_fuzzy()
self.sleep(60, "Waiting for 1 min before before collecting mem_usage")
fts_nodes_mem_usage = self.get_fts_ram_used()
self.log.info("fts_nodes_mem_usage: {0}".format(fts_nodes_mem_usage))
self.fts_index.fts_queries.append(self.query)
self.start_task_managers(10)
for count in range(2000):
for task_manager in self.task_managers:
task_manager.schedule(ESRunQueryCompare(self.fts_index,
self.es,
query_index=0))
self.sleep(600)
self.shutdown_task_managers()
self.sleep(60, "Waiting for 1 min before checking cpu utilization/mem_high again")
mem_high = False
for node in fts_nodes_mem_usage:
mem_high = mem_high or self.check_if_fts_ram_usage_high(node["nodeip"], 2.0*float(node["mem_usage"]))
if mem_high:
self.fail("CPU utilization or memory usage found to be high")
def test_create_index_same_name_same_scope_negative(self):
scope_name = self._input.param("scope", "_default")
#delete unnecessary bucket
self._cb_cluster.delete_bucket("default")
#create bucket
bucket_size = 200
bucket_priority = None
bucket_type = TestInputSingleton.input.param("bucket_type", "membase")
maxttl = TestInputSingleton.input.param("maxttl", None)
self._cb_cluster.create_default_bucket(
bucket_size,
self._num_replicas,
eviction_policy='valueOnly',
bucket_priority=bucket_priority,
bucket_type=bucket_type,
maxttl=maxttl,
bucket_storage='couchstore',
bucket_name='bucket1')
#create scope
self.cli_client.create_scope(bucket='bucket1', scope=scope_name)
#create collections with same name
self.cli_client.create_collection(bucket='bucket1', scope=scope_name, collection='collection1')
self.cli_client.create_collection(bucket='bucket1', scope=scope_name, collection='collection2')
# create 2 indexes
index1 = self.create_index(self._cb_cluster.get_bucket_by_name('bucket1'),
"index1", collection_index=True, _type=f"{scope_name}.collection1",
scope=scope_name, collections=["collection1"])
try:
index2 = self.create_index(self._cb_cluster.get_bucket_by_name('bucket1'),
"index1", collection_index=True, _type=f"{scope_name}.collection2",
scope=scope_name, collections=["collection2"], no_check=True)
except Exception as e:
self.log.info("Exceptin caught ::"+str(e)+"::")
return
self.fail(f"Successfully created 2 indexes with same name in scope {scope_name}")
def test_create_index_same_name_diff_scope_negative(self):
#delete unnecessary bucket
self._cb_cluster.delete_bucket("default")
#create bucket
bucket_size = 200
bucket_priority = None
bucket_type = TestInputSingleton.input.param("bucket_type", "membase")
maxttl = TestInputSingleton.input.param("maxttl", None)
self._cb_cluster.create_default_bucket(
bucket_size,
self._num_replicas,
eviction_policy='valueOnly',
bucket_priority=bucket_priority,
bucket_type=bucket_type,
maxttl=maxttl,
bucket_storage='couchstore',
bucket_name='bucket1')
#create scopes
self.cli_client.create_scope(bucket='bucket1', scope="scope1")
self.cli_client.create_scope(bucket='bucket1', scope="scope2")
#create collections with same name
self.cli_client.create_collection(bucket='bucket1', scope="scope1", collection='collection1')
self.cli_client.create_collection(bucket='bucket1', scope="scope2", collection='collection1')
# create 2 indexes
index1 = self.create_index(self._cb_cluster.get_bucket_by_name('bucket1'),
"index1", collection_index=True, _type="scope1.collection1",
scope="scope1", collections=["collection1"])
try:
index2 = self.create_index(self._cb_cluster.get_bucket_by_name('bucket1'),
"index1", collection_index=True, _type="scope2.collection1",
scope="scope2", collections=["collection1"], no_check=True)
except Exception as e:
self.log.info("Exception caught ::" + str(e) + "::")
return
self.fail("Failed creating 2 indexes with same name in different scopes ")
test_data = {
"doc_1": {
"num": 1,
"str": "str_1",
"bool": True,
"array": ["array1_1", "array1_2"],
"obj": {"key": "key1", "val": "val1"},
"filler": "filler"
},
"doc_2": {
"num": 2,
"str": "str_2",
"bool": False,
"array": ["array2_1", "array2_2"],
"obj": {"key": "key2", "val": "val2"},
"filler": "filler"
},
"doc_3": {
"num": 3,
"str": "str_3",
"bool": True,
"array": ["array3_1", "array3_2"],
"obj": {"key": "key3", "val": "val3"},
"filler": "filler"
},
"doc_4": {
"num": 4,
"str": "str_4",
"bool": False,
"array": ["array4_1", "array4_2"],
"obj": {"key": "key4", "val": "val4"},
"filler": "filler"
},
"doc_5": {
"num": 5,
"str": "str_5",
"bool": True,
"array": ["array5_1", "array5_2"],
"obj": {"key": "key5", "val": "val5"},
"filler": "filler"
},
"doc_10": {
"num": 10,
"str": "str_10",
"bool": False,
"array": ["array10_1", "array10_2"],
"obj": {"key": "key10", "val": "val10"},
"filler": "filler"
},
}
def test_search_before(self):
bucket = self._cb_cluster.get_bucket_by_name('default')
self._load_search_before_search_after_test_data(bucket.name, self.test_data)
index = self.create_index(bucket, "idx1")
self.wait_for_indexing_complete(len(self.test_data))
full_size = len(self.test_data)
partial_size = TestInputSingleton.input.param("partial_size", 1)
partial_start_index = TestInputSingleton.input.param("partial_start_index", 3)
sort_mode = eval(TestInputSingleton.input.param("sort_mode", '[_id]'))
cluster = index.get_cluster()
all_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"match": "filler", "field": "filler"},"size": full_size, "sort": sort_mode}
all_hits, all_matches, _, _ = cluster.run_fts_query(index.name, all_fts_query)
search_before_param = all_matches[partial_start_index]['sort']
for i in range(0, len(search_before_param)):
if search_before_param[i] == "_score":
search_before_param[i] = str(all_matches[partial_start_index]['score'])
search_before_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"match": "filler", "field": "filler"},"size": partial_size, "sort": sort_mode, "search_before": search_before_param}
_, search_before_matches, _, _ = cluster.run_fts_query(index.name, search_before_fts_query)
all_results_ids = []
search_before_results_ids = []
for match in all_matches:
all_results_ids.append(match['id'])
for match in search_before_matches:
search_before_results_ids.append(match['id'])
for i in range(0, partial_size-1):
if i in range(0, len(search_before_results_ids) - 1):
if search_before_results_ids[i] != all_results_ids[partial_start_index-partial_size+i]:
self.fail("test is failed")
def test_search_after(self):
bucket = self._cb_cluster.get_bucket_by_name('default')
self._load_search_before_search_after_test_data(bucket.name, self.test_data)
index = self.create_index(bucket, "idx1")
self.wait_for_indexing_complete(len(self.test_data))
full_size = len(self.test_data)
partial_size = TestInputSingleton.input.param("partial_size", 1)
partial_start_index = TestInputSingleton.input.param("partial_start_index", 3)
sort_mode = eval(TestInputSingleton.input.param("sort_mode", '[_id]'))
cluster = index.get_cluster()
all_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"match": "filler", "field": "filler"},"size": full_size, "sort": sort_mode}
all_hits, all_matches, _, _ = cluster.run_fts_query(index.name, all_fts_query)
search_before_param = all_matches[partial_start_index]['sort']
for i in range(0, len(search_before_param)):
if search_before_param[i] == "_score":
search_before_param[i] = str(all_matches[partial_start_index]['score'])
search_before_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"match": "filler", "field": "filler"},"size": partial_size, "sort": sort_mode, "search_after": search_before_param}
_, search_before_matches, _, _ = cluster.run_fts_query(index.name, search_before_fts_query)
all_results_ids = []
search_before_results_ids = []
for match in all_matches:
all_results_ids.append(match['id'])
for match in search_before_matches:
search_before_results_ids.append(match['id'])
for i in range(0, partial_size-1):
if i in range(0, len(search_before_results_ids)-1):
if search_before_results_ids[i] != all_results_ids[partial_start_index+1+i]:
self.fail("test is failed")
def test_search_before_multi_fields(self):
bucket = self._cb_cluster.get_bucket_by_name('default')
self._load_search_before_search_after_test_data(bucket.name, self.test_data)
index = self.create_index(bucket, "idx1")
self.wait_for_indexing_complete(len(self.test_data))
full_size = len(self.test_data)
partial_size = TestInputSingleton.input.param("partial_size", 1)
partial_start_index = TestInputSingleton.input.param("partial_start_index", 3)
sort_modes = ['str', 'num', 'bool', 'array', '_id', '_score']
sort_mode = []
fails = []
cluster = index.get_cluster()
for i in range(0, len(sort_modes)):
for j in range(0, len(sort_modes)):
sort_mode.clear()
if sort_modes[i] == sort_modes[j]:
pass
sort_mode.append(sort_modes[i])
sort_mode.append(sort_modes[j])
all_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"match": "filler", "field": "filler"},"size": full_size, "sort": sort_mode}
all_hits, all_matches, _, _ = cluster.run_fts_query(index.name, all_fts_query)
search_before_param = all_matches[partial_start_index]['sort']
for i in range(0, len(search_before_param)):
if search_before_param[i] == "_score":
search_before_param[i] = str(all_matches[partial_start_index]['score'])
search_before_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"match": "filler", "field": "filler"},"size": partial_size, "sort": sort_mode, "search_before": search_before_param}
_, search_before_matches, _, _ = cluster.run_fts_query(index.name, search_before_fts_query)
all_results_ids = []
search_before_results_ids = []
for match in all_matches:
all_results_ids.append(match['id'])
for match in search_before_matches:
search_before_results_ids.append(match['id'])
for i in range(0, partial_size-1):
if search_before_results_ids[i] != all_results_ids[full_size-partial_start_index-partial_size+i]:
fails.append(str(sort_mode))
self.assertEqual(len(fails), 0, "Tests for the following sort modes are failed: "+str(fails))
def test_search_after_multi_fields(self):
bucket = self._cb_cluster.get_bucket_by_name('default')
self._load_search_before_search_after_test_data(bucket.name, self.test_data)
index = self.create_index(bucket, "idx1")
self.wait_for_indexing_complete(len(self.test_data))
full_size = len(self.test_data)
partial_size = TestInputSingleton.input.param("partial_size", 1)
partial_start_index = TestInputSingleton.input.param("partial_start_index", 3)
sort_modes = ['str', 'num', 'bool', 'array', '_id', '_score']
sort_mode = []
fails = []
cluster = index.get_cluster()
for i in range(0, len(sort_modes)):
for j in range(0, len(sort_modes)):
sort_mode.clear()
if sort_modes[i] == sort_modes[j]:
pass
sort_mode.append(sort_modes[i])
sort_mode.append(sort_modes[j])
all_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"match": "filler", "field": "filler"},"size": full_size, "sort": sort_mode}
all_hits, all_matches, _, _ = cluster.run_fts_query(index.name, all_fts_query)
search_before_param = all_matches[partial_start_index]['sort']
for i in range(0, len(search_before_param)):
if search_before_param[i] == "_score":
search_before_param[i] = str(all_matches[partial_start_index]['score'])
search_before_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"match": "filler", "field": "filler"},"size": partial_size, "sort": sort_mode, "search_after": search_before_param}
_, search_before_matches, _, _ = cluster.run_fts_query(index.name, search_before_fts_query)
all_results_ids = []
search_before_results_ids = []
for match in all_matches:
all_results_ids.append(match['id'])
for match in search_before_matches:
search_before_results_ids.append(match['id'])
for i in range(0, partial_size-1):
if search_before_results_ids[i] != all_results_ids[partial_start_index+1+i]:
fails.append(str(sort_mode))
self.assertEqual(len(fails), 0, "Tests for the following sort modes are failed: "+str(fails))
def test_search_before_search_after_negative(self):
expected_error = "cannot use search after and search before together"
bucket = self._cb_cluster.get_bucket_by_name('default')
self._load_search_before_search_after_test_data(bucket.name, self.test_data)
index = self.create_index(bucket, "idx1")
self.wait_for_indexing_complete(len(self.test_data))
cluster = index.get_cluster()
search_before_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"query": "filler:filler"},"size": 2, "search_before": ["doc_2"], "search_after": ["doc_4"], "sort": ["_id"]}
response = cluster.run_fts_query_generalized(index.name, search_before_fts_query)
if 'error' in response.keys():
self.assertTrue(str(response['error']).index(expected_error) > 0, "Cannot find expected error message.")
else:
self.fail("Incorrect query was executed successfully.")
query_result = None
def run_fts_query_wrapper(self, index, fts_query):
cluster = index.get_cluster()
_, self.query_result,_,_ = cluster.run_fts_query(index.name, fts_query)
def test_concurrent_search_before_query_index_build(self):
bucket = self._cb_cluster.get_bucket_by_name('default')
self._load_search_before_search_after_test_data(bucket.name, self.test_data)
index = self.create_index(bucket, "idx1")
self.wait_for_indexing_complete(len(self.test_data))
cluster = index.get_cluster()
search_before_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"match": "filler", "field": "filler"},"size": 2, "sort": ["str"], "search_before": ["str_4"]}
_, search_before_matches, _, _ = cluster.run_fts_query(index.name, search_before_fts_query)
data_filler_thread = threading.Thread(target=self._load_search_before_search_after_additional_data, args=(bucket.name, 1000 , 20))
query_executor_thread = threading.Thread(target=self.run_fts_query_wrapper, args=(index, search_before_fts_query))
data_filler_thread.daemon = True
data_filler_thread.start()
self.sleep(10)
query_executor_thread.daemon = True
query_executor_thread.start()
data_filler_thread.join()
query_executor_thread.join()
self.log.info("idle results ::"+str(search_before_matches)+"::")
self.log.info("busy results ::"+str(self.query_result)+"::")
idle_ids = []
busy_ids = []
for match in search_before_matches:
idle_ids.append(match['id'])
for match in self.query_result:
busy_ids.append(match['id'])
self.assertEqual(idle_ids, busy_ids, "Results for idle and busy index states are different.")
def test_concurrent_search_after_query_index_build(self):
bucket = self._cb_cluster.get_bucket_by_name('default')
self._load_search_before_search_after_test_data(bucket.name, self.test_data)
index = self.create_index(bucket, "idx1")
self.wait_for_indexing_complete(len(self.test_data))
cluster = index.get_cluster()
search_after_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"match": "filler", "field": "filler"},"size": 2, "sort": ["str"], "search_after": ["str_2"]}
_, search_after_matches, _, _ = cluster.run_fts_query(index.name, search_after_fts_query)
data_filler_thread = threading.Thread(target=self._load_search_before_search_after_additional_data, args=(bucket.name, 1000 , 20))
query_executor_thread = threading.Thread(target=self.run_fts_query_wrapper, args=(index, search_after_fts_query))
data_filler_thread.daemon = True
data_filler_thread.start()
self.sleep(10)
query_executor_thread.daemon = True
query_executor_thread.start()
data_filler_thread.join()
query_executor_thread.join()
idle_ids = []
busy_ids = []
for match in search_after_matches:
idle_ids.append(match['id'])
for match in self.query_result:
busy_ids.append(match['id'])
self.assertEqual(idle_ids, busy_ids, "Results for idle and busy index states are different.")
def test_search_before_after_n1ql_function(self):
bucket = self._cb_cluster.get_bucket_by_name('default')
self._load_search_before_search_after_test_data(bucket.name, self.test_data)
index = self.create_index(bucket, "idx1")
direction = TestInputSingleton.input.param("direction", "search_before")
self.wait_for_indexing_complete(len(self.test_data))
cluster = index.get_cluster()
search_before_after_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"match": "filler", "field": "filler"},"size": 2, "sort": ["_id"], direction: ["doc_2"]}
_, fts_matches, _, _ = cluster.run_fts_query(index.name, search_before_after_fts_query)
n1ql_query = 'select meta().id from '+bucket.name+' where search('+bucket.name+', {"explain": false, "fields": ["*"], "highlight": {}, "query": {"match": "filler", "field": "filler"}, "size": 2, "sort": ["_id"], "'+direction+'": ["doc_2"]})'
n1ql_results = self._cb_cluster.run_n1ql_query(query=n1ql_query)['results']
fts_ids = []
n1ql_ids = []
for match in fts_matches:
fts_ids.append(match['id'])
for match in n1ql_results:
n1ql_ids.append(match['id'])
self.assertEqual(fts_ids, n1ql_ids, "Results for fts and n1ql queries are different.")
def test_search_before_not_indexed_field(self):
bucket = self._cb_cluster.get_bucket_by_name('default')
self._load_search_before_search_after_test_data(bucket.name, self.test_data)
index = self.create_index(bucket, "idx1")
self.wait_for_indexing_complete(len(self.test_data))
full_size = len(self.test_data)
partial_size = 2
partial_start_index = 3
index.index_definition['params']['doc_config'] = {}
doc_config = {}
doc_config['mode'] = 'type_field'
doc_config['type_field'] = 'filler'
index.index_definition['params']['doc_config'] = doc_config
index.add_type_mapping_to_index_definition(type="filler",
analyzer="standard")
index.index_definition['params']['mapping'] = {
"default_analyzer": "standard",
"default_datetime_parser": "dateTimeOptional",
"default_field": "_all",
"default_mapping": {
"dynamic": False,
"enabled": False
},
"default_type": "_default",
"docvalues_dynamic": True,
"index_dynamic": True,
"store_dynamic": False,
"type_field": "_type",
"types": {
"filler": {
"default_analyzer": "standard",
"dynamic": False,
"enabled": True,
"properties": {
"num": {
"enabled": True,
"dynamic": False,
"fields": [
{
"docvalues": True,
"include_term_vectors": True,
"index": True,
"name": "num",
"type": "number"
}
]
}
}
}
}
}
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.wait_for_indexing_complete(len(self.test_data))
self.sleep(5)
cluster = index.get_cluster()
all_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"query": "num: >0"},"size": full_size, "sort": ["str"]}
all_hits, all_matches, _, _ = cluster.run_fts_query(index.name, all_fts_query)
search_before_param = all_matches[partial_start_index]['sort']
for i in range(0, len(search_before_param)):
if search_before_param[i] == "_score":
search_before_param[i] = str(all_matches[partial_start_index]['score'])
search_before_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"query": "num: >0"},"size": partial_size, "sort": ["str"], "search_before": search_before_param}
total_hits, hit_list, time_taken, status = cluster.run_fts_query(index.name, search_before_fts_query)
self.assertEqual(total_hits, full_size, "search_before query for non-indexed field is failed.")
def test_search_after_not_indexed_field(self):
bucket = self._cb_cluster.get_bucket_by_name('default')
self._load_search_before_search_after_test_data(bucket.name, self.test_data)
index = self.create_index(bucket, "idx1")
self.wait_for_indexing_complete(len(self.test_data))
full_size = len(self.test_data)
partial_size = 2
partial_start_index = 3
index.index_definition['params']['doc_config'] = {}
doc_config = {}
doc_config['mode'] = 'type_field'
doc_config['type_field'] = 'filler'
index.index_definition['params']['doc_config'] = doc_config
index.add_type_mapping_to_index_definition(type="filler",
analyzer="standard")
index.index_definition['params']['mapping'] = {
"default_analyzer": "standard",
"default_datetime_parser": "dateTimeOptional",
"default_field": "_all",
"default_mapping": {
"dynamic": False,
"enabled": False
},
"default_type": "_default",
"docvalues_dynamic": True,
"index_dynamic": True,
"store_dynamic": False,
"type_field": "_type",
"types": {
"filler": {
"default_analyzer": "standard",
"dynamic": False,
"enabled": True,
"properties": {
"num": {
"enabled": True,
"dynamic": False,
"fields": [
{
"docvalues": True,
"include_term_vectors": True,
"index": True,
"name": "num",
"type": "number"
}
]
}
}
}
}
}
index.index_definition['uuid'] = index.get_uuid()
index.update()
self.wait_for_indexing_complete(len(self.test_data))
self.sleep(5)
cluster = index.get_cluster()
all_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"query": "num: >0"},"size": full_size, "sort": ["str"]}
all_hits, all_matches, _, _ = cluster.run_fts_query(index.name, all_fts_query)
search_after_param = all_matches[partial_start_index]['sort']
for i in range(0, len(search_after_param)):
if search_after_param[i] == "_score":
search_after_param[i] = str(all_matches[partial_start_index]['score'])
search_after_fts_query = {"explain": False, "fields": ["*"], "highlight": {}, "query": {"query": "num: >0"},"size": partial_size, "sort": ["str"], "search_after": search_after_param}
total_hits, hit_list, time_taken, status = cluster.run_fts_query(index.name, search_after_fts_query)
self.assertEqual(total_hits, full_size, "search_after query for non-indexed field is failed.")
def _load_search_before_search_after_test_data(self, bucket, test_data):
for key in test_data:
query = "insert into "+bucket+" (KEY, VALUE) VALUES " \
"('"+str(key)+"', " \
""+str(test_data[key])+")"
self._cb_cluster.run_n1ql_query(query=query)
def _load_search_before_search_after_additional_data(self, bucket, num_records=5000, start_index=20):
for i in range(start_index, num_records):
query = 'insert into '+bucket+' (KEY, VALUE) VALUES ' \
'("doc_'+str(i)+'", ' \
'{' \
'"num": '+str(i)+',' \
'"str": "ystr_'+str(i)+'",' \
'"bool": False,' \
'"array": ["array'+str(i)+'_1", "array'+str(i)+'_2"],' \
'"obj": {"key": "key'+str(i)+'", "val": "val'+str(i)+'"},' \
'"filler": "filler"' \
'})'
self._cb_cluster.run_n1ql_query(query=query)
def _create_oso_containers(self, bucket=None, num_scopes=1, collections_per_scope=1, docs_per_collection=10000):
load_tasks = []
for i in range(1, num_scopes+1):
scope_name = "scope_"+str(i)
self.cli_client.create_scope(bucket="default", scope=scope_name);
self.sleep(10)
for j in range(1, collections_per_scope+1):
collection_name = "collection_"+str(j)
self._cb_cluster._create_collection(bucket="default", scope=scope_name, collection=collection_name, cli_client=self.cli_client)
#load data into collections
gen_create = SDKDataLoader(num_ops=docs_per_collection, percent_create=100, percent_update=0, percent_delete=0,
load_pattern="uniform", start_seq_num=1, key_prefix="doc_", key_suffix="_",
scope=scope_name, collection=collection_name, json_template="emp", doc_expiry=0,
doc_size=500, get_sdk_logs=False, username="Administrator", password="password", timeout=1000,
start=0, end=0, op_type="create", all_collections=False, es_compare=False, es_host=None, es_port=None,
es_login=None, es_password=None)
load_tasks = self._cb_cluster.async_load_bucket_from_generator(bucket, gen_create)
for task in load_tasks:
task.result()
def _fill_collection(self, bucket=None, scope=None, collection=None, num_docs=1000, start_seq_num=1):
gen_create = SDKDataLoader(num_ops=num_docs, percent_create=100, percent_update=0, percent_delete=0,
load_pattern="uniform", start_seq_num=start_seq_num, key_prefix="doc_", key_suffix="_",
scope=scope, collection=collection, json_template="emp", doc_expiry=0,
doc_size=500, get_sdk_logs=False, username="Administrator", password="password", timeout=1000,
start=0, end=0, op_type="create", all_collections=False, es_compare=False, es_host=None, es_port=None,
es_login=None, es_password=None)#
load_tasks = self._cb_cluster.async_load_bucket_from_generator(bucket, gen_create)
for task in load_tasks:
task.result()
def _update_collection(self, bucket=None, scope=None, collection=None, num_docs=1000, start=1):
gen_create = SDKDataLoader(num_ops=num_docs, percent_create=0, percent_update=100, percent_delete=0,
load_pattern="uniform", start_seq_num=start, key_prefix="doc_", key_suffix="_",
scope=scope, collection=collection, json_template="emp", doc_expiry=0,
doc_size=500, get_sdk_logs=False, username="Administrator", password="password", timeout=1000,
start=start, end=start+num_docs, op_type="create", all_collections=False, es_compare=False, es_host=None, es_port=None,
es_login=None, es_password=None)
load_tasks = self._cb_cluster.async_load_bucket_from_generator(bucket, gen_create)
for task in load_tasks:
task.result()
def _delete_from_collection(self, bucket=None, scope=None, collection=None, num_docs=1000, start=1):
gen_create = SDKDataLoader(num_ops=num_docs, percent_create=0, percent_update=0, percent_delete=100,
load_pattern="uniform", start_seq_num=start, key_prefix="doc_", key_suffix="_",
scope=scope, collection=collection, json_template="emp", doc_expiry=0,
doc_size=500, get_sdk_logs=False, username="Administrator", password="password", timeout=1000,
start=start, end=start+num_docs, op_type="create", all_collections=False, es_compare=False, es_host=None, es_port=None,
es_login=None, es_password=None)
load_tasks = self._cb_cluster.async_load_bucket_from_generator(bucket, gen_create)
for task in load_tasks:
task.result()
def test_index_creation_oso(self):
rest = RestConnection(self._cb_cluster.get_random_fts_node())
rest.set_node_setting("useOSOBackfill", True)
bucket = self._cb_cluster.get_bucket_by_name('default')
num_scopes = TestInputSingleton.input.param("num_scopes", 5)
collections_per_scope = TestInputSingleton.input.param("collections_per_scope", 20)
docs_per_collection = TestInputSingleton.input.param("docs_per_collection", 10000)
self._create_oso_containers(bucket=bucket, num_scopes=num_scopes, collections_per_scope=collections_per_scope, docs_per_collection=docs_per_collection)
test_scope = "scope_"+str(num_scopes)
test_collection = "test_collection"
self._cb_cluster._create_collection(bucket="default", scope=test_scope, collection=test_collection, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=test_scope, collection=test_collection, num_docs=1000)
test_index = self.create_index(self._cb_cluster.get_bucket_by_name('default'),
"test_index", collection_index=True, _type=f"{test_scope}.{test_collection}",
scope=test_scope, collections=[test_collection])
self.wait_for_indexing_complete_simple(item_count=1000, index=test_index)
multi_collections = []
for i in range(1, 4):
coll_name = test_collection + "_" + str(i)
self._cb_cluster._create_collection(bucket="default", scope=test_scope, collection=coll_name, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=test_scope, collection=coll_name, num_docs=1000, start_seq_num=1000*i+1)
multi_collections.append(coll_name)
_type_multi = []
index_collections = []
for c in multi_collections:
_type_multi.append(f"{test_scope}.{c}")
index_collections.append(c)
test_index_multi = self.create_index(self._cb_cluster.get_bucket_by_name('default'),
"test_index_multi", collection_index=True, _type=_type_multi,
scope=test_scope, collections=index_collections)
self.wait_for_indexing_complete_simple(item_count=3000, index=test_index_multi)
test_query = {"match": "emp", "field": "type"}
hits_before, _, _, _ = test_index.execute_query(test_query)
hits_before_multi, _, _, _ = test_index_multi.execute_query(test_query)
additional_collections_per_scope = TestInputSingleton.input.param("additional_collections_per_scope", 2)
for i in range(1, num_scopes+1):
scope_name = "scope_" + str(i)
for j in (collections_per_scope+2, additional_collections_per_scope+1):
collection_name = "collection_" + str(j)
self._cb_cluster._create_collection(bucket="default", scope=scope_name, collection=collection_name, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=scope_name, collection=collection_name)
hits_after, _, _, _ = test_index.execute_query(test_query)
hits_after_multi, _, _, _ = test_index_multi.execute_query(test_query)
errors = []
try:
self.assertEqual(hits_before, hits_after)
except AssertionError as e:
errors.append("Hits before and after additional data load do not match for single collection index.")
try:
self.assertEqual(hits_before_multi, hits_after_multi)
except AssertionError as e:
errors.append("Hits before and after additional data load do not match for multi collection index.")
try:
self.assertEqual(len(errors), 0)
except AssertionError as ex:
for err in errors:
self.log.error(err)
self.fail()
def test_data_mutations_oso(self):
rest = RestConnection(self._cb_cluster.get_random_fts_node())
rest.set_node_setting("useOSOBackfill", True)
bucket = self._cb_cluster.get_bucket_by_name('default')
num_scopes = TestInputSingleton.input.param("num_scopes", 5)
collections_per_scope = TestInputSingleton.input.param("collections_per_scope", 20)
docs_per_collection = TestInputSingleton.input.param("docs_per_collection", 10000)
self._create_oso_containers(bucket=bucket, num_scopes=num_scopes, collections_per_scope=collections_per_scope, docs_per_collection=docs_per_collection)
test_scope = "scope_"+str(num_scopes)
test_collection = "test_collection"
self._cb_cluster._create_collection(bucket="default", scope=test_scope, collection=test_collection, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=test_scope, collection=test_collection, num_docs=1000)
test_index = self.create_index(self._cb_cluster.get_bucket_by_name('default'),
"test_index", collection_index=True, _type=f"{test_scope}.{test_collection}",
scope=test_scope, collections=[test_collection])
self.wait_for_indexing_complete_simple(item_count=1000, index=test_index)
multi_collections = []
for i in range(1, 4):
coll_name = test_collection + "_" + str(i)
self._cb_cluster._create_collection(bucket="default", scope=test_scope, collection=coll_name, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=test_scope, collection=coll_name, num_docs=1000, start_seq_num=1000*i+1)
multi_collections.append(coll_name)
_type_multi = []
index_collections = []
for c in multi_collections:
_type_multi.append(f"{test_scope}.{c}")
index_collections.append(c)
test_index_multi = self.create_index(self._cb_cluster.get_bucket_by_name('default'),
"test_index_multi", collection_index=True, _type=_type_multi,
scope=test_scope, collections=index_collections)
self.wait_for_indexing_complete_simple(item_count=3000, index=test_index_multi)
additional_collections_per_scope = TestInputSingleton.input.param("additional_collections_per_scope", 2)
for i in range(1, num_scopes+1):
scope_name = "scope_" + str(i)
for j in (collections_per_scope+2, additional_collections_per_scope+1):
collection_name = "collection_" + str(j)
self._cb_cluster._create_collection(bucket="default", scope=scope_name, collection=collection_name, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=scope_name, collection=collection_name)
for i in range(1, num_scopes+1):
scope_name = "scope_" + str(i)
for j in (1, additional_collections_per_scope+1):
collection_name = "collection_" + str(j)
self._update_collection(bucket=bucket, scope=scope_name, collection=collection_name)
self._update_collection(bucket=bucket, scope=test_scope, collection=test_collection, num_docs=1000)
for i in range(1, 4):
self._update_collection(bucket=bucket, scope=test_scope, collection=test_collection+"_"+str(i), num_docs=1000, start=1000*i+1)
self.wait_for_indexing_complete_simple(item_count=1000, index=test_index)
self.wait_for_indexing_complete_simple(item_count=3000, index=test_index_multi)
test_query = {"query": "mutated:1"}
hits, _, _, _ = test_index.execute_query(test_query)
hits_multi, _, _, _ = test_index_multi.execute_query(test_query)
errors = []
try:
self.assertEqual(hits, 1000)
except AssertionError as e:
errors.append("Full update of test collection is failed, or fts index produces wrong results for test query.")
try:
self.assertEqual(hits_multi, 3000)
except AssertionError as e:
errors.append("Full update of test collections for multi collection index is failed, or multi collections fts index produces wrong results for test query.")
try:
self.assertEqual(len(errors), 0)
except AssertionError as ex:
for err in errors:
self.log.error(err)
self.fail()
def test_doc_id_oso(self):
rest = RestConnection(self._cb_cluster.get_random_fts_node())
rest.set_node_setting("useOSOBackfill", True)
bucket = self._cb_cluster.get_bucket_by_name('default')
num_scopes = TestInputSingleton.input.param("num_scopes", 5)
collections_per_scope = TestInputSingleton.input.param("collections_per_scope", 20)
docs_per_collection = TestInputSingleton.input.param("docs_per_collection", 10000)
self._create_oso_containers(bucket=bucket, num_scopes=num_scopes, collections_per_scope=collections_per_scope, docs_per_collection=docs_per_collection)
test_scope = "scope_"+str(num_scopes)
test_collection = "test_collection"
self._cb_cluster._create_collection(bucket="default", scope=test_scope, collection=test_collection, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=test_scope, collection=test_collection, num_docs=1000, start_seq_num=1001)
test_index = self.create_index(self._cb_cluster.get_bucket_by_name('default'),
"test_index", collection_index=True, _type=f"{test_scope}.{test_collection}",
scope=test_scope, collections=[test_collection])
additional_collections_per_scope = TestInputSingleton.input.param("additional_collections_per_scope", 2)
for i in range(1, num_scopes+1):
scope_name = "scope_" + str(i)
for j in (collections_per_scope+2, additional_collections_per_scope+1):
collection_name = "collection_" + str(j)
self._cb_cluster._create_collection(bucket="default", scope=scope_name, collection=collection_name, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=scope_name, collection=collection_name)
self._fill_collection(bucket=bucket, scope=test_scope, collection=test_collection, num_docs=1000, start_seq_num=1)
test_query = {"match": "emp", "field": "type"}
hits, _, _, _ = test_index.execute_query(test_query)
self.assertEqual(hits, 2000, "Test for doc_id special load order is failed.")
def test_partial_rollback_oso(self):
from lib.memcached.helper.data_helper import MemcachedClientHelper
#items = 50000, update = True, upd = 30, upd_del_fields = ['dept']
rest = RestConnection(self._cb_cluster.get_random_fts_node())
rest.set_node_setting("useOSOBackfill", True)
bucket = self._cb_cluster.get_bucket_by_name("default")
self._cb_cluster.flush_buckets([bucket])
num_scopes = TestInputSingleton.input.param("num_scopes", 5)
collections_per_scope = TestInputSingleton.input.param("collections_per_scope", 20)
docs_per_collection = TestInputSingleton.input.param("docs_per_collection", 10000)
self._create_oso_containers(bucket=bucket, num_scopes=num_scopes, collections_per_scope=collections_per_scope, docs_per_collection=docs_per_collection)
test_scope = "scope_"+str(num_scopes)
test_collection = "test_collection"
self._cb_cluster._create_collection(bucket="default", scope=test_scope, collection=test_collection, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=test_scope, collection=test_collection, num_docs=15000)
test_index = self.create_index(self._cb_cluster.get_bucket_by_name('default'),
"test_index", collection_index=True, _type=f"{test_scope}.{test_collection}",
scope=test_scope, collections=[test_collection])
self.wait_for_indexing_complete_simple(item_count=15000, index=test_index)
multi_collections = []
for i in range(1, 4):
coll_name = test_collection + "_" + str(i)
self._cb_cluster._create_collection(bucket="default", scope=test_scope, collection=coll_name, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=test_scope, collection=coll_name, num_docs=15000)
multi_collections.append(coll_name)
_type_multi = []
index_collections = []
for c in multi_collections:
_type_multi.append(f"{test_scope}.{c}")
index_collections.append(c)
test_index_multi = self.create_index(self._cb_cluster.get_bucket_by_name('default'),
"test_index_multi", collection_index=True, _type=_type_multi,
scope=test_scope, collections=index_collections)
self.wait_for_indexing_complete_simple(item_count=45000, index=test_index_multi)
additional_collections_per_scope = TestInputSingleton.input.param("additional_collections_per_scope", 2)
for i in range(1, num_scopes+1):
scope_name = "scope_" + str(i)
for j in (collections_per_scope+2, additional_collections_per_scope+1):
collection_name = "collection_" + str(j)
self._cb_cluster._create_collection(bucket="default", scope=scope_name, collection=collection_name, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=scope_name, collection=collection_name)
for i in range(1, num_scopes+1):
scope_name = "scope_" + str(i)
for j in (1, additional_collections_per_scope+1):
collection_name = "collection_" + str(j)
self._update_collection(bucket=bucket, scope=scope_name, collection=collection_name)
# Stop Persistence on Node A & Node B
self.log.info("Stopping persistence on {0}".
format(self._input.servers[:2]))
mem_client = MemcachedClientHelper.direct_client(self._input.servers[0],
bucket)
mem_client.stop_persistence()
mem_client = MemcachedClientHelper.direct_client(self._input.servers[1],
bucket)
mem_client.stop_persistence()
# Perform mutations on the bucket
if self._input.param("update", False):
self._update_collection(bucket=bucket, scope=test_scope, collection=test_collection, num_docs=300)
for i in range(1, 4):
self._update_collection(bucket=bucket, scope=test_scope, collection=test_collection + "_" + str(i),
num_docs=300, start=1000 * i + 1)
if self._input.param("delete", False):
self._delete_from_collection(bucket=bucket, scope=test_scope, collection=test_collection, num_docs=5000)
for i in range(1, 4):
self._delete_from_collection(bucket=bucket, scope=test_scope, collection="test_collection_"+str(i), num_docs=5000)
self.wait_for_indexing_complete_simple(item_count=900, index=test_index)
self.wait_for_indexing_complete_simple(item_count=2700, index=test_index_multi)
# Run FTS Query to fetch the initial count of mutated items
query = {"query": "mutated:>0"}
hits1_simple_index, _, _, _ = test_index.execute_query(query)
self.log.info("Hits for simple index before rollback: %s" % hits1_simple_index)
hits1_multi_index, _, _, _ = test_index_multi.execute_query(query)
self.log.info("Hits for multi index before rollback: %s" % hits1_multi_index)
# Fetch count of docs in index and bucket
before_simple_index_doc_count = test_index.get_indexed_doc_count()
before_multi_index_doc_count = test_index_multi.get_indexed_doc_count()
before_bucket_doc_count = test_index.get_src_bucket_doc_count()
self.log.info("Docs in Bucket : %s, Docs in simple Index : %s, Docs in multi Index: %s" % (
before_bucket_doc_count, before_simple_index_doc_count, before_multi_index_doc_count))
# Kill memcached on Node A
self.log.info("Killing memcached on {0}".format(self._master.ip))
shell = RemoteMachineShellConnection(self._master)
shell.kill_memcached()
# Start persistence on Node B
self.log.info("Starting persistence on {0}".
format(self._input.servers[1].ip))
mem_client = MemcachedClientHelper.direct_client(self._input.servers[1],
bucket)
mem_client.start_persistence()
# Failover Node B
failover_task = self._cb_cluster.async_failover(
node=self._input.servers[1])
failover_task.result()
# Wait for Failover & FTS index rollback to complete
self.wait_for_indexing_complete_simple(item_count=900, index=test_index)
self.wait_for_indexing_complete_simple(item_count=2700, index=test_index_multi)
# Run FTS query to fetch count of mutated items post rollback.
hits2_simple_index, _, _, _ = test_index.execute_query(query)
self.log.info("Hits for simple index after rollback: %s" % hits2_simple_index)
hits2_multi_index, _, _, _ = test_index_multi.execute_query(query)
self.log.info("Hits for multi index after rollback: %s" % hits2_multi_index)
# Fetch count of docs in index and bucket
after_simple_index_doc_count = test_index.get_indexed_doc_count()
after_multi_index_doc_count = test_index_multi.get_indexed_doc_count()
after_bucket_doc_count = test_index.get_src_bucket_doc_count()
self.log.info("Docs in Bucket : %s, Docs in simple Index : %s, Docs in multi Index: %s" % (
after_bucket_doc_count, after_simple_index_doc_count, after_multi_index_doc_count))
# Validation : If there are deletes, validate the #docs in index goes
# up post rollback
if self._input.param("delete", False):
self.assertGreater(after_simple_index_doc_count, before_simple_index_doc_count,
"Deletes : Simple index count after rollback not "
"greater than before rollback")
self.assertGreater(after_multi_index_doc_count, before_multi_index_doc_count,
"Deletes : Multi index count after rollback not "
"greater than before rollback")
else:
# For Updates, validate that #hits goes down in the query output
# post rollback
self.assertGreater(hits1_simple_index, hits2_simple_index,
"Mutated items before rollback are not more "
"than after rollback for simple index")
self.assertGreater(hits1_multi_index, hits2_multi_index,
"Mutated items before rollback are not more "
"than after rollback for multi index")
# Failover FTS node
failover_fts_node = self._input.param("failover_fts_node", False)
if failover_fts_node:
failover_task = self._cb_cluster.async_failover(
num_nodes=1)
failover_task.result()
self.sleep(10)
# Run FTS query to fetch count of mutated items post FTS node failover.
hits3_simple_index, _, _, _ = test_index.execute_query(query)
hits3_multi_index, _, _, _ = test_index_multi.execute_query(query)
self.log.info(
"Hits after rollback and failover of primary FTS node for simple index: %s" % hits3_simple_index)
self.log.info(
"Hits after rollback and failover of primary FTS node for multi index: %s" % hits3_multi_index)
self.assertEqual(hits2_simple_index, hits3_simple_index,
"Mutated items after FTS node failover are not equal to that after rollback for simple index")
self.assertEqual(hits2_multi_index, hits3_multi_index,
"Mutated items after FTS node failover are not equal to that after rollback for multi index")
def test_flush_bucket_oso(self):
rest = RestConnection(self._cb_cluster.get_random_fts_node())
rest.set_node_setting("useOSOBackfill", True)
bucket = self._cb_cluster.get_bucket_by_name('default')
num_scopes = TestInputSingleton.input.param("num_scopes", 5)
collections_per_scope = TestInputSingleton.input.param("collections_per_scope", 20)
docs_per_collection = TestInputSingleton.input.param("docs_per_collection", 10000)
self._create_oso_containers(bucket=bucket, num_scopes=num_scopes, collections_per_scope=collections_per_scope, docs_per_collection=docs_per_collection)
test_scope = "scope_"+str(num_scopes)
test_collection = "test_collection"
self._cb_cluster._create_collection(bucket="default", scope=test_scope, collection=test_collection, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=test_scope, collection=test_collection, num_docs=1000)
test_index = self.create_index(self._cb_cluster.get_bucket_by_name('default'),
"test_index", collection_index=True, _type=f"{test_scope}.{test_collection}",
scope=test_scope, collections=[test_collection])
self.wait_for_indexing_complete_simple(item_count=1000, index=test_index)
multi_collections = []
for i in range(1, 4):
coll_name = test_collection + "_" + str(i)
self._cb_cluster._create_collection(bucket="default", scope=test_scope, collection=coll_name, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=test_scope, collection=coll_name, num_docs=1000, start_seq_num=1000*i+1)
multi_collections.append(coll_name)
_type_multi = []
index_collections = []
for c in multi_collections:
_type_multi.append(f"{test_scope}.{c}")
index_collections.append(c)
test_index_multi = self.create_index(self._cb_cluster.get_bucket_by_name('default'),
"test_index_multi", collection_index=True, _type=_type_multi,
scope=test_scope, collections=index_collections)
self.wait_for_indexing_complete_simple(item_count=3000, index=test_index_multi)
additional_collections_per_scope = TestInputSingleton.input.param("additional_collections_per_scope", 2)
for i in range(1, num_scopes+1):
scope_name = "scope_" + str(i)
for j in (collections_per_scope+2, additional_collections_per_scope+1):
collection_name = "collection_" + str(j)
self._cb_cluster._create_collection(bucket="default", scope=scope_name, collection=collection_name, cli_client=self.cli_client)
self._fill_collection(bucket=bucket, scope=scope_name, collection=collection_name)
for i in range(1, num_scopes+1):
scope_name = "scope_" + str(i)
for j in (1, additional_collections_per_scope+1):
collection_name = "collection_" + str(j)
self._update_collection(bucket=bucket, scope=scope_name, collection=collection_name)
self._update_collection(bucket=bucket, scope=test_scope, collection=test_collection, num_docs=1000)
for i in range(1, 4):
self._update_collection(bucket=bucket, scope=test_scope, collection=test_collection+"_"+str(i), num_docs=1000, start=1000*i+1)
self.wait_for_indexing_complete_simple(item_count=1000, index=test_index)
self.wait_for_indexing_complete_simple(item_count=3000, index=test_index_multi)
self._cb_cluster.flush_buckets([bucket])
self.sleep(60, "Waiting for flush to be happened.")
simple_index_doc_count = test_index.get_indexed_doc_count()
multi_index_doc_count = test_index_multi.get_indexed_doc_count()
bucket_doc_count = test_index.get_src_bucket_doc_count()
errors = []
try:
self.assertEquals(simple_index_doc_count, 0)
except AssertionError as e:
errors.append("Simple index contains documents after source bucket flush")
try:
self.assertEquals(multi_index_doc_count, 0)
except AssertionError as e:
errors.append("Multi index contains documents after source bucket flush")
try:
self.assertEquals(bucket_doc_count, 0)
except AssertionError as e:
errors.append("Source bucket contains documents after source bucket flush")
try:
self.assertEqual(len(errors), 0)
except AssertionError as ex:
for err in errors:
self.log.error(err)
self.fail()
| 50.069285
| 249
| 0.572478
| 22,555
| 205,234
| 4.878431
| 0.045178
| 0.018404
| 0.026465
| 0.02283
| 0.794643
| 0.769406
| 0.750529
| 0.730999
| 0.712032
| 0.693565
| 0
| 0.013677
| 0.325243
| 205,234
| 4,098
| 250
| 50.081503
| 0.780883
| 0.035915
| 0
| 0.643948
| 0
| 0.002938
| 0.127276
| 0.010253
| 0
| 0
| 0
| 0.000244
| 0.022327
| 1
| 0.032902
| false
| 0.009988
| 0.009107
| 0
| 0.043772
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
52ac2f7a93134c5d53333dca08137a98d467fe3c
| 9,995
|
py
|
Python
|
py_casim/casim.py
|
Sergeileduc/py_casim
|
a464c99c2acea003061dde3f49245a3f6a576ade
|
[
"MIT"
] | 2
|
2020-02-16T22:17:25.000Z
|
2020-02-17T19:21:10.000Z
|
py_casim/casim.py
|
Sergeileduc/py-casim
|
a464c99c2acea003061dde3f49245a3f6a576ade
|
[
"MIT"
] | 2
|
2021-03-17T08:48:51.000Z
|
2021-03-17T14:59:38.000Z
|
py_casim/casim.py
|
Sergeileduc/py-casim
|
a464c99c2acea003061dde3f49245a3f6a576ade
|
[
"MIT"
] | 1
|
2020-07-22T08:55:53.000Z
|
2020-07-22T08:55:53.000Z
|
"""Code to upload an image and get his share url."""
import logging
from pathlib import Path
import requests
from .tools import (get_all_shares, get_all_shares_loggedin, get_folder_id,
get_image_id, get_share, get_share_loggedin)
logger = logging.getLogger(__name__)
class Casim():
"""Upload image to Casimages and get share url/code."""
# CASIMAGES
_url = "https://www.casimages.com/"
_url_upload = "https://www.casimages.com/upload_ano_multi.php"
_url_casi_share = "https://www.casimages.com/codes_ano_multi.php"
_url_resize = "https://www.casimages.com/ajax/s_ano_resize.php"
#: Valid resize values for resize keyword argument
resize_values = ["100", "125", "320", "640", "800", "1024", "1280", "1600"]
_headers = {
"Accept": "application/json",
"Accept-Language": "fr,fr-FR;q=0.8,en-US;q=0.5,en;q=0.3",
"Accept-Encoding": "gzip, deflate, br",
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0", # noqa: E501
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"Pragma": "no-cache",
"X-Requested-With": "XMLHttpRequest"
}
def __init__(self, image, resize=None):
"""Init Casim() object with image path, and optionnal resize value."""
self.image = image
self.resize = resize if str(resize) in self.resize_values else None
self.image_id = None
self.session = requests.Session() # Session (keep cookies)
self.session.get(Casim._url) # Init for cookies
self._set_resize()
logger.info('casim created with image: "%s" and resize: %s',
self.image, self.resize)
def __repr__(self):
return f'Casim({self.image}, resize={self.resize})'
def _set_resize(self):
if self.resize:
params = {"dim": self.resize}
self.session.get(Casim._url_resize, params=params)
logger.info('ask for resize with value %s', self.resize)
def _upload_image(self):
"""Upload image and return id."""
with open(self.image, 'rb') as f:
file_ = {'Filedata': ('image', f, 'image/jpg')}
r = self.session.post(Casim._url_upload,
files=file_, headers=Casim._headers)
self.image_id = r.text # casimages share page ID
logger.info('upload is ok, image id is %s', self.image_id)
return self.image_id
def _get_share(self, index=None):
"""Get share link/code.
Args:
index (int, optional): sqdfsdf. Defaults to None.
0 : Direct link (Mail & Messenger)
1 : Direct link (Forum, Blog, Site)
2 : HTML Code Thumbnail
3 : HTML Code Big
4 : Forum BBCode Thumbnail
5 : Forum BBCode Big
6 : Source Link Thumbnail
7 : Source Link Big
Returns:
str (or list): image share url (or list of share urls)
"""
params = {"img": self.image_id}
r = self.session.get(Casim._url_casi_share, params=params)
logger.info('get() on share page returns code : %d', r.status_code)
return get_share(r.text, index) if index else get_all_shares(r.text)
def get_link(self):
"""Upload image and return share link (Big source link).
Perform same thing as get_share_code(index=7).
"""
self._upload_image()
return self._get_share(7)
def get_share_code(self, index=0):
"""Get share link/code.
Args:
index (int, optional): Url/code choice. Defaults to 0.
0 : Direct link (Mail & Messenger)
1 : Direct link (Forum, Blog, Site)
2 : HTML Code Thumbnail
3 : HTML Code Big
4 : Forum BBCode Thumbnail
5 : Forum BBCode Big
6 : Source Link Thumbnail
7 : Source Link Big
Returns:
str: image share url/code
"""
self._upload_image()
return self._get_share(index)
def get_all(self):
"""Get list of all links/code.
* Direct link (Mail & Messenger)
* Direct link (Forum, Blog, Site)
* HTML Code Thumbnail
* HTML Code Big
* Forum BBCode Thumbnail
* Forum BBCode Big
* Source Link Thumbnail
* Source Link Big
Returns:
list -- all image share codes/links
"""
self._upload_image()
return self._get_share()
class CasimLogged():
"""Upload image to Casimages account and get share url/code."""
# CASIMAGES
_url = "https://www.casimages.com/"
_url_login = "https://www.casimages.com/connexion"
_url_resize = "https://www.casimages.com/ajax/m_photos_p_resize.php"
_url_upload = "https://www.casimages.com/upload_mb_dz_img.php"
_url_casi_share = "https://www.casimages.com/ajax/m_photos_codes_img.php" # noqa: E501
_url_m_photos = "https://www.casimages.com/m_photos"
_url_search = "https://www.casimages.com/m_rechercher"
#: Valid resize values for resize keyword argument
resize_values = ["100", "125", "320", "640", "800", "1024", "1280", "1600"]
_headers = {
"Accept": "application/json",
"Accept-Language": "fr,fr-FR;q=0.8,en-US;q=0.5,en;q=0.3",
"Accept-Encoding": "gzip, deflate, br",
"User-Agent": "Mozilla/5.0 (X11; Linux x86_64; rv:69.0) Gecko/20100101 Firefox/69.0", # noqa: E501
"Cache-Control": "no-cache",
"Connection": "keep-alive",
"Pragma": "no-cache",
"X-Requested-With": "XMLHttpRequest"
}
def __init__(self, image, resize=None):
"""Init Casim() object with image path, and optionnal resize value."""
self.image = Path(image)
self.resize = resize if str(resize) in self.resize_values else None
self.image_id = None
self.loggedin = False
self.session = requests.Session() # Session (keep cookies)
self.session.get(Casim._url) # Init for cookies
logger.info('casim created with image: "%s" and resize: %s',
self.image, self.resize)
def __repr__(self):
return f'Casim({self.image}, resize={self.resize})'
def login(self, email, mdp):
"""Connect to Casimages with your own account."""
self.session.get(CasimLogged._url_login)
payload = {"email": email, "mdp": mdp}
self.session.post(CasimLogged._url_login, data=payload)
self.loggedin = True
def change_folder(self, name):
"""Change destination folder. Folder named "name" should exist."""
r = self.session.get(CasimLogged._url_m_photos)
folder_id = get_folder_id(r.text, name)
if folder_id:
payload = {"alb": folder_id}
r = self.session.get(CasimLogged._url_m_photos, params=payload)
def _set_resize(self):
if self.resize:
params = {"dim": self.resize}
self.session.get(self._url_resize, params=params)
logger.info('ask for resize with value %s', self.resize)
def _upload_image(self):
"""Upload image and return id."""
self._set_resize()
with open(self.image, 'rb') as f:
file_ = {'Filedata': (self.image.name, f, 'image/jpg')}
self.session.post(CasimLogged._url_upload,
files=file_, headers=CasimLogged._headers)
logger.info('upload is ok')
def _get_share(self, index=None):
"""Get share link/code.
Args:
index (int, optional): sqdfsdf. Defaults to None.
0 : Direct link (Mail & Messenger)
1 : Direct link (Forum, Blog, Site)
2 : HTML Code Thumbnail
3 : HTML Code Big
4 : Forum BBCode Thumbnail
5 : Forum BBCode Big
6 : Source Link Thumbnail
7 : Source Link Big
Returns:
str (or list): image share url (or list of share urls)
"""
payload = {"wf": "images", "q": self.image.name}
r = self.session.post(CasimLogged._url_search, params=payload)
image_id = get_image_id(r.text, self.image.name)
if image_id:
params = {"codimg": image_id}
r = self.session.get(CasimLogged._url_casi_share, params=params)
return get_share_loggedin(r.text, index) if index else get_all_shares_loggedin(r.text) # noqa: E501
def get_link(self):
"""Upload image and return share link (Big source link).
Perform same thing as get_share_code(index=7).
"""
self._upload_image()
return self._get_share(7)
def get_share_code(self, index=0):
"""Get share link/code.
Args:
index (int, optional): Url/code choice. Defaults to 0.
0 : Direct link (Mail & Messenger)
1 : Direct link (Forum, Blog, Site)
2 : HTML Code Thumbnail
3 : HTML Code Big
4 : Forum BBCode Thumbnail
5 : Forum BBCode Big
6 : Source Link Thumbnail
7 : Source Link Big
Returns:
str: image share url/code
"""
self._upload_image()
return self._get_share(index)
def get_all(self):
"""Get list of all links/code.
* Direct link (Mail & Messenger)
* Direct link (Forum, Blog, Site)
* HTML Code Thumbnail
* HTML Code Big
* Forum BBCode Thumbnail
* Forum BBCode Big
* Source Link Thumbnail
* Source Link Big
Returns:
list -- all image share codes/links
"""
self._upload_image()
return self._get_share()
| 35.443262
| 112
| 0.573787
| 1,255
| 9,995
| 4.416733
| 0.155378
| 0.031752
| 0.033736
| 0.03969
| 0.800108
| 0.746166
| 0.741295
| 0.7081
| 0.682482
| 0.658308
| 0
| 0.023553
| 0.311856
| 9,995
| 281
| 113
| 35.569395
| 0.78235
| 0.316158
| 0
| 0.569231
| 0
| 0.030769
| 0.232432
| 0.018346
| 0
| 0
| 0
| 0
| 0
| 1
| 0.138462
| false
| 0
| 0.030769
| 0.015385
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
52b5400d362fd2f963ce671487afb6fba86ac6db
| 1,539
|
py
|
Python
|
network/tests/common.py
|
glasser/integrations-core
|
1dd515d49b1690a1369ee5195713605b1b072b1f
|
[
"BSD-3-Clause"
] | 2
|
2019-05-28T03:48:29.000Z
|
2019-07-05T07:05:58.000Z
|
network/tests/common.py
|
glasser/integrations-core
|
1dd515d49b1690a1369ee5195713605b1b072b1f
|
[
"BSD-3-Clause"
] | 4
|
2019-07-03T02:53:19.000Z
|
2019-07-10T14:52:14.000Z
|
network/tests/common.py
|
glasser/integrations-core
|
1dd515d49b1690a1369ee5195713605b1b072b1f
|
[
"BSD-3-Clause"
] | 1
|
2019-12-23T13:35:17.000Z
|
2019-12-23T13:35:17.000Z
|
# (C) Datadog, Inc. 2019
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
from datadog_checks.dev import get_here
HERE = get_here()
SERVICE_CHECK_NAME = "network"
INSTANCE = {"collect_connection_state": True}
INSTANCE_BLACKLIST = {"collect_connection_state": True, "blacklist_conntrack_metrics": ["count"]}
EXPECTED_METRICS = [
'system.net.bytes_rcvd',
'system.net.bytes_sent',
'system.net.packets_in.count',
'system.net.packets_in.error',
'system.net.packets_out.count',
'system.net.packets_out.error',
]
CONNTRACK_METRICS = [
'system.net.conntrack.acct',
'system.net.conntrack.buckets',
'system.net.conntrack.checksum',
'system.net.conntrack.events',
'system.net.conntrack.expect_max',
'system.net.conntrack.generic_timeout',
'system.net.conntrack.helper',
'system.net.conntrack.log_invalid',
'system.net.conntrack.max',
'system.net.conntrack.tcp_loose',
'system.net.conntrack.tcp_max_retrans',
'system.net.conntrack.tcp_timeout_close',
'system.net.conntrack.tcp_timeout_close_wait',
'system.net.conntrack.tcp_timeout_established',
'system.net.conntrack.tcp_timeout_fin_wait',
'system.net.conntrack.tcp_timeout_last_ack',
'system.net.conntrack.tcp_timeout_max_retrans',
'system.net.conntrack.tcp_timeout_syn_recv',
'system.net.conntrack.tcp_timeout_syn_sent',
'system.net.conntrack.tcp_timeout_time_wait',
'system.net.conntrack.tcp_timeout_unacknowledged',
'system.net.conntrack.timestamp',
]
| 33.456522
| 97
| 0.741391
| 196
| 1,539
| 5.55102
| 0.341837
| 0.231618
| 0.363971
| 0.231618
| 0.301471
| 0.224265
| 0.069853
| 0
| 0
| 0
| 0
| 0.002965
| 0.123457
| 1,539
| 45
| 98
| 34.2
| 0.803558
| 0.061079
| 0
| 0
| 0
| 0
| 0.705066
| 0.696738
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.027027
| 0
| 0.027027
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
52b7fb203d8c122ada32eb085f88a8b51fc19ac9
| 70
|
py
|
Python
|
ColorCode.py
|
ashams-random/Data-Structures-using-Python
|
50f6cf55320c6cdbc2f27ea31a9ef943cc1dbc89
|
[
"Apache-2.0"
] | null | null | null |
ColorCode.py
|
ashams-random/Data-Structures-using-Python
|
50f6cf55320c6cdbc2f27ea31a9ef943cc1dbc89
|
[
"Apache-2.0"
] | 7
|
2021-10-05T17:31:16.000Z
|
2021-10-05T18:12:28.000Z
|
ColorCode.py
|
ashams-random/Data-Structures-using-Python
|
50f6cf55320c6cdbc2f27ea31a9ef943cc1dbc89
|
[
"Apache-2.0"
] | 7
|
2021-10-04T05:33:50.000Z
|
2021-10-05T18:09:30.000Z
|
print('\x1b[38;2;5;86;243m' + 'Welcome to my color code' + '\x1b[0m')
| 35
| 69
| 0.614286
| 14
| 70
| 3.071429
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.196721
| 0.128571
| 70
| 1
| 70
| 70
| 0.508197
| 0
| 0
| 0
| 0
| 0
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 5
|
52d28d70576546acbe3716d57f9fe9af5b2a0a19
| 10,169
|
py
|
Python
|
tests/gpfunction_test.py
|
ccube-eml/worker
|
a0e413bb330bf06db6880ab1a85647069c385fbb
|
[
"MIT"
] | null | null | null |
tests/gpfunction_test.py
|
ccube-eml/worker
|
a0e413bb330bf06db6880ab1a85647069c385fbb
|
[
"MIT"
] | null | null | null |
tests/gpfunction_test.py
|
ccube-eml/worker
|
a0e413bb330bf06db6880ab1a85647069c385fbb
|
[
"MIT"
] | null | null | null |
import unittest
import os
import tempfile
import shutil
from click.testing import CliRunner
from worker import __main__
from worker.amqp_manager import AMQPManager
THIS_DIRECTORY_PATH = os.path.dirname(os.path.abspath(__file__))
JOB_NAME = 'gpfunction'
AMQP_HOSTNAME = 'localhost'
LEARN_TASKS = [
{
'job_name': 'gpfunction',
'task_number': 0,
'dataset_name': 'higgs',
'training_rate': 0.5,
'fusion_rate': 0.3,
'sample_rate': 0.1,
'class_attribute': 'label',
'class_attribute_type': 'integer',
'true_class_value': '1',
'include_attributes': [],
'exclude_attributes': [],
'attributes_rate': 0.5,
'random_seed': 0,
'include_header': False,
'duration': 60,
'learn_parameters': {
'xover_op': 'operator.SinglePointKozaCrossover',
'external_threads': 4,
'false_negative_weight': 0.5,
'pop_size': 500,
},
},
]
LEARN_COMMAND = 'java -jar gpfunction.jar ' \
'-train ${CCUBE_LEARN_DATASET_FILE} ' \
'-minutes ${CCUBE_LEARN_DURATION_MINUTES} ' \
'-properties ${CCUBE_LEARN_PARAMETERS_PROPERTIES_FILE}'
LEARN_OUTPUT_FILES = '${CCUBE_LEARN_WORKING_DIRECTORY}/mostAccurate.txt'
LEARN_EXECUTABLE_FILE = 'resources/learners/gpfunction.jar'
LEARN_OUTPUTS = [
{
'success': True,
'files': 'UEsDBBQAAAAIADsDQUpa/W2KrgAAAIABAAAQAAAAbW9zdEFjY3VyYXRlLnR4dI1Oyw6CMBC8+xV7\nbBFJX3S7f9KrAjEkIipi5O8tDxESTZyk3dnHTIZtgRXPC7BQd8CqLi8fZV6AN+Cl4cAi8BpYc233\nt2K4qrpTfZyWTXnuZx4HcdYewklWN2ufT7Ng0Sj2wWV4cvoCQuUjAgv0nS3q26W1HhW/M84R+839\njwvv+AzwxHksErJKo0mdJE1IZL6PFK2hw8waic4YS4KkIMRYi0RblUpKSSE6KXHzAlBLAQIUAxQA\nAAAIADsDQUpa/W2KrgAAAIABAAAQAAAAAAAAAAAAAACkgQAAAABtb3N0QWNjdXJhdGUudHh0UEsF\nBgAAAAABAAEAPgAAANwAAAAAAA==\n',
},
{
'success': False,
'files': False,
},
{
'success': True,
'files': 'UEsDBBQAAAAIAEuWQUoxobvZzAAAAAACAAAQAAAAbW9zdEFjY3VyYXRlLnR4dG1QSQ7CMAy884oc\nE7qoztIkP8kVaIUqAaUbgt+TrQW1zcGKPR6PxzhBOEN4joYjfGkHhIeuH+13OtcePHpk6KZTbwv3\nT9W8mqqODAOczL0hLg0WsrTm4cc79ujAW3u1PEKQYSQ+hOv3M9IdFIph8r+cm7POw9pJTJ3kmucs\nGaBB8JcuChvL22Uif7/dX2jtfcevP4U3vRwzbOT05mOE0sqCAQgrp0WulaKaM+BMAZWF3i+VaSZy\nyQQwSUtBFRRapTyXSpa0ECCFUpofvlBLAQIUAxQAAAAIAEuWQUoxobvZzAAAAAACAAAQAAAAAAAA\nAAAAAACkgQAAAABtb3N0QWNjdXJhdGUudHh0UEsFBgAAAAABAAEAPgAAAPoAAAAAAA==\n',
},
]
FILTER_TASKS = [
{
'job_name': 'gpfunction',
'learner_outputs_number': 3,
'dataset_name': 'higgs',
'training_rate': 0.5,
'fusion_rate': 0.3,
'sample_rate': 0.1,
'class_attribute': 'label',
'class_attribute_type': 'integer',
'true_class_value': '1',
'include_attributes': [],
'exclude_attributes': [],
'attributes_rate': 0.5,
'random_seed': 0,
'include_header': False,
'threshold': 0.47,
'predict_parameters': None,
},
]
FUSER_TASKS = [
{
'job_name': 'gpfunction',
'dataset_name': 'higgs',
'training_rate': 0.5,
'fusion_rate': 0.3,
'sample_rate': 0.1,
'class_attribute': 'label',
'class_attribute_type': 'integer',
'true_class_value': '1',
'include_attributes': [],
'exclude_attributes': [],
'attributes_rate': 0.5,
'random_seed': 0,
'include_header': False,
'predict_parameters': None,
},
]
PREDICT_COMMAND = 'java -jar gpfunction.jar ' \
'-predict ${CCUBE_PREDICT_DATASET_FILE} ' \
'-model ${CCUBE_PREDICT_INPUT_FILES}/mostAccurate.txt ' \
'-o predictions.csv'
PREDICT_PREDICTIONS_FILE = '${CCUBE_PREDICT_WORKING_DIRECTORY}/predictions.csv'
PREDICT_EXECUTABLE_FILE = 'resources/learners/gpfunction.jar'
FACTORIZER_HOSTNAME = 'localhost'
FACTORIZER_PORT = '5000'
FILTER_OUTPUTS = [
[
{
'files': 'UEsDBBQAAAAIADsDQUpa/W2KrgAAAIABAAAQAAAAbW9zdEFjY3VyYXRlLnR4dI1Oyw6CMBC8+xV7\nbBFJX3S7f9KrAjEkIipi5O8tDxESTZyk3dnHTIZtgRXPC7BQd8CqLi8fZV6AN+Cl4cAi8BpYc233\nt2K4qrpTfZyWTXnuZx4HcdYewklWN2ufT7Ng0Sj2wWV4cvoCQuUjAgv0nS3q26W1HhW/M84R+839\njwvv+AzwxHksErJKo0mdJE1IZL6PFK2hw8waic4YS4KkIMRYi0RblUpKSSE6KXHzAlBLAQIUAxQA\nAAAIADsDQUpa/W2KrgAAAIABAAAQAAAAAAAAAAAAAACkgQAAAABtb3N0QWNjdXJhdGUudHh0UEsF\nBgAAAAABAAEAPgAAANwAAAAAAA==\n'
},
{
'files': 'UEsDBBQAAAAIAEuWQUoxobvZzAAAAAACAAAQAAAAbW9zdEFjY3VyYXRlLnR4dG1QSQ7CMAy884oc\nE7qoztIkP8kVaIUqAaUbgt+TrQW1zcGKPR6PxzhBOEN4joYjfGkHhIeuH+13OtcePHpk6KZTbwv3\nT9W8mqqODAOczL0hLg0WsrTm4cc79ujAW3u1PEKQYSQ+hOv3M9IdFIph8r+cm7POw9pJTJ3kmucs\nGaBB8JcuChvL22Uif7/dX2jtfcevP4U3vRwzbOT05mOE0sqCAQgrp0WulaKaM+BMAZWF3i+VaSZy\nyQQwSUtBFRRapTyXSpa0ECCFUpofvlBLAQIUAxQAAAAIAEuWQUoxobvZzAAAAAACAAAQAAAAAAAA\nAAAAAACkgQAAAABtb3N0QWNjdXJhdGUudHh0UEsFBgAAAAABAAEAPgAAAPoAAAAAAA==\n'
},
{
'files': 'UEsDBBQAAAAIAEuWQUoxobvZzAAAAAACAAAQAAAAbW9zdEFjY3VyYXRlLnR4dG1QSQ7CMAy884oc\nE7qoztIkP8kVaIUqAaUbgt+TrQW1zcGKPR6PxzhBOEN4joYjfGkHhIeuH+13OtcePHpk6KZTbwv3\nT9W8mqqODAOczL0hLg0WsrTm4cc79ujAW3u1PEKQYSQ+hOv3M9IdFIph8r+cm7POw9pJTJ3kmucs\nGaBB8JcuChvL22Uif7/dX2jtfcevP4U3vRwzbOT05mOE0sqCAQgrp0WulaKaM+BMAZWF3i+VaSZy\nyQQwSUtBFRRapTyXSpa0ECCFUpofvlBLAQIUAxQAAAAIAEuWQUoxobvZzAAAAAACAAAQAAAAAAAA\nAAAAAACkgQAAAABtb3N0QWNjdXJhdGUudHh0UEsFBgAAAAABAAEAPgAAAPoAAAAAAA==\n'
},
]
]
class GPFunctionTest(unittest.TestCase):
def setUp(self):
self.__amqp_manager = AMQPManager(AMQP_HOSTNAME)
self.__learner_tasks_queue_name = __main__.LEARN_TASKS_QUEUE_NAME.format(job_name_=JOB_NAME)
self.__learner_outputs_queue_name = __main__.LEARN_OUTPUTS_QUEUE_NAME.format(job_name_=JOB_NAME)
self.__amqp_manager.delete_queue(self.__learner_tasks_queue_name)
self.__amqp_manager.delete_queue(self.__learner_outputs_queue_name)
self.__filter_tasks_queue_name = __main__.FILTER_TASKS_QUEUE_NAME.format(job_name_=JOB_NAME)
self.__filter_outputs_queue_name = __main__.FILTER_OUTPUTS_QUEUE_NAME.format(job_name_=JOB_NAME)
self.__amqp_manager.delete_queue(self.__filter_tasks_queue_name)
self.__amqp_manager.delete_queue(self.__filter_outputs_queue_name)
self.__fuser_tasks_queue_name = __main__.FUSER_TASKS_QUEUE_NAME.format(job_name_=JOB_NAME)
self.__amqp_manager.delete_queue(self.__fuser_tasks_queue_name)
self.__runner = CliRunner()
self.__temporary_working_directory = tempfile.TemporaryDirectory()
shutil.copy(os.path.join(THIS_DIRECTORY_PATH, LEARN_EXECUTABLE_FILE), self.__temporary_working_directory.name)
def tearDown(self):
self.__temporary_working_directory.cleanup()
self.__amqp_manager.delete_queue(self.__learner_tasks_queue_name)
self.__amqp_manager.delete_queue(self.__learner_outputs_queue_name)
self.__amqp_manager.delete_queue(self.__filter_tasks_queue_name)
self.__amqp_manager.delete_queue(self.__filter_outputs_queue_name)
self.__amqp_manager.delete_queue(self.__fuser_tasks_queue_name)
def test_learn(self):
# Publishes the tasks.
self.__amqp_manager.create_queue(self.__learner_tasks_queue_name)
self.__amqp_manager.publish_messages(self.__learner_tasks_queue_name, LEARN_TASKS)
# Adds the environment variables.
os.environ['AMQP_HOSTNAME'] = AMQP_HOSTNAME
os.environ['FACTORIZER_HOSTNAME'] = FACTORIZER_HOSTNAME
os.environ['FACTORIZER_PORT'] = FACTORIZER_PORT
os.environ['CCUBE_LEARN_COMMAND'] = LEARN_COMMAND
os.environ['CCUBE_LEARN_WORKING_DIRECTORY'] = self.__temporary_working_directory.name
os.environ['CCUBE_LEARN_OUTPUT_FILES'] = LEARN_OUTPUT_FILES
result = self.__runner.invoke(
__main__.cli,
[
'learn',
'--job', JOB_NAME,
],
catch_exceptions=False,
)
print(result.output)
def test_filter(self):
# Publishes the tasks.
self.__amqp_manager.create_queue(self.__learner_outputs_queue_name)
self.__amqp_manager.publish_messages(self.__learner_outputs_queue_name, LEARN_OUTPUTS)
self.__amqp_manager.create_queue(self.__filter_tasks_queue_name)
self.__amqp_manager.publish_messages(self.__filter_tasks_queue_name, FILTER_TASKS)
# Adds the environment variables.
os.environ['AMQP_HOSTNAME'] = AMQP_HOSTNAME
os.environ['FACTORIZER_HOSTNAME'] = FACTORIZER_HOSTNAME
os.environ['FACTORIZER_PORT'] = FACTORIZER_PORT
os.environ['CCUBE_PREDICT_COMMAND'] = PREDICT_COMMAND
os.environ['CCUBE_PREDICT_WORKING_DIRECTORY'] = self.__temporary_working_directory.name
os.environ['CCUBE_PREDICT_PREDICTIONS_FILE'] = PREDICT_PREDICTIONS_FILE
result = self.__runner.invoke(
__main__.cli,
[
'filter',
'--job', JOB_NAME,
],
catch_exceptions=False,
)
print(result.output)
def test_fuser(self):
# Publishes the tasks.
self.__amqp_manager.create_queue(self.__filter_outputs_queue_name)
self.__amqp_manager.publish_messages(self.__filter_outputs_queue_name, FILTER_OUTPUTS)
self.__amqp_manager.create_queue(self.__fuser_tasks_queue_name)
self.__amqp_manager.publish_messages(self.__fuser_tasks_queue_name, FUSER_TASKS)
# Adds the environment variables.
os.environ['AMQP_HOSTNAME'] = AMQP_HOSTNAME
os.environ['FACTORIZER_HOSTNAME'] = FACTORIZER_HOSTNAME
os.environ['FACTORIZER_PORT'] = FACTORIZER_PORT
os.environ['CCUBE_PREDICT_COMMAND'] = PREDICT_COMMAND
os.environ['CCUBE_PREDICT_WORKING_DIRECTORY'] = self.__temporary_working_directory.name
os.environ['CCUBE_PREDICT_PREDICTIONS_FILE'] = PREDICT_PREDICTIONS_FILE
result = self.__runner.invoke(
__main__.cli,
[
'fuser',
'--job', JOB_NAME,
],
catch_exceptions=False,
)
print(result.output)
| 43.457265
| 483
| 0.719835
| 924
| 10,169
| 7.402597
| 0.170996
| 0.039474
| 0.046053
| 0.038889
| 0.795468
| 0.755263
| 0.738158
| 0.727339
| 0.727339
| 0.666082
| 0
| 0.036326
| 0.190579
| 10,169
| 233
| 484
| 43.643777
| 0.794679
| 0.015537
| 0
| 0.451282
| 0
| 0.025641
| 0.398141
| 0.284986
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025641
| false
| 0
| 0.035897
| 0
| 0.066667
| 0.015385
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
52d5f7a99898cc5cd224a1f35291456f9ac5ca21
| 87
|
py
|
Python
|
samo/__main__.py
|
AlexMontgomerie/samo
|
6e1deaeaadf69950e71d7e5da7ed06e79a830632
|
[
"MIT"
] | 13
|
2021-12-06T23:13:11.000Z
|
2022-03-12T16:21:59.000Z
|
samo/__main__.py
|
AlexMontgomerie/samo
|
6e1deaeaadf69950e71d7e5da7ed06e79a830632
|
[
"MIT"
] | null | null | null |
samo/__main__.py
|
AlexMontgomerie/samo
|
6e1deaeaadf69950e71d7e5da7ed06e79a830632
|
[
"MIT"
] | null | null | null |
import sys
import samo.cli
if __name__ == "__main__":
samo.cli.main(sys.argv[1:])
| 14.5
| 31
| 0.678161
| 14
| 87
| 3.642857
| 0.642857
| 0.27451
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013699
| 0.16092
| 87
| 5
| 32
| 17.4
| 0.684932
| 0
| 0
| 0
| 0
| 0
| 0.091954
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
52f55f952ecf5fd99ff450014f24f7c9fff70718
| 239
|
py
|
Python
|
src/transporters/transporters_factory.py
|
mucharafal/optics_generator_python
|
c14d4e5f19f921f4dc0a98129bca9d31754b72ad
|
[
"MIT"
] | null | null | null |
src/transporters/transporters_factory.py
|
mucharafal/optics_generator_python
|
c14d4e5f19f921f4dc0a98129bca9d31754b72ad
|
[
"MIT"
] | null | null | null |
src/transporters/transporters_factory.py
|
mucharafal/optics_generator_python
|
c14d4e5f19f921f4dc0a98129bca9d31754b72ad
|
[
"MIT"
] | null | null | null |
def get_transporter(configuration):
transporter_module = configuration.get_module_transporter()
transporter_configuration = configuration.get_configuration()
return transporter_module.get_transporter(transporter_configuration)
| 47.8
| 72
| 0.849372
| 22
| 239
| 8.818182
| 0.272727
| 0.371134
| 0.360825
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.09205
| 239
| 4
| 73
| 59.75
| 0.894009
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5e0cc1019a453b9a3d13fc13ee423001612f69b3
| 244
|
py
|
Python
|
otp/distributed/CentralLoggerAI.py
|
TheFamiliarScoot/open-toontown
|
678313033174ea7d08e5c2823bd7b473701ff547
|
[
"BSD-3-Clause"
] | 99
|
2019-11-02T22:25:00.000Z
|
2022-02-03T03:48:00.000Z
|
otp/distributed/CentralLoggerAI.py
|
TheFamiliarScoot/open-toontown
|
678313033174ea7d08e5c2823bd7b473701ff547
|
[
"BSD-3-Clause"
] | 42
|
2019-11-03T05:31:08.000Z
|
2022-03-16T22:50:32.000Z
|
otp/distributed/CentralLoggerAI.py
|
TheFamiliarScoot/open-toontown
|
678313033174ea7d08e5c2823bd7b473701ff547
|
[
"BSD-3-Clause"
] | 57
|
2019-11-03T07:47:37.000Z
|
2022-03-22T00:41:49.000Z
|
from direct.directnotify import DirectNotifyGlobal
from direct.distributed.DistributedObjectAI import DistributedObjectAI
class CentralLoggerAI(DistributedObjectAI):
notify = DirectNotifyGlobal.directNotify.newCategory('CentralLoggerAI')
| 34.857143
| 75
| 0.868852
| 19
| 244
| 11.157895
| 0.578947
| 0.09434
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077869
| 244
| 6
| 76
| 40.666667
| 0.942222
| 0
| 0
| 0
| 0
| 0
| 0.061475
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
5e29a1f1fdeeebc279ea6eed6b4f9540403d01f9
| 34,060
|
py
|
Python
|
src/frr/tests/topotests/multicast_pim_sm_topo3/test_multicast_pim_sm_topo4.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
src/frr/tests/topotests/multicast_pim_sm_topo3/test_multicast_pim_sm_topo4.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
src/frr/tests/topotests/multicast_pim_sm_topo3/test_multicast_pim_sm_topo4.py
|
zhouhaifeng/vpe
|
9c644ffd561988e5740021ed26e0f7739844353d
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright (c) 2020 by VMware, Inc. ("VMware")
# Used Copyright (c) 2018 by Network Device Education Foundation,
# Inc. ("NetDEF") in this file.
#
# Permission to use, copy, modify, and/or distribute this software
# for any purpose with or without fee is hereby granted, provided
# that the above copyright notice and this permission notice appear
# in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND VMWARE DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL VMWARE BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
#
"""
Following tests are covered to test multicast pim sm:
Test steps
- Create topology (setup module)
- Bring up topology
Following tests are covered:
1. TC:48 Verify mroute after configuring black-hole route for RP and source
2. TC:49 Verify mroute when RP is reachable using default route
3. TC:50 Verify mroute when LHR,FHR,RP and transit routers reachable
using default routes
4. TC:52 Verify PIM nbr after changing interface ip
5. TC:53 Verify IGMP interface updated with correct detail after changing interface config
6. TC:54 Verify received and transmit hello stats are getting cleared after PIM nbr reset
"""
import os
import sys
import time
import pytest
pytestmark = pytest.mark.pimd
# Save the Current Working Directory to find configuration files.
CWD = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(CWD, "../"))
sys.path.append(os.path.join(CWD, "../lib/"))
# Required to instantiate the topology builder class.
# pylint: disable=C0413
# Import topogen and topotest helpers
from lib.topogen import Topogen, get_topogen
from lib.common_config import (
start_topology,
write_test_header,
write_test_footer,
step,
reset_config_on_routers,
shutdown_bringup_interface,
apply_raw_config,
create_static_routes,
required_linux_kernel_version,
topo_daemons,
)
from lib.pim import (
create_pim_config,
create_igmp_config,
verify_ip_mroutes,
clear_ip_pim_interface_traffic,
verify_upstream_iif,
clear_ip_mroute,
verify_pim_rp_info,
verify_pim_interface_traffic,
McastTesterHelper,
)
from lib.topolog import logger
from lib.topojson import build_config_from_json
TOPOLOGY = """
i4-----c1-------------c2---i5
| |
| |
i1-----l1------r2-----f1---i2
| | | |
| | | |
i7 i6 i3 i8
Description:
i1, i2, i3. i4, i5, i6, i7, i8 - FRR running iperf to send IGMP
join and traffic
l1 - LHR
f1 - FHR
r2 - FRR router
c1 - FRR router
c2 - FRR router
"""
# Global variables
GROUP_RANGE = "224.0.0.0/4"
IGMP_GROUP = "225.1.1.1/32"
IGMP_JOIN = "225.1.1.1"
GROUP_RANGE_1 = [
"225.1.1.1/32",
"225.1.1.2/32",
"225.1.1.3/32",
"225.1.1.4/32",
"225.1.1.5/32",
]
IGMP_JOIN_RANGE_1 = ["225.1.1.1", "225.1.1.2", "225.1.1.3", "225.1.1.4", "225.1.1.5"]
NEW_ADDRESS_1 = "192.168.20.1"
NEW_ADDRESS_2 = "192.168.20.2"
NEW_ADDRESS_1_SUBNET = "192.168.20.1/24"
NEW_ADDRESS_2_SUBNET = "192.168.20.2/24"
def setup_module(mod):
"""
Sets up the pytest environment
* `mod`: module name
"""
# Required linux kernel version for this suite to run.
result = required_linux_kernel_version("4.19")
if result is not True:
pytest.skip("Kernel requirements are not met")
testsuite_run_time = time.asctime(time.localtime(time.time()))
logger.info("Testsuite start time: {}".format(testsuite_run_time))
logger.info("=" * 40)
logger.info("Master Topology: \n {}".format(TOPOLOGY))
logger.info("Running setup_module to create topology")
json_file = "{}/multicast_pim_sm_topo4.json".format(CWD)
tgen = Topogen(json_file, mod.__name__)
global topo
topo = tgen.json_topo
# ... and here it calls Mininet initialization functions.
# get list of daemons needs to be started for this suite.
daemons = topo_daemons(tgen, topo)
# Starting topology, create tmp files which are loaded to routers
# to start deamons and then start routers
start_topology(tgen, daemons)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
build_config_from_json(tgen, topo)
# XXX Replace this using "with McastTesterHelper()... " in each test if possible.
global app_helper
app_helper = McastTesterHelper(tgen)
logger.info("Running setup_module() done")
def teardown_module():
"""Teardown the pytest environment"""
logger.info("Running teardown_module to delete topology")
tgen = get_topogen()
app_helper.cleanup()
# Stop toplogy and Remove tmp files
tgen.stop_topology()
logger.info(
"Testsuite end time: {}".format(time.asctime(time.localtime(time.time())))
)
logger.info("=" * 40)
#####################################################
#
# Testcases
#
#####################################################
def verify_state_incremented(state_before, state_after):
"""
API to compare interface traffic state incrementing
Parameters
----------
* `state_before` : State dictionary for any particular instance
* `state_after` : State dictionary for any particular instance
"""
for router, state_data in state_before.items():
for state, value in state_data.items():
if state_before[router][state] >= state_after[router][state]:
errormsg = (
"[DUT: %s]: state %s value has not"
" incremented, Initial value: %s, "
"Current value: %s [FAILED!!]"
% (
router,
state,
state_before[router][state],
state_after[router][state],
)
)
return errormsg
logger.info(
"[DUT: %s]: State %s value is "
"incremented, Initial value: %s, Current value: %s"
" [PASSED!!]",
router,
state,
state_before[router][state],
state_after[router][state],
)
return True
def test_mroute_when_RP_reachable_default_route_p2(request):
"""
TC_49 Verify mroute when and source RP is reachable using default route
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
app_helper.stop_all_hosts()
clear_ip_mroute(tgen)
reset_config_on_routers(tgen)
clear_ip_pim_interface_traffic(tgen, topo)
step(
"Remove c1-c2 connected link to simulate topo "
"c1(FHR)---l1(RP)----r2---f1-----c2(LHR)"
)
intf_c1_c2 = topo["routers"]["c1"]["links"]["c2"]["interface"]
intf_c2_c1 = topo["routers"]["c2"]["links"]["c1"]["interface"]
shutdown_bringup_interface(tgen, "c1", intf_c1_c2, False)
shutdown_bringup_interface(tgen, "c2", intf_c2_c1, False)
step("Enable the PIM on all the interfaces of FRR1, FRR2, FRR3")
step(
"Enable IGMP of FRR1 interface and send IGMP joins "
" from FRR1 node for group range (225.1.1.1-5)"
)
intf_c2_i5 = topo["routers"]["c2"]["links"]["i5"]["interface"]
input_dict = {
"c2": {"igmp": {"interfaces": {intf_c2_i5: {"igmp": {"version": "2"}}}}}
}
result = create_igmp_config(tgen, topo, input_dict)
assert result is True, "Testcase {}: Failed Error: {}".format(tc_name, result)
input_join = {"i5": topo["routers"]["i5"]["links"]["c2"]["interface"]}
for recvr, recvr_intf in input_join.items():
result = app_helper.run_join(recvr, IGMP_JOIN_RANGE_1, join_intf=recvr_intf)
assert result is True, "Testcase {}: Failed Error: {}".format(tc_name, result)
step("Configure static RP for (225.1.1.1-5) as R2")
input_dict = {
"l1": {
"pim": {
"rp": [
{
"rp_addr": topo["routers"]["l1"]["links"]["lo"]["ipv4"].split(
"/"
)[0],
"group_addr_range": GROUP_RANGE,
}
]
}
}
}
result = create_pim_config(tgen, topo, input_dict)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
step("Send traffic from C1 to all the groups ( 225.1.1.1 to 225.1.1.5)")
input_src = {"i4": topo["routers"]["i4"]["links"]["c1"]["interface"]}
for src, src_intf in input_src.items():
result = app_helper.run_traffic(src, IGMP_JOIN_RANGE_1, bind_intf=src_intf)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
source_i4 = topo["routers"]["i4"]["links"]["c1"]["ipv4"].split("/")[0]
input_dict_starg = [
{
"dut": "c2",
"src_address": "*",
"iif": topo["routers"]["c2"]["links"]["f1"]["interface"],
"oil": topo["routers"]["c2"]["links"]["i5"]["interface"],
}
]
input_dict_sg = [
{
"dut": "c2",
"src_address": source_i4,
"iif": topo["routers"]["c2"]["links"]["f1"]["interface"],
"oil": topo["routers"]["c2"]["links"]["i5"]["interface"],
}
]
step("Verify mroutes and iff upstream")
for data in input_dict_sg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
result = verify_upstream_iif(
tgen, data["dut"], data["iif"], data["src_address"], IGMP_JOIN_RANGE_1
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
for data in input_dict_starg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
result = verify_upstream_iif(
tgen, data["dut"], data["iif"], data["src_address"], IGMP_JOIN_RANGE_1
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
step("Delete static routes on c2")
input_dict = {
"c2": {
"static_routes": [
{
"network": ["1.0.4.11/32", "10.0.2.1/24", "10.0.1.2/24"],
"next_hop": "10.0.3.2",
"delete": True,
}
]
}
}
result = create_static_routes(tgen, input_dict)
assert result is True, "Testcase {} :Failed \n Error {}".format(tc_name, result)
step("Verify RP info unknown after removing static route from c2 ")
dut = "c2"
rp_address = topo["routers"]["l1"]["links"]["lo"]["ipv4"].split("/")[0]
SOURCE = "Static"
result = verify_pim_rp_info(
tgen, topo, dut, GROUP_RANGE_1, "Unknown", rp_address, SOURCE
)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
step("Verify mroute not present after Delete of static routes on c1")
for data in input_dict_sg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
expected=False,
)
assert result is not True, (
"Testcase {} : Failed \n "
"mroutes(S,G) are present after delete of static routes on c1 \n Error: {}".format(
tc_name, result
)
)
result = verify_upstream_iif(
tgen,
data["dut"],
data["iif"],
data["src_address"],
IGMP_JOIN_RANGE_1,
expected=False,
)
assert result is not True, (
"Testcase {} : Failed \n "
"upstream is present after delete of static routes on c1 \n Error: {}".format(
tc_name, result
)
)
for data in input_dict_starg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
expected=False,
)
assert result is not True, (
"Testcase {} : Failed \n "
"mroutes(*,G) are present after delete of static routes on c1 \n Error: {}".format(
tc_name, result
)
)
result = verify_upstream_iif(
tgen,
data["dut"],
data["iif"],
data["src_address"],
IGMP_JOIN_RANGE_1,
expected=False,
)
assert result is not True, (
"Testcase {} : Failed \n "
"upstream is present after delete of static routes on c1 \n Error: {}".format(
tc_name, result
)
)
step("Configure default routes on c2")
intf_f1_c2 = topo["routers"]["f1"]["links"]["c2"]["ipv4"].split("/")[0]
input_dict = {
"c2": {"static_routes": [{"network": "0.0.0.0/0", "next_hop": intf_f1_c2}]}
}
result = create_static_routes(tgen, input_dict)
assert result is True, "Testcase {} :Failed \n Error {}".format(tc_name, result)
step("applying ip nht config on c2")
raw_config = {"c2": {"raw_config": ["ip nht resolve-via-default"]}}
result = apply_raw_config(tgen, raw_config)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
step("Verify RP info is NOT unknown after removing static route from c2 ")
result = verify_pim_rp_info(
tgen, topo, dut, GROUP_RANGE_1, "Unknown", rp_address, SOURCE, expected=False
)
assert result is not True, (
"Testcase {} : Failed \n "
"RP info is unknown after removing static route from c2 \n Error: {}".format(
tc_name, result
)
)
step("Verify (s,g) populated after adding default route ")
for data in input_dict_sg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
result = verify_upstream_iif(
tgen, data["dut"], data["iif"], data["src_address"], IGMP_JOIN_RANGE_1
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
step("Verify (*,g) populated after adding default route ")
for data in input_dict_starg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
result = verify_upstream_iif(
tgen, data["dut"], data["iif"], data["src_address"], IGMP_JOIN_RANGE_1
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
write_test_footer(tc_name)
def test_mroute_with_RP_default_route_all_nodes_p2(request):
"""
TC_50 Verify mroute when LHR,FHR,RP and transit routers reachable
using default routes
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
app_helper.stop_all_hosts()
clear_ip_mroute(tgen)
reset_config_on_routers(tgen)
clear_ip_pim_interface_traffic(tgen, topo)
step(
"Remove c1-c2 connected link to simulate topo "
"c1(LHR)---l1(RP)----r2---f1-----c2(FHR)"
)
intf_c1_c2 = topo["routers"]["c1"]["links"]["c2"]["interface"]
intf_c2_c1 = topo["routers"]["c2"]["links"]["c1"]["interface"]
shutdown_bringup_interface(tgen, "c1", intf_c1_c2, False)
shutdown_bringup_interface(tgen, "c2", intf_c2_c1, False)
step("Enable the PIM on all the interfaces of FRR1, FRR2, FRR3")
step(
"Enable IGMP of FRR1 interface and send IGMP joins "
" from FRR1 node for group range (225.1.1.1-5)"
)
intf_c1_i4 = topo["routers"]["c1"]["links"]["i4"]["interface"]
input_dict = {
"c1": {"igmp": {"interfaces": {intf_c1_i4: {"igmp": {"version": "2"}}}}}
}
result = create_igmp_config(tgen, topo, input_dict)
assert result is True, "Testcase {}: Failed Error: {}".format(tc_name, result)
input_join = {"i4": topo["routers"]["i4"]["links"]["c1"]["interface"]}
for recvr, recvr_intf in input_join.items():
result = app_helper.run_join(recvr, IGMP_JOIN_RANGE_1, join_intf=recvr_intf)
assert result is True, "Testcase {}: Failed Error: {}".format(tc_name, result)
step("Configure static RP for (225.1.1.1-5) as R2")
input_dict = {
"l1": {
"pim": {
"rp": [
{
"rp_addr": topo["routers"]["l1"]["links"]["lo"]["ipv4"].split(
"/"
)[0],
"group_addr_range": GROUP_RANGE,
}
]
}
}
}
result = create_pim_config(tgen, topo, input_dict)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
step("Send traffic from C2 to all the groups ( 225.1.1.1 to 225.1.1.5)")
input_src = {"i5": topo["routers"]["i5"]["links"]["c2"]["interface"]}
for src, src_intf in input_src.items():
result = app_helper.run_traffic(src, IGMP_JOIN_RANGE_1, bind_intf=src_intf)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
source_i5 = topo["routers"]["i5"]["links"]["c2"]["ipv4"].split("/")[0]
input_dict_starg = [
{
"dut": "c1",
"src_address": "*",
"iif": topo["routers"]["c1"]["links"]["l1"]["interface"],
"oil": topo["routers"]["c1"]["links"]["i4"]["interface"],
}
]
input_dict_sg = [
{
"dut": "c1",
"src_address": source_i5,
"iif": topo["routers"]["c1"]["links"]["l1"]["interface"],
"oil": topo["routers"]["c1"]["links"]["i4"]["interface"],
}
]
step("Verify mroutes and iff upstream")
for data in input_dict_sg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
result = verify_upstream_iif(
tgen, data["dut"], data["iif"], data["src_address"], IGMP_JOIN_RANGE_1
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
for data in input_dict_starg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
result = verify_upstream_iif(
tgen, data["dut"], data["iif"], data["src_address"], IGMP_JOIN_RANGE_1
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
step("Delete static routes RP on all the nodes")
input_dict = {
"c2": {
"static_routes": [
{"network": ["1.0.4.11/32"], "next_hop": "10.0.3.2", "delete": True}
]
},
"c1": {
"static_routes": [
{"network": ["1.0.4.11/32"], "next_hop": "10.0.2.2", "delete": True}
]
},
"r2": {
"static_routes": [
{"network": ["1.0.4.11/32"], "next_hop": "10.0.12.1", "delete": True}
]
},
"f1": {
"static_routes": [
{"network": ["1.0.4.11/32"], "next_hop": "10.0.7.2", "delete": True}
]
},
}
result = create_static_routes(tgen, input_dict)
assert result is True, "Testcase {} :Failed \n Error {}".format(tc_name, result)
step("Verify RP info unknown after removing static route from c2 ")
dut = "c2"
rp_address = topo["routers"]["l1"]["links"]["lo"]["ipv4"].split("/")[0]
SOURCE = "Static"
result = verify_pim_rp_info(
tgen, topo, dut, GROUP_RANGE_1, "Unknown", rp_address, SOURCE
)
assert result is True, "Testcase {} :Failed \n Error: {}".format(tc_name, result)
for data in input_dict_starg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
expected=False,
)
assert (
result is not True
), "Testcase {} : Failed \n " "mroutes are still present \n Error: {}".format(
tc_name, result
)
result = verify_upstream_iif(
tgen,
data["dut"],
data["iif"],
data["src_address"],
IGMP_JOIN_RANGE_1,
expected=False,
)
assert (
result is not True
), "Testcase {} : Failed \n " "upstream is still present \n Error: {}".format(
tc_name, result
)
step("Configure default routes on all the nodes")
intf_f1_c2 = topo["routers"]["f1"]["links"]["c2"]["ipv4"].split("/")[0]
intf_l1_c1 = topo["routers"]["l1"]["links"]["c1"]["ipv4"].split("/")[0]
intf_l1_r2 = topo["routers"]["l1"]["links"]["r2"]["ipv4"].split("/")[0]
intf_r2_f1 = topo["routers"]["r2"]["links"]["f1"]["ipv4"].split("/")[0]
input_dict = {
"c1": {"static_routes": [{"network": "0.0.0.0/0", "next_hop": intf_l1_c1}]},
"c2": {"static_routes": [{"network": "0.0.0.0/0", "next_hop": intf_f1_c2}]},
"r2": {"static_routes": [{"network": "0.0.0.0/0", "next_hop": intf_l1_r2}]},
"f1": {"static_routes": [{"network": "0.0.0.0/0", "next_hop": intf_r2_f1}]},
}
result = create_static_routes(tgen, input_dict)
assert result is True, "Testcase {} :Failed \n Error {}".format(tc_name, result)
step("applying ip nht config on c2")
raw_config = {
"c1": {"raw_config": ["ip nht resolve-via-default"]},
"c2": {"raw_config": ["ip nht resolve-via-default"]},
"r2": {"raw_config": ["ip nht resolve-via-default"]},
"f1": {"raw_config": ["ip nht resolve-via-default"]},
"l1": {"raw_config": ["ip nht resolve-via-default"]},
}
result = apply_raw_config(tgen, raw_config)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
step("Verify RP info Not unknown after removing static route from c2 ")
dut = "c2"
step("Verify RP info is NOT unknown after removing static route from c2 ")
result = verify_pim_rp_info(
tgen, topo, dut, GROUP_RANGE_1, "Unknown", rp_address, SOURCE, expected=False
)
assert result is not True, (
"Testcase {} : Failed \n "
"RP info is unknown after removing static route from c2 \n Error: {}".format(
tc_name, result
)
)
step("Verify (s,g) populated after adding default route ")
for data in input_dict_sg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
result = verify_upstream_iif(
tgen, data["dut"], data["iif"], data["src_address"], IGMP_JOIN_RANGE_1
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
step("Verify (*,g) populated after adding default route ")
for data in input_dict_starg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
result = verify_upstream_iif(
tgen, data["dut"], data["iif"], data["src_address"], IGMP_JOIN_RANGE_1
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
write_test_footer(tc_name)
def test_PIM_hello_tx_rx_p1(request):
"""
TC_54 Verify received and transmit hello stats
are getting cleared after PIM nbr reset
"""
tgen = get_topogen()
tc_name = request.node.name
write_test_header(tc_name)
# Don"t run this test if we have any failure.
if tgen.routers_have_failure():
pytest.skip(tgen.errors)
# Creating configuration from JSON
app_helper.stop_all_hosts()
clear_ip_mroute(tgen)
reset_config_on_routers(tgen)
clear_ip_pim_interface_traffic(tgen, topo)
step(
"Remove c1-c2 connected link to simulate topo "
"c1(LHR)---l1(RP)----r2---f1-----c2(FHR)"
)
intf_c1_c2 = topo["routers"]["c1"]["links"]["c2"]["interface"]
intf_c2_c1 = topo["routers"]["c2"]["links"]["c1"]["interface"]
shutdown_bringup_interface(tgen, "c1", intf_c1_c2, False)
shutdown_bringup_interface(tgen, "c2", intf_c2_c1, False)
step("Enable the PIM on all the interfaces of FRR1, FRR2, FRR3")
step(
"Enable IGMP of FRR1 interface and send IGMP joins "
" from FRR1 node for group range (225.1.1.1-5)"
)
intf_c1_i4 = topo["routers"]["c1"]["links"]["i4"]["interface"]
input_dict = {
"c1": {"igmp": {"interfaces": {intf_c1_i4: {"igmp": {"version": "2"}}}}}
}
result = create_igmp_config(tgen, topo, input_dict)
assert result is True, "Testcase {}: Failed Error: {}".format(tc_name, result)
input_join = {"i4": topo["routers"]["i4"]["links"]["c1"]["interface"]}
for recvr, recvr_intf in input_join.items():
result = app_helper.run_join(recvr, IGMP_JOIN_RANGE_1, join_intf=recvr_intf)
assert result is True, "Testcase {}: Failed Error: {}".format(tc_name, result)
step("Configure static RP for (225.1.1.1-5) as R2")
input_dict = {
"l1": {
"pim": {
"rp": [
{
"rp_addr": topo["routers"]["l1"]["links"]["lo"]["ipv4"].split(
"/"
)[0],
"group_addr_range": GROUP_RANGE,
}
]
}
}
}
result = create_pim_config(tgen, topo, input_dict)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
step("Send Mcast traffic from C2 to all the groups ( 225.1.1.1 to 225.1.1.5)")
input_src = {"i5": topo["routers"]["i5"]["links"]["c2"]["interface"]}
for src, src_intf in input_src.items():
result = app_helper.run_traffic(src, IGMP_JOIN_RANGE_1, bind_intf=src_intf)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
source_i5 = topo["routers"]["i5"]["links"]["c2"]["ipv4"].split("/")[0]
input_dict_starg = [
{
"dut": "c1",
"src_address": "*",
"iif": topo["routers"]["c1"]["links"]["l1"]["interface"],
"oil": topo["routers"]["c1"]["links"]["i4"]["interface"],
}
]
input_dict_sg = [
{
"dut": "c1",
"src_address": source_i5,
"iif": topo["routers"]["c1"]["links"]["l1"]["interface"],
"oil": topo["routers"]["c1"]["links"]["i4"]["interface"],
}
]
step("(*,G) and (S,G) created on f1 and node verify using 'show ip mroute'")
for data in input_dict_sg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
for data in input_dict_starg:
result = verify_ip_mroutes(
tgen,
data["dut"],
data["src_address"],
IGMP_JOIN_RANGE_1,
data["iif"],
data["oil"],
)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
intf_l1_c1 = topo["routers"]["l1"]["links"]["c1"]["interface"]
intf_c1_l1 = topo["routers"]["c1"]["links"]["l1"]["interface"]
step("verify before stats on C1")
state_dict = {
"c1": {
intf_c1_l1: ["helloTx", "helloRx"],
}
}
c1_state_before = verify_pim_interface_traffic(tgen, state_dict)
assert isinstance(
c1_state_before, dict
), "Testcase{} : Failed \n state_before is not dictionary \n Error: {}".format(
tc_name, result
)
step("Flap PIM nbr while doing interface c1-l1 interface shut from f1 side")
shutdown_bringup_interface(tgen, "c1", intf_c1_l1, False)
step(
"After shut of local interface from c1 , verify rx/tx hello counters are cleared on c1 side"
"verify using 'show ip pim interface traffic'"
)
shutdown_bringup_interface(tgen, "c1", intf_c1_l1, True)
step("verify stats after on c1")
c1_state_after = verify_pim_interface_traffic(tgen, state_dict)
assert isinstance(
c1_state_after, dict
), "Testcase{} : Failed \n state_before is not dictionary \n Error: {}".format(
tc_name, result
)
step("verify stats not increamented on c1")
result = verify_state_incremented(c1_state_before, c1_state_after)
assert (
result is not True
), "Testcase{} : Failed Error: {}" "stats incremented".format(tc_name, result)
step("verify before stats on l1")
l1_state_dict = {
"l1": {
intf_l1_c1: ["helloTx", "helloRx"],
}
}
l1_state_before = verify_pim_interface_traffic(tgen, l1_state_dict)
assert isinstance(
l1_state_before, dict
), "Testcase{} : Failed \n state_before is not dictionary \n Error: {}".format(
tc_name, result
)
step("Flap PIM nbr while doing interface r2-c1 shut from r2 side")
shutdown_bringup_interface(tgen, "l1", intf_l1_c1, False)
step(
"After shut the interface from r2 side , verify r2 side rx and tx of hello"
"counters are resetted show ip pim interface traffic"
)
shutdown_bringup_interface(tgen, "l1", intf_l1_c1, True)
step("verify stats after on l1")
l1_state_after = verify_pim_interface_traffic(tgen, l1_state_dict)
assert isinstance(
l1_state_after, dict
), "Testcase{} : Failed \n state_before is not dictionary \n Error: {}".format(
tc_name, result
)
step("verify stats not increamented on l1")
result = verify_state_incremented(l1_state_before, l1_state_after)
assert (
result is not True
), "Testcase{} : Failed Error: {}" "stats incremented".format(tc_name, result)
step("Reinit the dict")
c1_state_before = {}
l1_state_before = {}
c1_state_after = {}
l1_state_after = {}
step("verify before stats on C1")
state_dict = {
"c1": {
intf_c1_l1: ["helloTx", "helloRx"],
}
}
c1_state_before = verify_pim_interface_traffic(tgen, state_dict)
assert isinstance(
c1_state_before, dict
), "Testcase{} : Failed \n state_before is not dictionary \n Error: {}".format(
tc_name, result
)
step("Flap c1-r2 pim nbr while changing ip address from c1 side")
c1_l1_ip_subnet = topo["routers"]["c1"]["links"]["l1"]["ipv4"]
raw_config = {
"c1": {
"raw_config": [
"interface {}".format(intf_c1_l1),
"no ip address {}".format(c1_l1_ip_subnet),
"ip address {}".format(NEW_ADDRESS_2_SUBNET),
]
}
}
result = apply_raw_config(tgen, raw_config)
assert result is True, "Testcase {} : Failed Error: {}".format(tc_name, result)
step("verify stats after on c1")
c1_state_after = verify_pim_interface_traffic(tgen, state_dict)
assert isinstance(
c1_state_after, dict
), "Testcase{} : Failed \n state_before is not dictionary \n Error: {}".format(
tc_name, result
)
step("verify stats not increamented on c1")
result = verify_state_incremented(c1_state_before, c1_state_after)
assert (
result is not True
), "Testcase{} : Failed Error: {}" "stats incremented".format(tc_name, result)
write_test_footer(tc_name)
if __name__ == "__main__":
args = ["-s"] + sys.argv[1:]
sys.exit(pytest.main(args))
| 32.253788
| 100
| 0.57158
| 4,270
| 34,060
| 4.365105
| 0.092974
| 0.020924
| 0.036053
| 0.05408
| 0.775685
| 0.759161
| 0.745802
| 0.730994
| 0.704652
| 0.695585
| 0
| 0.031965
| 0.285408
| 34,060
| 1,055
| 101
| 32.28436
| 0.733843
| 0.085056
| 0
| 0.6109
| 0
| 0.006337
| 0.284235
| 0.006635
| 0
| 0
| 0
| 0
| 0.070976
| 1
| 0.007605
| false
| 0.001267
| 0.011407
| 0
| 0.021546
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
5e3cbde4c77ad9d62f6a74953c856e853ef7bc2c
| 26,853
|
py
|
Python
|
tests/testOrderbook3.py
|
blakelucey/pyziabm
|
b4e62aa036233e58d7b44b654c375baf57ffc2d3
|
[
"BSD-3-Clause"
] | 35
|
2017-11-27T13:10:42.000Z
|
2021-09-13T13:39:55.000Z
|
tests/testOrderbook3.py
|
blakelucey/pyziabm
|
b4e62aa036233e58d7b44b654c375baf57ffc2d3
|
[
"BSD-3-Clause"
] | 2
|
2017-10-10T20:28:49.000Z
|
2021-09-06T14:59:13.000Z
|
tests/testOrderbook3.py
|
blakelucey/pyziabm
|
b4e62aa036233e58d7b44b654c375baf57ffc2d3
|
[
"BSD-3-Clause"
] | 23
|
2017-08-28T18:29:09.000Z
|
2022-03-20T01:59:26.000Z
|
from pyziabm.orderbook3 import Orderbook
import unittest
class TestOrderbook(unittest.TestCase):
'''
Attribute objects in the Orderbook class include:
order_history: list
_bid_book: dictionary
_bid_book_prices: sorted list
_ask_book: dictionary
_ask_book_prices: sorted list
confirm_modify_collector: list
confirm_trade_collector: list
sip_collector: list
trade_book: list
Each method impacts one or more of these attributes.
'''
def setUp(self):
'''
setUp creates the Orderbook instance and a set of orders
'''
self.ex1 = Orderbook()
self.q1_buy = {'order_id': 't1_1', 'timestamp': 2, 'type': 'add', 'quantity': 1, 'side': 'buy',
'price': 50}
self.q2_buy = {'order_id': 't1_2', 'timestamp': 3, 'type': 'add', 'quantity': 1, 'side': 'buy',
'price': 50}
self.q3_buy = {'order_id': 't10_1', 'timestamp': 4, 'type': 'add', 'quantity': 3, 'side': 'buy',
'price': 49}
self.q4_buy = {'order_id': 't11_1', 'timestamp': 5, 'type': 'add', 'quantity': 3, 'side': 'buy',
'price': 47}
self.q1_sell = {'order_id': 't1_3', 'timestamp': 2, 'type': 'add', 'quantity': 1, 'side': 'sell',
'price': 52}
self.q2_sell = {'order_id': 't1_4', 'timestamp': 3, 'type': 'add', 'quantity': 1, 'side': 'sell',
'price': 52}
self.q3_sell = {'order_id': 't10_2', 'timestamp': 4, 'type': 'add', 'quantity': 3, 'side': 'sell',
'price': 53}
self.q4_sell = {'order_id': 't11_2', 'timestamp': 5, 'type': 'add', 'quantity': 3, 'side': 'sell',
'price': 55}
def test_add_order_to_history(self):
'''
add_order_to_history() impacts the order_history list
'''
h1 = {'order_id': 't1_5', 'timestamp': 4, 'type': 'add', 'quantity': 5, 'side': 'sell', 'price': 55}
self.assertFalse(self.ex1.order_history)
h1['exid'] = 1
self.ex1._add_order_to_history(h1)
self.assertDictEqual(h1, self.ex1.order_history[0])
def test_add_order_to_book(self):
'''
add_order_to_book() impacts _bid_book and _bid_book_prices or _ask_book and _ask_book_prices
Add two buy orders, then two sell orders
'''
# 2 buy orders
self.assertFalse(self.ex1._bid_book_prices)
self.assertFalse(self.ex1._bid_book)
self.ex1.add_order_to_book(self.q1_buy)
self.assertTrue(50 in self.ex1._bid_book_prices)
self.assertTrue(50 in self.ex1._bid_book.keys())
self.assertEqual(self.ex1._bid_book[50]['num_orders'], 1)
self.assertEqual(self.ex1._bid_book[50]['size'], 1)
self.assertEqual(self.ex1._bid_book[50]['order_ids'][0], self.q1_buy['order_id'])
self.assertDictEqual(self.ex1._bid_book[50]['orders'][self.q1_buy['order_id']], self.q1_buy)
self.ex1.add_order_to_book(self.q2_buy)
self.assertEqual(self.ex1._bid_book[50]['num_orders'], 2)
self.assertEqual(self.ex1._bid_book[50]['size'], 2)
self.assertEqual(self.ex1._bid_book[50]['order_ids'][1], self.q2_buy['order_id'])
self.assertDictEqual(self.ex1._bid_book[50]['orders'][self.q2_buy['order_id']], self.q2_buy)
# 2 sell orders
self.assertFalse(self.ex1._ask_book_prices)
self.assertFalse(self.ex1._ask_book)
self.ex1.add_order_to_book(self.q1_sell)
self.assertTrue(52 in self.ex1._ask_book_prices)
self.assertTrue(52 in self.ex1._ask_book.keys())
self.assertEqual(self.ex1._ask_book[52]['num_orders'], 1)
self.assertEqual(self.ex1._ask_book[52]['size'], 1)
self.assertEqual(self.ex1._ask_book[52]['order_ids'][0], self.q1_sell['order_id'])
self.assertDictEqual(self.ex1._ask_book[52]['orders'][self.q1_sell['order_id']], self.q1_sell)
self.ex1.add_order_to_book(self.q2_sell)
self.assertEqual(self.ex1._ask_book[52]['num_orders'], 2)
self.assertEqual(self.ex1._ask_book[52]['size'], 2)
self.assertEqual(self.ex1._ask_book[52]['order_ids'][1], self.q2_sell['order_id'])
self.assertDictEqual(self.ex1._ask_book[52]['orders'][self.q2_sell['order_id']], self.q2_sell)
def test_remove_order(self):
'''
_remove_order() impacts _bid_book and _bid_book_prices or _ask_book and _ask_book_prices
Add two orders, remove the second order twice
'''
# buy orders
self.ex1.add_order_to_book(self.q1_buy)
self.ex1.add_order_to_book(self.q2_buy)
self.assertTrue(50 in self.ex1._bid_book_prices)
self.assertTrue(50 in self.ex1._bid_book.keys())
self.assertEqual(self.ex1._bid_book[50]['num_orders'], 2)
self.assertEqual(self.ex1._bid_book[50]['size'], 2)
self.assertEqual(len(self.ex1._bid_book[50]['order_ids']), 2)
# remove first order
self.ex1._remove_order('buy', 50, 't1_1')
self.assertEqual(self.ex1._bid_book[50]['num_orders'], 1)
self.assertEqual(self.ex1._bid_book[50]['size'], 1)
self.assertEqual(len(self.ex1._bid_book[50]['order_ids']), 1)
self.assertFalse('t1_1' in self.ex1._bid_book[50]['orders'].keys())
self.assertTrue(50 in self.ex1._bid_book_prices)
# remove second order
self.ex1._remove_order('buy', 50, 't1_2')
self.assertFalse(self.ex1._bid_book_prices)
self.assertEqual(self.ex1._bid_book[50]['num_orders'], 0)
self.assertEqual(self.ex1._bid_book[50]['size'], 0)
self.assertEqual(len(self.ex1._bid_book[50]['order_ids']), 0)
self.assertFalse('t1_2' in self.ex1._bid_book[50]['orders'].keys())
self.assertFalse(50 in self.ex1._bid_book_prices)
# remove second order again
self.ex1._remove_order('buy', 50, 't1_2')
self.assertFalse(self.ex1._bid_book_prices)
self.assertEqual(self.ex1._bid_book[50]['num_orders'], 0)
self.assertEqual(self.ex1._bid_book[50]['size'], 0)
self.assertEqual(len(self.ex1._bid_book[50]['order_ids']), 0)
self.assertFalse('t1_2' in self.ex1._bid_book[50]['orders'].keys())
# sell orders
self.ex1.add_order_to_book(self.q1_sell)
self.ex1.add_order_to_book(self.q2_sell)
self.assertTrue(52 in self.ex1._ask_book_prices)
self.assertTrue(52 in self.ex1._ask_book.keys())
self.assertEqual(self.ex1._ask_book[52]['num_orders'], 2)
self.assertEqual(self.ex1._ask_book[52]['size'], 2)
self.assertEqual(len(self.ex1._ask_book[52]['order_ids']), 2)
# remove first order
self.ex1._remove_order('sell', 52, 't1_3')
self.assertEqual(self.ex1._ask_book[52]['num_orders'], 1)
self.assertEqual(self.ex1._ask_book[52]['size'], 1)
self.assertEqual(len(self.ex1._ask_book[52]['order_ids']), 1)
self.assertFalse('t1_1' in self.ex1._ask_book[52]['orders'].keys())
self.assertTrue(52 in self.ex1._ask_book_prices)
# remove second order
self.ex1._remove_order('sell', 52, 't1_4')
self.assertFalse(self.ex1._ask_book_prices)
self.assertEqual(self.ex1._ask_book[52]['num_orders'], 0)
self.assertEqual(self.ex1._ask_book[52]['size'], 0)
self.assertEqual(len(self.ex1._ask_book[52]['order_ids']), 0)
self.assertFalse('t1_2' in self.ex1._ask_book[52]['orders'].keys())
self.assertFalse(52 in self.ex1._ask_book_prices)
# remove second order again
self.ex1._remove_order('sell', 52, 't1_4')
self.assertFalse(self.ex1._ask_book_prices)
self.assertEqual(self.ex1._ask_book[52]['num_orders'], 0)
self.assertEqual(self.ex1._ask_book[52]['size'], 0)
self.assertEqual(len(self.ex1._ask_book[52]['order_ids']), 0)
self.assertFalse('t1_2' in self.ex1._ask_book[52]['orders'].keys())
def test_modify_order(self):
'''
_modify_order() primarily impacts _bid_book or _ask_book
_modify_order() could impact _bid_book_prices or _ask_book_prices if the order results
in removing the full quantity with a call to _remove_order()
Add 1 order, remove partial, then remainder
'''
# Buy order
q1 = {'order_id': 't1_1', 'timestamp': 5, 'type': 'add', 'quantity': 2, 'side': 'buy',
'price': 50}
self.ex1.add_order_to_book(q1)
self.assertEqual(self.ex1._bid_book[50]['size'], 2)
# remove 1
self.ex1._modify_order('buy', 1, 't1_1', 50)
self.assertEqual(self.ex1._bid_book[50]['size'], 1)
self.assertEqual(self.ex1._bid_book[50]['orders']['t1_1']['quantity'], 1)
self.assertTrue(self.ex1._bid_book_prices)
# remove remainder
self.ex1._modify_order('buy', 1, 't1_1', 50)
self.assertFalse(self.ex1._bid_book_prices)
self.assertEqual(self.ex1._bid_book[50]['num_orders'], 0)
self.assertEqual(self.ex1._bid_book[50]['size'], 0)
self.assertFalse('t1_1' in self.ex1._bid_book[50]['orders'].keys())
# Sell order
q2 = {'order_id': 't1_1', 'timestamp': 5, 'type': 'add', 'quantity': 2, 'side': 'sell',
'price': 50}
self.ex1.add_order_to_book(q2)
self.assertEqual(self.ex1._ask_book[50]['size'], 2)
# remove 1
self.ex1._modify_order('sell', 1, 't1_1', 50)
self.assertEqual(self.ex1._ask_book[50]['size'], 1)
self.assertEqual(self.ex1._ask_book[50]['orders']['t1_1']['quantity'], 1)
self.assertTrue(self.ex1._ask_book_prices)
# remove remainder
self.ex1._modify_order('sell', 1, 't1_1', 50)
self.assertFalse(self.ex1._ask_book_prices)
self.assertEqual(self.ex1._ask_book[50]['num_orders'], 0)
self.assertEqual(self.ex1._ask_book[50]['size'], 0)
self.assertFalse('t1_1' in self.ex1._ask_book[50]['orders'].keys())
def test_add_trade_to_book(self):
'''
add_trade_to_book() impacts trade_book
Check trade book empty, add a trade, check non-empty, verify dict equality
'''
t1 = dict(resting_order_id='t1_1', resting_timestamp=2, incoming_order_id='t2_1',
timestamp=5, price=50, quantity=1, side='buy')
self.assertFalse(self.ex1.trade_book)
self.ex1._add_trade_to_book('t1_1', 2, 't2_1', 5, 50, 1, 'buy')
self.assertTrue(self.ex1.trade_book)
self.assertDictEqual(t1, self.ex1.trade_book[0])
def test_confirm_trade(self):
'''
confirm_trade() impacts confirm_trade_collector
Check confirm trade collector empty, add a trade, check non-empty, verify dict equality
'''
t2 = dict(timestamp=5, trader='t3', order_id='t3_1', quantity=1,
side='sell', price=50)
self.assertFalse(self.ex1.confirm_trade_collector)
self.ex1._confirm_trade(5, 'sell', 1, 't3_1', 50)
self.assertTrue(self.ex1.confirm_trade_collector)
self.assertDictEqual(t2, self.ex1.confirm_trade_collector[0])
def test_confirm_modify(self):
'''
confirm_modify() impacts confirm_modify_collector
Check confirm modify collector empty, add a trade, check non-empty, verify dict equality
'''
m1 = dict(timestamp=7, trader='t5', order_id='t5_10', quantity=5, side='buy')
self.assertFalse(self.ex1.confirm_modify_collector)
self.ex1._confirm_modify(7, 'buy', 5, 't5_10')
self.assertTrue(self.ex1.confirm_modify_collector)
self.assertDictEqual(m1, self.ex1.confirm_modify_collector[0])
def test_process_order(self):
'''
process_order() impacts confirm_modify_collector, traded indicator, order_history,
_bid_book and _bid_book_prices or _ask_book and _ask_book_prices.
process_order() is a traffic manager. An order is either an add order or not. If it is an add order,
it is either priced to go directly to the book or is sent to match_trade (which is tested below). If it
is not an add order, it is either modified or cancelled. To test, we will add some buy and sell orders,
then test for trades, cancels and modifies. process_order() also resets some object collectors.
'''
self.q2_buy['quantity'] = 2
self.q2_sell['quantity'] = 2
self.assertEqual(len(self.ex1._ask_book_prices), 0)
self.assertEqual(len(self.ex1._bid_book_prices), 0)
self.assertFalse(self.ex1.confirm_modify_collector)
self.assertFalse(self.ex1.order_history)
self.assertFalse(self.ex1.traded)
# seed order book
self.ex1.add_order_to_book(self.q1_buy)
self.ex1.add_order_to_book(self.q1_sell)
# process new orders
self.ex1.process_order(self.q2_buy)
self.ex1.process_order(self.q2_sell)
self.assertEqual(len(self.ex1._ask_book_prices), 1)
self.assertEqual(len(self.ex1._bid_book_prices), 1)
self.assertEqual(len(self.ex1.order_history), 2)
# marketable sell takes out 1 share
q3_sell = {'order_id': 't3_1', 'timestamp': 5, 'type': 'add', 'quantity': 1, 'side': 'sell',
'price': 0}
self.ex1.process_order(q3_sell)
self.assertEqual(len(self.ex1.order_history), 3)
self.assertEqual(self.ex1._bid_book[50]['num_orders'], 1)
self.assertEqual(self.ex1._bid_book[50]['size'], 2)
self.assertTrue(self.ex1.traded)
# marketable buy takes out 1 share
q3_buy = {'order_id': 't3_2', 'timestamp': 5, 'type': 'add', 'quantity': 1, 'side': 'buy',
'price': 10000}
self.ex1.process_order(q3_buy)
self.assertEqual(len(self.ex1.order_history), 4)
self.assertEqual(self.ex1._ask_book[52]['num_orders'], 1)
self.assertEqual(self.ex1._ask_book[52]['size'], 2)
self.assertTrue(self.ex1.traded)
# add/cancel buy order
q4_buy = {'order_id': 't4_1', 'timestamp': 10, 'type': 'add', 'quantity': 1, 'side': 'buy',
'price': 48}
self.ex1.process_order(q4_buy)
self.assertEqual(len(self.ex1.order_history), 5)
self.assertEqual(len(self.ex1._bid_book_prices), 2)
self.assertEqual(self.ex1._bid_book[48]['num_orders'], 1)
self.assertEqual(self.ex1._bid_book[48]['size'], 1)
self.assertFalse(self.ex1.traded)
q4_cancel1 = {'order_id': 't4_1', 'timestamp': 10, 'type': 'cancel', 'quantity': 1, 'side': 'buy',
'price': 48}
self.ex1.process_order(q4_cancel1)
self.assertEqual(len(self.ex1.order_history), 6)
self.assertEqual(len(self.ex1._bid_book_prices), 1)
self.assertFalse(self.ex1.traded)
# add/cancel sell order
q4_sell = {'order_id': 't4_2', 'timestamp': 10, 'type': 'add', 'quantity': 1, 'side': 'sell',
'price': 54}
self.ex1.process_order(q4_sell)
self.assertEqual(len(self.ex1.order_history), 7)
self.assertEqual(len(self.ex1._ask_book_prices), 2)
self.assertEqual(self.ex1._ask_book[54]['num_orders'], 1)
self.assertEqual(self.ex1._ask_book[54]['size'], 1)
self.assertFalse(self.ex1.traded)
q4_cancel2 = {'order_id': 't4_2', 'timestamp': 10, 'type': 'cancel', 'quantity': 1, 'side': 'sell',
'price': 54}
self.ex1.process_order(q4_cancel2)
self.assertEqual(len(self.ex1.order_history), 8)
self.assertEqual(len(self.ex1._ask_book_prices), 1)
self.assertFalse(self.ex1.traded)
# add/modify buy order
q5_buy = {'order_id': 't5_1', 'timestamp': 10, 'type': 'add', 'quantity': 5, 'side': 'buy',
'price': 48}
self.ex1.process_order(q5_buy)
self.assertEqual(len(self.ex1.order_history), 9)
self.assertEqual(len(self.ex1._bid_book_prices), 2)
self.assertEqual(self.ex1._bid_book[48]['num_orders'], 1)
self.assertEqual(self.ex1._bid_book[48]['size'], 5)
q5_modify1 = {'order_id': 't5_1', 'timestamp': 10, 'type': 'modify', 'quantity': 2, 'side': 'buy',
'price': 48}
self.ex1.process_order(q5_modify1)
self.assertEqual(len(self.ex1.order_history), 10)
self.assertEqual(len(self.ex1._bid_book_prices), 2)
self.assertEqual(self.ex1._bid_book[48]['size'], 3)
self.assertEqual(self.ex1._bid_book[48]['orders']['t5_1']['quantity'], 3)
self.assertEqual(len(self.ex1.confirm_modify_collector), 1)
self.assertFalse(self.ex1.traded)
# add/modify sell order
q5_sell = {'order_id': 't5_1', 'timestamp': 10, 'type': 'add', 'quantity': 5, 'side': 'sell',
'price': 54}
self.ex1.process_order(q5_sell)
self.assertEqual(len(self.ex1.order_history), 11)
self.assertEqual(len(self.ex1._ask_book_prices), 2)
self.assertEqual(self.ex1._ask_book[54]['num_orders'], 1)
self.assertEqual(self.ex1._ask_book[54]['size'], 5)
q5_modify2 = {'order_id': 't5_1', 'timestamp': 10, 'type': 'modify', 'quantity': 2, 'side': 'sell',
'price': 54}
self.ex1.process_order(q5_modify2)
self.assertEqual(len(self.ex1.order_history), 12)
self.assertEqual(len(self.ex1._ask_book_prices), 2)
self.assertEqual(self.ex1._ask_book[54]['size'], 3)
self.assertEqual(self.ex1._ask_book[54]['orders']['t5_1']['quantity'], 3)
self.assertEqual(len(self.ex1.confirm_modify_collector), 1)
self.assertFalse(self.ex1.traded)
def test_match_trade_sell(self):
'''
An incoming order can:
1. take out part of an order,
2. take out an entire price level,
3. if priced, take out a price level and make a new inside market.
'''
# seed order book
self.ex1.add_order_to_book(self.q1_buy)
self.ex1.add_order_to_book(self.q1_sell)
# process new orders
self.ex1.process_order(self.q2_buy)
self.ex1.process_order(self.q2_sell)
self.ex1.process_order(self.q3_buy)
self.ex1.process_order(self.q3_sell)
self.ex1.process_order(self.q4_buy)
self.ex1.process_order(self.q4_sell)
# The book: bids: 2@50, 3@49, 3@47 ; asks: 2@52, 3@53, 3@55
self.assertEqual(self.ex1._bid_book[47]['size'], 3)
self.assertEqual(self.ex1._bid_book[49]['size'], 3)
self.assertEqual(self.ex1._bid_book[50]['size'], 2)
self.assertEqual(self.ex1._ask_book[52]['size'], 2)
self.assertEqual(self.ex1._ask_book[53]['size'], 3)
self.assertEqual(self.ex1._ask_book[55]['size'], 3)
#self.assertFalse(self.ex1.sip_collector)
# market sell order takes out part of first best bid
q1 = {'order_id': 't100_1', 'timestamp': 10, 'type': 'add', 'quantity': 1, 'side': 'sell',
'price': 0}
self.ex1.process_order(q1)
self.assertEqual(self.ex1._bid_book[50]['size'], 1)
self.assertTrue(50 in self.ex1._bid_book_prices)
self.assertEqual(self.ex1._bid_book[49]['size'], 3)
self.assertEqual(self.ex1._bid_book[47]['size'], 3)
self.assertEqual(self.ex1._bid_book[50]['orders'][self.ex1._bid_book[50]['order_ids'][0]]['quantity'], 1)
#self.assertEqual(len(self.ex1.sip_collector), 1)
# market sell order takes out remainder first best bid and all of the next level
self.assertEqual(len(self.ex1._bid_book_prices), 3)
q2 = {'order_id': 't100_2', 'timestamp': 11, 'type': 'add', 'quantity': 4, 'side': 'sell',
'price': 0}
self.ex1.process_order(q2)
self.assertEqual(len(self.ex1._bid_book_prices), 1)
self.assertFalse(50 in self.ex1._bid_book_prices)
self.assertFalse(49 in self.ex1._bid_book_prices)
self.assertTrue(47 in self.ex1._bid_book_prices)
#self.assertEqual(len(self.ex1.sip_collector), 3)
# make new market
q3 = {'order_id': 't101_1', 'timestamp': 12, 'type': 'add', 'quantity': 2, 'side': 'buy',
'price': 48}
q4 = {'order_id': 't102_1', 'timestamp': 13, 'type': 'add', 'quantity': 3, 'side': 'sell',
'price': 48}
self.ex1.process_order(q3)
self.assertEqual(len(self.ex1._bid_book_prices), 2)
self.assertTrue(48 in self.ex1._bid_book_prices)
self.assertTrue(47 in self.ex1._bid_book_prices)
self.assertEqual(self.ex1._bid_book_prices[-1], 48)
self.assertEqual(self.ex1._bid_book_prices[-2], 47)
# sip_collector does not reset until new trade at new time
#self.assertEqual(len(self.ex1.sip_collector), 3)
self.ex1.process_order(q4)
self.assertEqual(len(self.ex1._bid_book_prices), 1)
self.assertFalse(48 in self.ex1._bid_book_prices)
self.assertTrue(47 in self.ex1._bid_book_prices)
self.assertEqual(len(self.ex1._ask_book_prices), 4)
self.assertTrue(48 in self.ex1._ask_book_prices)
self.assertEqual(self.ex1._ask_book_prices[0], 48)
self.assertEqual(self.ex1._bid_book_prices[-1], 47)
#self.assertEqual(len(self.ex1.sip_collector), 1)
def test_match_trade_buy(self):
'''
An incoming order can:
1. take out part of an order,
2. take out an entire price level,
3. if priced, take out a price level and make a new inside market.
'''
# seed order book
self.ex1.add_order_to_book(self.q1_buy)
self.ex1.add_order_to_book(self.q1_sell)
# process new orders
self.ex1.process_order(self.q2_buy)
self.ex1.process_order(self.q2_sell)
self.ex1.process_order(self.q3_buy)
self.ex1.process_order(self.q3_sell)
self.ex1.process_order(self.q4_buy)
self.ex1.process_order(self.q4_sell)
# The book: bids: 2@50, 3@49, 3@47 ; asks: 2@52, 3@53, 3@55
self.assertEqual(self.ex1._bid_book[47]['size'], 3)
self.assertEqual(self.ex1._bid_book[49]['size'], 3)
self.assertEqual(self.ex1._bid_book[50]['size'], 2)
self.assertEqual(self.ex1._ask_book[52]['size'], 2)
self.assertEqual(self.ex1._ask_book[53]['size'], 3)
self.assertEqual(self.ex1._ask_book[55]['size'], 3)
# market buy order takes out part of first best ask
q1 = {'order_id': 't100_1', 'timestamp': 10, 'type': 'add', 'quantity': 1, 'side': 'buy',
'price': 100000}
self.ex1.process_order(q1)
self.assertEqual(self.ex1._ask_book[52]['size'], 1)
self.assertTrue(52 in self.ex1._ask_book_prices)
self.assertEqual(self.ex1._ask_book[53]['size'], 3)
self.assertEqual(self.ex1._ask_book[55]['size'], 3)
self.assertEqual(self.ex1._ask_book[52]['orders'][self.ex1._ask_book[52]['order_ids'][0]]['quantity'], 1)
# market buy order takes out remainder first best ask and all of the next level
self.assertEqual(len(self.ex1._ask_book_prices), 3)
q2 = {'order_id': 't100_2', 'timestamp': 11, 'type': 'add', 'quantity': 4, 'side': 'buy',
'price': 100000}
self.ex1.process_order(q2)
self.assertEqual(len(self.ex1._ask_book_prices), 1)
self.assertFalse(52 in self.ex1._ask_book_prices)
self.assertFalse(53 in self.ex1._ask_book_prices)
self.assertTrue(55 in self.ex1._ask_book_prices)
# make new market
q3 = {'order_id': 't101_1', 'timestamp': 12, 'type': 'add', 'quantity': 2, 'side': 'sell',
'price': 54}
q4 = {'order_id': 't102_1', 'timestamp': 13, 'type': 'add', 'quantity': 3, 'side': 'buy',
'price': 54}
self.ex1.process_order(q3)
self.assertEqual(len(self.ex1._ask_book_prices), 2)
self.assertTrue(55 in self.ex1._ask_book_prices)
self.assertTrue(54 in self.ex1._ask_book_prices)
self.assertEqual(self.ex1._ask_book_prices[0], 54)
self.assertEqual(self.ex1._ask_book_prices[1], 55)
self.ex1.process_order(q4)
self.assertEqual(len(self.ex1._ask_book_prices), 1)
self.assertFalse(54 in self.ex1._ask_book_prices)
self.assertTrue(55 in self.ex1._ask_book_prices)
self.assertEqual(len(self.ex1._bid_book_prices), 4)
self.assertTrue(54 in self.ex1._bid_book_prices)
self.assertEqual(self.ex1._ask_book_prices[0], 55)
self.assertEqual(self.ex1._bid_book_prices[-1], 54)
def test_report_top_of_book(self):
'''
At setup(), top of book has 2 to sell at 52 and 2 to buy at 50
at time = 3
'''
self.ex1.add_order_to_book(self.q1_buy)
self.ex1.add_order_to_book(self.q2_buy)
self.ex1.add_order_to_book(self.q1_sell)
self.ex1.add_order_to_book(self.q2_sell)
tob_check = {'timestamp': 5, 'best_bid': 50, 'best_ask': 52, 'bid_size': 2, 'ask_size': 2}
self.ex1.report_top_of_book(5)
self.assertDictEqual(self.ex1._sip_collector[0], tob_check)
def test_market_collapse(self):
'''
At setup(), there is 8 total bid size and 8 total ask size
A trade for 8 or more should collapse the market
'''
print('Market Collapse Tests to stdout:\n')
# seed order book
self.ex1.add_order_to_book(self.q1_buy)
self.ex1.add_order_to_book(self.q1_sell)
# process new orders
self.ex1.process_order(self.q2_buy)
self.ex1.process_order(self.q2_sell)
self.ex1.process_order(self.q3_buy)
self.ex1.process_order(self.q3_sell)
self.ex1.process_order(self.q4_buy)
self.ex1.process_order(self.q4_sell)
# The book: bids: 2@50, 3@49, 3@47 ; asks: 2@52, 3@53, 3@55
# market buy order takes out part of the asks: no collapse
q1 = {'order_id': 't100_1', 'timestamp': 10, 'type': 'add', 'quantity': 4, 'side': 'buy',
'price': 100000}
self.ex1.process_order(q1)
# next market buy order takes out the asks: market collapse
q2 = {'order_id': 't100_2', 'timestamp': 10, 'type': 'add', 'quantity': 5, 'side': 'buy',
'price': 100000}
self.ex1.process_order(q2)
# market sell order takes out part of the bids: no collapse
q3 = {'order_id': 't100_3', 'timestamp': 10, 'type': 'add', 'quantity': 4, 'side': 'sell',
'price': 0}
self.ex1.process_order(q3)
# next market sell order takes out the asks: market collapse
q4 = {'order_id': 't100_4', 'timestamp': 10, 'type': 'add', 'quantity': 5, 'side': 'sell',
'price': 0}
self.ex1.process_order(q4)
| 52.345029
| 113
| 0.625219
| 3,863
| 26,853
| 4.101734
| 0.054103
| 0.127674
| 0.053014
| 0.074219
| 0.831619
| 0.802966
| 0.771537
| 0.714673
| 0.63326
| 0.585547
| 0
| 0.06179
| 0.226753
| 26,853
| 512
| 114
| 52.447266
| 0.70131
| 0.147581
| 0
| 0.568365
| 0
| 0
| 0.11824
| 0
| 0
| 0
| 0
| 0
| 0.541555
| 1
| 0.034853
| false
| 0
| 0.005362
| 0
| 0.042895
| 0.002681
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
eae89428953de91fc2c529955a2c6bb7203a3b5b
| 69
|
py
|
Python
|
neural-networks-and-deep-learning/mnist/src/sigmoid.py
|
jitendra8911/Machine-Learning
|
9f2770ce8d9c427c941462b774f99a20ea5c2078
|
[
"MIT"
] | null | null | null |
neural-networks-and-deep-learning/mnist/src/sigmoid.py
|
jitendra8911/Machine-Learning
|
9f2770ce8d9c427c941462b774f99a20ea5c2078
|
[
"MIT"
] | null | null | null |
neural-networks-and-deep-learning/mnist/src/sigmoid.py
|
jitendra8911/Machine-Learning
|
9f2770ce8d9c427c941462b774f99a20ea5c2078
|
[
"MIT"
] | null | null | null |
import numpy as np
def sigmoid(z):
return 1.0/(1.0+np.exp(-z))
| 11.5
| 31
| 0.608696
| 15
| 69
| 2.8
| 0.733333
| 0.095238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072727
| 0.202899
| 69
| 5
| 32
| 13.8
| 0.690909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
|
0
| 5
|
eaeb4a93b5a68227dbf4c2d02ee58d0064891d97
| 11,650
|
py
|
Python
|
Code/ModelSelection/datafold-master/datafold/pcfold/tests/test_distance.py
|
sazio/SSVEP_IEEE_SMC_2021
|
8cae603d04dd8d623f01cf1e8979cf8ac9e0809f
|
[
"MIT"
] | 1
|
2022-02-21T01:58:12.000Z
|
2022-02-21T01:58:12.000Z
|
Code/ModelSelection/datafold-master/datafold/pcfold/tests/test_distance.py
|
sazio/SSVEP_IEEE_SMC_2021
|
8cae603d04dd8d623f01cf1e8979cf8ac9e0809f
|
[
"MIT"
] | null | null | null |
Code/ModelSelection/datafold-master/datafold/pcfold/tests/test_distance.py
|
sazio/SSVEP_IEEE_SMC_2021
|
8cae603d04dd8d623f01cf1e8979cf8ac9e0809f
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import unittest
import warnings
import numpy as np
import numpy.testing as nptest
import scipy
import scipy.sparse
from scipy.sparse.base import SparseEfficiencyWarning
from scipy.spatial.distance import cdist, pdist, squareform
from datafold.pcfold.distance import (
_all_available_distance_algorithm,
_ensure_kmin_nearest_neighbor,
compute_distance_matrix,
)
from datafold.utils.general import is_symmetric_matrix
class TestDistAlgorithms(unittest.TestCase):
def setUp(self) -> None:
self.data_X = np.random.rand(500, 100)
self.data_Y = np.random.rand(300, 100)
self.algos = _all_available_distance_algorithm()
def test_pdist_dense(self):
backend_options = {}
expected = squareform(pdist(self.data_X))
for metric in ["euclidean", "sqeuclidean"]:
if metric == "sqeuclidean":
expected = np.square(expected)
for algo in self.algos:
try:
actual = compute_distance_matrix(
X=self.data_X,
metric=metric,
cut_off=None,
kmin=0,
backend=algo.backend_name,
**backend_options,
)
self.assertIsInstance(actual, np.ndarray)
nptest.assert_allclose(actual, expected, atol=1e-14, rtol=1e-14)
except AssertionError as e:
print(f"{algo.backend_name} failed for metric {metric}")
raise e
def test_cdist_dense(self):
backend_options = {}
# NOTE: first Y and then X because, the Y (query points) should be in rows, the X
# (reference points) in columns This turned out to be a better handling for
# equations (e.g. in geometric harmonics).
expected = cdist(self.data_Y, self.data_X)
for metric in ["euclidean", "sqeuclidean"]:
if metric == "sqeuclidean":
expected = np.square(expected)
for algo in self.algos:
try:
actual = compute_distance_matrix(
X=self.data_X,
Y=self.data_Y,
metric=metric,
cut_off=None,
kmin=0,
backend=algo.backend_name,
**backend_options,
)
self.assertIsInstance(actual, np.ndarray)
nptest.assert_allclose(actual, expected, atol=1e-15, rtol=1e-14)
except Exception as e:
print(f"{algo.backend_name} failed for metric {metric}")
raise e
def test_pdist_sparse(self):
backend_options = {}
expected = squareform(pdist(self.data_X))
cut_off = float(np.median(expected))
expected[expected > cut_off] = 0
for metric in ["euclidean", "sqeuclidean"]:
if metric == "sqeuclidean":
expected = np.square(expected)
for algo in self.algos:
try:
actual = compute_distance_matrix(
X=self.data_X,
metric=metric,
cut_off=cut_off,
kmin=0,
backend=algo.backend_name,
**backend_options,
)
self.assertIsInstance(actual, scipy.sparse.csr_matrix)
nptest.assert_allclose(
actual.toarray(), expected, atol=1e-14, rtol=1e-14
)
self.assertTrue(is_symmetric_matrix(actual, tol=0))
except Exception as e:
print(f"{algo.backend_name} failed for metric {metric}")
raise e
def test_cdist_sparse(self):
backend_options = {}
# See also comment in 'test_cdist_dense'
expected = cdist(self.data_Y, self.data_X)
cut_off = float(np.median(expected))
expected[expected > cut_off] = 0
for metric in ["euclidean", "sqeuclidean"]:
if metric == "sqeuclidean":
expected = np.square(expected)
for algo in self.algos:
try:
actual = compute_distance_matrix(
X=self.data_X,
Y=self.data_Y,
metric=metric,
cut_off=cut_off,
kmin=0,
backend=algo.backend_name,
**backend_options,
)
self.assertIsInstance(actual, scipy.sparse.csr_matrix)
nptest.assert_allclose(
actual.toarray(), expected, atol=1e-15, rtol=1e-14
)
except Exception as e:
print(f"{algo.backend_name} failed with metric {metric}")
raise e
def test_pdist_sparse_zeros(self):
backend_options = {}
expected = squareform(pdist(self.data_X))
cut_off = float(np.median(expected))
with warnings.catch_warnings():
warnings.simplefilter("ignore", SparseEfficiencyWarning)
expected[expected > cut_off] = 0
expected = scipy.sparse.csr_matrix(expected)
expected.eliminate_zeros()
expected.setdiag(0)
expected.sort_indices()
for metric in ["euclidean", "sqeuclidean"]:
if metric == "sqeuclidean":
expected.data = np.square(expected.data)
for algo in self.algos:
try:
actual = compute_distance_matrix(
X=self.data_X,
metric=metric,
cut_off=cut_off,
kmin=0,
backend=algo.backend_name,
**backend_options,
)
self.assertTrue(is_symmetric_matrix(actual, tol=0))
self.assertIsInstance(actual, scipy.sparse.csr_matrix)
nptest.assert_allclose(
expected.data, actual.data, atol=1e-14, rtol=1e-14
)
except Exception as e:
print(f"{algo.backend_name} failed for metric {metric}")
raise e
def test_cdist_sparse_duplicate_zeros(self):
backend_options = {}
data_Y = self.data_Y.copy() # make copy to manipulate values
data_Y[0:3, :] = self.data_X[0:3, :] # make duplicate values
expected = cdist(data_Y, self.data_X)
cut_off = float(np.median(expected))
expected[expected > cut_off] = 0
with warnings.catch_warnings():
warnings.simplefilter("ignore", SparseEfficiencyWarning)
expected = scipy.sparse.csr_matrix(expected)
expected[0, 0] = 0
expected[1, 1] = 0
expected[2, 2] = 0
expected.sort_indices()
for metric in ["euclidean", "sqeuclidean"]:
if metric == "sqeuclidean":
expected.data = np.square(expected.data)
for algo in self.algos:
try:
actual = compute_distance_matrix(
X=self.data_X,
Y=data_Y,
metric=metric,
cut_off=cut_off,
backend=algo.backend_name,
**backend_options,
)
self.assertIsInstance(actual, scipy.sparse.csr_matrix)
nptest.assert_allclose(
actual.data, expected.data, atol=1e-15, rtol=1e-14
)
except Exception as e:
print(f"{algo.backend_name} failed for metric {metric}")
raise e
def test_ensure_kmin_nearest_neighbours_pdist(self):
print("SUPRESSED SPARSITY WARNINGS. TODO: See #93")
warnings.filterwarnings("ignore", category=SparseEfficiencyWarning)
for quantile in [0.1, 0.2, 0.3, 0.7, 0.8, 0.9]:
for kmin in np.linspace(1, self.data_X.shape[1], 5).astype(int):
cut_off = np.quantile(pdist(self.data_X), q=quantile)
# The matrix is essentially zero, with only the diagonal saved zeros
pdist_distance_matrix = compute_distance_matrix(
self.data_X, cut_off=cut_off
)
distance_matrix = _ensure_kmin_nearest_neighbor(
self.data_X,
Y=None,
metric="euclidean",
kmin=kmin,
distance_matrix=pdist_distance_matrix,
)
try:
self.assertTrue((distance_matrix.getnnz(axis=1) >= kmin).all())
self.assertTrue(is_symmetric_matrix(distance_matrix))
rows, columns = distance_matrix.nonzero()
actual = scipy.sparse.csr_matrix(
(
pdist_distance_matrix[rows, columns].A1,
(rows, columns),
),
shape=distance_matrix.shape,
)
self.assertTrue(is_symmetric_matrix(actual))
nptest.assert_array_equal(
actual.toarray(),
distance_matrix.toarray(),
)
except AssertionError as e:
print(f"Failed for quantile={quantile} and kmin={kmin}")
raise e
def test_ensure_kmin_nearest_neighbours_cdist(self):
print("SUPRESSED SPARSITY WARNINGS. TODO: See #93")
warnings.filterwarnings("ignore", category=SparseEfficiencyWarning)
for quantile in [0.1, 0.2, 0.3, 0.7, 0.8, 0.9]:
for kmin in np.linspace(1, self.data_X.shape[1], 5).astype(int):
cut_off = np.quantile(pdist(self.data_X), q=quantile)
# The matrix is essentially zero, with only the diagonal saved zeros
cdist_distance_matrix = compute_distance_matrix(
self.data_X, Y=self.data_Y, cut_off=cut_off
)
# TODO: resolve SparsityWarning, see issue #93
distance_matrix = _ensure_kmin_nearest_neighbor(
self.data_X,
Y=self.data_Y,
metric="euclidean",
kmin=kmin,
distance_matrix=cdist_distance_matrix,
)
try:
rows, columns = distance_matrix.nonzero()
actual = scipy.sparse.csr_matrix(
(
cdist_distance_matrix[rows, columns].A1,
(rows, columns),
),
shape=distance_matrix.shape,
)
nptest.assert_array_equal(
actual.toarray(), distance_matrix.toarray()
)
self.assertTrue((distance_matrix.getnnz(axis=1) >= kmin).all())
except AssertionError as e:
print(f"Failed for quantile={quantile} and kmin={kmin}")
raise e
| 36.180124
| 89
| 0.505579
| 1,148
| 11,650
| 4.952091
| 0.148084
| 0.042216
| 0.034829
| 0.012665
| 0.803166
| 0.794019
| 0.773087
| 0.751275
| 0.688478
| 0.616535
| 0
| 0.016496
| 0.412017
| 11,650
| 321
| 90
| 36.292835
| 0.813431
| 0.041631
| 0
| 0.688259
| 0
| 0
| 0.061054
| 0
| 0
| 0
| 0
| 0.003115
| 0.093117
| 1
| 0.036437
| false
| 0
| 0.040486
| 0
| 0.080972
| 0.040486
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 5
|
dc2d538cffa6e7812c3a0eaf4ec2ed79d7c98320
| 40
|
py
|
Python
|
src/server_design/algorithms/compressor/designSolutions/sol_600.py
|
robertpardillo/Funnel
|
f45e419f55e085bbb95e17c47b4c94a7c625ba9b
|
[
"MIT"
] | 1
|
2021-05-18T16:10:49.000Z
|
2021-05-18T16:10:49.000Z
|
src/server_design/algorithms/compressor/designSolutions/sol_600.py
|
robertpardillo/Funnel
|
f45e419f55e085bbb95e17c47b4c94a7c625ba9b
|
[
"MIT"
] | null | null | null |
src/server_design/algorithms/compressor/designSolutions/sol_600.py
|
robertpardillo/Funnel
|
f45e419f55e085bbb95e17c47b4c94a7c625ba9b
|
[
"MIT"
] | null | null | null |
def sol600(design_parameters):
pass
| 20
| 31
| 0.75
| 5
| 40
| 5.8
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0.175
| 40
| 2
| 32
| 20
| 0.787879
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.5
| 0
| 0
| 0.5
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
dc2ffc94a7cdc227630c7f8f6be61fff02127643
| 170
|
py
|
Python
|
(a+b)2.py
|
keerthana1502/python_practice
|
8c0499e014826af78f9a88730551ace3fa79686d
|
[
"bzip2-1.0.6"
] | null | null | null |
(a+b)2.py
|
keerthana1502/python_practice
|
8c0499e014826af78f9a88730551ace3fa79686d
|
[
"bzip2-1.0.6"
] | null | null | null |
(a+b)2.py
|
keerthana1502/python_practice
|
8c0499e014826af78f9a88730551ace3fa79686d
|
[
"bzip2-1.0.6"
] | null | null | null |
def formula():
a=int(input("Enter a "))
b=int(input("Enter b "))
print((for1(a,b))*(for1(a,b)))
print(for1(a,b))
def for1(a,b):
return(a+b)
formula()
| 18.888889
| 34
| 0.541176
| 31
| 170
| 2.967742
| 0.322581
| 0.130435
| 0.26087
| 0.23913
| 0.26087
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029412
| 0.2
| 170
| 8
| 35
| 21.25
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0.094118
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0
| 0.125
| 0.25
| 0.25
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 5
|
dc8412de9117f48342eb9c7b616f1bda7d1ed248
| 162
|
py
|
Python
|
dynamic_initial_data/apps.py
|
wesleykendall/django-dynamic-initial-data
|
22764dd1e8d6be92b54909604101890513a8379f
|
[
"MIT"
] | 9
|
2015-05-13T17:27:41.000Z
|
2021-01-18T01:06:16.000Z
|
dynamic_initial_data/apps.py
|
wesleykendall/django-dynamic-initial-data
|
22764dd1e8d6be92b54909604101890513a8379f
|
[
"MIT"
] | 9
|
2015-03-26T20:51:49.000Z
|
2021-04-12T16:21:18.000Z
|
dynamic_initial_data/apps.py
|
wesleykendall/django-dynamic-initial-data
|
22764dd1e8d6be92b54909604101890513a8379f
|
[
"MIT"
] | 9
|
2015-03-26T19:21:02.000Z
|
2019-06-06T18:11:24.000Z
|
from django.apps import AppConfig
class DynamicInitialDataConfig(AppConfig):
name = 'dynamic_initial_data'
verbose_name = 'Django Dynamic Initial Data'
| 23.142857
| 48
| 0.783951
| 18
| 162
| 6.888889
| 0.666667
| 0.225806
| 0.290323
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.154321
| 162
| 6
| 49
| 27
| 0.905109
| 0
| 0
| 0
| 0
| 0
| 0.290123
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 5
|
dc89cbb03ca8101fdfda78a7fe75dce383a9fcf5
| 182
|
py
|
Python
|
pydirector/pdnetwork.py
|
skyline75489/pythondirector
|
47cffd9ce4581d7587a1f8ff91a20d453f96d5d8
|
[
"Unlicense"
] | null | null | null |
pydirector/pdnetwork.py
|
skyline75489/pythondirector
|
47cffd9ce4581d7587a1f8ff91a20d453f96d5d8
|
[
"Unlicense"
] | null | null | null |
pydirector/pdnetwork.py
|
skyline75489/pythondirector
|
47cffd9ce4581d7587a1f8ff91a20d453f96d5d8
|
[
"Unlicense"
] | null | null | null |
import pdlogging
try:
from pdnetworktwisted import *
except ImportError:
pdlogging.log("no twisted available - falling back to asyncore")
from pdnetworkasyncore import *
| 26
| 68
| 0.763736
| 20
| 182
| 6.95
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181319
| 182
| 6
| 69
| 30.333333
| 0.932886
| 0
| 0
| 0
| 0
| 0
| 0.258242
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
dca8c4e34c5019f4bb145dc197ec7370b23bd6d0
| 14,871
|
py
|
Python
|
python/medifor/v1/pipeline_pb2_grpc.py
|
lanl/medifor
|
a20e71d1f0e7473a29c4ed5323c7d9368458d5ba
|
[
"Apache-2.0"
] | null | null | null |
python/medifor/v1/pipeline_pb2_grpc.py
|
lanl/medifor
|
a20e71d1f0e7473a29c4ed5323c7d9368458d5ba
|
[
"Apache-2.0"
] | null | null | null |
python/medifor/v1/pipeline_pb2_grpc.py
|
lanl/medifor
|
a20e71d1f0e7473a29c4ed5323c7d9368458d5ba
|
[
"Apache-2.0"
] | null | null | null |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from medifor.v1 import analytic_pb2 as medifor_dot_v1_dot_analytic__pb2
from medifor.v1 import pipeline_pb2 as medifor_dot_v1_dot_pipeline__pb2
class PipelineStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Detect = channel.unary_unary(
'/mediforproto.Pipeline/Detect',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.DetectionRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.DetectionInfo.FromString,
)
self.GetDetectionInfo = channel.unary_unary(
'/mediforproto.Pipeline/GetDetectionInfo',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.DetectionInfoRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.DetectionInfo.FromString,
)
self.GetDetectionList = channel.unary_unary(
'/mediforproto.Pipeline/GetDetectionList',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.DetectionListRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.DetectionList.FromString,
)
self.DeleteDetection = channel.unary_unary(
'/mediforproto.Pipeline/DeleteDetection',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.DeleteDetectionRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_analytic__pb2.Empty.FromString,
)
self.Fuse = channel.unary_unary(
'/mediforproto.Pipeline/Fuse',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.FusionRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.FusionInfo.FromString,
)
self.FuseByID = channel.unary_unary(
'/mediforproto.Pipeline/FuseByID',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.FusionRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.FusionInfo.FromString,
)
self.FuseAllIDs = channel.unary_unary(
'/mediforproto.Pipeline/FuseAllIDs',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.FuseAllIDsRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.FuseAllIDsResponse.FromString,
)
self.UpdateDetectionTags = channel.unary_unary(
'/mediforproto.Pipeline/UpdateDetectionTags',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.UpdateDetectionTagsRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.DetectionInfo.FromString,
)
self.GetDetectionTagInfo = channel.unary_unary(
'/mediforproto.Pipeline/GetDetectionTagInfo',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.DetectionTagInfoRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.DetectionTagInfo.FromString,
)
self.GetAnalyticMeta = channel.unary_unary(
'/mediforproto.Pipeline/GetAnalyticMeta',
request_serializer=medifor_dot_v1_dot_analytic__pb2.Empty.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.AnalyticList.FromString,
)
self.ListDetections = channel.unary_unary(
'/mediforproto.Pipeline/ListDetections',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.ListDetectionsRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.DetectionList.FromString,
)
self.UpdateDetectionMetadata = channel.unary_unary(
'/mediforproto.Pipeline/UpdateDetectionMetadata',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.UpdateDetectionMetadataRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.DetectionInfo.FromString,
)
self.GetAnalyticStats = channel.unary_unary(
'/mediforproto.Pipeline/GetAnalyticStats',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.GetAnalyticStatsRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.GetAnalyticStatsResponse.FromString,
)
self.GetHistogram = channel.unary_unary(
'/mediforproto.Pipeline/GetHistogram',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.GetHistogramRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.GetHistogramResponse.FromString,
)
self.DeleteFailedAnalytics = channel.unary_unary(
'/mediforproto.Pipeline/DeleteFailedAnalytics',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.DeleteFailedAnalyticsRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.DeleteFailedAnalyticsResponse.FromString,
)
self.GetAnalyticsWithScores = channel.unary_unary(
'/mediforproto.Pipeline/GetAnalyticsWithScores',
request_serializer=medifor_dot_v1_dot_pipeline__pb2.GetAnalyticsWithScoresRequest.SerializeToString,
response_deserializer=medifor_dot_v1_dot_pipeline__pb2.GetAnalyticsWithScoresResponse.FromString,
)
class PipelineServicer(object):
# missing associated documentation comment in .proto file
pass
def Detect(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDetectionInfo(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDetectionList(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteDetection(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Fuse(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def FuseByID(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def FuseAllIDs(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateDetectionTags(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetDetectionTagInfo(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAnalyticMeta(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListDetections(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def UpdateDetectionMetadata(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAnalyticStats(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetHistogram(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DeleteFailedAnalytics(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetAnalyticsWithScores(self, request, context):
# missing associated documentation comment in .proto file
pass
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_PipelineServicer_to_server(servicer, server):
rpc_method_handlers = {
'Detect': grpc.unary_unary_rpc_method_handler(
servicer.Detect,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.DetectionRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.DetectionInfo.SerializeToString,
),
'GetDetectionInfo': grpc.unary_unary_rpc_method_handler(
servicer.GetDetectionInfo,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.DetectionInfoRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.DetectionInfo.SerializeToString,
),
'GetDetectionList': grpc.unary_unary_rpc_method_handler(
servicer.GetDetectionList,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.DetectionListRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.DetectionList.SerializeToString,
),
'DeleteDetection': grpc.unary_unary_rpc_method_handler(
servicer.DeleteDetection,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.DeleteDetectionRequest.FromString,
response_serializer=medifor_dot_v1_dot_analytic__pb2.Empty.SerializeToString,
),
'Fuse': grpc.unary_unary_rpc_method_handler(
servicer.Fuse,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.FusionRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.FusionInfo.SerializeToString,
),
'FuseByID': grpc.unary_unary_rpc_method_handler(
servicer.FuseByID,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.FusionRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.FusionInfo.SerializeToString,
),
'FuseAllIDs': grpc.unary_unary_rpc_method_handler(
servicer.FuseAllIDs,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.FuseAllIDsRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.FuseAllIDsResponse.SerializeToString,
),
'UpdateDetectionTags': grpc.unary_unary_rpc_method_handler(
servicer.UpdateDetectionTags,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.UpdateDetectionTagsRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.DetectionInfo.SerializeToString,
),
'GetDetectionTagInfo': grpc.unary_unary_rpc_method_handler(
servicer.GetDetectionTagInfo,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.DetectionTagInfoRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.DetectionTagInfo.SerializeToString,
),
'GetAnalyticMeta': grpc.unary_unary_rpc_method_handler(
servicer.GetAnalyticMeta,
request_deserializer=medifor_dot_v1_dot_analytic__pb2.Empty.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.AnalyticList.SerializeToString,
),
'ListDetections': grpc.unary_unary_rpc_method_handler(
servicer.ListDetections,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.ListDetectionsRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.DetectionList.SerializeToString,
),
'UpdateDetectionMetadata': grpc.unary_unary_rpc_method_handler(
servicer.UpdateDetectionMetadata,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.UpdateDetectionMetadataRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.DetectionInfo.SerializeToString,
),
'GetAnalyticStats': grpc.unary_unary_rpc_method_handler(
servicer.GetAnalyticStats,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.GetAnalyticStatsRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.GetAnalyticStatsResponse.SerializeToString,
),
'GetHistogram': grpc.unary_unary_rpc_method_handler(
servicer.GetHistogram,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.GetHistogramRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.GetHistogramResponse.SerializeToString,
),
'DeleteFailedAnalytics': grpc.unary_unary_rpc_method_handler(
servicer.DeleteFailedAnalytics,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.DeleteFailedAnalyticsRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.DeleteFailedAnalyticsResponse.SerializeToString,
),
'GetAnalyticsWithScores': grpc.unary_unary_rpc_method_handler(
servicer.GetAnalyticsWithScores,
request_deserializer=medifor_dot_v1_dot_pipeline__pb2.GetAnalyticsWithScoresRequest.FromString,
response_serializer=medifor_dot_v1_dot_pipeline__pb2.GetAnalyticsWithScoresResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'mediforproto.Pipeline', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 49.079208
| 112
| 0.779705
| 1,492
| 14,871
| 7.373995
| 0.071046
| 0.059989
| 0.071987
| 0.089984
| 0.837393
| 0.783585
| 0.776677
| 0.636975
| 0.532994
| 0.440829
| 0
| 0.010802
| 0.153386
| 14,871
| 302
| 113
| 49.241722
| 0.863066
| 0.075516
| 0
| 0.401575
| 1
| 0
| 0.116476
| 0.050397
| 0
| 0
| 0
| 0
| 0
| 1
| 0.070866
| false
| 0.070866
| 0.011811
| 0
| 0.090551
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 5
|
f4bcebf3e4c54d50058c5c2dcf5af491fbc618b3
| 66
|
py
|
Python
|
sequencer/__init__.py
|
dalya/Sequencer
|
53a8920f1a19f4399338613c2b7b747dac545502
|
[
"MIT"
] | 100
|
2020-03-05T20:11:57.000Z
|
2022-03-28T15:28:48.000Z
|
sequencer/__init__.py
|
dalya/Sequencer
|
53a8920f1a19f4399338613c2b7b747dac545502
|
[
"MIT"
] | 2
|
2020-06-21T20:05:56.000Z
|
2022-03-28T14:11:17.000Z
|
sequencer/__init__.py
|
dalya/Sequencer
|
53a8920f1a19f4399338613c2b7b747dac545502
|
[
"MIT"
] | 16
|
2020-03-31T19:58:12.000Z
|
2022-01-31T11:37:05.000Z
|
"""
Sequencer code in python
"""
from .sequencer_ import Sequencer
| 16.5
| 33
| 0.757576
| 8
| 66
| 6.125
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 66
| 4
| 33
| 16.5
| 0.859649
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 5
|
f4de8dc4edc3a5ec18e7b4e7e143c217f12b05b1
| 80
|
py
|
Python
|
Tests/Bugs/lepage/r_lepage_3_t.py
|
jwilk/Pyrex
|
83dfbae1261788933472e3f9c501ad74c61a37c5
|
[
"Apache-2.0"
] | 5
|
2019-05-26T20:48:36.000Z
|
2021-07-09T01:38:38.000Z
|
Tests/Bugs/lepage/r_lepage_3_t.py
|
jwilk/Pyrex
|
83dfbae1261788933472e3f9c501ad74c61a37c5
|
[
"Apache-2.0"
] | null | null | null |
Tests/Bugs/lepage/r_lepage_3_t.py
|
jwilk/Pyrex
|
83dfbae1261788933472e3f9c501ad74c61a37c5
|
[
"Apache-2.0"
] | 1
|
2022-02-10T07:14:58.000Z
|
2022-02-10T07:14:58.000Z
|
import r_lepage_3
g = r_lepage_3.Grail()
g("spam", 42, ["tomato", "sandwich"])
| 16
| 37
| 0.6625
| 14
| 80
| 3.5
| 0.714286
| 0.285714
| 0.326531
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 0.125
| 80
| 4
| 38
| 20
| 0.642857
| 0
| 0
| 0
| 0
| 0
| 0.225
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 5
|
521c6c3a4bc112762cec88bf0297957c37b0fea8
| 2,226
|
py
|
Python
|
core/protocols/transport/tcp.py
|
CarboniDavide/rtscan
|
3fe447235d0534a7d66e7d6f5e8b9c00d8832b4f
|
[
"MIT"
] | null | null | null |
core/protocols/transport/tcp.py
|
CarboniDavide/rtscan
|
3fe447235d0534a7d66e7d6f5e8b9c00d8832b4f
|
[
"MIT"
] | null | null | null |
core/protocols/transport/tcp.py
|
CarboniDavide/rtscan
|
3fe447235d0534a7d66e7d6f5e8b9c00d8832b4f
|
[
"MIT"
] | null | null | null |
import socket
from struct import *
HEADER_BASE_LENGTH = 20
class TCPHeader:
def __init__(self, value = None):
self.__value = value
self.__unpacked = unpack('!HHLLBBHHH', value)
def __getitem__(self, key):
return self.__value[key]
def __repr__(self):
return self.__value
def __str__(self):
return str(self.__value)
@property
def source_port(self):
return self.__unpacked[0]
@property
def destination_port(self):
return self.__unpacked[1]
@property
def sequence(self):
return self.__unpacked[2]
@property
def acknowledgement(self):
return self.__unpacked[3]
@property
def doff_reserved(self):
return self.__unpacked[4]
@property
def control_flags(self):
return self.__unpacked[5]
@property
def window_size(self):
return self.__unpacked[6]
@property
def check_sum(self):
return self.__unpacked[7]
@property
def urgent_pointer(self):
return self.__unpacked[8]
@property
def header_length(self):
return self.doff_reserved >> 4
@property
def size(self):
return (self.doff_reserved >> 4 ) * 4
@property
def unpacked(self):
return self.__unpacked
class TCPData:
def __init__(self, value = None):
self.__value = value
def __getitem__(self, key):
return self.__value[key]
def __repr__(self):
return self.__value
def __str__(self):
return str(self.__value)
@property
def value(self):
return self.__value
@property
def size(self):
return len(self.__value)
class TCP:
def __init__(self, packet = None):
self.__value = packet
self.__header = TCPHeader(packet[:HEADER_BASE_LENGTH])
self.__data = TCPData(packet[self.__header.size:])
def __repr__(self):
return self.__value
def __str__(self):
return str(self.__value)
@property
def value(self):
return self.__value
@property
def header(self):
return self.__header
@property
def data(self):
return self.__data
| 19.356522
| 62
| 0.61186
| 255
| 2,226
| 4.878431
| 0.207843
| 0.184887
| 0.213826
| 0.176849
| 0.436495
| 0.371383
| 0.327974
| 0.327974
| 0.277331
| 0.277331
| 0
| 0.008929
| 0.295597
| 2,226
| 114
| 63
| 19.526316
| 0.784439
| 0
| 0
| 0.52439
| 0
| 0
| 0.004492
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.341463
| false
| 0
| 0.02439
| 0.304878
| 0.707317
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 5
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.